diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 00000000000..559b271bf38 --- /dev/null +++ b/.dockerignore @@ -0,0 +1,27 @@ +Dockerfile* +.dockerignore + +# Yarn +.pnp.* +.yarn/* +!.yarn/patches +!.yarn/plugins +!.yarn/releases +!.yarn/sdks +!.yarn/versions + +packages +**/package.tgz +**/target +**/node_modules +**/outputs + +# Noir.js +tooling/noir_js/lib + +# Wasm build artifacts +compiler/wasm/nodejs +compiler/wasm/web +tooling/noirc_abi_wasm/nodejs +tooling/noirc_abi_wasm/web +tooling/noir_js/lib diff --git a/.eslintrc.js b/.eslintrc.js index f47f171561c..0f20016d862 100644 --- a/.eslintrc.js +++ b/.eslintrc.js @@ -1,4 +1,9 @@ module.exports = { + env: { + browser: true, + es6: true, + node: true, + }, root: true, parser: '@typescript-eslint/parser', plugins: ['@typescript-eslint', 'prettier'], diff --git a/.github/DEAD_LINKS_IN_DOCS.md b/.github/DEAD_LINKS_IN_DOCS.md new file mode 100644 index 00000000000..b1671276dcf --- /dev/null +++ b/.github/DEAD_LINKS_IN_DOCS.md @@ -0,0 +1,11 @@ +--- +title: "Docs contains dead links" +assignees: signorecello catmcgee critesjosh jzaki Savio-Sou +labels: documentation +--- + +Some of the external links in the docs are now dead. This is likely due to the thing being linked to being moved. + +Check the [Check Markdown links]({{env.WORKFLOW_URL}}) workflow for details. + +This issue was raised by the workflow `{{env.WORKFLOW_NAME}}` diff --git a/.github/NIGHTLY_TEST_FAILURE.md b/.github/NIGHTLY_TEST_FAILURE.md deleted file mode 100644 index 05772d82a51..00000000000 --- a/.github/NIGHTLY_TEST_FAILURE.md +++ /dev/null @@ -1,11 +0,0 @@ ---- -title: "nightly test-integration failed" -assignees: kobyhallx, tomafrench, jonybur -labels: bug ---- - -Something broke our nightly integration test. - -Check the [test]({{env.WORKFLOW_URL}}) workflow for details. - -This issue was raised by the workflow `{{env.WORKFLOW_NAME}}` diff --git a/.github/actions/docs/build-status/script.sh b/.github/actions/docs/build-status/script.sh index 0b282557cf2..2e86de6c173 100755 --- a/.github/actions/docs/build-status/script.sh +++ b/.github/actions/docs/build-status/script.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash BRANCH_NAME=$(echo "$BRANCH_NAME" | sed -e "s#refs/[^/]*/##") DEPLOY_STATUS=$(curl -X GET "https://api.netlify.com/api/v1/sites/$SITE_ID/deploys?branch=$BRANCH_NAME" | jq -r '.[] | select(.created_at != null) | .state' | head -1) diff --git a/.github/actions/setup/action.yml b/.github/actions/setup/action.yml index 5efe115ddcf..8e24b6738a9 100644 --- a/.github/actions/setup/action.yml +++ b/.github/actions/setup/action.yml @@ -4,13 +4,13 @@ description: Installs the workspace's yarn dependencies and caches them runs: using: composite steps: - - name: Cache - uses: actions/cache@v3 - id: cache + - uses: actions/setup-node@v3 + id: node with: - path: "**/node_modules" - key: yarn-v1-${{ hashFiles('**/yarn.lock') }} + node-version: 18.17.1 + cache: 'yarn' + cache-dependency-path: 'yarn.lock' + - name: Install run: yarn --immutable shell: bash - if: steps.cache.outputs.cache-hit != 'true' diff --git a/.github/scripts/acvm_js-build.sh b/.github/scripts/acvm_js-build.sh new file mode 100755 index 00000000000..0565a9bb89f --- /dev/null +++ b/.github/scripts/acvm_js-build.sh @@ -0,0 +1,5 @@ +#!/bin/bash +set -eu + +.github/scripts/install_wasm-bindgen.sh +yarn workspace @noir-lang/acvm_js build diff --git a/.github/scripts/acvm_js-test-browser.sh b/.github/scripts/acvm_js-test-browser.sh new file mode 100755 index 00000000000..598c98dadf2 --- /dev/null +++ b/.github/scripts/acvm_js-test-browser.sh @@ -0,0 +1,5 @@ +#!/bin/bash +set -eu + +npx playwright install && npx playwright install-deps +yarn workspace @noir-lang/acvm_js test:browser diff --git a/.github/scripts/acvm_js-test.sh b/.github/scripts/acvm_js-test.sh new file mode 100755 index 00000000000..d5519d26cc4 --- /dev/null +++ b/.github/scripts/acvm_js-test.sh @@ -0,0 +1,4 @@ +#!/bin/bash +set -eu + +yarn workspace @noir-lang/acvm_js test diff --git a/.github/scripts/backend-barretenberg-build.sh b/.github/scripts/backend-barretenberg-build.sh new file mode 100755 index 00000000000..d90995397d8 --- /dev/null +++ b/.github/scripts/backend-barretenberg-build.sh @@ -0,0 +1,4 @@ +#!/bin/bash +set -eu + +yarn workspace @noir-lang/backend_barretenberg build diff --git a/.github/scripts/backend-barretenberg-test.sh b/.github/scripts/backend-barretenberg-test.sh new file mode 100755 index 00000000000..1bd6f8e410d --- /dev/null +++ b/.github/scripts/backend-barretenberg-test.sh @@ -0,0 +1,4 @@ +#!/bin/bash +set -eu + +yarn workspace @noir-lang/backend_barretenberg test diff --git a/.github/scripts/install_wasm-bindgen.sh b/.github/scripts/install_wasm-bindgen.sh new file mode 100755 index 00000000000..b8c41393ab0 --- /dev/null +++ b/.github/scripts/install_wasm-bindgen.sh @@ -0,0 +1,5 @@ +#!/bin/bash +set -eu + +curl -L --proto '=https' --tlsv1.2 -sSf https://raw.githubusercontent.com/cargo-bins/cargo-binstall/main/install-from-binstall-release.sh | bash +cargo-binstall wasm-bindgen-cli --version 0.2.86 -y diff --git a/.github/scripts/integration-test.sh b/.github/scripts/integration-test.sh new file mode 100755 index 00000000000..4e1b52cedf9 --- /dev/null +++ b/.github/scripts/integration-test.sh @@ -0,0 +1,6 @@ +#!/bin/bash +set -eu + +apt-get install libc++-dev -y +npx playwright install && npx playwright install-deps +yarn workspace integration-tests test \ No newline at end of file diff --git a/.github/scripts/nargo-build.sh b/.github/scripts/nargo-build.sh new file mode 100755 index 00000000000..2115732ab7e --- /dev/null +++ b/.github/scripts/nargo-build.sh @@ -0,0 +1,8 @@ +#!/bin/bash +set -eu + +export SOURCE_DATE_EPOCH=$(date +%s) +export GIT_DIRTY=false +export GIT_COMMIT=$(git rev-parse --verify HEAD) + +cargo build --release diff --git a/.github/scripts/nargo-test.sh b/.github/scripts/nargo-test.sh new file mode 100755 index 00000000000..9234df7bf5c --- /dev/null +++ b/.github/scripts/nargo-test.sh @@ -0,0 +1,10 @@ +#!/bin/bash +set -eu + +apt-get install -y curl libc++-dev + +export SOURCE_DATE_EPOCH=$(date +%s) +export GIT_DIRTY=false +export GIT_COMMIT=$(git rev-parse --verify HEAD) + +cargo test --workspace --locked --release \ No newline at end of file diff --git a/.github/scripts/noir-codegen-build.sh b/.github/scripts/noir-codegen-build.sh new file mode 100755 index 00000000000..d42be4d676e --- /dev/null +++ b/.github/scripts/noir-codegen-build.sh @@ -0,0 +1,4 @@ +#!/bin/bash +set -eu + +yarn workspace @noir-lang/noir_codegen build diff --git a/.github/scripts/noir-codegen-test.sh b/.github/scripts/noir-codegen-test.sh new file mode 100755 index 00000000000..6f603f65507 --- /dev/null +++ b/.github/scripts/noir-codegen-test.sh @@ -0,0 +1,4 @@ +#!/bin/bash +set -eu + +yarn workspace @noir-lang/noir_codegen test \ No newline at end of file diff --git a/.github/scripts/noir-js-build.sh b/.github/scripts/noir-js-build.sh new file mode 100755 index 00000000000..04367e41342 --- /dev/null +++ b/.github/scripts/noir-js-build.sh @@ -0,0 +1,4 @@ +#!/bin/bash +set -eu + +yarn workspace @noir-lang/noir_js build diff --git a/.github/scripts/noir-js-test.sh b/.github/scripts/noir-js-test.sh new file mode 100755 index 00000000000..b5fe34038fe --- /dev/null +++ b/.github/scripts/noir-js-test.sh @@ -0,0 +1,6 @@ +#!/bin/bash +set -eu + +./scripts/nargo_compile_noir_js_assert_lt.sh +rm -rf /usr/src/noir/tooling/noir_js/test/noir_compiled_examples/assert_lt/target/debug_assert_lt.json +yarn workspace @noir-lang/noir_js test \ No newline at end of file diff --git a/.github/scripts/noir-js-types-build.sh b/.github/scripts/noir-js-types-build.sh new file mode 100755 index 00000000000..77b08651d68 --- /dev/null +++ b/.github/scripts/noir-js-types-build.sh @@ -0,0 +1,4 @@ +#!/bin/bash +set -eu + +yarn workspace @noir-lang/types build \ No newline at end of file diff --git a/.github/scripts/noir-wasm-build.sh b/.github/scripts/noir-wasm-build.sh new file mode 100755 index 00000000000..ccd49863f77 --- /dev/null +++ b/.github/scripts/noir-wasm-build.sh @@ -0,0 +1,7 @@ +#!/bin/bash +set -eu + +.github/scripts/noirc-abi-build.sh + +.github/scripts/install_wasm-bindgen.sh +yarn workspace @noir-lang/noir_wasm build diff --git a/.github/scripts/noir-wasm-test-browser.sh b/.github/scripts/noir-wasm-test-browser.sh new file mode 100755 index 00000000000..4b584abce23 --- /dev/null +++ b/.github/scripts/noir-wasm-test-browser.sh @@ -0,0 +1,6 @@ +#!/bin/bash +set -eu + +./scripts/nargo_compile_wasm_fixtures.sh +npx playwright install && npx playwright install-deps +yarn workspace @noir-lang/noir_wasm test:browser \ No newline at end of file diff --git a/.github/scripts/noir-wasm-test.sh b/.github/scripts/noir-wasm-test.sh new file mode 100755 index 00000000000..03e1bac2330 --- /dev/null +++ b/.github/scripts/noir-wasm-test.sh @@ -0,0 +1,7 @@ +#!/bin/bash +set -eu + +./scripts/nargo_compile_wasm_fixtures.sh +yarn workspace @noir-lang/noir_wasm test:node +npx playwright install && npx playwright install-deps +yarn workspace @noir-lang/noir_wasm test:browser diff --git a/.github/scripts/noirc-abi-build.sh b/.github/scripts/noirc-abi-build.sh new file mode 100755 index 00000000000..d5da6deaa0f --- /dev/null +++ b/.github/scripts/noirc-abi-build.sh @@ -0,0 +1,5 @@ +#!/bin/bash +set -eu + +.github/scripts/install_wasm-bindgen.sh +yarn workspace @noir-lang/noirc_abi build diff --git a/.github/scripts/noirc-abi-test-browser.sh b/.github/scripts/noirc-abi-test-browser.sh new file mode 100755 index 00000000000..7a966cb5e94 --- /dev/null +++ b/.github/scripts/noirc-abi-test-browser.sh @@ -0,0 +1,5 @@ +#!/bin/bash +set -eu + +npx playwright install && npx playwright install-deps +yarn workspace @noir-lang/noirc_abi test:browser diff --git a/.github/scripts/noirc-abi-test.sh b/.github/scripts/noirc-abi-test.sh new file mode 100755 index 00000000000..39ca0a44b07 --- /dev/null +++ b/.github/scripts/noirc-abi-test.sh @@ -0,0 +1,4 @@ +#!/bin/bash +set -eu + +yarn workspace @noir-lang/noirc_abi test diff --git a/.github/workflows/auto-pr-rebuild-script.yml b/.github/workflows/auto-pr-rebuild-script.yml deleted file mode 100644 index 4937de98e04..00000000000 --- a/.github/workflows/auto-pr-rebuild-script.yml +++ /dev/null @@ -1,130 +0,0 @@ -name: Rebuild ACIR artifacts - -on: - pull_request: - push: - branches: - - master - -jobs: - check-artifacts-requested: - name: Check if artifacts should be published - runs-on: ubuntu-22.04 - outputs: - publish: ${{ steps.check.outputs.result }} - - steps: - - name: Check if artifacts should be published - id: check - uses: actions/github-script@v6 - with: - github-token: ${{ secrets.GITHUB_TOKEN }} - script: | - const { REF_NAME } = process.env; - if (REF_NAME == "master") { - console.log(`publish = true`) - return true; - } - - const labels = context.payload.pull_request.labels.map(label => label.name); - const publish = labels.includes('publish-acir'); - - console.log(`publish = ${publish}`) - return publish; - result-encoding: string - env: - REF_NAME: ${{ github.ref_name }} - - build-nargo: - name: Build nargo binary - if: ${{ needs.check-artifacts-requested.outputs.publish == 'true' }} - runs-on: ubuntu-22.04 - needs: [check-artifacts-requested] - strategy: - matrix: - target: [x86_64-unknown-linux-gnu] - - steps: - - name: Checkout Noir repo - uses: actions/checkout@v4 - - - name: Setup toolchain - uses: dtolnay/rust-toolchain@1.71.1 - - - uses: Swatinem/rust-cache@v2 - with: - key: ${{ matrix.target }} - cache-on-failure: true - save-if: ${{ github.event_name != 'merge_group' }} - - - name: Build Nargo - run: cargo build --package nargo_cli --release - - - name: Package artifacts - run: | - mkdir dist - cp ./target/release/nargo ./dist/nargo - 7z a -ttar -so -an ./dist/* | 7z a -si ./nargo-x86_64-unknown-linux-gnu.tar.gz - - - name: Upload artifact - uses: actions/upload-artifact@v3 - with: - name: nargo - path: ./dist/* - retention-days: 3 - - auto-pr-rebuild-script: - name: Rebuild ACIR artifacts - needs: [build-nargo] - runs-on: ubuntu-latest - - steps: - - name: Check out code - uses: actions/checkout@v4 - - - name: Download nargo binary - uses: actions/download-artifact@v3 - with: - name: nargo - path: ./nargo - - - name: Add Nargo to $PATH - run: | - chmod +x ${{ github.workspace }}/nargo/nargo - echo "${{ github.workspace }}/nargo" >> $GITHUB_PATH - - - name: Set up Git user (Github Action) - run: | - git config --local user.name kevaundray - git config --local user.email kevtheappdev@gmail.com - - - name: Run rebuild script - working-directory: tooling/nargo_cli/tests - run: | - chmod +x ./rebuild.sh - ./rebuild.sh - - - name: Upload ACIR artifacts - uses: actions/upload-artifact@v3 - with: - name: acir-artifacts - path: ./tooling/nargo_cli/tests/acir_artifacts - retention-days: 10 - - - name: Check for changes in acir_artifacts directory - id: check_changes - if: ${{ github.ref_name }} == "master" - run: | - git diff --quiet tooling/nargo_cli/tests/acir_artifacts/ || echo "::set-output name=changes::true" - - - name: Create or Update PR - if: steps.check_changes.outputs.changes == 'true' - uses: peter-evans/create-pull-request@v3 - with: - token: ${{ secrets.NOIR_REPO_TOKEN }} - commit-message: "chore: update acir artifacts" - title: "chore: Update ACIR artifacts" - body: "Automatic PR to update acir artifacts" - add-paths: tooling/nargo_cli/tests/acir_artifacts/*.gz - labels: "auto-pr" - branch: "auto-pr-rebuild-script-branch" diff --git a/.github/workflows/build-aztec-feature-flag.yml b/.github/workflows/build-aztec-feature-flag.yml deleted file mode 100644 index 888a88a7f88..00000000000 --- a/.github/workflows/build-aztec-feature-flag.yml +++ /dev/null @@ -1,45 +0,0 @@ -name: Build with aztec feature flag - -on: - pull_request: - merge_group: - push: - branches: - - master - -# This will cancel previous runs when a branch or PR is updated -concurrency: - group: ${{ github.workflow }}-${{ github.head_ref || github.ref || github.run_id }} - cancel-in-progress: true - -jobs: - build-aztec-feature-flag: - name: Test on ${{ matrix.os }} - runs-on: ${{ matrix.runner }} - timeout-minutes: 30 - - strategy: - fail-fast: false - matrix: - include: - - os: ubuntu - runner: ubuntu-latest - target: x86_64-unknown-linux-gnu - - steps: - - name: Checkout - uses: actions/checkout@v4 - - - name: Setup toolchain - uses: dtolnay/rust-toolchain@1.71.1 - with: - targets: ${{ matrix.target }} - - - uses: Swatinem/rust-cache@v2 - with: - key: ${{ matrix.target }} - cache-on-failure: true - save-if: ${{ github.event_name != 'merge_group' }} - - - name: Build with feature flag - run: cargo build --features="noirc_frontend/aztec" diff --git a/.github/workflows/docker-test-flow.yml b/.github/workflows/docker-test-flow.yml new file mode 100644 index 00000000000..ad93d08e852 --- /dev/null +++ b/.github/workflows/docker-test-flow.yml @@ -0,0 +1,733 @@ +name: Test Nargo and JS packages + +on: + push: + branches: + - 'master' + +concurrency: + group: ${{ github.workflow }}-${{ github.head_ref || github.ref || github.run_id }} + cancel-in-progress: true + +jobs: + build-base-nargo: + name: Build base nargo docker image + runs-on: ubuntu-latest + steps: + - name: Checkout code + uses: actions/checkout@v4 + - name: Get current date + id: date + run: echo "date=$(date +'%Y.%m.%d.%H.%M')" >> $GITHUB_STATE + - name: prepare docker images tags + id: prep + run: | + REGISTRY="ghcr.io" + IMG_RAW="${REGISTRY}/${{ github.repository }}" + IMAGE=$(echo "$IMG_RAW" | tr '[:upper:]' '[:lower:]') + TAGS="${IMAGE}:${{ github.sha }}-nargo" + FULL_TAGS="${TAGS},${IMAGE}:latest-nargo,${IMAGE}:v${{ steps.date.outputs.date }}-nargo" + echo "tags=$FULL_TAGS" >> $GITHUB_OUTPUT + echo "image=$IMAGE" >> $GITHUB_OUTPUT + - name: Set up Docker Buildx + id: buildx + uses: docker/setup-buildx-action@v3 + - name: Login to GitHub Container Registry + uses: docker/login-action@v3 + with: + registry: ghcr.io + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + - name: Build nargo base dockerfile + uses: docker/build-push-action@v5 + with: + context: . + file: Dockerfile.ci + tags: ${{ steps.prep.outputs.tags }} + target: base-nargo + cache-from: type=gha + cache-to: type=gha,mode=max + push: true + + build-base-js: + name: Build base js docker image + runs-on: ubuntu-latest + steps: + - name: Checkout code + uses: actions/checkout@v4 + - name: Get current date + id: date + run: echo "date=$(date +'%Y.%m.%d.%H.%M')" >> $GITHUB_STATE + - name: Prepare docker image tags + id: prep + run: | + REGISTRY="ghcr.io" + IMG_RAW="${REGISTRY}/${{ github.repository }}" + IMAGE=$(echo "$IMG_RAW" | tr '[:upper:]' '[:lower:]') + TAGS="${IMAGE}:${{ github.sha }}-js" + FULL_TAGS="${TAGS},${IMAGE}:latest-js,${IMAGE}:v${{ steps.date.outputs.date }}-js" + echo "tags=$FULL_TAGS" >> $GITHUB_OUTPUT + echo "image=$IMAGE" >> $GITHUB_OUTPUT + - name: Set up Docker Buildx + id: buildx + uses: docker/setup-buildx-action@v3 + - name: Login to GitHub Container Registry + uses: docker/login-action@v3 + with: + registry: ghcr.io + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + - name: Build js base dockerfile + uses: docker/build-push-action@v5 + with: + context: . + file: Dockerfile.ci + tags: ${{ steps.prep.outputs.tags }} + target: base-js + cache-from: type=gha + cache-to: type=gha,mode=max + push: true + + artifact-nargo: + name: Artifact nargo + runs-on: ubuntu-latest + needs: [build-base-nargo] + container: + image: ghcr.io/noir-lang/noir:${{ github.sha }}-nargo + credentials: + username: ${{ github.actor }} + password: ${{ secrets.github_token }} + steps: + - name: Artifact nargo + uses: actions/upload-artifact@v4 + with: + name: nargo + path: /usr/src/noir/target/release/nargo + if-no-files-found: error + compression-level: 0 + + test-nargo: + name: Test nargo + runs-on: ubuntu-latest + needs: [build-base-nargo] + container: + image: ghcr.io/noir-lang/noir:${{ github.sha }}-nargo + credentials: + username: ${{ github.actor }} + password: ${{ secrets.github_token }} + steps: + - name: Test + working-directory: /usr/src/noir + run: | + .github/scripts/nargo-test.sh + + build-noir-wasm: + name: Build noir wasm + runs-on: ubuntu-latest + needs: [build-base-js] + container: + image: ghcr.io/noir-lang/noir:${{ github.sha }}-js + credentials: + username: ${{ github.actor }} + password: ${{ secrets.github_token }} + steps: + - name: Build + working-directory: /usr/src/noir + run: | + ./.github/scripts/noir-wasm-build.sh + - name: Artifact + uses: actions/upload-artifact@v4 + with: + name: noir_wasm + path: /usr/src/noir/compiler/wasm + retention-days: 10 + + test-noir-wasm: + name: Test noir wasm + runs-on: ubuntu-latest + needs: [build-base-js, artifact-nargo, build-noir-wasm] + container: + image: ghcr.io/noir-lang/noir:${{ github.sha }}-js + credentials: + username: ${{ github.actor }} + password: ${{ secrets.github_token }} + steps: + - name: Download nargo + uses: actions/download-artifact@v4 + with: + name: nargo + path: /usr/src/noir/target/release + - name: Prep downloaded artifact + run: | + chmod +x /usr/src/noir/target/release/nargo + - name: Download noir_wasm artifact + uses: actions/download-artifact@v4 + with: + name: noir_wasm + path: /usr/src/noir/compiler/wasm + - name: Test + working-directory: /usr/src/noir + run: | + ./.github/scripts/noir-wasm-test.sh + + test-noir-wasm-browser: + name: Test noir wasm browser + runs-on: ubuntu-latest + needs: [build-base-js, artifact-nargo, build-noir-wasm] + container: + image: ghcr.io/noir-lang/noir:${{ github.sha }}-js + credentials: + username: ${{ github.actor }} + password: ${{ secrets.github_token }} + steps: + - name: Download nargo + uses: actions/download-artifact@v4 + with: + name: nargo + path: /usr/src/noir/target/release + - name: Prep downloaded artifact + run: | + chmod +x /usr/src/noir/target/release/nargo + - name: Download noir_wasm artifact + uses: actions/download-artifact@v4 + with: + name: noir_wasm + path: /usr/src/noir/compiler/wasm + - name: Test + working-directory: /usr/src/noir + run: | + ./.github/scripts/noir-wasm-test-browser.sh + + build-acvm_js: + name: Build acvm js + runs-on: ubuntu-latest + needs: [build-base-js] + container: + image: ghcr.io/noir-lang/noir:${{ github.sha }}-js + credentials: + username: ${{ github.actor }} + password: ${{ secrets.github_token }} + steps: + - name: Build + working-directory: /usr/src/noir + run: | + ./.github/scripts/acvm_js-build.sh + - name: Artifact + uses: actions/upload-artifact@v4 + with: + name: acvm_js + path: + /usr/src/noir/acvm-repo/acvm_js/outputs/out/acvm_js + if-no-files-found: error + compression-level: 0 + + test-acvm_js: + name: Test acvm js + runs-on: ubuntu-latest + needs: [build-base-js, build-acvm_js] + container: + image: ghcr.io/noir-lang/noir:${{ github.sha }}-js + credentials: + username: ${{ github.actor }} + password: ${{ secrets.github_token }} + steps: + - name: Download acvm js + uses: actions/download-artifact@v4 + with: + name: acvm_js + path: | + /usr/src/noir/acvm-repo/acvm_js + - name: Test + working-directory: /usr/src/noir + run: | + ./.github/scripts/acvm_js-test.sh + + test-acvm_js-browser: + name: Test acvm js browser + runs-on: ubuntu-latest + needs: [build-base-js, build-acvm_js] + container: + image: ghcr.io/noir-lang/noir:${{ github.sha }}-js + credentials: + username: ${{ github.actor }} + password: ${{ secrets.github_token }} + steps: + - name: Download acvm js + uses: actions/download-artifact@v4 + with: + name: acvm_js + path: | + /usr/src/noir/acvm-repo/acvm_js + - name: Test + working-directory: /usr/src/noir + run: | + ./.github/scripts/acvm_js-test-browser.sh + + build-noirc-abi: + name: Build noirc abi + runs-on: ubuntu-latest + needs: [build-base-js] + container: + image: ghcr.io/noir-lang/noir:${{ github.sha }}-js + credentials: + username: ${{ github.actor }} + password: ${{ secrets.github_token }} + steps: + - name: Build + working-directory: /usr/src/noir + run: | + ./.github/scripts/noirc-abi-build.sh + - name: Artifact + uses: actions/upload-artifact@v4 + with: + name: noirc_abi_wasm + path: + /usr/src/noir/tooling/noirc_abi_wasm/outputs/out/noirc_abi_wasm + if-no-files-found: error + compression-level: 0 + + test-noirc-abi: + name: Test noirc abi + runs-on: ubuntu-latest + needs: [build-base-js, build-noirc-abi] + container: + image: ghcr.io/noir-lang/noir:${{ github.sha }}-js + credentials: + username: ${{ github.actor }} + password: ${{ secrets.github_token }} + steps: + - name: Download noirc abi + uses: actions/download-artifact@v4 + with: + name: noirc_abi_wasm + path: | + /usr/src/noir/tooling/noirc_abi_wasm + - name: Test + working-directory: /usr/src/noir + run: | + ./.github/scripts/noirc-abi-test.sh + + test-noirc-abi-browser: + name: Test noirc abi browser + runs-on: ubuntu-latest + needs: [build-base-js, build-noirc-abi] + container: + image: ghcr.io/noir-lang/noir:${{ github.sha }}-js + credentials: + username: ${{ github.actor }} + password: ${{ secrets.github_token }} + steps: + - name: Download noirc abi + uses: actions/download-artifact@v4 + with: + name: noirc_abi_wasm + path: | + /usr/src/noir/tooling/noirc_abi_wasm + - name: Test + working-directory: /usr/src/noir + run: | + ./.github/scripts/noirc-abi-test-browser.sh + + build-noir-js-types: + name: Build noir js types + runs-on: ubuntu-latest + needs: [build-base-js, build-noirc-abi] + container: + image: ghcr.io/noir-lang/noir:${{ github.sha }}-js + credentials: + username: ${{ github.actor }} + password: ${{ secrets.github_token }} + steps: + - name: Download noirc abi + uses: actions/download-artifact@v4 + with: + name: noirc_abi_wasm + path: | + /usr/src/noir/tooling/noirc_abi_wasm + - name: Build + working-directory: /usr/src/noir + run: | + ./.github/scripts/noir-js-types-build.sh + - name: Artifact + uses: actions/upload-artifact@v4 + with: + name: noir-js-types + path: | + /usr/src/noir/tooling/noir_js_types/lib + if-no-files-found: error + compression-level: 0 + + build-barretenberg-backend: + name: Build Barretenberg backend + runs-on: ubuntu-latest + needs: [build-base-js, build-noirc-abi, build-noir-js-types] + container: + image: ghcr.io/noir-lang/noir:${{ github.sha }}-js + credentials: + username: ${{ github.actor }} + password: ${{ secrets.github_token }} + steps: + - name: Download noirc abi + uses: actions/download-artifact@v4 + with: + name: noirc_abi_wasm + path: | + /usr/src/noir/tooling/noirc_abi_wasm + - name: Download noir js types + uses: actions/download-artifact@v4 + with: + name: noir-js-types + path: /usr/src/noir/tooling/noir_js_types/lib/ + - name: Build + working-directory: /usr/src/noir + run: | + ./.github/scripts/backend-barretenberg-build.sh + - name: Artifact + uses: actions/upload-artifact@v4 + with: + name: barretenberg-backend + path: + /usr/src/noir/tooling/noir_js_backend_barretenberg/lib + if-no-files-found: error + compression-level: 0 + + test-barretenberg-backend: + name: Test Barretenberg backend + runs-on: ubuntu-latest + needs: [build-base-js, build-noirc-abi, build-noir-js-types, build-barretenberg-backend] + container: + image: ghcr.io/noir-lang/noir:${{ github.sha }}-js + credentials: + username: ${{ github.actor }} + password: ${{ secrets.github_token }} + steps: + - name: Download artifact + uses: actions/download-artifact@v4 + with: + name: noirc_abi_wasm + path: | + /usr/src/noir/tooling/noirc_abi_wasm + - name: Download noir js types + uses: actions/download-artifact@v4 + with: + name: noir-js-types + path: /usr/src/noir/tooling/noir_js_types/lib/ + - name: Download Backend barretenberg + uses: actions/download-artifact@v4 + with: + name: barretenberg-backend + path: + /usr/src/noir/tooling/noir_js_backend_barretenberg/lib + - name: Test + working-directory: /usr/src/noir + run: | + ./.github/scripts/backend-barretenberg-test.sh + + build-noir_js: + name: Build noirjs + runs-on: ubuntu-latest + needs: [build-base-js, artifact-nargo, build-noirc-abi, build-acvm_js, build-barretenberg-backend, build-noir-js-types] + container: + image: ghcr.io/noir-lang/noir:${{ github.sha }}-js + credentials: + username: ${{ github.actor }} + password: ${{ secrets.github_token }} + steps: + - name: Download nargo + uses: actions/download-artifact@v4 + with: + name: nargo + path: /usr/src/noir/target/release + - name: prep downloaded artifact + run: | + chmod +x /usr/src/noir/target/release/nargo + - name: Download noirc abi + uses: actions/download-artifact@v4 + with: + name: noirc_abi_wasm + path: | + /usr/src/noir/tooling/noirc_abi_wasm + - name: Download acvm js + uses: actions/download-artifact@v4 + with: + name: acvm_js + path: | + /usr/src/noir/acvm-repo/acvm_js + - name: Download Barretenberg backend + uses: actions/download-artifact@v4 + with: + name: barretenberg-backend + path: + /usr/src/noir/tooling/noir_js_backend_barretenberg/lib + - name: Download noir js types + uses: actions/download-artifact@v4 + with: + name: noir-js-types + path: | + /usr/src/noir/tooling/noir_js_types/lib + - name: Build + working-directory: /usr/src/noir + run: | + ./.github/scripts/noir-js-build.sh + - name: Artifact + uses: actions/upload-artifact@v4 + with: + name: noir_js + path: + /usr/src/noir/tooling/noir_js/lib + + test-noir_js: + name: Test noirjs + runs-on: ubuntu-latest + needs: [ + build-base-js, + build-noirc-abi, + artifact-nargo, + build-acvm_js, + build-barretenberg-backend, + build-noir_js, + build-noir-js-types + ] + container: + image: ghcr.io/noir-lang/noir:${{ github.sha }}-js + credentials: + username: ${{ github.actor }} + password: ${{ secrets.github_token }} + steps: + - name: Download nargo + uses: actions/download-artifact@v4 + with: + name: nargo + path: /usr/src/noir/target/release + - name: Prep downloaded artifact + run: | + chmod +x /usr/src/noir/target/release/nargo + - name: Download noirc abi + uses: actions/download-artifact@v4 + with: + name: noirc_abi_wasm + path: | + /usr/src/noir/tooling/noirc_abi_wasm + - name: Download acvm js + uses: actions/download-artifact@v4 + with: + name: acvm_js + path: | + /usr/src/noir/acvm-repo/acvm_js + - name: Download Barretenberg backend + uses: actions/download-artifact@v4 + with: + name: barretenberg-backend + path: + /usr/src/noir/tooling/noir_js_backend_barretenberg/lib + - name: Download noir js types + uses: actions/download-artifact@v4 + with: + name: noir-js-types + path: | + /usr/src/noir/tooling/noir_js_types/lib + - name: Download noir js + uses: actions/download-artifact@v4 + with: + name: noir_js + path: + /usr/src/noir/tooling/noir_js/lib + - name: Test + working-directory: /usr/src/noir + run: | + ./.github/scripts/noir-js-test.sh + + build-noir_codegen: + name: Build noir codegen + runs-on: ubuntu-latest + needs: [build-base-js, build-noirc-abi, build-acvm_js, build-noir-js-types, build-noir_js] + container: + image: ghcr.io/noir-lang/noir:${{ github.sha }}-js + credentials: + username: ${{ github.actor }} + password: ${{ secrets.github_token }} + steps: + - name: Download nargo + uses: actions/download-artifact@v4 + with: + name: nargo + path: /usr/src/noir/target/release + - name: Prep downloaded artifact + run: | + chmod +x /usr/src/noir/target/release/nargo + - name: Download noirc abi package + uses: actions/download-artifact@v4 + with: + name: noirc_abi_wasm + path: /usr/src/noir/tooling/noirc_abi_wasm + - name: Download acvm js + uses: actions/download-artifact@v4 + with: + name: acvm_js + path: /usr/src/noir/acvm-repo/acvm_js + - name: Download noir js types + uses: actions/download-artifact@v4 + with: + name: noir-js-types + path: | + /usr/src/noir/tooling/noir_js_types/lib + - name: Download noir js + uses: actions/download-artifact@v4 + with: + name: noir_js + path: + /usr/src/noir/tooling/noir_js/lib + - name: Build + working-directory: /usr/src/noir + run: | + ./.github/scripts/noir-codegen-build.sh + - name: Artifact + uses: actions/upload-artifact@v4 + with: + name: noir_codegen + path: + /usr/src/noir/tooling/noir_codegen/lib + + test-noir_codegen: + name: Test noir codegen + runs-on: ubuntu-latest + needs: [build-base-js, artifact-nargo, build-noirc-abi, build-acvm_js, build-noir-js-types, build-noir_js, build-noir_codegen] + container: + image: ghcr.io/noir-lang/noir:${{ github.sha }}-js + credentials: + username: ${{ github.actor }} + password: ${{ secrets.github_token }} + steps: + - name: Download nargo + uses: actions/download-artifact@v4 + with: + name: nargo + path: /usr/src/noir/target/release + - name: Prep downloaded artifact + run: | + chmod +x /usr/src/noir/target/release/nargo + - name: Download noirc abi + uses: actions/download-artifact@v4 + with: + name: noirc_abi_wasm + path: /usr/src/noir/tooling/noirc_abi_wasm + - name: Download acvm js + uses: actions/download-artifact@v4 + with: + name: acvm_js + path: /usr/src/noir/acvm-repo/acvm_js + - name: Download noir js types + uses: actions/download-artifact@v4 + with: + name: noir-js-types + path: | + /usr/src/noir/tooling/noir_js_types/lib + - name: Download noir js + uses: actions/download-artifact@v4 + with: + name: noir_js + path: + /usr/src/noir/tooling/noir_js/lib + - name: Download noir codegen + uses: actions/download-artifact@v4 + with: + name: noir_codegen + path: + /usr/src/noir/tooling/noir_codegen/lib + - name: Test + working-directory: /usr/src/noir + run: | + ./.github/scripts/noir-codegen-test.sh + + test-integration: + name: Integration test + runs-on: ubuntu-latest + needs: [ + build-base-js, + artifact-nargo, + build-noir-wasm, + build-noirc-abi, + build-acvm_js, + build-noir-js-types, + build-noir_js, + build-barretenberg-backend + ] + container: + image: ghcr.io/noir-lang/noir:${{ github.sha }}-js + credentials: + username: ${{ github.actor }} + password: ${{ secrets.github_token }} + steps: + - name: Download nargo + uses: actions/download-artifact@v4 + with: + name: nargo + path: /usr/src/noir/target/release + - name: Prep downloaded artifact + run: | + chmod +x /usr/src/noir/target/release/nargo + - name: Download noir wasm + uses: actions/download-artifact@v4 + with: + name: noir_wasm + path: /usr/src/noir/compiler/wasm + - name: Download noirc abi + uses: actions/download-artifact@v4 + with: + name: noirc_abi_wasm + path: /usr/src/noir/tooling/noirc_abi_wasm + - name: Download acvm js + uses: actions/download-artifact@v4 + with: + name: acvm_js + path: /usr/src/noir/acvm-repo/acvm_js + - name: Download noir js types + uses: actions/download-artifact@v4 + with: + name: noir-js-types + path: | + /usr/src/noir/tooling/noir_js_types/lib + - name: Download noir js + uses: actions/download-artifact@v4 + with: + name: noir_js + path: + /usr/src/noir/tooling/noir_js/lib + - name: Download Barretenberg backend + uses: actions/download-artifact@v4 + with: + name: barretenberg-backend + path: + /usr/src/noir/tooling/noir_js_backend_barretenberg/lib + - name: Test + working-directory: /usr/src/noir + run: | + ./.github/scripts/integration-test.sh + + tests-end: + name: End + runs-on: ubuntu-latest + if: ${{ always() }} + needs: + - test-nargo + - test-noirc-abi + - test-noirc-abi-browser + - test-noir-wasm + - test-noir-wasm-browser + - test-integration + - test-noir_codegen + - test-acvm_js + - test-acvm_js-browser + - test-barretenberg-backend + - test-noir_js + + steps: + - name: Report overall success + run: | + if [[ $FAIL == true ]]; then + exit 1 + else + exit 0 + fi + env: + FAIL: ${{ contains(needs.*.result, 'failure') || contains(needs.*.result, 'skipped') }} diff --git a/.github/workflows/docs-dead-links.yml b/.github/workflows/docs-dead-links.yml new file mode 100644 index 00000000000..ffb18fa0eb2 --- /dev/null +++ b/.github/workflows/docs-dead-links.yml @@ -0,0 +1,35 @@ +name: Check Markdown links + +on: + schedule: + # Run a check at 9 AM UTC + - cron: "0 9 * * *" + +# This will cancel previous runs when a branch or PR is updated +concurrency: + group: ${{ github.workflow }}-${{ github.head_ref || github.ref || github.run_id }} + cancel-in-progress: true + + +jobs: + markdown-link-check: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@master + - uses: gaurav-nelson/github-action-markdown-link-check@v1 + with: + use-quiet-mode: 'yes' + config-file: ./docs/link-check.config.json + folder-path: ./docs + + # Raise an issue if the previous step failed due to dead links being found + - name: Alert on dead links + uses: JasonEtco/create-an-issue@v2 + if: ${{ failure() }} + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + WORKFLOW_NAME: ${{ github.workflow }} + WORKFLOW_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}/job/${{ github.job }} + with: + update_existing: true + filename: .github/DEAD_LINKS_IN_DOCS.md diff --git a/.github/workflows/docs-new-version.yml b/.github/workflows/docs-new-version.yml deleted file mode 100644 index 9b109e170bb..00000000000 --- a/.github/workflows/docs-new-version.yml +++ /dev/null @@ -1,112 +0,0 @@ -name: Cut a new version of the docs - -on: - workflow_dispatch: - inputs: - tag: - description: The tag to build Docs for - required: false - -jobs: - publish-docs: - runs-on: ubuntu-latest - if: ${{ inputs.tag != '' }} - permissions: - pull-requests: write - contents: write - steps: - - name: Checkout sources - uses: actions/checkout@v4 - with: - ref: ${{ inputs.tag }} - - - name: Create new branch - run: | - git checkout -b new-docs-version-${{ github.event.inputs.tag }} - - - name: Setup Node.js - uses: actions/setup-node@v2 - with: - node-version: '18' - - - name: Install wasm-bindgen-cli - uses: taiki-e/install-action@v2 - with: - tool: wasm-bindgen-cli@0.2.86 - - - name: Install wasm-opt - run: | - npm i wasm-opt -g - - - name: Install Yarn - run: npm install -g yarn - - - name: Install Yarn dependencies - run: yarn - - - name: Build acvm_js - run: yarn workspace @noir-lang/acvm_js build - - - name: Build noirc_abi - run: yarn workspace @noir-lang/noirc_abi build - - - name: Build noir_js_types - run: yarn workspace @noir-lang/types build - - - name: Build barretenberg wrapper - run: yarn workspace @noir-lang/backend_barretenberg build - - - name: Run noir_js - run: | - yarn workspace @noir-lang/noir_js build - - - name: Build docs - run: - yarn workspace docs build - - - name: Cut a new version - working-directory: ./docs - run: yarn docusaurus docs:version ${{ inputs.tag }} - - - name: Remove pre-releases - id: get_version - run: | - cd docs && yarn setStable - - - name: Commit new documentation version - run: | - git config --local user.name 'signorecello' - git config --local user.email 'github@zepedro.me' - git add . - git commit -m "chore(docs): cut new docs version for tag ${{ github.event.inputs.tag }}" - - - name: Push changes to new branch - run: git push origin new-docs-version-${{ github.event.inputs.tag }} - - - name: Create Pull Request - run: | - gh pr create \ - --title "chore(docs): docs for ${{ github.event.inputs.tag }}" \ - --body "Updates documentation to new version for tag ${{ github.event.inputs.tag }}." \ - --base master \ - --head new-docs-version-${{ github.event.inputs.tag }} \ - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - - name: Build docs - run: yarn workspace docs build - - - name: Deploy to Netlify - uses: nwtgck/actions-netlify@v2.1 - with: - publish-dir: './docs/build' - production-branch: master - production-deploy: true - github-token: ${{ secrets.GITHUB_TOKEN }} - enable-github-deployment: false - deploy-message: "Deploy from GitHub Actions for tag ${{ inputs.tag }}" - env: - NETLIFY_AUTH_TOKEN: ${{ secrets.NETLIFY_AUTH_TOKEN }} - NETLIFY_SITE_ID: ${{ secrets.NETLIFY_SITE_ID }} - timeout-minutes: 1 - diff --git a/.github/workflows/docs-pr.yml b/.github/workflows/docs-pr.yml index 02044c82224..a16487a49ef 100644 --- a/.github/workflows/docs-pr.yml +++ b/.github/workflows/docs-pr.yml @@ -55,7 +55,7 @@ jobs: if: needs.add_label.outputs.has_label == 'true' steps: - name: Checkout code - uses: actions/checkout@v2 + uses: actions/checkout@v4 - name: Setup Node.js uses: actions/setup-node@v2 @@ -71,34 +71,12 @@ jobs: run: | npm i wasm-opt -g - - name: Install dependencies - run: yarn + - name: Install Yarn dependencies + uses: ./.github/actions/setup - - name: Build acvm_js - run: yarn workspace @noir-lang/acvm_js build - - - name: Build noirc_abi - run: yarn workspace @noir-lang/noirc_abi build - - - name: Build noir_js_types - run: yarn workspace @noir-lang/types build - - - name: Build barretenberg wrapper - run: yarn workspace @noir-lang/backend_barretenberg build - - - name: Run noir_js - run: | - yarn workspace @noir-lang/noir_js build - - - name: Remove pre-releases - working-directory: docs - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - run: yarn setStable - - name: Build docs run: - yarn workspace docs build + yarn workspaces foreach -Rt run build - name: Deploy to Netlify uses: nwtgck/actions-netlify@v2.1 diff --git a/.github/workflows/docs-release.yml b/.github/workflows/docs-release.yml deleted file mode 100644 index 4cd9d9998cb..00000000000 --- a/.github/workflows/docs-release.yml +++ /dev/null @@ -1,72 +0,0 @@ -name: Rebuild docs with the latest release - -on: - release: - types: [released] - workflow_dispatch: - -jobs: - build_and_deploy: - runs-on: ubuntu-latest - permissions: - pull-requests: write - steps: - - name: Checkout code - uses: actions/checkout@v2 - - - name: Setup Node.js - uses: actions/setup-node@v2 - with: - node-version: '18' - - - name: Install wasm-bindgen-cli - uses: taiki-e/install-action@v2 - with: - tool: wasm-bindgen-cli@0.2.86 - - - name: Install wasm-opt - run: | - npm i wasm-opt -g - - - name: Install dependencies - run: yarn - - - name: Build acvm_js - run: yarn workspace @noir-lang/acvm_js build - - - name: Build noirc_abi - run: yarn workspace @noir-lang/noirc_abi build - - - name: Build noir_js_types - run: yarn workspace @noir-lang/types build - - - name: Build barretenberg wrapper - run: yarn workspace @noir-lang/backend_barretenberg build - - - name: Run noir_js - run: | - yarn workspace @noir-lang/noir_js build - - - name: Remove pre-releases - working-directory: docs - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - run: yarn setStable - - - name: Build docs - run: - yarn workspace docs build - - - name: Deploy to Netlify - uses: nwtgck/actions-netlify@v2.1 - with: - publish-dir: './docs/build' - production-branch: master - production-deploy: true - github-token: ${{ secrets.GITHUB_TOKEN }} - enable-github-deployment: false - deploy-message: "Deploy from GitHub Actions for release" - env: - NETLIFY_AUTH_TOKEN: ${{ secrets.NETLIFY_AUTH_TOKEN }} - NETLIFY_SITE_ID: ${{ secrets.NETLIFY_SITE_ID }} - timeout-minutes: 1 diff --git a/.github/workflows/gates_report.yml b/.github/workflows/gates_report.yml index 41a68c65852..8e3ef768828 100644 --- a/.github/workflows/gates_report.yml +++ b/.github/workflows/gates_report.yml @@ -50,9 +50,7 @@ jobs: pull-requests: write steps: - - uses: actions/checkout@v3 - with: - submodules: recursive + - uses: actions/checkout@v4 - name: Download nargo binary uses: actions/download-artifact@v3 @@ -69,10 +67,10 @@ jobs: nargo -V - name: Generate gates report - working-directory: ./tooling/nargo_cli/tests + working-directory: ./test_programs run: | ./gates_report.sh - mv gates_report.json ../../../gates_report.json + mv gates_report.json ../gates_report.json - name: Compare gates reports id: gates_diff diff --git a/.github/workflows/publish-acvm.yml b/.github/workflows/publish-acvm.yml index ded669f13d0..0251aaa0377 100644 --- a/.github/workflows/publish-acvm.yml +++ b/.github/workflows/publish-acvm.yml @@ -3,7 +3,7 @@ name: Publish ACVM crates on: workflow_dispatch: inputs: - acvm-ref: + noir-ref: description: The acvm reference to checkout required: true @@ -15,7 +15,7 @@ jobs: - name: Checkout sources uses: actions/checkout@v4 with: - ref: ${{ inputs.acvm-ref }} + ref: ${{ inputs.noir-ref }} - name: Setup toolchain uses: dtolnay/rust-toolchain@1.71.1 @@ -45,15 +45,9 @@ jobs: env: CARGO_REGISTRY_TOKEN: ${{ secrets.ACVM_CRATES_IO_TOKEN }} - - name: Publish barretenberg_blackbox_solver + - name: Publish bn254_blackbox_solver run: | - cargo publish --package barretenberg_blackbox_solver - env: - CARGO_REGISTRY_TOKEN: ${{ secrets.ACVM_CRATES_IO_TOKEN }} - - - name: Publish acvm_stdlib - run: | - cargo publish --package acvm_stdlib + cargo publish --package bn254_blackbox_solver env: CARGO_REGISTRY_TOKEN: ${{ secrets.ACVM_CRATES_IO_TOKEN }} diff --git a/.github/workflows/publish-docs.yml b/.github/workflows/publish-docs.yml new file mode 100644 index 00000000000..07b39d7627c --- /dev/null +++ b/.github/workflows/publish-docs.yml @@ -0,0 +1,53 @@ +name: Publish documentation + +on: + push: + branches: + - master + paths: [docs/**] + +jobs: + publish-docs: + name: Publish docs + runs-on: ubuntu-latest + + steps: + - name: Checkout release branch + uses: actions/checkout@v4 + + - name: Setup Node.js + uses: actions/setup-node@v2 + with: + node-version: '18' + + - name: Install wasm-bindgen-cli + uses: taiki-e/install-action@v2 + with: + tool: wasm-bindgen-cli@0.2.86 + + - name: Install wasm-opt + run: | + npm i wasm-opt -g + + - name: Install Yarn dependencies + uses: ./.github/actions/setup + + - name: Build docs for deploying + working-directory: docs + run: + yarn workspaces foreach -Rt run build + + - name: Deploy to Netlify + uses: nwtgck/actions-netlify@v2.1 + with: + publish-dir: './docs/build' + production-branch: master + production-deploy: true + github-token: ${{ secrets.GITHUB_TOKEN }} + enable-github-deployment: false + deploy-message: "Deploy from GitHub Actions for tag ${{ inputs.noir-ref }}" + enable-commit-comment: false + env: + NETLIFY_AUTH_TOKEN: ${{ secrets.NETLIFY_AUTH_TOKEN }} + NETLIFY_SITE_ID: ${{ secrets.NETLIFY_SITE_ID }} + timeout-minutes: 1 diff --git a/.github/workflows/publish-es-packages.yml b/.github/workflows/publish-es-packages.yml index 2e88ee2b77f..e360654b46a 100644 --- a/.github/workflows/publish-es-packages.yml +++ b/.github/workflows/publish-es-packages.yml @@ -30,11 +30,6 @@ jobs: nix-cache-name: "noir" cachix-auth-token: ${{ secrets.CACHIXAUTHTOKEN }} - - name: Enable aztec features - if: ${{ inputs.npm-tag == 'aztec' }} - run: | - echo $'\n'"default = [\"aztec\"]"$'\n' >> compiler/noirc_frontend/Cargo.toml - - name: Build wasm package run: | nix build -L .#noir_wasm diff --git a/.github/workflows/publish-nargo.yml b/.github/workflows/publish-nargo.yml index 59d4fe4cc48..3bcc9a13570 100644 --- a/.github/workflows/publish-nargo.yml +++ b/.github/workflows/publish-nargo.yml @@ -30,6 +30,7 @@ jobs: strategy: matrix: target: [x86_64-apple-darwin, aarch64-apple-darwin] + timeout-minutes: 30 steps: - name: Checkout @@ -120,6 +121,7 @@ jobs: fail-fast: false matrix: target: [x86_64-unknown-linux-gnu, x86_64-unknown-linux-musl] + timeout-minutes: 30 steps: - name: Checkout diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 95da6792f04..f9f6fe2fc54 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -11,6 +11,7 @@ jobs: outputs: release-pr: ${{ steps.release.outputs.pr }} tag-name: ${{ steps.release.outputs.tag_name }} + pending-release-semver: v${{ steps.release.outputs.major }}.${{steps.release.outputs.minor}}.${{steps.release.outputs.patch}} runs-on: ubuntu-latest steps: - name: Run release-please @@ -20,8 +21,8 @@ jobs: token: ${{ secrets.NOIR_RELEASES_TOKEN }} command: manifest - update-lockfile: - name: Update lockfile + update-acvm-workspace-package-versions: + name: Update acvm workspace package versions needs: [release-please] if: ${{ needs.release-please.outputs.release-pr }} runs-on: ubuntu-latest @@ -35,9 +36,13 @@ jobs: - name: Setup toolchain uses: dtolnay/rust-toolchain@1.65.0 + - name: Update ACVM workspace root versions + run: | + ./scripts/update-acvm-workspace-versions.sh + - name: Update lockfile run: | - cargo update --workspace + cargo update --workspace - name: Configure git run: | @@ -46,8 +51,44 @@ jobs: - name: Commit updates run: | - git add Cargo.lock - git commit -m 'chore: Update lockfile' + git add . + git commit -m 'chore: Update root workspace acvm versions and lockfile' + git push + + update-docs: + name: Update docs + needs: [release-please, update-acvm-workspace-package-versions] + if: ${{ needs.release-please.outputs.release-pr }} + runs-on: ubuntu-latest + + steps: + - name: Checkout release branch + uses: actions/checkout@v4 + with: + ref: ${{ fromJSON(needs.release-please.outputs.release-pr).headBranchName }} + token: ${{ secrets.NOIR_RELEASES_TOKEN }} + + - name: Setup Node.js + uses: actions/setup-node@v2 + with: + node-version: '18' + + - name: Install Yarn dependencies + uses: ./.github/actions/setup + + - name: Cut a new version + working-directory: ./docs + run: yarn docusaurus docs:version ${{ needs.release-please.outputs.pending-release-semver }} + + - name: Configure git + run: | + git config --local user.name 'kevaundray' + git config --local user.email 'kevtheappdev@gmail.com' + + - name: Commit new documentation version + run: | + git add . + git commit -m "chore(docs): cut new docs version for tag ${{ needs.release-please.outputs.pending-release-semver }}" git push build-binaries: @@ -78,19 +119,18 @@ jobs: ref: master token: ${{ secrets.NOIR_REPO_TOKEN }} inputs: '{ "noir-ref": "${{ needs.release-please.outputs.tag-name }}", "npm-tag": "latest" }' - - publish-docs: - name: Publish docs + publish-acvm: + name: Publish acvm needs: [release-please] if: ${{ needs.release-please.outputs.tag-name }} runs-on: ubuntu-latest + steps: - - name: Dispatch to publish workflow + - name: Dispatch to publish-acvm uses: benc-uk/workflow-dispatch@v1 with: - workflow: docs-new-version.yml - repo: noir-lang/noir + workflow: publish-acvm.yml ref: master - token: ${{ secrets.GITHUB_TOKEN }} - inputs: '{ "tag": "${{ needs.release-please.outputs.tag-name }}"}' + token: ${{ secrets.NOIR_REPO_TOKEN }} + inputs: '{ "noir-ref": "${{ needs.release-please.outputs.tag-name }}" }' diff --git a/.github/workflows/spellcheck.yml b/.github/workflows/spellcheck.yml index 89fcdd12d12..83d67325775 100644 --- a/.github/workflows/spellcheck.yml +++ b/.github/workflows/spellcheck.yml @@ -8,6 +8,32 @@ concurrency: cancel-in-progress: true jobs: - spellcheck: - name: Spellcheck - uses: noir-lang/.github/.github/workflows/spellcheck.yml@main + code: + name: Code + runs-on: ubuntu-latest + steps: + - name: Checkout sources + uses: actions/checkout@v4 + + - name: Check spelling + uses: streetsidesoftware/cspell-action@v2 + with: + strict: false # Do not fail, if a spelling mistake is found (This can be annoying for contributors) + incremental_files_only: true # Run this action on files which have changed in PR + files: | + **/*.{md,rs} + + docs: + name: Documentation + runs-on: ubuntu-latest + steps: + - name: Checkout sources + uses: actions/checkout@v4 + + - name: Check spelling + uses: streetsidesoftware/cspell-action@v2 + with: + strict: true # Documentation has higher standards for correctness. + incremental_files_only: true # Run this action on files which have changed in PR + files: | + ./docs/**/*.md diff --git a/.github/workflows/test-abi_wasm.yml b/.github/workflows/test-abi_wasm.yml deleted file mode 100644 index 52223d874bf..00000000000 --- a/.github/workflows/test-abi_wasm.yml +++ /dev/null @@ -1,71 +0,0 @@ -name: ABI Wasm test - -on: - pull_request: - merge_group: - push: - branches: - - master - -# This will cancel previous runs when a branch or PR is updated -concurrency: - group: ${{ github.workflow }}-${{ github.head_ref || github.ref || github.run_id }} - cancel-in-progress: true - -jobs: - build: - runs-on: ubuntu-latest - - steps: - - name: Checkout sources - uses: actions/checkout@v3 - - - name: Setup Nix - uses: ./.github/actions/nix - with: - github-token: ${{ secrets.GITHUB_TOKEN }} - nix-cache-name: "noir" - cachix-auth-token: ${{ secrets.CACHIXAUTHTOKEN }} - - - name: Build noirc_abi_wasm - run: | - nix build -L .#noirc_abi_wasm - cp -r ./result/noirc_abi_wasm/nodejs ./tooling/noirc_abi_wasm - cp -r ./result/noirc_abi_wasm/web ./tooling/noirc_abi_wasm - - - name: Dereference symlink - run: echo "UPLOAD_PATH=$(readlink -f ./result/noirc_abi_wasm)" >> $GITHUB_ENV - - - name: Upload artifact - uses: actions/upload-artifact@v3 - with: - name: noirc_abi_wasm - path: ${{ env.UPLOAD_PATH }} - retention-days: 10 - - test: - runs-on: ubuntu-latest - needs: [build] - - steps: - - name: Checkout sources - uses: actions/checkout@v3 - - - name: Download wasm package artifact - uses: actions/download-artifact@v3 - with: - name: noirc_abi_wasm - path: ./tooling/noirc_abi_wasm - - - name: Install Yarn dependencies - uses: ./.github/actions/setup - - - name: Run node tests - run: yarn workspace @noir-lang/noirc_abi test - - - name: Install Playwright - uses: ./.github/actions/install-playwright - - - name: Run browser tests - run: yarn workspace @noir-lang/noirc_abi test:browser - diff --git a/.github/workflows/test-acvm-js.yml b/.github/workflows/test-acvm-js.yml deleted file mode 100644 index 14ce5d916c0..00000000000 --- a/.github/workflows/test-acvm-js.yml +++ /dev/null @@ -1,99 +0,0 @@ -name: Test acvm_js - -on: - pull_request: - merge_group: - push: - branches: - - master - -# This will cancel previous runs when a branch or PR is updated -concurrency: - group: ${{ github.workflow }}-${{ github.head_ref || github.ref || github.run_id }} - cancel-in-progress: true - -jobs: - build-acvm-js-package: - runs-on: ubuntu-latest - steps: - - name: Checkout sources - uses: actions/checkout@v4 - - - name: Setup Nix - uses: ./.github/actions/nix - with: - github-token: ${{ secrets.GITHUB_TOKEN }} - nix-cache-name: "noir" - cachix-auth-token: ${{ secrets.CACHIXAUTHTOKEN }} - - - name: Build acvm-js - run: | - nix build -L .#acvm_js - - - name: Dereference symlink - run: echo "UPLOAD_PATH=$(readlink -f result)" >> $GITHUB_ENV - - - name: Upload artifact - uses: actions/upload-artifact@v3 - with: - name: acvm-js - path: ${{ env.UPLOAD_PATH }} - retention-days: 3 - - test-acvm_js-node: - needs: [build-acvm-js-package] - name: Node.js Tests - runs-on: ubuntu-latest - - steps: - - name: Checkout sources - uses: actions/checkout@v4 - - - name: Download artifact - uses: actions/download-artifact@v3 - with: - name: acvm-js - path: ./result - - - name: Move build artifacts - run: | - mv ./result/acvm_js/nodejs ./acvm-repo/acvm_js/nodejs - mv ./result/acvm_js/web ./acvm-repo/acvm_js/web - - - name: Set up test environment - uses: ./.github/actions/setup - - - name: Run node tests - run: yarn workspace @noir-lang/acvm_js test - - test-acvm_js-browser: - needs: [build-acvm-js-package] - name: Browser Tests - runs-on: ubuntu-latest - - steps: - - name: Checkout sources - uses: actions/checkout@v4 - - - name: Download artifact - uses: actions/download-artifact@v3 - with: - name: acvm-js - path: ./result - - - name: Move build artifacts - run: | - mv ./result/acvm_js/nodejs ./acvm-repo/acvm_js/nodejs - mv ./result/acvm_js/web ./acvm-repo/acvm_js/web - - - name: Set up test environment - uses: ./.github/actions/setup - - - name: Install playwright deps - run: | - npx playwright install - npx playwright install-deps - - - name: Run browser tests - working-directory: ./acvm-repo/acvm_js - run: yarn workspace @noir-lang/acvm_js test:browser diff --git a/.github/workflows/test-integration.yml b/.github/workflows/test-integration.yml deleted file mode 100644 index ad28d9c8e86..00000000000 --- a/.github/workflows/test-integration.yml +++ /dev/null @@ -1,199 +0,0 @@ -name: test-integration - -on: - workflow_dispatch: - pull_request: - merge_group: - schedule: - - cron: "0 2 * * *" # Run nightly at 2 AM UTC - -jobs: - build-nargo: - runs-on: ubuntu-latest - strategy: - matrix: - target: [x86_64-unknown-linux-gnu] - - steps: - - name: Checkout Noir repo - uses: actions/checkout@v4 - - - name: Setup toolchain - uses: dtolnay/rust-toolchain@1.71.1 - - - uses: Swatinem/rust-cache@v2 - with: - key: ${{ matrix.target }} - cache-on-failure: true - save-if: ${{ github.event_name != 'merge_group' }} - - - name: Build Nargo - run: cargo build --package nargo_cli --release - - - name: Package artifacts - run: | - mkdir dist - cp ./target/release/nargo ./dist/nargo - 7z a -ttar -so -an ./dist/* | 7z a -si ./nargo-x86_64-unknown-linux-gnu.tar.gz - - - name: Upload artifact - uses: actions/upload-artifact@v3 - with: - name: nargo - path: ./dist/* - retention-days: 3 - - build-acvm-js: - runs-on: ubuntu-latest - steps: - - name: Checkout sources - uses: actions/checkout@v4 - - - name: Setup Nix - uses: ./.github/actions/nix - with: - github-token: ${{ secrets.GITHUB_TOKEN }} - nix-cache-name: "noir" - cachix-auth-token: ${{ secrets.CACHIXAUTHTOKEN }} - - - name: Build acvm-js - run: | - nix build -L .#acvm_js - - - name: Dereference symlink - run: echo "UPLOAD_PATH=$(readlink -f result/acvm_js)" >> $GITHUB_ENV - - - name: Upload artifact - uses: actions/upload-artifact@v3 - with: - name: acvm-js - path: ${{ env.UPLOAD_PATH }} - retention-days: 3 - - build-wasm: - runs-on: ubuntu-latest - - steps: - - name: Checkout sources - uses: actions/checkout@v4 - - - name: Setup Nix - uses: ./.github/actions/nix - with: - github-token: ${{ secrets.GITHUB_TOKEN }} - nix-cache-name: "noir" - cachix-auth-token: ${{ secrets.CACHIXAUTHTOKEN }} - - - name: Build wasm package - run: | - nix build -L .#noir_wasm - - - name: Dereference symlink - run: echo "UPLOAD_PATH=$(readlink -f ./result/noir_wasm)" >> $GITHUB_ENV - - - name: Upload artifact - uses: actions/upload-artifact@v3 - with: - name: noir_wasm - path: ${{ env.UPLOAD_PATH }} - retention-days: 3 - - build-noirc: - runs-on: ubuntu-latest - - steps: - - name: Checkout sources - uses: actions/checkout@v3 - - - name: Setup Nix - uses: ./.github/actions/nix - with: - github-token: ${{ secrets.GITHUB_TOKEN }} - nix-cache-name: "noir" - cachix-auth-token: ${{ secrets.CACHIXAUTHTOKEN }} - - - name: Build noirc_abi_wasm - run: | - nix build -L .#noirc_abi_wasm - cp -r ./result/noirc_abi_wasm/nodejs ./tooling/noirc_abi_wasm - cp -r ./result/noirc_abi_wasm/web ./tooling/noirc_abi_wasm - - - name: Dereference symlink - run: echo "UPLOAD_PATH=$(readlink -f ./result/noirc_abi_wasm)" >> $GITHUB_ENV - - - name: Upload artifact - uses: actions/upload-artifact@v3 - with: - name: noirc_abi_wasm - path: ${{ env.UPLOAD_PATH }} - retention-days: 10 - - test-solidity-verifier: - runs-on: ubuntu-latest - needs: [build-acvm-js, build-wasm, build-nargo, build-noirc] - env: - CACHED_PATH: /tmp/nix-cache - - steps: - - name: Checkout - uses: actions/checkout@v4 - - - name: Download nargo binary - uses: actions/download-artifact@v3 - with: - name: nargo - path: ./nargo - - - name: Download acvm_js package artifact - uses: actions/download-artifact@v3 - with: - name: acvm-js - path: ./acvm-repo/acvm_js - - - name: Download noir_wasm package artifact - uses: actions/download-artifact@v3 - with: - name: noir_wasm - path: ./compiler/wasm - - - name: Download noirc_abi package artifact - uses: actions/download-artifact@v3 - with: - name: noirc_abi_wasm - path: ./tooling/noirc_abi_wasm - - - name: Set nargo on PATH - run: | - nargo_binary="${{ github.workspace }}/nargo/nargo" - chmod +x $nargo_binary - echo "$(dirname $nargo_binary)" >> $GITHUB_PATH - export PATH="$PATH:$(dirname $nargo_binary)" - nargo -V - - - name: Install Yarn dependencies - uses: ./.github/actions/setup - - - name: Install Playwright - uses: ./.github/actions/install-playwright - - - name: Setup `integration-tests` - run: | - yarn workspace @noir-lang/source-resolver build - yarn workspace @noir-lang/types build - yarn workspace @noir-lang/backend_barretenberg build - yarn workspace @noir-lang/noir_js build - - - name: Run `integration-tests` - run: | - yarn test:integration - - - name: Alert on nightly test failure - uses: JasonEtco/create-an-issue@v2 - if: ${{ failure() && github.event_name == 'schedule' }} - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - WORKFLOW_NAME: ${{ github.workflow }} - WORKFLOW_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }} - with: - update_existing: true - filename: .github/NIGHTLY_TEST_FAILURE.md diff --git a/.github/workflows/test-js-packages.yml b/.github/workflows/test-js-packages.yml new file mode 100644 index 00000000000..addc9ce3d83 --- /dev/null +++ b/.github/workflows/test-js-packages.yml @@ -0,0 +1,462 @@ +name: Javascript Tests + +on: + pull_request: + merge_group: + push: + branches: + - master + +# This will cancel previous runs when a branch or PR is updated +concurrency: + group: ${{ github.workflow }}-${{ github.head_ref || github.ref || github.run_id }} + cancel-in-progress: true + +jobs: + build-nargo: + runs-on: ubuntu-22.04 + timeout-minutes: 30 + + steps: + - name: Checkout Noir repo + uses: actions/checkout@v4 + + - name: Setup toolchain + uses: dtolnay/rust-toolchain@1.71.1 + + - uses: Swatinem/rust-cache@v2 + with: + key: x86_64-unknown-linux-gnu + cache-on-failure: true + save-if: ${{ github.event_name != 'merge_group' }} + + - name: Build Nargo + run: cargo build --package nargo_cli --release + + - name: Package artifacts + run: | + mkdir dist + cp ./target/release/nargo ./dist/nargo + 7z a -ttar -so -an ./dist/* | 7z a -si ./nargo-x86_64-unknown-linux-gnu.tar.gz + + - name: Upload artifact + uses: actions/upload-artifact@v3 + with: + name: nargo + path: ./dist/* + retention-days: 3 + + build-noir-wasm: + needs: [build-noirc-abi] + runs-on: ubuntu-latest + timeout-minutes: 30 + + steps: + - name: Checkout sources + uses: actions/checkout@v4 + + - name: Setup toolchain + uses: dtolnay/rust-toolchain@1.71.1 + + - uses: Swatinem/rust-cache@v2 + with: + key: noir-wasm + cache-on-failure: true + save-if: ${{ github.event_name != 'merge_group' }} + + - name: Download noirc_abi_wasm package artifact + uses: actions/download-artifact@v3 + with: + name: noirc_abi_wasm + path: ./tooling/noirc_abi_wasm + + - name: Install Yarn dependencies + uses: ./.github/actions/setup + + - name: Build noir_wasm + run: yarn workspace @noir-lang/noir_wasm build + + - name: Upload artifact + uses: actions/upload-artifact@v3 + with: + name: noir_wasm + path: | + ./compiler/wasm/dist + ./compiler/wasm/build + retention-days: 3 + + build-acvm-js: + runs-on: ubuntu-latest + timeout-minutes: 30 + + steps: + - name: Checkout sources + uses: actions/checkout@v4 + + - name: Setup Nix + uses: ./.github/actions/nix + with: + github-token: ${{ secrets.GITHUB_TOKEN }} + nix-cache-name: "noir" + cachix-auth-token: ${{ secrets.CACHIXAUTHTOKEN }} + + - name: Build acvm-js + run: | + nix build -L .#acvm_js + + - name: Dereference symlink + run: echo "UPLOAD_PATH=$(readlink -f result/acvm_js)" >> $GITHUB_ENV + + - name: Upload artifact + uses: actions/upload-artifact@v3 + with: + name: acvm-js + path: ${{ env.UPLOAD_PATH }} + retention-days: 3 + + build-noirc-abi: + runs-on: ubuntu-latest + timeout-minutes: 30 + + steps: + - name: Checkout sources + uses: actions/checkout@v3 + + - name: Setup Nix + uses: ./.github/actions/nix + with: + github-token: ${{ secrets.GITHUB_TOKEN }} + nix-cache-name: "noir" + cachix-auth-token: ${{ secrets.CACHIXAUTHTOKEN }} + + - name: Build noirc_abi_wasm + run: | + nix build -L .#noirc_abi_wasm + + - name: Dereference symlink + run: echo "UPLOAD_PATH=$(readlink -f ./result/noirc_abi_wasm)" >> $GITHUB_ENV + + - name: Upload artifact + uses: actions/upload-artifact@v3 + with: + name: noirc_abi_wasm + path: ${{ env.UPLOAD_PATH }} + retention-days: 10 + + test-acvm_js-node: + needs: [build-acvm-js] + name: ACVM JS (Node.js) + runs-on: ubuntu-latest + timeout-minutes: 30 + + steps: + - name: Checkout sources + uses: actions/checkout@v4 + + - name: Download artifact + uses: actions/download-artifact@v3 + with: + name: acvm-js + path: ./acvm-repo/acvm_js + + - name: Set up test environment + uses: ./.github/actions/setup + + - name: Run node tests + run: yarn workspace @noir-lang/acvm_js test + + test-acvm_js-browser: + needs: [build-acvm-js] + name: ACVM JS (Browser) + runs-on: ubuntu-latest + timeout-minutes: 30 + + steps: + - name: Checkout sources + uses: actions/checkout@v4 + + - name: Download artifact + uses: actions/download-artifact@v3 + with: + name: acvm-js + path: ./acvm-repo/acvm_js + + - name: Set up test environment + uses: ./.github/actions/setup + + - name: Install playwright deps + run: | + npx playwright install + npx playwright install-deps + + - name: Run browser tests + run: yarn workspace @noir-lang/acvm_js test:browser + + test-noirc-abi: + needs: [build-noirc-abi] + name: noirc_abi + runs-on: ubuntu-latest + timeout-minutes: 30 + + steps: + - name: Checkout sources + uses: actions/checkout@v3 + + - name: Download wasm package artifact + uses: actions/download-artifact@v3 + with: + name: noirc_abi_wasm + path: ./tooling/noirc_abi_wasm + + - name: Install Yarn dependencies + uses: ./.github/actions/setup + + - name: Run node tests + run: yarn workspace @noir-lang/noirc_abi test + + - name: Install Playwright + uses: ./.github/actions/install-playwright + + - name: Run browser tests + run: yarn workspace @noir-lang/noirc_abi test:browser + + test-noir-js-backend-barretenberg: + needs: [build-noirc-abi] + name: noir-js-backend-barretenberg + runs-on: ubuntu-latest + timeout-minutes: 30 + + steps: + - name: Checkout + uses: actions/checkout@v4 + + - name: Download wasm package artifact + uses: actions/download-artifact@v3 + with: + name: noirc_abi_wasm + path: ./tooling/noirc_abi_wasm + + - name: Install Yarn dependencies + uses: ./.github/actions/setup + + - name: Build noir_js_types + run: yarn workspace @noir-lang/types build + + - name: Run barretenberg wrapper tests + run: | + yarn workspace @noir-lang/backend_barretenberg test + + test-noir-js: + needs: [build-acvm-js, build-noirc-abi] + name: Noir JS + runs-on: ubuntu-latest + timeout-minutes: 30 + + steps: + - name: Checkout + uses: actions/checkout@v4 + + - name: Download artifact + uses: actions/download-artifact@v3 + with: + name: acvm-js + path: ./acvm-repo/acvm_js + + - name: Download wasm package artifact + uses: actions/download-artifact@v3 + with: + name: noirc_abi_wasm + path: ./tooling/noirc_abi_wasm + + - name: Install Yarn dependencies + uses: ./.github/actions/setup + + - name: Build noir_js_types + run: yarn workspace @noir-lang/types build + + - name: Build barretenberg wrapper + run: yarn workspace @noir-lang/backend_barretenberg build + + - name: Run noir_js tests + run: | + yarn workspace @noir-lang/noir_js build + yarn workspace @noir-lang/noir_js test + + test-noir-wasm: + needs: [build-noir-wasm, build-nargo] + name: noir_wasm + runs-on: ubuntu-latest + timeout-minutes: 30 + + steps: + - name: Checkout sources + uses: actions/checkout@v4 + + - name: Download wasm package artifact + uses: actions/download-artifact@v3 + with: + name: noir_wasm + path: ./compiler/wasm + + - name: Install Yarn dependencies + uses: ./.github/actions/setup + + - name: Download nargo binary + uses: actions/download-artifact@v3 + with: + name: nargo + path: ./nargo + + - name: Set nargo on PATH + run: | + nargo_binary="${{ github.workspace }}/nargo/nargo" + chmod +x $nargo_binary + echo "$(dirname $nargo_binary)" >> $GITHUB_PATH + export PATH="$PATH:$(dirname $nargo_binary)" + + - name: Build fixtures + run: yarn workspace @noir-lang/noir_wasm test:build_fixtures + + - name: Install Playwright + uses: ./.github/actions/install-playwright + + - name: Run node tests + run: yarn workspace @noir-lang/noir_wasm test:node + + - name: Run browser tests + run: yarn workspace @noir-lang/noir_wasm test:browser + + test-noir-codegen: + needs: [build-acvm-js, build-noirc-abi, build-nargo] + name: noir_codegen + runs-on: ubuntu-latest + timeout-minutes: 30 + + steps: + - name: Checkout + uses: actions/checkout@v4 + + - name: Download nargo binary + uses: actions/download-artifact@v3 + with: + name: nargo + path: ./nargo + + - name: Download acvm_js package artifact + uses: actions/download-artifact@v3 + with: + name: acvm-js + path: ./acvm-repo/acvm_js + + - name: Download noirc_abi package artifact + uses: actions/download-artifact@v3 + with: + name: noirc_abi_wasm + path: ./tooling/noirc_abi_wasm + + - name: Set nargo on PATH + run: | + nargo_binary="${{ github.workspace }}/nargo/nargo" + chmod +x $nargo_binary + echo "$(dirname $nargo_binary)" >> $GITHUB_PATH + export PATH="$PATH:$(dirname $nargo_binary)" + nargo -V + + - name: Install Yarn dependencies + uses: ./.github/actions/setup + + - name: Build noir_js_types + run: yarn workspace @noir-lang/types build + + - name: Build noir_js + run: yarn workspace @noir-lang/noir_js build + + - name: Run noir_codegen tests + run: yarn workspace @noir-lang/noir_codegen test + + test-integration: + name: Integration Tests + runs-on: ubuntu-latest + needs: [build-acvm-js, build-noir-wasm, build-nargo, build-noirc-abi] + timeout-minutes: 30 + + steps: + - name: Checkout + uses: actions/checkout@v4 + + - name: Download nargo binary + uses: actions/download-artifact@v3 + with: + name: nargo + path: ./nargo + + - name: Download acvm_js package artifact + uses: actions/download-artifact@v3 + with: + name: acvm-js + path: ./acvm-repo/acvm_js + + - name: Download noir_wasm package artifact + uses: actions/download-artifact@v3 + with: + name: noir_wasm + path: ./compiler/wasm + + - name: Download noirc_abi package artifact + uses: actions/download-artifact@v3 + with: + name: noirc_abi_wasm + path: ./tooling/noirc_abi_wasm + + - name: Set nargo on PATH + run: | + nargo_binary="${{ github.workspace }}/nargo/nargo" + chmod +x $nargo_binary + echo "$(dirname $nargo_binary)" >> $GITHUB_PATH + export PATH="$PATH:$(dirname $nargo_binary)" + nargo -V + + - name: Install Yarn dependencies + uses: ./.github/actions/setup + + - name: Install Playwright + uses: ./.github/actions/install-playwright + + - name: Setup `integration-tests` + run: | + # Note the lack of spaces between package names. + PACKAGES_TO_BUILD="@noir-lang/types,@noir-lang/backend_barretenberg,@noir-lang/noir_js" + yarn workspaces foreach -vtp --from "{$PACKAGES_TO_BUILD}" run build + + - name: Run `integration-tests` + run: | + yarn test:integration + + # This is a job which depends on all test jobs and reports the overall status. + # This allows us to add/remove test jobs without having to update the required workflows. + tests-end: + name: End + runs-on: ubuntu-latest + # We want this job to always run (even if the dependant jobs fail) as we want this job to fail rather than skipping. + if: ${{ always() }} + needs: + - test-acvm_js-node + - test-acvm_js-browser + - test-noirc-abi + - test-noir-js-backend-barretenberg + - test-noir-js + - test-noir-wasm + - test-noir-codegen + - test-integration + + steps: + - name: Report overall success + run: | + if [[ $FAIL == true ]]; then + exit 1 + else + exit 0 + fi + env: + # We treat any skipped or failing jobs as a failure for the workflow as a whole. + FAIL: ${{ contains(needs.*.result, 'failure') || contains(needs.*.result, 'skipped') }} diff --git a/.github/workflows/test-noir-js.yml b/.github/workflows/test-noir-js.yml deleted file mode 100644 index e74f2ee6015..00000000000 --- a/.github/workflows/test-noir-js.yml +++ /dev/null @@ -1,61 +0,0 @@ -name: Test Noir Js - -on: - pull_request: - merge_group: - push: - branches: - - master - -jobs: - test-noir-js: - name: Test Noir JS - runs-on: ubuntu-latest - timeout-minutes: 30 - - steps: - - name: Checkout - uses: actions/checkout@v4 - - - name: Install Yarn dependencies - uses: ./.github/actions/setup - - - name: Setup toolchain - uses: dtolnay/rust-toolchain@1.71.1 - with: - targets: wasm32-unknown-unknown - - - uses: Swatinem/rust-cache@v2 - with: - key: wasm32-unknown-unknown-noir-js - cache-on-failure: true - save-if: ${{ github.event_name != 'merge_group' }} - - - name: Install jq - run: sudo apt-get install jq - - - name: Install wasm-bindgen-cli - uses: taiki-e/install-action@v2 - with: - tool: wasm-bindgen-cli@0.2.86 - - - name: Install wasm-opt - run: | - npm i wasm-opt -g - - - name: Build acvm_js - run: yarn workspace @noir-lang/acvm_js build - - - name: Build noirc_abi - run: yarn workspace @noir-lang/noirc_abi build - - - name: Build noir_js_types - run: yarn workspace @noir-lang/types build - - - name: Build barretenberg wrapper - run: yarn workspace @noir-lang/backend_barretenberg build - - - name: Run noir_js tests - run: | - yarn workspace @noir-lang/noir_js build - yarn workspace @noir-lang/noir_js test diff --git a/.github/workflows/test-noir_wasm.yml b/.github/workflows/test-noir_wasm.yml deleted file mode 100644 index 2175e20acd7..00000000000 --- a/.github/workflows/test-noir_wasm.yml +++ /dev/null @@ -1,125 +0,0 @@ -name: Wasm - -on: - pull_request: - merge_group: - push: - branches: - - master - -concurrency: - group: ${{ github.workflow }}-${{ github.head_ref || github.ref || github.run_id }} - cancel-in-progress: true - -jobs: - build-nargo: - runs-on: ubuntu-22.04 - strategy: - matrix: - target: [x86_64-unknown-linux-gnu] - - steps: - - name: Checkout Noir repo - uses: actions/checkout@v4 - - - name: Setup toolchain - uses: dtolnay/rust-toolchain@1.71.1 - - - uses: Swatinem/rust-cache@v2 - with: - key: ${{ matrix.target }} - cache-on-failure: true - save-if: ${{ github.event_name != 'merge_group' }} - - - name: Build Nargo - run: cargo build --package nargo_cli --release - - - name: Package artifacts - run: | - mkdir dist - cp ./target/release/nargo ./dist/nargo - 7z a -ttar -so -an ./dist/* | 7z a -si ./nargo-x86_64-unknown-linux-gnu.tar.gz - - - name: Upload artifact - uses: actions/upload-artifact@v3 - with: - name: nargo - path: ./dist/* - retention-days: 3 - - build-wasm: - runs-on: ubuntu-latest - - steps: - - name: Checkout sources - uses: actions/checkout@v4 - - - name: Setup Nix - uses: ./.github/actions/nix - with: - github-token: ${{ secrets.GITHUB_TOKEN }} - nix-cache-name: "noir" - cachix-auth-token: ${{ secrets.CACHIXAUTHTOKEN }} - - - name: Build wasm package - run: | - nix build -L .#noir_wasm - - - name: Dereference symlink - run: echo "UPLOAD_PATH=$(readlink -f ./result/noir_wasm)" >> $GITHUB_ENV - - - name: Upload artifact - uses: actions/upload-artifact@v3 - with: - name: noir_wasm - path: ${{ env.UPLOAD_PATH }} - retention-days: 3 - - test: - needs: [build-wasm, build-nargo] - name: Test noir_wasm - runs-on: ubuntu-latest - steps: - - name: Checkout noir-lang/noir - uses: actions/checkout@v4 - - - name: Download wasm package artifact - uses: actions/download-artifact@v3 - with: - name: noir_wasm - path: ./compiler/wasm/downloaded - - - name: Download nargo binary - uses: actions/download-artifact@v3 - with: - name: nargo - path: ./nargo - - - name: Compile fixtures with Nargo CLI - working-directory: ./compiler/wasm/fixtures - run: | - nargo_binary=${{ github.workspace }}/nargo/nargo - chmod +x $nargo_binary - for dir in $(ls -d */); do - pushd $dir/noir-script - $nargo_binary compile - popd - done - - - name: Install Yarn dependencies - uses: ./.github/actions/setup - - - name: Install Playwright - uses: ./.github/actions/install-playwright - - - name: Install dependencies - run: | - cp -r ./compiler/wasm/downloaded/nodejs ./compiler/wasm - cp -r ./compiler/wasm/downloaded/web ./compiler/wasm - yarn workspace @noir-lang/source-resolver build - - - name: Run node tests - run: yarn workspace @noir-lang/noir_wasm test:node - - - name: Run browser tests - run: yarn workspace @noir-lang/noir_wasm test:browser diff --git a/.github/workflows/test-source-resolver.yml b/.github/workflows/test-source-resolver.yml deleted file mode 100644 index 5dafe4c5fd9..00000000000 --- a/.github/workflows/test-source-resolver.yml +++ /dev/null @@ -1,26 +0,0 @@ -name: Test Source Resolver - -on: - push: - paths: - - "compiler/source-resolver/**" - pull_request: - paths: - - "compiler/source-resolver/**" - -jobs: - test: - runs-on: ubuntu-latest - - steps: - - name: Checkout - uses: actions/checkout@v3 - - - name: Install Yarn dependencies - uses: ./.github/actions/setup - - - name: Build @noir-lang/source-resolver - run: yarn workspace @noir-lang/source-resolver build - - - name: Run tests - run: yarn workspace @noir-lang/source-resolver test diff --git a/.gitignore b/.gitignore index 169353af2b6..8d02d34d463 100644 --- a/.gitignore +++ b/.gitignore @@ -14,11 +14,6 @@ pkg/ !.yarn/sdks !.yarn/versions -# Source resolver -compiler/source-resolver/node_modules -compiler/source-resolver/lib -compiler/source-resolver/lib-node - # Noir.js tooling/noir_js/lib @@ -36,9 +31,10 @@ result *.pk *.vk **/Verifier.toml +**/contract **/target -!tooling/nargo_cli/tests/acir_artifacts/*/target -!tooling/nargo_cli/tests/acir_artifacts/*/target/witness.gz +!test_programs/acir_artifacts/*/target +!test_programs/acir_artifacts/*/target/witness.gz !compiler/wasm/noir-script/target gates_report.json @@ -53,3 +49,9 @@ compiler/wasm/web tooling/noirc_abi_wasm/nodejs tooling/noirc_abi_wasm/web tooling/noir_js/lib + +**/package.tgz +packages + +# docs autogen build +/docs/docs/noir_js/reference/ diff --git a/.prettierrc b/.prettierrc index ef937f9697a..052c0657b3b 100644 --- a/.prettierrc +++ b/.prettierrc @@ -3,4 +3,4 @@ "printWidth": 120, "singleQuote": true, "trailingComma": "all" -} \ No newline at end of file +} diff --git a/.release-please-manifest.json b/.release-please-manifest.json index ea8ab2395df..01f6fb140b1 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,4 +1,4 @@ { - ".": "0.19.2", - "acvm-repo": "0.33.0" + ".": "0.22.0", + "acvm-repo": "0.38.0" } \ No newline at end of file diff --git a/CHANGELOG.md b/CHANGELOG.md index cacf629d818..3fc044076a0 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,388 @@ # Changelog +## [0.22.0](https://github.com/noir-lang/noir/compare/v0.21.0...v0.22.0) (2023-12-18) + + +### ⚠ BREAKING CHANGES + +* Remove unused methods on ACIR opcodes ([#3841](https://github.com/noir-lang/noir/issues/3841)) +* Remove backend field from artifacts ([#3819](https://github.com/noir-lang/noir/issues/3819)) +* Remove partial backend feature ([#3805](https://github.com/noir-lang/noir/issues/3805)) + +### Features + +* Add context-centric based API for noir_wasm ([#3798](https://github.com/noir-lang/noir/issues/3798)) ([19155d0](https://github.com/noir-lang/noir/commit/19155d02a1248c85e94f14a2a0bb383a4edeb16f)) + + +### Miscellaneous Chores + +* Remove backend field from artifacts ([#3819](https://github.com/noir-lang/noir/issues/3819)) ([fa1cf5f](https://github.com/noir-lang/noir/commit/fa1cf5f03aa21b001c31ebb9ce405e3c2859bb57)) +* Remove partial backend feature ([#3805](https://github.com/noir-lang/noir/issues/3805)) ([0383100](https://github.com/noir-lang/noir/commit/0383100853a80a5b28b797cdfeae0d271f1b7805)) +* Remove unused methods on ACIR opcodes ([#3841](https://github.com/noir-lang/noir/issues/3841)) ([9e5d0e8](https://github.com/noir-lang/noir/commit/9e5d0e813d61a0bfb5ee68174ed287c5a20f1579)) + +## [0.21.0](https://github.com/noir-lang/noir/compare/v0.20.0...v0.21.0) (2023-12-15) + + +### ⚠ BREAKING CHANGES + +* remove unused `source-resolver` package ([#3791](https://github.com/noir-lang/noir/issues/3791)) +* Make file manager read-only to the compiler ([#3760](https://github.com/noir-lang/noir/issues/3760)) + +### Features + +* Add `prelude.nr` ([#3693](https://github.com/noir-lang/noir/issues/3693)) ([5f0f81f](https://github.com/noir-lang/noir/commit/5f0f81f7f49b021880e0bff648aa6c6d0fede46c)) +* Add some traits to the stdlib ([#3796](https://github.com/noir-lang/noir/issues/3796)) ([8e11352](https://github.com/noir-lang/noir/commit/8e113526a2d78d27ed4e489f16d5604a2aaa18ea)) +* Add support for writing tracing debug info to file ([#3790](https://github.com/noir-lang/noir/issues/3790)) ([98a5004](https://github.com/noir-lang/noir/commit/98a500436a68652a367ccbf77e32f8544aff73bc)) +* Allow passing custom foreign call handlers when creating proofs in NoirJS ([#3764](https://github.com/noir-lang/noir/issues/3764)) ([6076e08](https://github.com/noir-lang/noir/commit/6076e08a0814bb6f3836af3c65a7b40c066b9494)) +* Allow underscores in integer literals ([#3746](https://github.com/noir-lang/noir/issues/3746)) ([2c06a64](https://github.com/noir-lang/noir/commit/2c06a64e502bac6839375c5636d39a172a609a5f)) +* Avoid overflow checks on boolean multiplication ([#3745](https://github.com/noir-lang/noir/issues/3745)) ([9b5b686](https://github.com/noir-lang/noir/commit/9b5b6861c3aa0e154e17598ac9994d3970f0e752)) +* Aztec-packages ([#3754](https://github.com/noir-lang/noir/issues/3754)) ([c043265](https://github.com/noir-lang/noir/commit/c043265e550b59bd4296504826fe15d3ce3e9ad2)) +* Dockerfile to test cargo and JS packages ([#3684](https://github.com/noir-lang/noir/issues/3684)) ([513d619](https://github.com/noir-lang/noir/commit/513d6196a0766082a3c88a4050498bae2cfa7e13)) +* Docs landing page with a playground ([#3667](https://github.com/noir-lang/noir/issues/3667)) ([9a95fbe](https://github.com/noir-lang/noir/commit/9a95fbeefb2ecd5a898006530a1e054cd345bfe8)) +* Enhance test information output ([#3696](https://github.com/noir-lang/noir/issues/3696)) ([468fbbc](https://github.com/noir-lang/noir/commit/468fbbca43e33b23bc662bf1d36dcb79830a291c)) +* Implement print without newline ([#3650](https://github.com/noir-lang/noir/issues/3650)) ([9827dfe](https://github.com/noir-lang/noir/commit/9827dfe51118ba55da6da51ab8bf45cffd2ca756)) +* **lsp:** Add goto definition for locals ([#3705](https://github.com/noir-lang/noir/issues/3705)) ([9dd465c](https://github.com/noir-lang/noir/commit/9dd465c23e286481fa9a35632d133901f86d5883)) +* **lsp:** Add goto definition for structs ([#3718](https://github.com/noir-lang/noir/issues/3718)) ([a576c5b](https://github.com/noir-lang/noir/commit/a576c5bba6ab92eb4798715a43475808ac954fba)) +* Optimize out unnecessary truncation instructions ([#3717](https://github.com/noir-lang/noir/issues/3717)) ([c9c72ae](https://github.com/noir-lang/noir/commit/c9c72ae7b80aa9504a082dd083b19d4b80d954c5)) +* Remove experimental feature warning for traits ([#3783](https://github.com/noir-lang/noir/issues/3783)) ([cb52242](https://github.com/noir-lang/noir/commit/cb522429592477c2b0544f3b3026a1a946b0e5b1)) +* Reorganizing docs to fit diataxis framework ([#3711](https://github.com/noir-lang/noir/issues/3711)) ([54a1ed5](https://github.com/noir-lang/noir/commit/54a1ed58c991eefa7ac9304b894c7046c294487b)) +* Simplify explicit equality assertions to assert equality directly ([#3708](https://github.com/noir-lang/noir/issues/3708)) ([2fc46e2](https://github.com/noir-lang/noir/commit/2fc46e2269bba8d9ad6ae5fcea10e64dce9b3745)) +* Speed up transformation of debug messages ([#3815](https://github.com/noir-lang/noir/issues/3815)) ([2a8af1e](https://github.com/noir-lang/noir/commit/2a8af1e4141ffff61547ee1c2837a6392bd5db48)) + + +### Bug Fixes + +* `try_unify` no longer binds types on failure ([#3697](https://github.com/noir-lang/noir/issues/3697)) ([f03e581](https://github.com/noir-lang/noir/commit/f03e5812439bdf9d1aedc69debdc50ba5dba2049)) +* Add missing assertion to test ([#3765](https://github.com/noir-lang/noir/issues/3765)) ([bcbe116](https://github.com/noir-lang/noir/commit/bcbe11613b7205476a49ad0d588b868b4fc43ba1)) +* Add negative integer literals ([#3690](https://github.com/noir-lang/noir/issues/3690)) ([8b3a68f](https://github.com/noir-lang/noir/commit/8b3a68f5286c09e1f612dbcfff3fe41023ab7109)) +* Allow trait method references from the trait name ([#3774](https://github.com/noir-lang/noir/issues/3774)) ([cfa34d4](https://github.com/noir-lang/noir/commit/cfa34d4d913dbd35f8329430e0d58830e069d6ff)) +* Deserialize odd length hex literals ([#3747](https://github.com/noir-lang/noir/issues/3747)) ([4000fb2](https://github.com/noir-lang/noir/commit/4000fb279221eb07187d657bfaa7f1c7b311abf2)) +* **docs:** Trigger `update-docs` workflow when the `release-please` PR gets merged and not on every merge to master ([#3677](https://github.com/noir-lang/noir/issues/3677)) ([9a3d1d2](https://github.com/noir-lang/noir/commit/9a3d1d2cf647cd583344f8da122fed1acbca9397)) +* Initialize strings as u8 array ([#3682](https://github.com/noir-lang/noir/issues/3682)) ([8da40b7](https://github.com/noir-lang/noir/commit/8da40b75a36ebac51d5377311db3c55fa339dcac)) +* **lsp:** Package resolution on save ([#3794](https://github.com/noir-lang/noir/issues/3794)) ([14f2fff](https://github.com/noir-lang/noir/commit/14f2fffeb3de5f653c11694ee3c5e5d62aaa34ec)) +* Parse negative integer literals ([#3698](https://github.com/noir-lang/noir/issues/3698)) ([463ab06](https://github.com/noir-lang/noir/commit/463ab060075db1915127c3f6cef11bfed9d40109)) +* Pub is required on return for entry points ([#3616](https://github.com/noir-lang/noir/issues/3616)) ([7f1d796](https://github.com/noir-lang/noir/commit/7f1d7968368734e02b152e2e907dc7af9e1604c8)) +* Remove `noirc_driver/aztec` feature flag in docker ([#3784](https://github.com/noir-lang/noir/issues/3784)) ([a48d562](https://github.com/noir-lang/noir/commit/a48d562b59aa2009a9c9b65dd71e11cdd8d06cf0)) +* Remove include-keys option ([#3692](https://github.com/noir-lang/noir/issues/3692)) ([95d7ce2](https://github.com/noir-lang/noir/commit/95d7ce21016e3603bf279efb970536ad32d89a3a)) +* Revert change to modify version in workspace file for acvm dependencies ([#3673](https://github.com/noir-lang/noir/issues/3673)) ([0696f75](https://github.com/noir-lang/noir/commit/0696f755364293bcc7ebc7a0def0dcafede2e543)) +* Sequence update-lockfile workflow so it gets modified after the ACVM version in the root has been changed ([#3676](https://github.com/noir-lang/noir/issues/3676)) ([c00cd85](https://github.com/noir-lang/noir/commit/c00cd8537836f8e4d8559b01d16dfdd1b5cad519)) +* **ssa:** Handle array arguments to side effectual constrain statements ([#3740](https://github.com/noir-lang/noir/issues/3740)) ([028d65e](https://github.com/noir-lang/noir/commit/028d65ea71f9c11e69784d06e0f9768668455f83)) +* Stop cloning Traits! ([#3736](https://github.com/noir-lang/noir/issues/3736)) ([fcff412](https://github.com/noir-lang/noir/commit/fcff412bb39a04a5c88506ae5a5ee2fbdefd93ef)) +* Stop issuing unused variable warnings for variables in trait definitions ([#3797](https://github.com/noir-lang/noir/issues/3797)) ([0bb44c3](https://github.com/noir-lang/noir/commit/0bb44c3bbc63d385d77d93da6abd07214bcfd700)) +* Unsigned integers cannot be negated ([#3688](https://github.com/noir-lang/noir/issues/3688)) ([f904ae1](https://github.com/noir-lang/noir/commit/f904ae1065af74652b2111ea17b72f994de37472)) + + +### Miscellaneous Chores + +* Make file manager read-only to the compiler ([#3760](https://github.com/noir-lang/noir/issues/3760)) ([e3dcc21](https://github.com/noir-lang/noir/commit/e3dcc21cb2c0fef7f28f50b018747c4f09609b11)) +* Remove unused `source-resolver` package ([#3791](https://github.com/noir-lang/noir/issues/3791)) ([57d2505](https://github.com/noir-lang/noir/commit/57d2505d53e2233becd1e2a7de882c4acb518eff)) + +## [0.20.0](https://github.com/noir-lang/noir/compare/v0.19.5...v0.20.0) (2023-12-01) + + +### ⚠ BREAKING CHANGES + +* avoid integer overflows ([#2713](https://github.com/noir-lang/noir/issues/2713)) +* return Pedersen structure in stdlib ([#3190](https://github.com/noir-lang/noir/issues/3190)) +* noir-wasm outputs debug symbols ([#3317](https://github.com/noir-lang/noir/issues/3317)) +* move mimc to hash submodule ([#3361](https://github.com/noir-lang/noir/issues/3361)) +* bump MSRV to 1.71.1 ([#3353](https://github.com/noir-lang/noir/issues/3353)) +* Add semver checks for the compiler version in Nargo.toml ([#3336](https://github.com/noir-lang/noir/issues/3336)) +* Move circuit serialization circuit into acir ([#3345](https://github.com/noir-lang/noir/issues/3345)) +* change stdlib function `pedersen` to `pedersen_commitment` ([#3341](https://github.com/noir-lang/noir/issues/3341)) +* expose pedersen hash in acir and bb solver ([#3269](https://github.com/noir-lang/noir/issues/3269)) +* Switch to new pedersen implementation ([#3151](https://github.com/noir-lang/noir/issues/3151)) +* Pass ACIR to ACVM by reference rather than passing ownership ([#2872](https://github.com/noir-lang/noir/issues/2872)) +* Make for loops a statement ([#2975](https://github.com/noir-lang/noir/issues/2975)) +* **traits:** trait functions with a default implementation must not be followed by a semicolon ([#2987](https://github.com/noir-lang/noir/issues/2987)) +* **wasm:** improve and simplify wasm compiler interface ([#2976](https://github.com/noir-lang/noir/issues/2976)) +* **wasm:** update wasm artifacts to match cli artifacts ([#2973](https://github.com/noir-lang/noir/issues/2973)) +* Maintain shape of foreign call arguments ([#2935](https://github.com/noir-lang/noir/issues/2935)) +* update to `bb` version 0.7.3 ([#2729](https://github.com/noir-lang/noir/issues/2729)) +* **noir_js:** Rename inner and outer proof methods ([#2845](https://github.com/noir-lang/noir/issues/2845)) +* `generateWitness` now returns a serialized witness file ([#2842](https://github.com/noir-lang/noir/issues/2842)) +* Issue an error when a module is declared twice & fix module search path ([#2801](https://github.com/noir-lang/noir/issues/2801)) +* Default integers to u64 ([#2764](https://github.com/noir-lang/noir/issues/2764)) + +### Features + +* `compute_note_hash_and_nullifier` check ([#3216](https://github.com/noir-lang/noir/issues/3216)) ([4963c6c](https://github.com/noir-lang/noir/commit/4963c6c024bba710dd8907147f631f5c26094f0a)) +* **abi:** Throw errors rather than returning string from `noirc_abi_wasm` ([#2817](https://github.com/noir-lang/noir/issues/2817)) ([df7b42c](https://github.com/noir-lang/noir/commit/df7b42cd253d1b908a42c367b116813f9999d93b)) +* **abi:** Tuples as inputs/outputs to main ([#2899](https://github.com/noir-lang/noir/issues/2899)) ([d8bd78f](https://github.com/noir-lang/noir/commit/d8bd78f60c447bb8488a844d779e8aaf4150afe7)) +* **acir:** Enable dynamic indices on non-homogenous arrays ([#2703](https://github.com/noir-lang/noir/issues/2703)) ([622d2e4](https://github.com/noir-lang/noir/commit/622d2e436992c23e6d0885b591bd1072ca57b307)) +* **acir:** Handle dynamic array operations for nested slices ([#3187](https://github.com/noir-lang/noir/issues/3187)) ([e026319](https://github.com/noir-lang/noir/commit/e026319fc25763d30781b90e6a4454ddb5d3bc7b)) +* **acir:** Set dynamic array values ([#3054](https://github.com/noir-lang/noir/issues/3054)) ([e871866](https://github.com/noir-lang/noir/commit/e871866d2203f0f0f49f3b273d99d385b950b65f)) +* **acvm_js:** Export black box solver functions ([#2812](https://github.com/noir-lang/noir/issues/2812)) ([da8a98e](https://github.com/noir-lang/noir/commit/da8a98ed312fe69cb0bdb8f9d0a70ee7a981398f)) +* **acvm:** Separate ACVM optimizations and transformations ([#2979](https://github.com/noir-lang/noir/issues/2979)) ([5865d1a](https://github.com/noir-lang/noir/commit/5865d1a1bca16e1853663c71f893ff81fa3f7185)) +* Add --check option to nargo fmt for dry-run formatting verification ([#3530](https://github.com/noir-lang/noir/issues/3530)) ([4469707](https://github.com/noir-lang/noir/commit/4469707d97085fab0f7ade8d015dc827c56156ee)) +* Add `destroy` method to `Noir` ([#3105](https://github.com/noir-lang/noir/issues/3105)) ([7e40274](https://github.com/noir-lang/noir/commit/7e402744a7d64ffcac6db026cec1631230204f0f)) +* Add `execute` method to `Noir` class ([#3081](https://github.com/noir-lang/noir/issues/3081)) ([17bdd7e](https://github.com/noir-lang/noir/commit/17bdd7e3909f0ddd195e5cb7095cd0d30758ed43)) +* Add `FieldElement::from<usize>` implementation ([#3647](https://github.com/noir-lang/noir/issues/3647)) ([8b7c5aa](https://github.com/noir-lang/noir/commit/8b7c5aa5311f4e6811438f67bd552b641b13fc9a)) +* Add `noir_codegen` package ([#3392](https://github.com/noir-lang/noir/issues/3392)) ([6c4cd4d](https://github.com/noir-lang/noir/commit/6c4cd4d37e4af38dccf899bcbd3950d1e236b35d)) +* Add ACIR serializer C++ codegen ([#2961](https://github.com/noir-lang/noir/issues/2961)) ([7556982](https://github.com/noir-lang/noir/commit/7556982dbebe25eaa17240abbe270b771b55de45)) +* Add an options object to `BarretenbergBackend` constructor ([#3105](https://github.com/noir-lang/noir/issues/3105)) ([7e40274](https://github.com/noir-lang/noir/commit/7e402744a7d64ffcac6db026cec1631230204f0f)) +* Add aztec selectors for event structs ([#2983](https://github.com/noir-lang/noir/issues/2983)) ([982380e](https://github.com/noir-lang/noir/commit/982380e54bb4d696688522c540f1234734ae2e80)) +* Add bb interface implementation ([#2902](https://github.com/noir-lang/noir/issues/2902)) ([fe92dc0](https://github.com/noir-lang/noir/commit/fe92dc0df57b2cbc0e7b8cd1f3a91cba6b0f3049)) +* Add check for overlapping generic traits ([#3307](https://github.com/noir-lang/noir/issues/3307)) ([8cf81b6](https://github.com/noir-lang/noir/commit/8cf81b659bed9522aede29c1ebb4a4ed2bfa1205)) +* Add conditional compilation of methods based on the underlying field being used ([#3045](https://github.com/noir-lang/noir/issues/3045)) ([2e008e2](https://github.com/noir-lang/noir/commit/2e008e2438795bbc41b0641e830378b76bf2e194)) +* Add crate for pub modifier ([#3271](https://github.com/noir-lang/noir/issues/3271)) ([e7a1a1a](https://github.com/noir-lang/noir/commit/e7a1a1a4b42b6b72c16f2204e33af80dbabba6b5)) +* Add debugger commands to introspect (and modify) the current state ([#3391](https://github.com/noir-lang/noir/issues/3391)) ([9e1ad85](https://github.com/noir-lang/noir/commit/9e1ad858cf8a1d9aba0137abe6a749267498bfaf)) +* Add experimental REPL-based debugger ([#2995](https://github.com/noir-lang/noir/issues/2995)) ([281c696](https://github.com/noir-lang/noir/commit/281c696da61c64b42b9525b8756ffc195f70d775)) +* Add exports of JS black box solvers to noirJS ([#3295](https://github.com/noir-lang/noir/issues/3295)) ([8369871](https://github.com/noir-lang/noir/commit/836987150f82354d3dfc01cfaad69f70240ca80c)) +* Add generic count check for trait methods ([#3382](https://github.com/noir-lang/noir/issues/3382)) ([a9f9717](https://github.com/noir-lang/noir/commit/a9f9717ba69c0dd8e4fc7045fe6aea2077b84c95)) +* Add JS types for ABI and input maps ([#3023](https://github.com/noir-lang/noir/issues/3023)) ([599e7a1](https://github.com/noir-lang/noir/commit/599e7a1d6bae5d93273e9ef1265024eac909660d)) +* Add LSP command to profile opcodes in vscode ([#3496](https://github.com/noir-lang/noir/issues/3496)) ([6fbf77a](https://github.com/noir-lang/noir/commit/6fbf77ae2b87a55db92344f5066a82ccaf6c2086)) +* Add lsp formatting ([#3433](https://github.com/noir-lang/noir/issues/3433)) ([286c876](https://github.com/noir-lang/noir/commit/286c87694fda185f25b05cec5504142643bc207f)) +* Add noir types package ([#2893](https://github.com/noir-lang/noir/issues/2893)) ([e8fc868](https://github.com/noir-lang/noir/commit/e8fc8687e6dd89295fd023201443f1197963a243)) +* Add package version to Nargo.toml metadata ([#3427](https://github.com/noir-lang/noir/issues/3427)) ([9e1717c](https://github.com/noir-lang/noir/commit/9e1717c2d96a0b9e394e5cb2fb9e1d09b5259ca0)) +* Add profile info print out ([#3425](https://github.com/noir-lang/noir/issues/3425)) ([a8b5fa8](https://github.com/noir-lang/noir/commit/a8b5fa8e30dc27e64666381b7451569f350967d1)) +* Add semver checks for the compiler version in Nargo.toml ([#3336](https://github.com/noir-lang/noir/issues/3336)) ([0e530cf](https://github.com/noir-lang/noir/commit/0e530cfe86f87a532be30a02f4353d010e47e458)) +* Add special case for boolean AND in acir-gen ([#3615](https://github.com/noir-lang/noir/issues/3615)) ([824039b](https://github.com/noir-lang/noir/commit/824039bcc0a3275f333ea94aecc701d129f99fe5)) +* Add support for tuple values in `noir_codegen` ([#3592](https://github.com/noir-lang/noir/issues/3592)) ([346d75f](https://github.com/noir-lang/noir/commit/346d75f9dd9261996d4d7bb80eb7e4118e8f8ce2)) +* Allow a trait to be implemented multiple times for the same struct ([#3292](https://github.com/noir-lang/noir/issues/3292)) ([51831df](https://github.com/noir-lang/noir/commit/51831df68bc20460c6d05d55469002db06113925)) +* Allow providing custom foreign call executors to `execute_circuit` ([#3506](https://github.com/noir-lang/noir/issues/3506)) ([d27db33](https://github.com/noir-lang/noir/commit/d27db332f8c320ffd9b5520bebbd83ae09e31de7)) +* Allow traits to have generic functions ([#3365](https://github.com/noir-lang/noir/issues/3365)) ([0f9af65](https://github.com/noir-lang/noir/commit/0f9af652efc6b7628784a397a9df674eaa30de61)) +* Avoid integer overflows ([#2713](https://github.com/noir-lang/noir/issues/2713)) ([7d7d632](https://github.com/noir-lang/noir/commit/7d7d63291d712137f97e6d44a774acdf2bd20512)) +* Aztec-packages ([#3599](https://github.com/noir-lang/noir/issues/3599)) ([2cd6dc3](https://github.com/noir-lang/noir/commit/2cd6dc39e3a956aa5dff721d47aaf1921f98fded)) +* Aztec-packages ([#3626](https://github.com/noir-lang/noir/issues/3626)) ([e0a96ea](https://github.com/noir-lang/noir/commit/e0a96ea70b17c8c898dd72ac929f3969a4cec1d3)) +* Cache debug artifacts ([#3133](https://github.com/noir-lang/noir/issues/3133)) ([c5a6229](https://github.com/noir-lang/noir/commit/c5a622983e4049d82589f185be5e96c63ed6066d)) +* Check where clauses when searching for trait impls ([#3407](https://github.com/noir-lang/noir/issues/3407)) ([84c6604](https://github.com/noir-lang/noir/commit/84c6604397f262c09ba4bac157fead38b8280313)) +* Codegen typed interfaces for functions in `noir_codegen` ([#3533](https://github.com/noir-lang/noir/issues/3533)) ([290c463](https://github.com/noir-lang/noir/commit/290c463622a93a34293f73b5bf2aea7ade30a11c)) +* Compile without a backend ([#3437](https://github.com/noir-lang/noir/issues/3437)) ([d69cf5d](https://github.com/noir-lang/noir/commit/d69cf5debcc430bb019b6cc95774aac084776dda)) +* Complex slice inputs for dynamic slice builtins ([#3617](https://github.com/noir-lang/noir/issues/3617)) ([8b23b34](https://github.com/noir-lang/noir/commit/8b23b349ae15afa48f6cbe8962586bbe79e79890)) +* Contract events in artifacts ([#2873](https://github.com/noir-lang/noir/issues/2873)) ([4765c82](https://github.com/noir-lang/noir/commit/4765c8288c583a61a81ff97eea1ef49df13eeca0)) +* Copy on write optimization for brillig ([#3522](https://github.com/noir-lang/noir/issues/3522)) ([da29c02](https://github.com/noir-lang/noir/commit/da29c02327acb2f46f5a7a25c7404dfa44c82616)) +* Data bus ([#3508](https://github.com/noir-lang/noir/issues/3508)) ([6b0bdbc](https://github.com/noir-lang/noir/commit/6b0bdbced1bfe2c92e90dd7b70ca8f9e5ccb7c0d)) +* **debugger:** Highlight current src code loc ([#3174](https://github.com/noir-lang/noir/issues/3174)) ([6b87582](https://github.com/noir-lang/noir/commit/6b87582dfe872ad6c248cf9995d76b0ef1580625)) +* **debugger:** Print limited source code context ([#3217](https://github.com/noir-lang/noir/issues/3217)) ([dcda1c7](https://github.com/noir-lang/noir/commit/dcda1c7aed69ae8f55cd3f680e3cc1ece9de7541)) +* Default integers to u64 ([#2764](https://github.com/noir-lang/noir/issues/2764)) ([01cb041](https://github.com/noir-lang/noir/commit/01cb041a92ef6043dd5a160e0a56a63400801980)) +* Dynamic indexing of non-homogenous slices ([#2883](https://github.com/noir-lang/noir/issues/2883)) ([72c3661](https://github.com/noir-lang/noir/commit/72c3661c86712b99236eafaac99f76f13d42b9d9)) +* Enable the `fmt` command in the help menu ([#3328](https://github.com/noir-lang/noir/issues/3328)) ([63d414c](https://github.com/noir-lang/noir/commit/63d414c06a399525601e3db11dc48b180e93c2d8)) +* Expand trait impl overlap check to cover generic types ([#3320](https://github.com/noir-lang/noir/issues/3320)) ([a01549b](https://github.com/noir-lang/noir/commit/a01549b78a2c5de3e64ce5d3e5c4bb73a8c8b4fb)) +* Export `CompiledCircuit` from codegened TS ([#3589](https://github.com/noir-lang/noir/issues/3589)) ([e06c675](https://github.com/noir-lang/noir/commit/e06c67500da11518caffe0e98bdb9cd7f5f89049)) +* Expose pedersen hash in acir and bb solver ([#3269](https://github.com/noir-lang/noir/issues/3269)) ([0108b6c](https://github.com/noir-lang/noir/commit/0108b6c1e8dc0dfc766ab3c4944deae9354dec36)) +* Extract Brillig VM to allow step debugging ([#3259](https://github.com/noir-lang/noir/issues/3259)) ([f6431f9](https://github.com/noir-lang/noir/commit/f6431f99711f15a96a4f7fed2f413daece94b5e1)) +* Format infix expressions ([#3001](https://github.com/noir-lang/noir/issues/3001)) ([7926ada](https://github.com/noir-lang/noir/commit/7926ada88ed08ac9d874604834533d900fbb16b0)) +* **formatter:** Add formatter support for array literals ([#3061](https://github.com/noir-lang/noir/issues/3061)) ([a535321](https://github.com/noir-lang/noir/commit/a5353217a1f49b83daf11d5fa55e0bcccebf0271)) +* Handle constant index operations on simple slices ([#3464](https://github.com/noir-lang/noir/issues/3464)) ([7ae12f8](https://github.com/noir-lang/noir/commit/7ae12f8c5243d31b2f410c246ed6b9e2fcea5d4c)) +* Handle warnings in evaluator ([#3205](https://github.com/noir-lang/noir/issues/3205)) ([5cfd156](https://github.com/noir-lang/noir/commit/5cfd156ca2035038a226bdd81a51636d3de3c34e)) +* Implement `bound_constraint_with_offset` in terms of `AcirVar`s ([#3233](https://github.com/noir-lang/noir/issues/3233)) ([8d89cb5](https://github.com/noir-lang/noir/commit/8d89cb59fe710859a96eaed4f988952bd727fb7d)) +* Implement automatic dereferencing for index expressions ([#3082](https://github.com/noir-lang/noir/issues/3082)) ([8221bfd](https://github.com/noir-lang/noir/commit/8221bfd2ffde7d1dbf71a72d95257acf76ecca74)) +* Implement automatic dereferencing for indexing lvalues ([#3083](https://github.com/noir-lang/noir/issues/3083)) ([6e2b70a](https://github.com/noir-lang/noir/commit/6e2b70ae90b686158957ea29ef1b2a5f0ed38e5f)) +* Implement euclidean division and signed division in terms of `AcirVar`s ([#3230](https://github.com/noir-lang/noir/issues/3230)) ([b8b7782](https://github.com/noir-lang/noir/commit/b8b77825410c0e1f95549259a51e2c40de1ec342)) +* Implement impl specialization ([#3087](https://github.com/noir-lang/noir/issues/3087)) ([44716fa](https://github.com/noir-lang/noir/commit/44716fae0bae0f78ceee76f7231af49c4abeace1)) +* Implement integer printing ([#3577](https://github.com/noir-lang/noir/issues/3577)) ([6601408](https://github.com/noir-lang/noir/commit/6601408e378b2afe4fdfd8e04482c39311ccc7e9)) +* Implement raw string literals ([#3556](https://github.com/noir-lang/noir/issues/3556)) ([87a302f](https://github.com/noir-lang/noir/commit/87a302fbc26d5fd42694953935d95a7ccb3d50a0)) +* Implement string escape sequences ([#2803](https://github.com/noir-lang/noir/issues/2803)) ([f7529b8](https://github.com/noir-lang/noir/commit/f7529b80f0958fd47a525f25a123f16438bbb892)) +* Implement where clauses on impls ([#3324](https://github.com/noir-lang/noir/issues/3324)) ([4c3d1de](https://github.com/noir-lang/noir/commit/4c3d1dea27726133335538443df3dbbd2c8f2d58)) +* **lsp:** Add "info" codelens ([#2982](https://github.com/noir-lang/noir/issues/2982)) ([80770d9](https://github.com/noir-lang/noir/commit/80770d9fae7c42e69a62cf01babfc69449600ac5)) +* **lsp:** Add goto definition for functions ([#3656](https://github.com/noir-lang/noir/issues/3656)) ([7bb7356](https://github.com/noir-lang/noir/commit/7bb735662c86e6533ac58f448ad748e34f02edb7)) +* Maintain shape of foreign call arguments ([#2935](https://github.com/noir-lang/noir/issues/2935)) ([f7869e6](https://github.com/noir-lang/noir/commit/f7869e6fb492b617e776e538ac4babfa56261d26)) +* Make generic impls callable ([#3297](https://github.com/noir-lang/noir/issues/3297)) ([8d9b738](https://github.com/noir-lang/noir/commit/8d9b738ea958320a16946ebe6fcfb6bbf2dda42e)) +* Manage breakpoints and allow restarting a debugging session ([#3325](https://github.com/noir-lang/noir/issues/3325)) ([f502108](https://github.com/noir-lang/noir/commit/f502108a59e2141f4898b0a25e84d7ed7d2bff58)) +* Nargo test runtime callstacks and assert messages without string matching ([#2953](https://github.com/noir-lang/noir/issues/2953)) ([1b6a4e6](https://github.com/noir-lang/noir/commit/1b6a4e6021929c23a1bca5dff02c004422cc71f8)) +* **noir_js:** Allow providing foreign call handlers in noirJS ([#3294](https://github.com/noir-lang/noir/issues/3294)) ([c76b0f8](https://github.com/noir-lang/noir/commit/c76b0f81b89ae02c39c13c3bd400b5d13f083759)) +* Noir-wasm outputs debug symbols ([#3317](https://github.com/noir-lang/noir/issues/3317)) ([f9933fa](https://github.com/noir-lang/noir/commit/f9933fa50c42aade8ddc13d29deef7645ddcf586)) +* Noir-wasm takes dependency graph ([#3213](https://github.com/noir-lang/noir/issues/3213)) ([a2c8ebd](https://github.com/noir-lang/noir/commit/a2c8ebd4a800d7ef042ac9cbe5ee6a837c715634)) +* Old docs issues ([#3195](https://github.com/noir-lang/noir/issues/3195)) ([26746c5](https://github.com/noir-lang/noir/commit/26746c59e12a60f3869a5b885b05926c94f01215)) +* Optimize euclidean division acir-gen ([#3121](https://github.com/noir-lang/noir/issues/3121)) ([2c175c0](https://github.com/noir-lang/noir/commit/2c175c0d886eea390ef97ada1c2a5b0e1bef15e8)) +* Oracle mocker for nargo test ([#2928](https://github.com/noir-lang/noir/issues/2928)) ([0dd1e77](https://github.com/noir-lang/noir/commit/0dd1e77c0e625805e15fa56b4738c93ebae19b6d)) +* Pass ACIR to ACVM by reference rather than passing ownership ([#2872](https://github.com/noir-lang/noir/issues/2872)) ([b3a9c34](https://github.com/noir-lang/noir/commit/b3a9c343993ce3207de62106bda6cb2b2ef3de50)) +* Pass brillig bytecode to VM by reference ([#3030](https://github.com/noir-lang/noir/issues/3030)) ([4ee290b](https://github.com/noir-lang/noir/commit/4ee290b8b6f75bc1974a5750248570eeca8d244e)) +* Perform compile-time euclidean division on constants ([#3231](https://github.com/noir-lang/noir/issues/3231)) ([3866d7e](https://github.com/noir-lang/noir/commit/3866d7ea32dcdabbdb5ba85478d97fd51a6850f3)) +* Prevent unnecessary witness creation in euclidean division ([#2980](https://github.com/noir-lang/noir/issues/2980)) ([c6f660e](https://github.com/noir-lang/noir/commit/c6f660e86d40a106930483f1d6161814e3c0de10)) +* Properly track equivalence of witnesses generated for black box functions ([#3428](https://github.com/noir-lang/noir/issues/3428)) ([20b70c2](https://github.com/noir-lang/noir/commit/20b70c29b4bd323d67ef56ab1933341a7747d3cb)) +* Provide formatting subcommand ([#2640](https://github.com/noir-lang/noir/issues/2640)) ([a38b15f](https://github.com/noir-lang/noir/commit/a38b15f5d8e69faff125d363f2fd1f2f90ae6830)) +* Publish aztec build of noir_wasm ([#3049](https://github.com/noir-lang/noir/issues/3049)) ([3b51f4d](https://github.com/noir-lang/noir/commit/3b51f4df7e808233f6987baec93f4b5de7e5b304)) +* Refactor debugger and separate core from UI ([#3308](https://github.com/noir-lang/noir/issues/3308)) ([8466810](https://github.com/noir-lang/noir/commit/846681079ab7295b201480a5c8baebc45e858c6f)) +* Remove redundant predicate from brillig quotients ([#2784](https://github.com/noir-lang/noir/issues/2784)) ([a8f18c5](https://github.com/noir-lang/noir/commit/a8f18c55b35f47c6fa3ebfebcd827aeb55e5c850)) +* Remove type arrays for flat slices ([#3466](https://github.com/noir-lang/noir/issues/3466)) ([8225b2b](https://github.com/noir-lang/noir/commit/8225b2b379ddf145f9418f8517478704f9aac350)) +* Remove unnecessary truncation of boolean multiplication ([#3122](https://github.com/noir-lang/noir/issues/3122)) ([39dbcf1](https://github.com/noir-lang/noir/commit/39dbcf1ab80d2bb472d08db4de15d4e0c1f2eb52)) +* Replace boolean range constraints with arithmetic opcodes ([#3234](https://github.com/noir-lang/noir/issues/3234)) ([949222c](https://github.com/noir-lang/noir/commit/949222c20d9e65152e3814d02da1c4c41ffc23a5)) +* Return compilation errors from noir_wasm ([#3091](https://github.com/noir-lang/noir/issues/3091)) ([55f63c9](https://github.com/noir-lang/noir/commit/55f63c935cec62fbba63eed421812a4372c1aa4d)) +* Return Pedersen structure in stdlib ([#3190](https://github.com/noir-lang/noir/issues/3190)) ([be30d59](https://github.com/noir-lang/noir/commit/be30d59e61a9dff6ab94ffb97365c1282c331643)) +* Reuse witnesses more when interacting with memory ([#3658](https://github.com/noir-lang/noir/issues/3658)) ([5a4a73d](https://github.com/noir-lang/noir/commit/5a4a73d24ddd7d2bb46c85714397ee6e031c97a6)) +* Reuse witnesses which have been assigned constant values during ACIR gen ([#3137](https://github.com/noir-lang/noir/issues/3137)) ([9eb43e2](https://github.com/noir-lang/noir/commit/9eb43e2a4665397295e74a593f73d19fa9fa5d27)) +* Save Brillig execution state in ACVM ([#3026](https://github.com/noir-lang/noir/issues/3026)) ([88682da](https://github.com/noir-lang/noir/commit/88682da87ffc9e26da5c9e4b5a4d8e62a6ee43c6)) +* Send and receive unflattened public inputs to backend ([#3543](https://github.com/noir-lang/noir/issues/3543)) ([a7bdc67](https://github.com/noir-lang/noir/commit/a7bdc67ef3ec2037bffc4f1f472907cad786c319)) +* Solve `fixed_base_scalar_mul` black box functions in rust ([#3153](https://github.com/noir-lang/noir/issues/3153)) ([1c1afbc](https://github.com/noir-lang/noir/commit/1c1afbcddf0b5fdb39f00ad28ae90caf699d1265)) +* **ssa:** Multiple slice mergers ([#2753](https://github.com/noir-lang/noir/issues/2753)) ([8f76fe5](https://github.com/noir-lang/noir/commit/8f76fe5819e95ed111587090e15add48a2b4e859)) +* **stdlib:** Optimize constraint counts in sha256/sha512 ([#3253](https://github.com/noir-lang/noir/issues/3253)) ([d3be552](https://github.com/noir-lang/noir/commit/d3be552149ab375b24b509603fcd7237b374ca5a)) +* Switch to new pedersen implementation ([#3151](https://github.com/noir-lang/noir/issues/3151)) ([35fb3f7](https://github.com/noir-lang/noir/commit/35fb3f7076d52db7ca3bef0a70a3dbccaf82f58d)) +* **traits:** Add impl Trait as function return type [#2397](https://github.com/noir-lang/noir/issues/2397) ([#3176](https://github.com/noir-lang/noir/issues/3176)) ([4cb2024](https://github.com/noir-lang/noir/commit/4cb20244abba0abc49be0376611979a786563565)) +* **traits:** Add trait impl for buildin types ([#2964](https://github.com/noir-lang/noir/issues/2964)) ([2c87b27](https://github.com/noir-lang/noir/commit/2c87b273dfdf033dd8c79b78f006a0e9813559d7)) +* **traits:** Added checks for duplicated trait associated items (types, consts, functions) ([#2927](https://github.com/noir-lang/noir/issues/2927)) ([d49492c](https://github.com/noir-lang/noir/commit/d49492cd80d04ee6acc01247b06b088deefcd0c6)) +* **traits:** Allow multiple traits to share the same associated function name and to be implemented for the same type ([#3126](https://github.com/noir-lang/noir/issues/3126)) ([004f8dd](https://github.com/noir-lang/noir/commit/004f8dd733cb23da4ed57b160f6b86d53bc0b5f1)) +* **traits:** Implement trait bounds typechecker + monomorphizer passes ([#2717](https://github.com/noir-lang/noir/issues/2717)) ([5ca99b1](https://github.com/noir-lang/noir/commit/5ca99b128e9991b5272c00292208d85415e70edf)) +* **traits:** Improve support for traits static method resolution ([#2958](https://github.com/noir-lang/noir/issues/2958)) ([0d0d8f7](https://github.com/noir-lang/noir/commit/0d0d8f7d2b401eb6b534dbb175dfd4b26d2a5f7d)) +* **traits:** Multi module support for traits ([#2844](https://github.com/noir-lang/noir/issues/2844)) ([4deb07f](https://github.com/noir-lang/noir/commit/4deb07f80ce110187b66a46dd5624af3b8df3dbd)) +* Use ranges instead of a vector for input witness ([#3314](https://github.com/noir-lang/noir/issues/3314)) ([b12b7ec](https://github.com/noir-lang/noir/commit/b12b7ecb995988c731be5f1f2f67fda952f1a228)) +* **wasm:** Improve and simplify wasm compiler interface ([#2976](https://github.com/noir-lang/noir/issues/2976)) ([1b5124b](https://github.com/noir-lang/noir/commit/1b5124bc74f7ac5360db04b34d1b7b2284061fd3)) +* **wasm:** Update wasm artifacts to match cli artifacts ([#2973](https://github.com/noir-lang/noir/issues/2973)) ([ce16c0b](https://github.com/noir-lang/noir/commit/ce16c0b14565cfe1bc2c9f09ae71643d2657440b)) + + +### Bug Fixes + +* "Missing trait impl" error in trait dispatch ([#3440](https://github.com/noir-lang/noir/issues/3440)) ([52daaec](https://github.com/noir-lang/noir/commit/52daaec504101fe3c0caa30441c17f30a34af475)) +* `compute_note_hash_and_nullifier` compiler check ([#3351](https://github.com/noir-lang/noir/issues/3351)) ([4e2d35f](https://github.com/noir-lang/noir/commit/4e2d35f256bea2fee3a6cbd7af0c0c15a37c0a2e)) +* **3275:** Activate brillig modulo test with negative integers ([#3318](https://github.com/noir-lang/noir/issues/3318)) ([31c493c](https://github.com/noir-lang/noir/commit/31c493ce2082a571d147f707837beb4f3ed2ca64)) +* **3300:** Cache warnings into debug artefacts ([#3313](https://github.com/noir-lang/noir/issues/3313)) ([cb5a15b](https://github.com/noir-lang/noir/commit/cb5a15b9dbcfdaac5d656a122c73dca23855307d)) +* ACIR optimizer should update assertion messages ([#3010](https://github.com/noir-lang/noir/issues/3010)) ([758b6b6](https://github.com/noir-lang/noir/commit/758b6b62918907c1a39f3090a77419003551745e)) +* **acvm:** Return false rather than panicking on invalid ECDSA signatures ([#2783](https://github.com/noir-lang/noir/issues/2783)) ([155abc0](https://github.com/noir-lang/noir/commit/155abc0d99fff41c79163c16bf297d41e5dff0fa)) +* Add `pub` modifier to grumpkin functions ([#3036](https://github.com/noir-lang/noir/issues/3036)) ([f8990d7](https://github.com/noir-lang/noir/commit/f8990d75b948ce0a6968db659370f7ece7f5db08)) +* Add compiler error message for invalid input types ([#3220](https://github.com/noir-lang/noir/issues/3220)) ([989e80d](https://github.com/noir-lang/noir/commit/989e80d4ea62e68cfab69a1cd16d481cbccc6c02)) +* Add size checks to integer literals ([#3236](https://github.com/noir-lang/noir/issues/3236)) ([7f8fe8c](https://github.com/noir-lang/noir/commit/7f8fe8c88eb2d26ae3a93e2f74430fadc74b4836)) +* Adding proving key initialization ([#3322](https://github.com/noir-lang/noir/issues/3322)) ([3383740](https://github.com/noir-lang/noir/commit/3383740f9a0004f2ee77c9686f81baed6cd1917c)) +* Allow `where` clause on all functions and improve error message ([#3465](https://github.com/noir-lang/noir/issues/3465)) ([1647e33](https://github.com/noir-lang/noir/commit/1647e33564bf56ab8721a365f5fc6bcb38901412)) +* Allow constructors in parentheses in `if` conditions and `for` ranges ([#3219](https://github.com/noir-lang/noir/issues/3219)) ([ad192d1](https://github.com/noir-lang/noir/commit/ad192d1b7492f6ecd5fc98bb88201d6c442dc052)) +* Allow two `TypeVariable::Constant(N)` to unify even if their constants are not equal ([#3225](https://github.com/noir-lang/noir/issues/3225)) ([cc4ca4b](https://github.com/noir-lang/noir/commit/cc4ca4bb5f4fed5f531a2040501fcc6ed53a9ab4)) +* Apply predicate to over/underflow checks ([#3494](https://github.com/noir-lang/noir/issues/3494)) ([fc3edf7](https://github.com/noir-lang/noir/commit/fc3edf7aa5da9074614fa900bbcb57e512e3d56b)) +* **aztec_nr:** Serialise arrays of structs ([#3401](https://github.com/noir-lang/noir/issues/3401)) ([e979a58](https://github.com/noir-lang/noir/commit/e979a587e755d4b715ebacba715d778938026ac0)) +* Change non-constant argument errors from `to_be_radix` from ICE to proper error ([#3048](https://github.com/noir-lang/noir/issues/3048)) ([19ce286](https://github.com/noir-lang/noir/commit/19ce28638fe3ea42ab4984cb99e3898cd17fa8d9)) +* Check for overflow with hexadecimal inputs ([#3004](https://github.com/noir-lang/noir/issues/3004)) ([db1e736](https://github.com/noir-lang/noir/commit/db1e736240c0b74f6f59504db5a50de1c749d395)) +* Compiler version error message ([#3558](https://github.com/noir-lang/noir/issues/3558)) ([026a358](https://github.com/noir-lang/noir/commit/026a3587b01ddc8f444ff588a7b3f3fd1a0bb386)) +* Complete debug metadata ([#3228](https://github.com/noir-lang/noir/issues/3228)) ([2f6509d](https://github.com/noir-lang/noir/commit/2f6509d2acdee5014d65efaca9e6a9e0df3ca160)) +* Conditionally run the "Create or Update PR" step in acir artifacts rebuild workflow ([#2849](https://github.com/noir-lang/noir/issues/2849)) ([63da875](https://github.com/noir-lang/noir/commit/63da875a85a2ad4ad3038443ba52eb28ea44ad10)) +* Corrected the formatting of error message parameters in index out of bounds error ([#3630](https://github.com/noir-lang/noir/issues/3630)) ([3bba386](https://github.com/noir-lang/noir/commit/3bba3862dc8703410681300be894bfd1ebca7336)) +* **debugger:** Step through foreign calls and breakpoints inside Brillig blocks ([#3511](https://github.com/noir-lang/noir/issues/3511)) ([5d77d7a](https://github.com/noir-lang/noir/commit/5d77d7ac82a4df6995ca151b2c8070044cb1fe9d)) +* Determinism of fallback transformer ([#3100](https://github.com/noir-lang/noir/issues/3100)) ([12daad1](https://github.com/noir-lang/noir/commit/12daad19c902caf5ee9e2eb4b6847bde5a924353)) +* Disable modulo for fields ([#3009](https://github.com/noir-lang/noir/issues/3009)) ([7e68976](https://github.com/noir-lang/noir/commit/7e689768f4af1188e01a1a300a0d2fa152cea504)) +* Disallow returning constant values ([#2978](https://github.com/noir-lang/noir/issues/2978)) ([79c2e88](https://github.com/noir-lang/noir/commit/79c2e88ebefe71ebc0fe457347570df31b24ac36)) +* Do not perform dead instruction elimination on mod,div unless rhs is constant ([#3141](https://github.com/noir-lang/noir/issues/3141)) ([af3d771](https://github.com/noir-lang/noir/commit/af3d77182054845303fa59de92d783453079a048)) +* Do not simply divisions ([#3664](https://github.com/noir-lang/noir/issues/3664)) ([e5b981b](https://github.com/noir-lang/noir/commit/e5b981b08c2b345f00426acafe47b76d5262254d)) +* Docker builds ([#3620](https://github.com/noir-lang/noir/issues/3620)) ([f3eac52](https://github.com/noir-lang/noir/commit/f3eac5282860c1954ea2cee6a21633df5b1865fd)) +* **docs:** Update `editUrl` path for docusaurus ([#3184](https://github.com/noir-lang/noir/issues/3184)) ([4646a93](https://github.com/noir-lang/noir/commit/4646a93f5e95604b5710353764b2c4295efaef6b)) +* Download expected `bb` version if installed backend has version mismatch ([#3150](https://github.com/noir-lang/noir/issues/3150)) ([3f03435](https://github.com/noir-lang/noir/commit/3f03435552fe75b5c7a49bfc8d63d06573381220)) +* Error message for assigning the wrong type is backwards [#2804](https://github.com/noir-lang/noir/issues/2804) ([#2805](https://github.com/noir-lang/noir/issues/2805)) ([b2d62bf](https://github.com/noir-lang/noir/commit/b2d62bff3b7958b3ed62c285a7ebd45045ac2e05)) +* Finer bit size in bound constrain ([#2869](https://github.com/noir-lang/noir/issues/2869)) ([68385e2](https://github.com/noir-lang/noir/commit/68385e294a1501b19b28f3f5510e973283ed0821)) +* Fix aztec library after nargo fmt ([#3014](https://github.com/noir-lang/noir/issues/3014)) ([f43083c](https://github.com/noir-lang/noir/commit/f43083c744ff13aefa4d294a090c9445a9b70aac)) +* Fix crash when using undeclared traits ([#3509](https://github.com/noir-lang/noir/issues/3509)) ([8bb095a](https://github.com/noir-lang/noir/commit/8bb095af77d3b4043855841f1ae5799d75ed94f0)) +* Fix lexer error formatting ([#3274](https://github.com/noir-lang/noir/issues/3274)) ([74bd517](https://github.com/noir-lang/noir/commit/74bd517fe7839465ff086ffe622462bed5159006)) +* Fix method `program_counter`, change method signature ([#3012](https://github.com/noir-lang/noir/issues/3012)) ([5ea522b](https://github.com/noir-lang/noir/commit/5ea522b840ca0f6f90d02ca00f0de32f515d450f)) +* Fix panic in some cases when calling a private function ([#2799](https://github.com/noir-lang/noir/issues/2799)) ([078d5df](https://github.com/noir-lang/noir/commit/078d5df691d4ea48e83c9530cd40b64917eba0a7)) +* Fix panic when using repeated arrays which define variables ([#3221](https://github.com/noir-lang/noir/issues/3221)) ([c4faf3a](https://github.com/noir-lang/noir/commit/c4faf3a0a40eea1ee02e11dfe08b48c6b4438bbf)) +* Fix should_fail_with ([#2940](https://github.com/noir-lang/noir/issues/2940)) ([4f07b84](https://github.com/noir-lang/noir/commit/4f07b84458dba97530d8179a3b9b19101b472616)) +* Fix subtract with underflow in flattening pass ([#2796](https://github.com/noir-lang/noir/issues/2796)) ([f2ed505](https://github.com/noir-lang/noir/commit/f2ed5054b0b0335dd3ecb17369b0d2e6eafb1171)) +* Fixing versioning workflow ([#3296](https://github.com/noir-lang/noir/issues/3296)) ([3d5e43a](https://github.com/noir-lang/noir/commit/3d5e43a4b8cd9d2bb67d44a2eff93374c3603e42)) +* Flatten public inputs according to their index in numerial rather than ascii order ([#3605](https://github.com/noir-lang/noir/issues/3605)) ([a1f6343](https://github.com/noir-lang/noir/commit/a1f6343b7df1b166b1be4db09527694a3df2738a)) +* Follow dependencies when looking for a struct ([#3405](https://github.com/noir-lang/noir/issues/3405)) ([561b1b8](https://github.com/noir-lang/noir/commit/561b1b8f0b22d8b1800cb3552942a442a27c2a2c)) +* Force recompilation when `output_debug` flag is set. ([#2898](https://github.com/noir-lang/noir/issues/2898)) ([9854416](https://github.com/noir-lang/noir/commit/9854416f5ac03c9da6538edc6a0a540ccccb4b61)) +* **frontend:** Error on unsupported integer annotation ([#2778](https://github.com/noir-lang/noir/issues/2778)) ([90c3d8b](https://github.com/noir-lang/noir/commit/90c3d8baa3b7ae10bc99f6a767121f556ff75967)) +* Impl methods are no longer placed in contracts ([#3255](https://github.com/noir-lang/noir/issues/3255)) ([b673b07](https://github.com/noir-lang/noir/commit/b673b071663d9756d6346954fce7d4ec6e1577dd)) +* Improve error message when multiplying unit values ([#2950](https://github.com/noir-lang/noir/issues/2950)) ([57b7c55](https://github.com/noir-lang/noir/commit/57b7c55e7005876dc2e070c64e1b8115ca8a4242)) +* Include .nr and .sol files in builds ([#3039](https://github.com/noir-lang/noir/issues/3039)) ([ae8d0e9](https://github.com/noir-lang/noir/commit/ae8d0e9013f26b52e8f0bdc9f84866ffec50872d)) +* Issue an error when a module is declared twice & fix module search path ([#2801](https://github.com/noir-lang/noir/issues/2801)) ([7f76910](https://github.com/noir-lang/noir/commit/7f76910ebbd20e3d7a1db7541f2b7f43cd9b546d)) +* Lack of cjs package version ([#2848](https://github.com/noir-lang/noir/issues/2848)) ([adc2d59](https://github.com/noir-lang/noir/commit/adc2d597536b52c690dceb14ea5f8e30a493452c)) +* Make for loops a statement ([#2975](https://github.com/noir-lang/noir/issues/2975)) ([0e266eb](https://github.com/noir-lang/noir/commit/0e266ebc7328866b0b10554e37c9d9012a7b501c)) +* Match rust behaviour for left-shift overflow ([#3518](https://github.com/noir-lang/noir/issues/3518)) ([2d7ceb1](https://github.com/noir-lang/noir/commit/2d7ceb17edda1d9e70901cfd13f45cdc0df0d28d)) +* Minor problems with `aztec` publishing ([#3095](https://github.com/noir-lang/noir/issues/3095)) ([0fc8f20](https://github.com/noir-lang/noir/commit/0fc8f20b8b87d033d27ce18db039399c17f81837)) +* Move mimc to hash submodule ([#3361](https://github.com/noir-lang/noir/issues/3361)) ([3ec29f1](https://github.com/noir-lang/noir/commit/3ec29f17464703716978daacfa9f00c4f5013551)) +* Overflow checks for constant folding ([#3420](https://github.com/noir-lang/noir/issues/3420)) ([b7a6383](https://github.com/noir-lang/noir/commit/b7a6383cf9dc3bc4a71b9644352340c1e9339c81)) +* Parse parenthesized lvalues ([#3058](https://github.com/noir-lang/noir/issues/3058)) ([50ca58c](https://github.com/noir-lang/noir/commit/50ca58c7b133f8b21091dfd304379429284b0d60)) +* Prevent duplicated assert message transformation ([#3038](https://github.com/noir-lang/noir/issues/3038)) ([082a6d0](https://github.com/noir-lang/noir/commit/082a6d02dad67a25692bed15c340a16a848a320e)) +* Prevent mutating immutable bindings to mutable types ([#3075](https://github.com/noir-lang/noir/issues/3075)) ([d5ee20e](https://github.com/noir-lang/noir/commit/d5ee20ea43ccf1130f7d34231562f13e98ea636b)) +* **println:** Enable printing of arrays/strings >2 in fmt strings ([#2947](https://github.com/noir-lang/noir/issues/2947)) ([309fa70](https://github.com/noir-lang/noir/commit/309fa70823535c5340f986a17f4ddddcb8723bb8)) +* Recompile artefacts from a different noir version ([#3248](https://github.com/noir-lang/noir/issues/3248)) ([7347b27](https://github.com/noir-lang/noir/commit/7347b2742a5ad38d3d252e657810d061bab83e24)) +* Remove cast for field comparisons in brillig ([#2874](https://github.com/noir-lang/noir/issues/2874)) ([1fc1fdb](https://github.com/noir-lang/noir/commit/1fc1fdb4e15d2ce625ea79d458c5346fab418e49)) +* Remove duplication of code to load stdlib files ([#2868](https://github.com/noir-lang/noir/issues/2868)) ([b694aab](https://github.com/noir-lang/noir/commit/b694aab87c4665a3a89715c9d4096eeb3efb9944)) +* Remove quotes from println output ([#3574](https://github.com/noir-lang/noir/issues/3574)) ([127b6aa](https://github.com/noir-lang/noir/commit/127b6aa1ec8893275fdfa7795db7c52c4fc1d4dd)) +* Remove sha2_block test ([#3360](https://github.com/noir-lang/noir/issues/3360)) ([a48c03b](https://github.com/noir-lang/noir/commit/a48c03bec786d1fb85eef46eeddeccf29e81fe76)) +* Restrict fill_internal_slices pass to acir functions ([#3634](https://github.com/noir-lang/noir/issues/3634)) ([0cad9aa](https://github.com/noir-lang/noir/commit/0cad9aa9c19091b3679bdc6e7fe044194c5db7e0)) +* Return error rather than panicking on unreadable circuits ([#3179](https://github.com/noir-lang/noir/issues/3179)) ([d4f61d3](https://github.com/noir-lang/noir/commit/d4f61d3d51d515e40a5fd02d35315889f841bf53)) +* Show println output before an error occurs in `nargo execute` ([#3211](https://github.com/noir-lang/noir/issues/3211)) ([2f0b80d](https://github.com/noir-lang/noir/commit/2f0b80dda8401ce8962c857dbcd9548e7fdde4aa)) +* Silence unused variable warnings in stdlib ([#2795](https://github.com/noir-lang/noir/issues/2795)) ([5747bfe](https://github.com/noir-lang/noir/commit/5747bfed256f9179321ec0bd1e02f5f82723a4c7)) +* Somewhat reduce mem2reg memory usage ([#3572](https://github.com/noir-lang/noir/issues/3572)) ([9b9ed89](https://github.com/noir-lang/noir/commit/9b9ed890e68b6c7f0671b05919bdc86f593c5df5)) +* Split conditional_regression tests ([#2774](https://github.com/noir-lang/noir/issues/2774)) ([8ed8832](https://github.com/noir-lang/noir/commit/8ed8832c7b475cd28ae697a09f1ad07c539736db)) +* **ssa:** Do not replace previously constrained values ([#2647](https://github.com/noir-lang/noir/issues/2647)) ([d528844](https://github.com/noir-lang/noir/commit/d5288449a10d162a0340818a6beab54dd985a11a)) +* **traits:** Trait functions with a default implementation must not be followed by a semicolon ([#2987](https://github.com/noir-lang/noir/issues/2987)) ([a3593c0](https://github.com/noir-lang/noir/commit/a3593c042163d89bd012b7f901f3b18446209e82)) +* Transform hir before type checks ([#2994](https://github.com/noir-lang/noir/issues/2994)) ([a29b568](https://github.com/noir-lang/noir/commit/a29b568295e40e19dd354bbe47e31f922e08d8c9)) +* Update link to recursion example ([#3224](https://github.com/noir-lang/noir/issues/3224)) ([10eae15](https://github.com/noir-lang/noir/commit/10eae15c6992442876e184c7d2bd36a34f639ea1)) +* Use 128 bits for constant bit shift ([#3586](https://github.com/noir-lang/noir/issues/3586)) ([2ca9b05](https://github.com/noir-lang/noir/commit/2ca9b059317f0513ea21153ebdb468c4f6633de5)) +* Use pedersen_hash for merkle tree ([#3357](https://github.com/noir-lang/noir/issues/3357)) ([6b74d31](https://github.com/noir-lang/noir/commit/6b74d316fec3b379dd7b51064f1acb1a0e6a15cc)) +* Verify impls arising from function calls exist ([#3472](https://github.com/noir-lang/noir/issues/3472)) ([d7f919d](https://github.com/noir-lang/noir/commit/d7f919dcc001080ed24616ebbc37426ef7ac7638)) + + +### Miscellaneous Chores + +* `generateWitness` now returns a serialized witness file ([#2842](https://github.com/noir-lang/noir/issues/2842)) ([57d3f37](https://github.com/noir-lang/noir/commit/57d3f376d9ceadb75caf37a2bfc0e9394f76bfe6)) +* Bump MSRV to 1.71.1 ([#3353](https://github.com/noir-lang/noir/issues/3353)) ([78f2127](https://github.com/noir-lang/noir/commit/78f2127dd12e36e831e63fd670d9f9d870818af7)) +* Change stdlib function `pedersen` to `pedersen_commitment` ([#3341](https://github.com/noir-lang/noir/issues/3341)) ([964b777](https://github.com/noir-lang/noir/commit/964b7771506bdf8408d8917ab32bf51db8ce09d2)) +* Move circuit serialization circuit into acir ([#3345](https://github.com/noir-lang/noir/issues/3345)) ([122119b](https://github.com/noir-lang/noir/commit/122119b7377cec1b7c42c586c64b69b3bdf4d539)) +* **noir_js:** Rename inner and outer proof methods ([#2845](https://github.com/noir-lang/noir/issues/2845)) ([71dbbb8](https://github.com/noir-lang/noir/commit/71dbbb863a6f262da4804c17965ace627bf3a278)) +* Update to `bb` version 0.7.3 ([#2729](https://github.com/noir-lang/noir/issues/2729)) ([fce68d1](https://github.com/noir-lang/noir/commit/fce68d1404ae66bd7a71417d791dd70545bf24f2)) + +## [0.19.5](https://github.com/noir-lang/noir/compare/v0.19.4...v0.19.5) (2023-12-01) + + +### Features + +* Add `FieldElement::from<usize>` implementation ([#3647](https://github.com/noir-lang/noir/issues/3647)) ([8b7c5aa](https://github.com/noir-lang/noir/commit/8b7c5aa5311f4e6811438f67bd552b641b13fc9a)) +* Add package version to Nargo.toml metadata ([#3427](https://github.com/noir-lang/noir/issues/3427)) ([9e1717c](https://github.com/noir-lang/noir/commit/9e1717c2d96a0b9e394e5cb2fb9e1d09b5259ca0)) +* Add special case for boolean AND in acir-gen ([#3615](https://github.com/noir-lang/noir/issues/3615)) ([824039b](https://github.com/noir-lang/noir/commit/824039bcc0a3275f333ea94aecc701d129f99fe5)) +* Aztec-packages ([#3599](https://github.com/noir-lang/noir/issues/3599)) ([2cd6dc3](https://github.com/noir-lang/noir/commit/2cd6dc39e3a956aa5dff721d47aaf1921f98fded)) +* Aztec-packages ([#3626](https://github.com/noir-lang/noir/issues/3626)) ([e0a96ea](https://github.com/noir-lang/noir/commit/e0a96ea70b17c8c898dd72ac929f3969a4cec1d3)) +* Complex slice inputs for dynamic slice builtins ([#3617](https://github.com/noir-lang/noir/issues/3617)) ([8b23b34](https://github.com/noir-lang/noir/commit/8b23b349ae15afa48f6cbe8962586bbe79e79890)) +* Copy on write optimization for brillig ([#3522](https://github.com/noir-lang/noir/issues/3522)) ([da29c02](https://github.com/noir-lang/noir/commit/da29c02327acb2f46f5a7a25c7404dfa44c82616)) +* Data bus ([#3508](https://github.com/noir-lang/noir/issues/3508)) ([6b0bdbc](https://github.com/noir-lang/noir/commit/6b0bdbced1bfe2c92e90dd7b70ca8f9e5ccb7c0d)) +* Implement integer printing ([#3577](https://github.com/noir-lang/noir/issues/3577)) ([6601408](https://github.com/noir-lang/noir/commit/6601408e378b2afe4fdfd8e04482c39311ccc7e9)) +* Implement raw string literals ([#3556](https://github.com/noir-lang/noir/issues/3556)) ([87a302f](https://github.com/noir-lang/noir/commit/87a302fbc26d5fd42694953935d95a7ccb3d50a0)) +* **lsp:** Add goto definition for functions ([#3656](https://github.com/noir-lang/noir/issues/3656)) ([7bb7356](https://github.com/noir-lang/noir/commit/7bb735662c86e6533ac58f448ad748e34f02edb7)) +* Reuse witnesses more when interacting with memory ([#3658](https://github.com/noir-lang/noir/issues/3658)) ([5a4a73d](https://github.com/noir-lang/noir/commit/5a4a73d24ddd7d2bb46c85714397ee6e031c97a6)) + + +### Bug Fixes + +* Corrected the formatting of error message parameters in index out of bounds error ([#3630](https://github.com/noir-lang/noir/issues/3630)) ([3bba386](https://github.com/noir-lang/noir/commit/3bba3862dc8703410681300be894bfd1ebca7336)) +* Do not simply divisions ([#3664](https://github.com/noir-lang/noir/issues/3664)) ([e5b981b](https://github.com/noir-lang/noir/commit/e5b981b08c2b345f00426acafe47b76d5262254d)) +* Docker builds ([#3620](https://github.com/noir-lang/noir/issues/3620)) ([f3eac52](https://github.com/noir-lang/noir/commit/f3eac5282860c1954ea2cee6a21633df5b1865fd)) +* Flatten public inputs according to their index in numerial rather than ascii order ([#3605](https://github.com/noir-lang/noir/issues/3605)) ([a1f6343](https://github.com/noir-lang/noir/commit/a1f6343b7df1b166b1be4db09527694a3df2738a)) +* Restrict fill_internal_slices pass to acir functions ([#3634](https://github.com/noir-lang/noir/issues/3634)) ([0cad9aa](https://github.com/noir-lang/noir/commit/0cad9aa9c19091b3679bdc6e7fe044194c5db7e0)) + +## [0.19.4](https://github.com/noir-lang/noir/compare/v0.19.3...v0.19.4) (2023-11-28) + + +### Features + +* Add --check option to nargo fmt for dry-run formatting verification ([#3530](https://github.com/noir-lang/noir/issues/3530)) ([4469707](https://github.com/noir-lang/noir/commit/4469707d97085fab0f7ade8d015dc827c56156ee)) +* Add support for tuple values in `noir_codegen` ([#3592](https://github.com/noir-lang/noir/issues/3592)) ([346d75f](https://github.com/noir-lang/noir/commit/346d75f9dd9261996d4d7bb80eb7e4118e8f8ce2)) +* Codegen typed interfaces for functions in `noir_codegen` ([#3533](https://github.com/noir-lang/noir/issues/3533)) ([290c463](https://github.com/noir-lang/noir/commit/290c463622a93a34293f73b5bf2aea7ade30a11c)) +* Export `CompiledCircuit` from codegened TS ([#3589](https://github.com/noir-lang/noir/issues/3589)) ([e06c675](https://github.com/noir-lang/noir/commit/e06c67500da11518caffe0e98bdb9cd7f5f89049)) +* Remove type arrays for flat slices ([#3466](https://github.com/noir-lang/noir/issues/3466)) ([8225b2b](https://github.com/noir-lang/noir/commit/8225b2b379ddf145f9418f8517478704f9aac350)) +* Send and receive unflattened public inputs to backend ([#3543](https://github.com/noir-lang/noir/issues/3543)) ([a7bdc67](https://github.com/noir-lang/noir/commit/a7bdc67ef3ec2037bffc4f1f472907cad786c319)) + + +### Bug Fixes + +* Compiler version error message ([#3558](https://github.com/noir-lang/noir/issues/3558)) ([026a358](https://github.com/noir-lang/noir/commit/026a3587b01ddc8f444ff588a7b3f3fd1a0bb386)) +* Remove quotes from println output ([#3574](https://github.com/noir-lang/noir/issues/3574)) ([127b6aa](https://github.com/noir-lang/noir/commit/127b6aa1ec8893275fdfa7795db7c52c4fc1d4dd)) +* Somewhat reduce mem2reg memory usage ([#3572](https://github.com/noir-lang/noir/issues/3572)) ([9b9ed89](https://github.com/noir-lang/noir/commit/9b9ed890e68b6c7f0671b05919bdc86f593c5df5)) +* Use 128 bits for constant bit shift ([#3586](https://github.com/noir-lang/noir/issues/3586)) ([2ca9b05](https://github.com/noir-lang/noir/commit/2ca9b059317f0513ea21153ebdb468c4f6633de5)) + +## [0.19.3](https://github.com/noir-lang/noir/compare/v0.19.2...v0.19.3) (2023-11-22) + + +### Features + +* Add debugger commands to introspect (and modify) the current state ([#3391](https://github.com/noir-lang/noir/issues/3391)) ([9e1ad85](https://github.com/noir-lang/noir/commit/9e1ad858cf8a1d9aba0137abe6a749267498bfaf)) +* Add LSP command to profile opcodes in vscode ([#3496](https://github.com/noir-lang/noir/issues/3496)) ([6fbf77a](https://github.com/noir-lang/noir/commit/6fbf77ae2b87a55db92344f5066a82ccaf6c2086)) +* Add lsp formatting ([#3433](https://github.com/noir-lang/noir/issues/3433)) ([286c876](https://github.com/noir-lang/noir/commit/286c87694fda185f25b05cec5504142643bc207f)) +* Allow providing custom foreign call executors to `execute_circuit` ([#3506](https://github.com/noir-lang/noir/issues/3506)) ([d27db33](https://github.com/noir-lang/noir/commit/d27db332f8c320ffd9b5520bebbd83ae09e31de7)) +* Compile without a backend ([#3437](https://github.com/noir-lang/noir/issues/3437)) ([d69cf5d](https://github.com/noir-lang/noir/commit/d69cf5debcc430bb019b6cc95774aac084776dda)) +* Enable the `fmt` command in the help menu ([#3328](https://github.com/noir-lang/noir/issues/3328)) ([63d414c](https://github.com/noir-lang/noir/commit/63d414c06a399525601e3db11dc48b180e93c2d8)) +* Handle constant index operations on simple slices ([#3464](https://github.com/noir-lang/noir/issues/3464)) ([7ae12f8](https://github.com/noir-lang/noir/commit/7ae12f8c5243d31b2f410c246ed6b9e2fcea5d4c)) + + +### Bug Fixes + +* "Missing trait impl" error in trait dispatch ([#3440](https://github.com/noir-lang/noir/issues/3440)) ([52daaec](https://github.com/noir-lang/noir/commit/52daaec504101fe3c0caa30441c17f30a34af475)) +* Adding proving key initialization ([#3322](https://github.com/noir-lang/noir/issues/3322)) ([3383740](https://github.com/noir-lang/noir/commit/3383740f9a0004f2ee77c9686f81baed6cd1917c)) +* Allow `where` clause on all functions and improve error message ([#3465](https://github.com/noir-lang/noir/issues/3465)) ([1647e33](https://github.com/noir-lang/noir/commit/1647e33564bf56ab8721a365f5fc6bcb38901412)) +* Apply predicate to over/underflow checks ([#3494](https://github.com/noir-lang/noir/issues/3494)) ([fc3edf7](https://github.com/noir-lang/noir/commit/fc3edf7aa5da9074614fa900bbcb57e512e3d56b)) +* **debugger:** Step through foreign calls and breakpoints inside Brillig blocks ([#3511](https://github.com/noir-lang/noir/issues/3511)) ([5d77d7a](https://github.com/noir-lang/noir/commit/5d77d7ac82a4df6995ca151b2c8070044cb1fe9d)) +* Fix crash when using undeclared traits ([#3509](https://github.com/noir-lang/noir/issues/3509)) ([8bb095a](https://github.com/noir-lang/noir/commit/8bb095af77d3b4043855841f1ae5799d75ed94f0)) +* Match rust behaviour for left-shift overflow ([#3518](https://github.com/noir-lang/noir/issues/3518)) ([2d7ceb1](https://github.com/noir-lang/noir/commit/2d7ceb17edda1d9e70901cfd13f45cdc0df0d28d)) +* Verify impls arising from function calls exist ([#3472](https://github.com/noir-lang/noir/issues/3472)) ([d7f919d](https://github.com/noir-lang/noir/commit/d7f919dcc001080ed24616ebbc37426ef7ac7638)) + ## [0.19.2](https://github.com/noir-lang/noir/compare/v0.19.1...v0.19.2) (2023-11-07) @@ -1075,7 +1458,7 @@ * **acvm:** Update to acvm 0.4.1 ([#779](https://github.com/noir-lang/noir/issues/779)) ([6f57e86](https://github.com/noir-lang/noir/commit/6f57e86c3d51191aa516a3b9315337b925810433)) * **ci:** Add concurrency group for rust workflow ([#806](https://github.com/noir-lang/noir/issues/806)) ([1b80f55](https://github.com/noir-lang/noir/commit/1b80f559599c2a7d7b8697f42f63db8e59d318c5)) -* **ci:** Add concurreny group for rust workflow ([1b80f55](https://github.com/noir-lang/noir/commit/1b80f559599c2a7d7b8697f42f63db8e59d318c5)) +* **ci:** Add concurrency group for rust workflow ([1b80f55](https://github.com/noir-lang/noir/commit/1b80f559599c2a7d7b8697f42f63db8e59d318c5)) * **ci:** Build binaries when a release is made ([#773](https://github.com/noir-lang/noir/issues/773)) ([a0c0c2c](https://github.com/noir-lang/noir/commit/a0c0c2c354b50c80eba425ba2f8c235015696c35)) * Impls with generics ([#798](https://github.com/noir-lang/noir/issues/798)) ([bea735d](https://github.com/noir-lang/noir/commit/bea735d98e162f42df5957781638101c1e6c75f6)) * **nargo:** add flag to verify created proofs ([#737](https://github.com/noir-lang/noir/issues/737)) ([e981c7c](https://github.com/noir-lang/noir/commit/e981c7ca0ab23073339869a7d45c04ae10fe1adf)) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 9cbbeeb677f..d2553b003f8 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -92,6 +92,10 @@ This strategy avoids scenarios where pull requests grow too large/out-of-scope a The easiest way to do this is to have multiple Conventional Commits while you work and then you can cherry-pick the smaller changes into separate branches for pull requesting. +### Typos and other small changes + +Significant changes, like new features or important bug fixes, typically have a more pronounced impact on the project’s overall development. For smaller fixes, such as typos, we encourage you to report them instead of opening PRs. This approach helps us manage our resources effectively and ensures that every change contributes meaningfully to the project. PRs involving such smaller fixes will likely be closed and incorporated in PRs authored by the core team. + ### Reviews For any repository in the noir-lang organization, we require code review & approval by __one__ Noir team member before the changes are merged, as enforced by GitHub branch protection. Non-breaking pull requests may be merged at any time. Breaking pull requests should only be merged when the team has general agreement of the changes and is preparing a breaking release. diff --git a/Cargo.lock b/Cargo.lock index 41b94be6ac3..6bdce772fe4 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4,10 +4,10 @@ version = 3 [[package]] name = "acir" -version = "0.33.0" +version = "0.38.0" dependencies = [ "acir_field", - "base64", + "base64 0.21.2", "bincode", "brillig", "flate2", @@ -23,12 +23,12 @@ dependencies = [ [[package]] name = "acir_field" -version = "0.33.0" +version = "0.38.0" dependencies = [ "ark-bls12-381", "ark-bn254", "ark-ff", - "cfg-if", + "cfg-if 1.0.0", "hex", "num-bigint", "num-traits", @@ -37,28 +37,30 @@ dependencies = [ [[package]] name = "acvm" -version = "0.33.0" +version = "0.38.0" dependencies = [ "acir", "acvm_blackbox_solver", - "acvm_stdlib", "brillig_vm", "indexmap 1.9.3", "num-bigint", "num-traits", "paste", "proptest", - "rand", + "rand 0.8.5", "thiserror", + "tracing", ] [[package]] name = "acvm_blackbox_solver" -version = "0.33.0" +version = "0.38.0" dependencies = [ "acir", "blake2", + "blake3", "k256", + "keccak", "p256", "sha2", "sha3", @@ -67,30 +69,23 @@ dependencies = [ [[package]] name = "acvm_js" -version = "0.33.0" +version = "0.38.0" dependencies = [ "acvm", - "barretenberg_blackbox_solver", + "bn254_blackbox_solver", "build-data", - "cfg-if", + "cfg-if 1.0.0", "console_error_panic_hook", "const-str", "gloo-utils", "js-sys", - "log", "pkg-config", "serde", + "tracing-subscriber", + "tracing-web", "wasm-bindgen", "wasm-bindgen-futures", "wasm-bindgen-test", - "wasm-logger", -] - -[[package]] -name = "acvm_stdlib" -version = "0.33.0" -dependencies = [ - "acir", ] [[package]] @@ -114,7 +109,7 @@ version = "0.7.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5a824f2aa7e75a0c98c5a504fceb80649e9c35265d44525b5f94de4771a395cd" dependencies = [ - "getrandom", + "getrandom 0.2.10", "once_cell", "version_check", ] @@ -125,8 +120,8 @@ version = "0.8.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2c99f64d1e06488f620f932677e24bc6e2897582980441ae90a671415bd7ec2f" dependencies = [ - "cfg-if", - "getrandom", + "cfg-if 1.0.0", + "getrandom 0.2.10", "once_cell", "version_check", ] @@ -217,7 +212,7 @@ checksum = "a4668cab20f66d8d020e1fbc0ebe47217433c1b6c8f2040faf858554e394ace6" [[package]] name = "arena" -version = "0.19.2" +version = "0.22.0" dependencies = [ "generational-arena", ] @@ -348,9 +343,15 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "94893f1e0c6eeab764ade8dc4c0db24caf4fe7cbbaafc0eba0a9030f447b5185" dependencies = [ "num-traits", - "rand", + "rand 0.8.5", ] +[[package]] +name = "arrayref" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6b4930d2cb77ce62f89ee5d5289b4ac049559b1c45539271f5ed4fdc7db34545" + [[package]] name = "arrayvec" version = "0.7.4" @@ -394,7 +395,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "10da8f3146014722c89e7859e1d7bb97873125d7346d10ca642ffab794355828" dependencies = [ "async-lock", - "cfg-if", + "cfg-if 1.0.0", "concurrent-queue", "futures-io", "futures-lite", @@ -423,7 +424,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "138985dd8aefbefeaa66b01b7f5b2b6b4c333fcef1cc5f32c63a2aabe37d6de3" dependencies = [ "async-io", - "futures", + "futures 0.3.28", "lsp-types 0.94.1", "pin-project-lite", "rustix", @@ -443,6 +444,14 @@ version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" +[[package]] +name = "aztec_macros" +version = "0.22.0" +dependencies = [ + "iter-extended", + "noirc_frontend", +] + [[package]] name = "backend-interface" version = "0.11.0" @@ -460,6 +469,7 @@ dependencies = [ "tempfile", "test-binary", "thiserror", + "tracing", ] [[package]] @@ -470,41 +480,25 @@ checksum = "4319208da049c43661739c5fade2ba182f09d1dc2299b32298d3a31692b17e12" dependencies = [ "addr2line", "cc", - "cfg-if", + "cfg-if 1.0.0", "libc", "miniz_oxide", "object", "rustc-demangle", ] -[[package]] -name = "barretenberg_blackbox_solver" -version = "0.33.0" -dependencies = [ - "acir", - "acvm_blackbox_solver", - "ark-ec", - "ark-ff", - "flate2", - "getrandom", - "grumpkin", - "js-sys", - "num-bigint", - "pkg-config", - "reqwest", - "rust-embed", - "tar", - "thiserror", - "wasm-bindgen-futures", - "wasmer", -] - [[package]] name = "base16ct" version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "349a06037c7bf932dd7e7d1f653678b2038b9ad46a74102f1fc7bd7872678cce" +[[package]] +name = "base64" +version = "0.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9e1b586273c5702936fe7b7d6896644d8be71e6314cfe09d3167c95f712589e8" + [[package]] name = "base64" version = "0.21.2" @@ -592,6 +586,19 @@ dependencies = [ "digest", ] +[[package]] +name = "blake3" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0231f06152bf547e9c2b5194f247cd97aacf6dcd8b15d8e5ec0663f64580da87" +dependencies = [ + "arrayref", + "arrayvec", + "cc", + "cfg-if 1.0.0", + "constant_time_eq", +] + [[package]] name = "block-buffer" version = "0.10.4" @@ -601,9 +608,31 @@ dependencies = [ "generic-array", ] +[[package]] +name = "bn254_blackbox_solver" +version = "0.38.0" +dependencies = [ + "acir", + "acvm_blackbox_solver", + "ark-ec", + "ark-ff", + "flate2", + "getrandom 0.2.10", + "grumpkin", + "js-sys", + "num-bigint", + "pkg-config", + "reqwest", + "rust-embed", + "tar", + "thiserror", + "wasm-bindgen-futures", + "wasmer", +] + [[package]] name = "brillig" -version = "0.33.0" +version = "0.38.0" dependencies = [ "acir_field", "serde", @@ -611,7 +640,7 @@ dependencies = [ [[package]] name = "brillig_vm" -version = "0.33.0" +version = "0.38.0" dependencies = [ "acir", "acvm_blackbox_solver", @@ -754,6 +783,12 @@ version = "1.0.79" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "50d30906286121d95be3d479533b458f87493b30a4b5f79a607db8f5d11aa91f" +[[package]] +name = "cfg-if" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4785bdd1c96b2a846b2bd7cc02e86b6b3dbf14e7e53446c4f54c92a361040822" + [[package]] name = "cfg-if" version = "1.0.0" @@ -879,7 +914,7 @@ checksum = "fc4159b76af02757139baf42c0c971c6dc155330999fbfd8eddb29b97fb2db68" dependencies = [ "codespan-reporting", "lsp-types 0.88.0", - "url", + "url 2.4.0", ] [[package]] @@ -926,6 +961,12 @@ version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "acbf1af155f9b9ef647e42cdc158db4b64a1b61f743629225fde6f3e0be2a7c7" +[[package]] +name = "comma" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "55b672471b4e9f9e95499ea597ff64941a309b2cdbffcc46f2cc5e2d971fd335" + [[package]] name = "concurrent-queue" version = "2.3.0" @@ -953,7 +994,7 @@ version = "0.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a06aeb73f470f66dcdbf7223caeebb85984942f22f1adb2a088cf9668146bbbc" dependencies = [ - "cfg-if", + "cfg-if 1.0.0", "wasm-bindgen", ] @@ -989,6 +1030,18 @@ dependencies = [ "unicode-xid", ] +[[package]] +name = "constant_time_eq" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f7144d30dcf0fafbce74250a3963025d8d52177934239851c917d29f1df280c2" + +[[package]] +name = "convert_case" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6245d59a3e82a7fc217c5828a6692dbc6dfb63a0c8c90495621f7b9d79704a0e" + [[package]] name = "core-foundation-sys" version = "0.8.4" @@ -1002,7 +1055,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "80128832c58ea9cbd041d2a759ec449224487b2c1e400453d99d244eead87a8e" dependencies = [ "autocfg", - "cfg-if", + "cfg-if 1.0.0", "libc", "scopeguard", "windows-sys 0.33.0", @@ -1014,7 +1067,7 @@ version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ee34052ee3d93d6d8f3e6f81d85c47921f6653a19a7b70e939e3e602d893a674" dependencies = [ - "cfg-if", + "cfg-if 1.0.0", ] [[package]] @@ -1115,7 +1168,7 @@ version = "1.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b540bd8bc810d3885c6ea91e2018302f68baba2129ab3e88f32389ee9370880d" dependencies = [ - "cfg-if", + "cfg-if 1.0.0", ] [[package]] @@ -1154,13 +1207,23 @@ dependencies = [ "itertools", ] +[[package]] +name = "crossbeam-channel" +version = "0.5.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "14c3242926edf34aec4ac3a77108ad4854bffaa2e4ddc1824124ce59231302d5" +dependencies = [ + "cfg-if 1.0.0", + "crossbeam-utils", +] + [[package]] name = "crossbeam-deque" version = "0.8.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ce6fd6f855243022dcecf8702fef0c297d4338e226845fe067f6341ad9fa0cef" dependencies = [ - "cfg-if", + "cfg-if 1.0.0", "crossbeam-epoch", "crossbeam-utils", ] @@ -1172,7 +1235,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ae211234986c545741a7dc064309f67ee1e5ad243d0e48335adc0484d960bcc7" dependencies = [ "autocfg", - "cfg-if", + "cfg-if 1.0.0", "crossbeam-utils", "memoffset 0.9.0", "scopeguard", @@ -1184,17 +1247,17 @@ version = "0.3.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d1cfb3ea8a53f37c40dea2c7bedcbd88bdfae54f5e2175d6ecaff1c988353add" dependencies = [ - "cfg-if", + "cfg-if 1.0.0", "crossbeam-utils", ] [[package]] name = "crossbeam-utils" -version = "0.8.16" +version = "0.8.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a22b2d63d4d1dc0b7f1b6b2747dd0088008a9be28b6ddf0b1e7d335e3037294" +checksum = "c06d96137f14f244c37f989d9fff8f95e6c18b918e71f36638f8c49112e4c78f" dependencies = [ - "cfg-if", + "cfg-if 1.0.0", ] [[package]] @@ -1204,7 +1267,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ef2b4b23cddf68b89b8f8069890e8c270d54e2d5fe1b143820234805e4cb17ef" dependencies = [ "generic-array", - "rand_core", + "rand_core 0.6.4", "subtle", "zeroize", ] @@ -1240,6 +1303,17 @@ dependencies = [ "memchr", ] +[[package]] +name = "dap" +version = "0.4.1-alpha1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "35c7fc89d334ab745ba679f94c7314c9b17ecdcd923c111df6206e9fd7729fa9" +dependencies = [ + "serde", + "serde_json", + "thiserror", +] + [[package]] name = "darling" version = "0.20.3" @@ -1281,11 +1355,11 @@ version = "5.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "978747c1d849a7d2ee5e8adc0159961c48fb7e5db2f06af6723b80123bb53856" dependencies = [ - "cfg-if", + "cfg-if 1.0.0", "hashbrown 0.14.0", "lock_api", "once_cell", - "parking_lot_core", + "parking_lot_core 0.9.8", ] [[package]] @@ -1327,6 +1401,19 @@ dependencies = [ "syn 1.0.109", ] +[[package]] +name = "derive_more" +version = "0.99.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4fb810d30a7c1953f91334de7244731fc3f3c10d7fe163338a35b9f640960321" +dependencies = [ + "convert_case", + "proc-macro2", + "quote", + "rustc_version", + "syn 1.0.109", +] + [[package]] name = "difflib" version = "0.4.0" @@ -1359,7 +1446,7 @@ version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b98cf8ebf19c3d1b223e151f99a4f9f0690dca41414773390fc824184ac833e1" dependencies = [ - "cfg-if", + "cfg-if 1.0.0", "dirs-sys-next", ] @@ -1438,7 +1525,7 @@ dependencies = [ "generic-array", "group", "pkcs8", - "rand_core", + "rand_core 0.6.4", "sec1", "subtle", "zeroize", @@ -1462,7 +1549,7 @@ version = "0.8.32" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "071a31f4ee85403370b58aca746f01041ede6f0da2730960ad001edc2b71b394" dependencies = [ - "cfg-if", + "cfg-if 1.0.0", ] [[package]] @@ -1572,7 +1659,7 @@ version = "3.0.13" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ef033ed5e9bad94e55838ca0ca906db0e043f517adda0c8b79c7a8c66c93c1b5" dependencies = [ - "cfg-if", + "cfg-if 1.0.0", "rustix", "windows-sys 0.48.0", ] @@ -1583,7 +1670,7 @@ version = "0.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d013fc25338cc558c5c2cfbad646908fb23591e2404481826742b651c9af7160" dependencies = [ - "rand_core", + "rand_core 0.6.4", "subtle", ] @@ -1602,7 +1689,7 @@ version = "0.2.22" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d4029edd3e734da6fe05b6cd7bd2960760a616bd2ddd0d59a0124746d6272af0" dependencies = [ - "cfg-if", + "cfg-if 1.0.0", "libc", "redox_syscall 0.3.5", "windows-sys 0.48.0", @@ -1641,11 +1728,10 @@ dependencies = [ [[package]] name = "fm" -version = "0.19.2" +version = "0.22.0" dependencies = [ "codespan-reporting", "iter-extended", - "rust-embed", "serde", "tempfile", ] @@ -1662,7 +1748,7 @@ version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a62bc1cf6f830c2ec14a513a9fb124d0a213a629668a4186f329db21fe045652" dependencies = [ - "percent-encoding", + "percent-encoding 2.3.0", ] [[package]] @@ -1671,6 +1757,12 @@ version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e6d5a32815ae3f33302d95fdcb2ce17862f8c65363dcfd29360480ba1001fc9c" +[[package]] +name = "futures" +version = "0.1.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3a471a38ef8ed83cd6e40aa59c1ffe17db6855c18e3604d9c4ed8c08ebc28678" + [[package]] name = "futures" version = "0.3.28" @@ -1711,6 +1803,7 @@ dependencies = [ "futures-core", "futures-task", "futures-util", + "num_cpus", ] [[package]] @@ -1758,6 +1851,7 @@ version = "0.3.28" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "26b01e40b772d54cf6c6d721c1d1abd0647a0106a12ecaa1c186273392a69533" dependencies = [ + "futures 0.1.31", "futures-channel", "futures-core", "futures-io", @@ -1785,7 +1879,7 @@ version = "0.2.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "877e94aff08e743b651baaea359664321055749b398adff8740a7399af7796e7" dependencies = [ - "cfg-if", + "cfg-if 1.0.0", ] [[package]] @@ -1798,16 +1892,27 @@ dependencies = [ "version_check", ] +[[package]] +name = "getrandom" +version = "0.1.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8fc3cb4d91f53b50155bdcfd23f6a4c39ae1969c2ae85982b135750cccaf5fce" +dependencies = [ + "cfg-if 1.0.0", + "libc", + "wasi 0.9.0+wasi-snapshot-preview1", +] + [[package]] name = "getrandom" version = "0.2.10" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "be4136b2a15dd319360be1c07d9933517ccf0be8f16bf62a3bee4f0d618df427" dependencies = [ - "cfg-if", + "cfg-if 1.0.0", "js-sys", "libc", - "wasi", + "wasi 0.11.0+wasi-snapshot-preview1", "wasm-bindgen", ] @@ -1878,7 +1983,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5dfbfb3a6cfbd390d5c9564ab283a0349b9b9fcd46a706c1eb10e0db70bfbac7" dependencies = [ "ff", - "rand_core", + "rand_core 0.6.4", "subtle", ] @@ -1908,7 +2013,7 @@ dependencies = [ "indexmap 1.9.3", "slab", "tokio", - "tokio-util", + "tokio-util 0.7.8", "tracing", ] @@ -2038,7 +2143,7 @@ dependencies = [ "httpdate", "itoa", "pin-project-lite", - "socket2", + "socket2 0.4.9", "tokio", "tower-service", "tracing", @@ -2094,6 +2199,17 @@ version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39" +[[package]] +name = "idna" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "38f09e0f0b1fb55fdee1f17470ad800da77af5186a1a76c026b679358b7e844e" +dependencies = [ + "matches", + "unicode-bidi", + "unicode-normalization", +] + [[package]] name = "idna" version = "0.4.0" @@ -2128,7 +2244,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d0acd33ff0285af998aaf9b57342af478078f53492322fafc47450e09397e0e9" dependencies = [ "bitmaps", - "rand_core", + "rand_core 0.6.4", "rand_xoshiro", "serde", "sized-chunks", @@ -2206,6 +2322,15 @@ dependencies = [ "str_stack", ] +[[package]] +name = "instant" +version = "0.1.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a5bbe824c507c5da5956355e86a746d82e0e1464f65d862cc5e71da70e94b2c" +dependencies = [ + "cfg-if 1.0.0", +] + [[package]] name = "ipnet" version = "2.8.0" @@ -2225,7 +2350,7 @@ dependencies = [ [[package]] name = "iter-extended" -version = "0.19.2" +version = "0.22.0" [[package]] name = "itertools" @@ -2251,13 +2376,127 @@ dependencies = [ "wasm-bindgen", ] +[[package]] +name = "jsonrpc" +version = "0.16.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34efde8d2422fb79ed56db1d3aea8fa5b583351d15a26770cdee2f88813dd702" +dependencies = [ + "base64 0.13.1", + "minreq", + "serde", + "serde_json", +] + +[[package]] +name = "jsonrpc-client-transports" +version = "18.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d2b99d4207e2a04fb4581746903c2bb7eb376f88de9c699d0f3e10feeac0cd3a" +dependencies = [ + "derive_more", + "futures 0.3.28", + "jsonrpc-core", + "jsonrpc-pubsub", + "log", + "serde", + "serde_json", + "url 1.7.2", +] + +[[package]] +name = "jsonrpc-core" +version = "18.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "14f7f76aef2d054868398427f6c54943cf3d1caa9a7ec7d0c38d69df97a965eb" +dependencies = [ + "futures 0.3.28", + "futures-executor", + "futures-util", + "log", + "serde", + "serde_derive", + "serde_json", +] + +[[package]] +name = "jsonrpc-core-client" +version = "18.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b51da17abecbdab3e3d4f26b01c5ec075e88d3abe3ab3b05dc9aa69392764ec0" +dependencies = [ + "futures 0.3.28", + "jsonrpc-client-transports", +] + +[[package]] +name = "jsonrpc-derive" +version = "18.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b939a78fa820cdfcb7ee7484466746a7377760970f6f9c6fe19f9edcc8a38d2" +dependencies = [ + "proc-macro-crate", + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "jsonrpc-http-server" +version = "18.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e1dea6e07251d9ce6a552abfb5d7ad6bc290a4596c8dcc3d795fae2bbdc1f3ff" +dependencies = [ + "futures 0.3.28", + "hyper", + "jsonrpc-core", + "jsonrpc-server-utils", + "log", + "net2", + "parking_lot 0.11.2", + "unicase", +] + +[[package]] +name = "jsonrpc-pubsub" +version = "18.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "240f87695e6c6f62fb37f05c02c04953cf68d6408b8c1c89de85c7a0125b1011" +dependencies = [ + "futures 0.3.28", + "jsonrpc-core", + "lazy_static", + "log", + "parking_lot 0.11.2", + "rand 0.7.3", + "serde", +] + +[[package]] +name = "jsonrpc-server-utils" +version = "18.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fa4fdea130485b572c39a460d50888beb00afb3e35de23ccd7fad8ff19f0e0d4" +dependencies = [ + "bytes", + "futures 0.3.28", + "globset", + "jsonrpc-core", + "lazy_static", + "log", + "tokio", + "tokio-stream", + "tokio-util 0.6.10", + "unicase", +] + [[package]] name = "k256" version = "0.11.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "72c1e0b51e7ec0a97369623508396067a486bd0cbed95a2659a4b863d28cfc8b" dependencies = [ - "cfg-if", + "cfg-if 1.0.0", "ecdsa", "elliptic-curve", "sha2", @@ -2286,9 +2525,9 @@ checksum = "884e2677b40cc8c339eaefcb701c32ef1fd2493d71118dc0ca4b6a736c93bd67" [[package]] name = "libc" -version = "0.2.147" +version = "0.2.151" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b4668fb0ea861c1df094127ac5f1da3409a82116a4ba74fca2e58ef927159bb3" +checksum = "302d7ab3130588088d277783b1e2d2e10c9e9e4a16dd9050e6ec93fb3e7048f4" [[package]] name = "libm" @@ -2337,7 +2576,7 @@ dependencies = [ "serde", "serde_json", "serde_repr", - "url", + "url 2.4.0", ] [[package]] @@ -2350,7 +2589,7 @@ dependencies = [ "serde", "serde_json", "serde_repr", - "url", + "url 2.4.0", ] [[package]] @@ -2362,6 +2601,21 @@ dependencies = [ "libc", ] +[[package]] +name = "matchers" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8263075bb86c5a1b1427b5ae862e8889656f126e9f77c484496e8b47cf5c5558" +dependencies = [ + "regex-automata 0.1.10", +] + +[[package]] +name = "matches" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2532096657941c2fea9c289d370a250971c689d4f143798ff67113ec042024a5" + [[package]] name = "memchr" version = "2.5.0" @@ -2395,15 +2649,6 @@ dependencies = [ "autocfg", ] -[[package]] -name = "memoffset" -version = "0.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d61c719bcfbcf5d62b3a09efa6088de8c54bc0bfcd3ea7ae39fcc186108b8de1" -dependencies = [ - "autocfg", -] - [[package]] name = "memoffset" version = "0.9.0" @@ -2428,14 +2673,25 @@ dependencies = [ "adler", ] +[[package]] +name = "minreq" +version = "2.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cb3371dfc7b772c540da1380123674a8e20583aca99907087d990ca58cf44203" +dependencies = [ + "log", + "serde", + "serde_json", +] + [[package]] name = "mio" -version = "0.8.8" +version = "0.8.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "927a765cd3fc26206e66b296465fa9d3e5ab003e651c1b3c060e7956d96b19d2" +checksum = "8f3d0b296e374a4e6f3c7b0a1f5a51d748a0d34c85e7dc48fc3fa9a87657fe09" dependencies = [ "libc", - "wasi", + "wasi 0.11.0+wasi-snapshot-preview1", "windows-sys 0.48.0", ] @@ -2447,12 +2703,17 @@ checksum = "7843ec2de400bcbc6a6328c958dc38e5359da6e93e72e37bc5246bf1ae776389" [[package]] name = "nargo" -version = "0.19.2" +version = "0.22.0" dependencies = [ "acvm", "codespan-reporting", "fm", "iter-extended", + "jsonrpc", + "jsonrpc-core", + "jsonrpc-core-client", + "jsonrpc-derive", + "jsonrpc-http-server", "noirc_abi", "noirc_driver", "noirc_errors", @@ -2462,25 +2723,28 @@ dependencies = [ "rayon", "rustc_version", "serde", + "serial_test", + "tempfile", "thiserror", + "tracing", ] [[package]] name = "nargo_cli" -version = "0.19.2" +version = "0.22.0" dependencies = [ "acvm", "assert_cmd", "assert_fs", "async-lsp", "backend-interface", - "barretenberg_blackbox_solver", - "bb_abstraction_leaks", + "bn254_blackbox_solver", "build-data", "clap", "color-eyre", "const_format", "criterion", + "dap", "dirs", "fm", "hex", @@ -2503,31 +2767,34 @@ dependencies = [ "rustc_version", "serde", "serde_json", + "similar-asserts", "tempfile", "termcolor", "test-binary", "thiserror", "tokio", - "tokio-util", - "toml", + "tokio-util 0.7.8", + "toml 0.7.6", "tower", + "tracing-appender", + "tracing-subscriber", ] [[package]] name = "nargo_fmt" -version = "0.19.2" +version = "0.22.0" dependencies = [ "bytecount", "noirc_frontend", "serde", "similar-asserts", "thiserror", - "toml", + "toml 0.7.6", ] [[package]] name = "nargo_toml" -version = "0.19.2" +version = "0.22.0" dependencies = [ "dirs", "fm", @@ -2536,8 +2803,19 @@ dependencies = [ "semver", "serde", "thiserror", - "toml", - "url", + "toml 0.7.6", + "url 2.4.0", +] + +[[package]] +name = "net2" +version = "0.2.39" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b13b648036a2339d06de780866fbdfda0dde886de7b3af2ddeba8b14f4ee34ac" +dependencies = [ + "cfg-if 0.1.10", + "libc", + "winapi", ] [[package]] @@ -2557,49 +2835,70 @@ checksum = "8f3790c00a0150112de0f4cd161e3d7fc4b2d8a5542ffc35f099a2562aecb35c" dependencies = [ "bitflags 1.3.2", "cc", - "cfg-if", + "cfg-if 1.0.0", "libc", "memoffset 0.6.5", ] [[package]] name = "nix" -version = "0.26.2" +version = "0.25.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bfdda3d196821d6af13126e40375cdf7da646a96114af134d5f417a9a1dc8e1a" +checksum = "f346ff70e7dbfd675fe90590b92d59ef2de15a8779ae305ebcbfd3f0caf59be4" dependencies = [ + "autocfg", "bitflags 1.3.2", - "cfg-if", + "cfg-if 1.0.0", + "libc", + "memoffset 0.6.5", + "pin-utils", +] + +[[package]] +name = "nix" +version = "0.26.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "598beaf3cc6fdd9a5dfb1630c2800c7acd31df7aaf0f565796fba2b53ca1af1b" +dependencies = [ + "bitflags 1.3.2", + "cfg-if 1.0.0", "libc", - "static_assertions", ] [[package]] name = "noir_debugger" -version = "0.19.2" +version = "0.22.0" dependencies = [ "acvm", + "assert_cmd", + "build-data", "codespan-reporting", + "dap", "easy-repl", + "fm", "nargo", + "noirc_driver", "noirc_errors", "noirc_printable_type", "owo-colors", + "rexpect", + "serde_json", + "tempfile", + "test-binary", "thiserror", ] [[package]] name = "noir_lsp" -version = "0.19.2" +version = "0.22.0" dependencies = [ "acvm", "async-lsp", - "cfg-if", "codespan-lsp", - "codespan-reporting", "fm", "lsp-types 0.94.1", "nargo", + "nargo_fmt", "nargo_toml", "noirc_driver", "noirc_errors", @@ -2607,6 +2906,7 @@ dependencies = [ "serde", "serde_json", "serde_with", + "thiserror", "tokio", "tower", "wasm-bindgen", @@ -2614,41 +2914,41 @@ dependencies = [ [[package]] name = "noir_lsp_wasm" -version = "0.19.2" +version = "0.22.0" dependencies = [ "acvm", "async-io", "async-lsp", - "futures", + "futures 0.3.28", "noir_lsp", "tower", ] [[package]] name = "noir_wasm" -version = "0.19.2" +version = "0.22.0" dependencies = [ "acvm", "build-data", - "cfg-if", "console_error_panic_hook", "fm", - "getrandom", + "getrandom 0.2.10", "gloo-utils", "js-sys", - "log", "nargo", "noirc_driver", "noirc_errors", "noirc_frontend", + "rust-embed", "serde", + "tracing-subscriber", + "tracing-web", "wasm-bindgen", - "wasm-logger", ] [[package]] name = "noirc_abi" -version = "0.19.2" +version = "0.22.0" dependencies = [ "acvm", "iter-extended", @@ -2660,17 +2960,17 @@ dependencies = [ "strum", "strum_macros", "thiserror", - "toml", + "toml 0.7.6", ] [[package]] name = "noirc_abi_wasm" -version = "0.19.2" +version = "0.22.0" dependencies = [ "acvm", "build-data", "console_error_panic_hook", - "getrandom", + "getrandom 0.2.10", "gloo-utils", "iter-extended", "js-sys", @@ -2682,9 +2982,10 @@ dependencies = [ [[package]] name = "noirc_driver" -version = "0.19.2" +version = "0.22.0" dependencies = [ "acvm", + "aztec_macros", "build-data", "clap", "fm", @@ -2694,12 +2995,14 @@ dependencies = [ "noirc_errors", "noirc_evaluator", "noirc_frontend", + "rust-embed", "serde", + "tracing", ] [[package]] name = "noirc_errors" -version = "0.19.2" +version = "0.22.0" dependencies = [ "acvm", "chumsky", @@ -2708,27 +3011,28 @@ dependencies = [ "fm", "serde", "serde_with", + "tracing", ] [[package]] name = "noirc_evaluator" -version = "0.19.2" +version = "0.22.0" dependencies = [ "acvm", "fxhash", "im", "iter-extended", - "noirc_abi", "noirc_errors", "noirc_frontend", "num-bigint", "serde", "thiserror", + "tracing", ] [[package]] name = "noirc_frontend" -version = "0.19.2" +version = "0.22.0" dependencies = [ "acvm", "arena", @@ -2745,15 +3049,18 @@ dependencies = [ "smol_str", "strum", "strum_macros", + "tempfile", "thiserror", + "tracing", ] [[package]] name = "noirc_printable_type" -version = "0.19.2" +version = "0.22.0" dependencies = [ "acvm", "iter-extended", + "jsonrpc", "regex", "serde", "serde_json", @@ -2766,6 +3073,16 @@ version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "61807f77802ff30975e01f4f071c8ba10c022052f98b3294119f3e615d13e5be" +[[package]] +name = "nu-ansi-term" +version = "0.46.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77a8165726e8236064dbb45459242600304b42a5ea24ee2948e18e023bf7ba84" +dependencies = [ + "overload", + "winapi", +] + [[package]] name = "num-bigint" version = "0.4.3" @@ -2838,6 +3155,12 @@ version = "11.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0ab1bc2a289d34bd04a330323ac98a1b4bc82c9d9fcb1e66b63caa84da26b575" +[[package]] +name = "overload" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39" + [[package]] name = "owo-colors" version = "3.5.0" @@ -2861,6 +3184,17 @@ version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bb813b8af86854136c6922af0598d719255ecb2179515e6e7730d468f05c9cae" +[[package]] +name = "parking_lot" +version = "0.11.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7d17b78036a60663b797adeaee46f5c9dfebb86948d1255007a1d6be0271ff99" +dependencies = [ + "instant", + "lock_api", + "parking_lot_core 0.8.6", +] + [[package]] name = "parking_lot" version = "0.12.1" @@ -2868,7 +3202,21 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3742b2c103b9f06bc9fff0a37ff4912935851bee6d36f3c02bcc755bcfec228f" dependencies = [ "lock_api", - "parking_lot_core", + "parking_lot_core 0.9.8", +] + +[[package]] +name = "parking_lot_core" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "60a2cfe6f0ad2bfc16aefa463b497d5c7a5ecd44a23efa72aa342d90177356dc" +dependencies = [ + "cfg-if 1.0.0", + "instant", + "libc", + "redox_syscall 0.2.16", + "smallvec", + "winapi", ] [[package]] @@ -2877,7 +3225,7 @@ version = "0.9.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "93f00c865fe7cabf650081affecd3871070f26767e7b2070a3ffae14c654b447" dependencies = [ - "cfg-if", + "cfg-if 1.0.0", "libc", "redox_syscall 0.3.5", "smallvec", @@ -2890,6 +3238,12 @@ version = "1.0.14" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "de3145af08024dea9fa9914f381a17b8fc6034dfb00f3a84013f7ff43f29ed4c" +[[package]] +name = "percent-encoding" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "31010dd2e1ac33d5b46a5b413495239882813e0369f8ed8a5e266f173602f831" + [[package]] name = "percent-encoding" version = "2.3.0" @@ -2914,7 +3268,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5d5285893bb5eb82e6aaf5d59ee909a06a16737a8970984dd7746ba9283498d6" dependencies = [ "phf_shared", - "rand", + "rand 0.8.5", ] [[package]] @@ -2942,9 +3296,9 @@ dependencies = [ [[package]] name = "pin-project-lite" -version = "0.2.10" +version = "0.2.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4c40d25201921e5ff0c862a505c6557ea88568a4e3ace775ab55e93f2f4f9d57" +checksum = "8afb450f006bf6385ca15ef45d71d2288452bc3683ce2e2cacc0d18e4be60b58" [[package]] name = "pin-utils" @@ -3002,7 +3356,7 @@ version = "3.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e53b6af1f60f36f8c2ac2aad5459d75a5a9b4be1e8cdd40264f315d78193e531" dependencies = [ - "cfg-if", + "cfg-if 1.0.0", "concurrent-queue", "pin-project-lite", "rustix", @@ -3017,15 +3371,15 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "978385d59daf9269189d052ca8a84c1acfd0715c0599a5d5188d4acc078ca46a" dependencies = [ "backtrace", - "cfg-if", + "cfg-if 1.0.0", "criterion", "findshlibs", "inferno", "libc", "log", - "nix 0.26.2", + "nix 0.26.4", "once_cell", - "parking_lot", + "parking_lot 0.12.1", "smallvec", "symbolic-demangle", "tempfile", @@ -3094,6 +3448,15 @@ dependencies = [ "unicode-width", ] +[[package]] +name = "proc-macro-crate" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d6ea3c4595b96363c13943497db34af4460fb474a95c43f4446ad341b8c9785" +dependencies = [ + "toml 0.5.11", +] + [[package]] name = "proc-macro-error" version = "1.0.4" @@ -3144,10 +3507,10 @@ dependencies = [ "bitflags 2.4.1", "lazy_static", "num-traits", - "rand", - "rand_chacha", + "rand 0.8.5", + "rand_chacha 0.3.1", "rand_xorshift", - "regex-syntax", + "regex-syntax 0.7.4", "rusty-fork", "tempfile", "unarray", @@ -3213,6 +3576,19 @@ dependencies = [ "nibble_vec", ] +[[package]] +name = "rand" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6a6b1679d49b24bbfe0c803429aa1874472f50d9b363131f0e89fc356b544d03" +dependencies = [ + "getrandom 0.1.16", + "libc", + "rand_chacha 0.2.2", + "rand_core 0.5.1", + "rand_hc", +] + [[package]] name = "rand" version = "0.8.5" @@ -3220,8 +3596,18 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" dependencies = [ "libc", - "rand_chacha", - "rand_core", + "rand_chacha 0.3.1", + "rand_core 0.6.4", +] + +[[package]] +name = "rand_chacha" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f4c8ed856279c9737206bf725bf36935d8666ead7aa69b52be55af369d193402" +dependencies = [ + "ppv-lite86", + "rand_core 0.5.1", ] [[package]] @@ -3231,7 +3617,16 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" dependencies = [ "ppv-lite86", - "rand_core", + "rand_core 0.6.4", +] + +[[package]] +name = "rand_core" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "90bde5296fc891b0cef12a6d03ddccc162ce7b2aff54160af9338f8d40df6d19" +dependencies = [ + "getrandom 0.1.16", ] [[package]] @@ -3240,7 +3635,16 @@ version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" dependencies = [ - "getrandom", + "getrandom 0.2.10", +] + +[[package]] +name = "rand_hc" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ca3129af7b92a17112d59ad498c6f81eaf463253766b90396d39ea7a39d6613c" +dependencies = [ + "rand_core 0.5.1", ] [[package]] @@ -3249,7 +3653,7 @@ version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d25bf25ec5ae4a3f1b92f929810509a2f53d7dca2f50b794ff57e3face536c8f" dependencies = [ - "rand_core", + "rand_core 0.6.4", ] [[package]] @@ -3258,7 +3662,7 @@ version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6f97cdb2a36ed4183de61b2f824cc45c9f1037f28afe0a322e9fff4c108b5aaa" dependencies = [ - "rand_core", + "rand_core 0.6.4", ] [[package]] @@ -3305,7 +3709,7 @@ version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b033d837a7cf162d7993aded9304e30a83213c648b6e389db233191f891e5c2b" dependencies = [ - "getrandom", + "getrandom 0.2.10", "redox_syscall 0.2.16", "thiserror", ] @@ -3331,7 +3735,7 @@ dependencies = [ "aho-corasick", "memchr", "regex-automata 0.3.3", - "regex-syntax", + "regex-syntax 0.7.4", ] [[package]] @@ -3339,6 +3743,9 @@ name = "regex-automata" version = "0.1.10" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132" +dependencies = [ + "regex-syntax 0.6.29", +] [[package]] name = "regex-automata" @@ -3348,9 +3755,15 @@ checksum = "39354c10dd07468c2e73926b23bb9c2caca74c5501e38a35da70406f1d923310" dependencies = [ "aho-corasick", "memchr", - "regex-syntax", + "regex-syntax 0.7.4", ] +[[package]] +name = "regex-syntax" +version = "0.6.29" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1" + [[package]] name = "regex-syntax" version = "0.7.4" @@ -3384,7 +3797,7 @@ version = "0.11.20" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3e9ad3fe7488d7e34558a2033d45a0c90b72d97b4f80705666fea71472e2e6a1" dependencies = [ - "base64", + "base64 0.21.2", "bytes", "encoding_rs", "futures-core", @@ -3399,7 +3812,7 @@ dependencies = [ "log", "mime", "once_cell", - "percent-encoding", + "percent-encoding 2.3.0", "pin-project-lite", "rustls", "rustls-pemfile", @@ -3409,7 +3822,7 @@ dependencies = [ "tokio", "tokio-rustls", "tower-service", - "url", + "url 2.4.0", "wasm-bindgen", "wasm-bindgen-futures", "web-sys", @@ -3417,6 +3830,19 @@ dependencies = [ "winreg", ] +[[package]] +name = "rexpect" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "01ff60778f96fb5a48adbe421d21bf6578ed58c0872d712e7e08593c195adff8" +dependencies = [ + "comma", + "nix 0.25.1", + "regex", + "tempfile", + "thiserror", +] + [[package]] name = "rfc6979" version = "0.3.1" @@ -3569,7 +3995,7 @@ version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2d3987094b1d07b653b7dfdc3f70ce9a1da9c51ac18c1b06b662e4f9a0e9f4b2" dependencies = [ - "base64", + "base64 0.21.2", ] [[package]] @@ -3607,7 +4033,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "db7826789c0e25614b03e5a54a0717a86f9ff6e6e5247f92b369472869320039" dependencies = [ "bitflags 1.3.2", - "cfg-if", + "cfg-if 1.0.0", "clipboard-win", "dirs-next", "fd-lock", @@ -3864,7 +4290,7 @@ version = "3.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1402f54f9a3b9e2efe71c1cea24e648acce55887983553eeb858cf3115acfd49" dependencies = [ - "base64", + "base64 0.21.2", "chrono", "hex", "indexmap 1.9.3", @@ -3887,13 +4313,38 @@ dependencies = [ "syn 2.0.26", ] +[[package]] +name = "serial_test" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0e56dd856803e253c8f298af3f4d7eb0ae5e23a737252cd90bb4f3b435033b2d" +dependencies = [ + "dashmap", + "futures 0.3.28", + "lazy_static", + "log", + "parking_lot 0.12.1", + "serial_test_derive", +] + +[[package]] +name = "serial_test_derive" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "91d129178576168c589c9ec973feedf7d3126c01ac2bf08795109aa35b69fb8f" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.26", +] + [[package]] name = "sha2" version = "0.10.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "479fb9d862239e610720565ca91403019f2f00410f1864c5aa7479b950a76ed8" dependencies = [ - "cfg-if", + "cfg-if 1.0.0", "cpufeatures", "digest", ] @@ -3949,7 +4400,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "74233d3b3b2f6d4b006dc19dee745e73e2a6bfb6f93607cd3b02bd5b00797d7c" dependencies = [ "digest", - "rand_core", + "rand_core 0.6.4", ] [[package]] @@ -4049,6 +4500,16 @@ dependencies = [ "winapi", ] +[[package]] +name = "socket2" +version = "0.5.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b5fac59a5cb5dd637972e5fca70daf0523c9067fcdc4842f053dae04a18f8e9" +dependencies = [ + "libc", + "windows-sys 0.48.0", +] + [[package]] name = "spin" version = "0.5.2" @@ -4071,12 +4532,6 @@ version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3" -[[package]] -name = "static_assertions" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f" - [[package]] name = "str-buf" version = "1.0.6" @@ -4194,7 +4649,7 @@ version = "3.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cb94d2f3cc536af71caac6b6fcebf65860b347e7ce0cc9ebe8f70d3e521054ef" dependencies = [ - "cfg-if", + "cfg-if 1.0.0", "fastrand", "redox_syscall 0.3.5", "rustix", @@ -4287,7 +4742,7 @@ version = "1.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3fdd6f064ccff2d6567adcb3873ca630700f00b5ad3f060c25b5dcfd9a4ce152" dependencies = [ - "cfg-if", + "cfg-if 1.0.0", "once_cell", ] @@ -4346,27 +4801,26 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" [[package]] name = "tokio" -version = "1.29.1" +version = "1.35.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "532826ff75199d5833b9d2c5fe410f29235e25704ee5f0ef599fb51c21f4a4da" +checksum = "841d45b238a16291a4e1584e61820b8ae57d696cc5015c459c229ccc6990cc1c" dependencies = [ - "autocfg", "backtrace", "bytes", "libc", "mio", "num_cpus", "pin-project-lite", - "socket2", + "socket2 0.5.5", "tokio-macros", "windows-sys 0.48.0", ] [[package]] name = "tokio-macros" -version = "2.1.0" +version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "630bdcf245f78637c13ec01ffae6187cca34625e8c63150d424b59e55af2675e" +checksum = "5b8a1e28f2deaa14e508979454cb3a223b10b938b45af148bc0986de36f1923b" dependencies = [ "proc-macro2", "quote", @@ -4383,6 +4837,31 @@ dependencies = [ "tokio", ] +[[package]] +name = "tokio-stream" +version = "0.1.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "397c988d37662c7dda6d2208364a706264bf3d6138b11d436cbac0ad38832842" +dependencies = [ + "futures-core", + "pin-project-lite", + "tokio", +] + +[[package]] +name = "tokio-util" +version = "0.6.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "36943ee01a6d67977dd3f84a5a1d2efeb4ada3a1ae771cadfaa535d9d9fc6507" +dependencies = [ + "bytes", + "futures-core", + "futures-sink", + "log", + "pin-project-lite", + "tokio", +] + [[package]] name = "tokio-util" version = "0.7.8" @@ -4398,6 +4877,15 @@ dependencies = [ "tracing", ] +[[package]] +name = "toml" +version = "0.5.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f4f7f0dd8d50a853a531c426359045b1998f04219d88799810762cd4ad314234" +dependencies = [ + "serde", +] + [[package]] name = "toml" version = "0.7.6" @@ -4457,22 +4945,33 @@ checksum = "b6bc1c9ce2b5135ac7f93c72918fc37feb872bdc6a5533a8b85eb4b86bfdae52" [[package]] name = "tracing" -version = "0.1.37" +version = "0.1.40" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8ce8c33a8d48bd45d624a6e523445fd21ec13d3653cd51f681abf67418f54eb8" +checksum = "c3523ab5a71916ccf420eebdf5521fcef02141234bbc0b8a49f2fdc4544364ef" dependencies = [ - "cfg-if", "log", "pin-project-lite", "tracing-attributes", "tracing-core", ] +[[package]] +name = "tracing-appender" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3566e8ce28cc0a3fe42519fc80e6b4c943cc4c8cef275620eb8dac2d3d4e06cf" +dependencies = [ + "crossbeam-channel", + "thiserror", + "time", + "tracing-subscriber", +] + [[package]] name = "tracing-attributes" -version = "0.1.26" +version = "0.1.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f4f31f56159e98206da9efd823404b79b6ef3143b4a7ab76e67b1751b25a4ab" +checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7" dependencies = [ "proc-macro2", "quote", @@ -4481,9 +4980,9 @@ dependencies = [ [[package]] name = "tracing-core" -version = "0.1.31" +version = "0.1.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0955b8137a1df6f1a2e9a37d8a6656291ff0297c1a97c24e0d8425fe2312f79a" +checksum = "c06d3da6113f116aaee68e4d601191614c9053067f9ab7f6edbcb161237daa54" dependencies = [ "once_cell", "valuable", @@ -4499,15 +4998,46 @@ dependencies = [ "tracing-subscriber", ] +[[package]] +name = "tracing-log" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ee855f1f400bd0e5c02d150ae5de3840039a3f54b025156404e34c23c03f47c3" +dependencies = [ + "log", + "once_cell", + "tracing-core", +] + [[package]] name = "tracing-subscriber" -version = "0.3.17" +version = "0.3.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "30a651bc37f915e81f087d86e62a18eec5f79550c7faff886f7090b4ea757c77" +checksum = "ad0f048c97dbd9faa9b7df56362b8ebcaa52adb06b498c050d2f4e32f90a7a8b" dependencies = [ + "matchers", + "nu-ansi-term", + "once_cell", + "regex", "sharded-slab", + "smallvec", "thread_local", + "tracing", + "tracing-core", + "tracing-log", +] + +[[package]] +name = "tracing-web" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b9e6a141feebd51f8d91ebfd785af50fca223c570b86852166caa3b141defe7c" +dependencies = [ + "js-sys", "tracing-core", + "tracing-subscriber", + "wasm-bindgen", + "web-sys", ] [[package]] @@ -4537,6 +5067,15 @@ version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "eaea85b334db583fe3274d12b4cd1880032beab409c0d774be044d4480ab9a94" +[[package]] +name = "unicase" +version = "2.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f7d2d4dafb69621809a81864c9c1b864479e1235c0dd4e199924b9742439ed89" +dependencies = [ + "version_check", +] + [[package]] name = "unicode-bidi" version = "0.3.13" @@ -4588,6 +5127,17 @@ version = "0.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a156c684c91ea7d62626509bce3cb4e1d9ed5c4d978f7b4352658f96a4c26b4a" +[[package]] +name = "url" +version = "1.7.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dd4e7c0d531266369519a4aa4f399d748bd37043b00bde1e4ff1f60a120b355a" +dependencies = [ + "idna 0.1.5", + "matches", + "percent-encoding 1.0.1", +] + [[package]] name = "url" version = "2.4.0" @@ -4595,8 +5145,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "50bff7831e19200a85b17131d085c25d7811bc4e186efdaf54bbd132994a88cb" dependencies = [ "form_urlencoded", - "idna", - "percent-encoding", + "idna 0.4.0", + "percent-encoding 2.3.0", "serde", ] @@ -4668,6 +5218,12 @@ dependencies = [ "try-lock", ] +[[package]] +name = "wasi" +version = "0.9.0+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cccddf32554fecc6acb585f82a32a72e28b48f8c4c1883ddfeeeaa96f7d8e519" + [[package]] name = "wasi" version = "0.11.0+wasi-snapshot-preview1" @@ -4680,7 +5236,7 @@ version = "0.2.86" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5bba0e8cb82ba49ff4e229459ff22a191bbe9a1cb3a341610c9c33efc27ddf73" dependencies = [ - "cfg-if", + "cfg-if 1.0.0", "serde", "serde_json", "wasm-bindgen-macro", @@ -4701,36 +5257,13 @@ dependencies = [ "wasm-bindgen-shared", ] -[[package]] -name = "wasm-bindgen-downcast" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5dac026d43bcca6e7ce1c0956ba68f59edf6403e8e930a5d891be72c31a44340" -dependencies = [ - "js-sys", - "once_cell", - "wasm-bindgen", - "wasm-bindgen-downcast-macros", -] - -[[package]] -name = "wasm-bindgen-downcast-macros" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c5020cfa87c7cecefef118055d44e3c1fc122c7ec25701d528ee458a0b45f38f" -dependencies = [ - "proc-macro2", - "quote", - "syn 1.0.109", -] - [[package]] name = "wasm-bindgen-futures" version = "0.4.36" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2d1985d03709c53167ce907ff394f5316aa22cb4e12761295c5dc57dacb6297e" dependencies = [ - "cfg-if", + "cfg-if 1.0.0", "js-sys", "wasm-bindgen", "web-sys", @@ -4798,25 +5331,14 @@ dependencies = [ "leb128", ] -[[package]] -name = "wasm-logger" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "074649a66bb306c8f2068c9016395fa65d8e08d2affcbf95acf3c24c3ab19718" -dependencies = [ - "log", - "wasm-bindgen", - "web-sys", -] - [[package]] name = "wasmer" -version = "4.2.3" +version = "4.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "50cb1ae2956aac1fbbcf334c543c1143cdf7d5b0a5fb6c3d23a17bf37dd1f47b" +checksum = "ce45cc009177ca345a6d041f9062305ad467d15e7d41494f5b81ab46d62d7a58" dependencies = [ "bytes", - "cfg-if", + "cfg-if 1.0.0", "derivative", "indexmap 1.9.3", "js-sys", @@ -4828,7 +5350,6 @@ dependencies = [ "target-lexicon", "thiserror", "wasm-bindgen", - "wasm-bindgen-downcast", "wasmer-compiler", "wasmer-compiler-cranelift", "wasmer-derive", @@ -4842,13 +5363,13 @@ dependencies = [ [[package]] name = "wasmer-compiler" -version = "4.2.3" +version = "4.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "12fd9aeef339095798d1e04957d5657d97490b1112f145cbf08b98f6393b4a0a" +checksum = "e044f6140c844602b920deb4526aea3cc9c0d7cf23f00730bb9b2034669f522a" dependencies = [ "backtrace", "bytes", - "cfg-if", + "cfg-if 1.0.0", "enum-iterator", "enumset", "lazy_static", @@ -4869,9 +5390,9 @@ dependencies = [ [[package]] name = "wasmer-compiler-cranelift" -version = "4.2.3" +version = "4.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "344f5f1186c122756232fe7f156cc8d2e7bf333d5a658e81e25efa3415c26d07" +checksum = "32ce02358eb44a149d791c1d6648fb7f8b2f99cd55e3c4eef0474653ec8cc889" dependencies = [ "cranelift-codegen", "cranelift-entity", @@ -4888,9 +5409,9 @@ dependencies = [ [[package]] name = "wasmer-derive" -version = "4.2.3" +version = "4.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2ac8c1f2dc0ed3c7412a5546e468365184a461f8ce7dfe2a707b621724339f91" +checksum = "c782d80401edb08e1eba206733f7859db6c997fc5a7f5fb44edc3ecd801468f6" dependencies = [ "proc-macro-error", "proc-macro2", @@ -4900,9 +5421,9 @@ dependencies = [ [[package]] name = "wasmer-types" -version = "4.2.3" +version = "4.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a57ecbf218c0a9348d4dfbdac0f9d42d9201ae276dffb13e61ea4ff939ecce7" +checksum = "fd09e80d4d74bb9fd0ce6c3c106b1ceba1a050f9948db9d9b78ae53c172d6157" dependencies = [ "bytecheck", "enum-iterator", @@ -4916,13 +5437,13 @@ dependencies = [ [[package]] name = "wasmer-vm" -version = "4.2.3" +version = "4.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "60c3513477bc0097250f6e34a640e2a903bb0ee57e6bb518c427f72c06ac7728" +checksum = "bdcd8a4fd36414a7b6a003dbfbd32393bce3e155d715dd877c05c1b7a41d224d" dependencies = [ "backtrace", "cc", - "cfg-if", + "cfg-if 1.0.0", "corosensei", "crossbeam-queue", "dashmap", @@ -4933,7 +5454,7 @@ dependencies = [ "lazy_static", "libc", "mach", - "memoffset 0.8.0", + "memoffset 0.9.0", "more-asserts", "region", "scopeguard", @@ -4955,7 +5476,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f2ea896273ea99b15132414be1da01ab0d8836415083298ecaffbe308eaac87a" dependencies = [ "indexmap 1.9.3", - "url", + "url 2.4.0", ] [[package]] @@ -5225,7 +5746,7 @@ version = "0.50.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "524e57b2c537c0f9b1e69f1965311ec12182b4122e45035b1508cd24d2adadb1" dependencies = [ - "cfg-if", + "cfg-if 1.0.0", "windows-sys 0.48.0", ] diff --git a/Cargo.toml b/Cargo.toml index 7a10f282437..da8f5ccc9b3 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,7 @@ [workspace] members = [ + "aztec_macros", "compiler/noirc_evaluator", "compiler/noirc_frontend", "compiler/noirc_errors", @@ -28,18 +29,17 @@ members = [ "acvm-repo/acir", "acvm-repo/acvm", "acvm-repo/acvm_js", - "acvm-repo/stdlib", "acvm-repo/brillig", "acvm-repo/brillig_vm", "acvm-repo/blackbox_solver", - "acvm-repo/barretenberg_blackbox_solver", + "acvm-repo/bn254_blackbox_solver", ] default-members = ["tooling/nargo_cli"] resolver = "2" [workspace.package] # x-release-please-start-version -version = "0.19.2" +version = "0.22.0" # x-release-please-end authors = ["The Noir Team "] edition = "2021" @@ -50,14 +50,14 @@ repository = "https://github.com/noir-lang/noir/" [workspace.dependencies] # ACVM workspace dependencies -acir = { path = "acvm-repo/acir", default-features = false } -acvm = { path = "acvm-repo/acvm" } -acir_field = { path = "acvm-repo/acir_field", default-features = false } -stdlib = { package = "acvm_stdlib", path = "acvm-repo/stdlib", default-features = false } -brillig = { path = "acvm-repo/brillig", default-features = false } -brillig_vm = { path = "acvm-repo/brillig_vm", default-features = false } -acvm_blackbox_solver = { path = "acvm-repo/blackbox_solver", default-features = false } -barretenberg_blackbox_solver = { path = "acvm-repo/barretenberg_blackbox_solver", default-features = false } +acir_field = { version = "0.38.0", path = "acvm-repo/acir_field", default-features = false } +acir = { version = "0.38.0", path = "acvm-repo/acir", default-features = false } +acvm = { version = "0.38.0", path = "acvm-repo/acvm" } +stdlib = { version = "0.37.1", package = "acvm_stdlib", path = "acvm-repo/stdlib", default-features = false } +brillig = { version = "0.38.0", path = "acvm-repo/brillig", default-features = false } +brillig_vm = { version = "0.38.0", path = "acvm-repo/brillig_vm", default-features = false } +acvm_blackbox_solver = { version = "0.38.0", path = "acvm-repo/blackbox_solver", default-features = false } +bn254_blackbox_solver = { version = "0.38.0", path = "acvm-repo/bn254_blackbox_solver", default-features = false } # Noir compiler workspace dependencies arena = { path = "compiler/utils/arena" } @@ -95,9 +95,11 @@ gloo-utils = { version = "0.1", features = ["serde"] } js-sys = "0.3.62" getrandom = "0.2" +# Debugger +dap = "0.4.1-alpha1" cfg-if = "1.0.0" -clap = { version = "4.3.19", features = ["derive"] } +clap = { version = "4.3.19", features = ["derive", "env"] } codespan = { version = "0.11.1", features = ["serialization"] } codespan-lsp = "0.11.1" codespan-reporting = "0.11.1" @@ -120,8 +122,29 @@ hex = "0.4.2" const_format = "0.2.30" num-bigint = "0.4" num-traits = "0.2" +similar-asserts = "1.5.0" +tempfile = "3.6.0" +jsonrpc = { version = "0.16.0", features = ["minreq_http"] } + +tracing = "0.1.40" +tracing-web = "0.1.3" +tracing-subscriber = { version = "0.3.18", features = ["env-filter"] } +rust-embed = "6.6.0" [profile.dev] # This is required to be able to run `cargo test` in acvm_js due to the `locals exceeds maximum` error. # See https://ritik-mishra.medium.com/resolving-the-wasm-pack-error-locals-exceed-maximum-ec3a9d96685b -opt-level = 1 \ No newline at end of file +opt-level = 1 + + +[profile.size] +inherits = "release" +lto = true +opt-level = "z" + +[profile.size-aggressive] +inherits = "release" +strip = true +lto = true +panic = "abort" +opt-level = "z" diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 00000000000..000292e0a47 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,12 @@ +FROM rust:bookworm +WORKDIR /usr/src/noir +COPY . . +RUN ./scripts/bootstrap_native.sh + +# When running the container, mount the users home directory to same location. +FROM ubuntu:lunar +# Install Tini as nargo doesn't handle signals properly. +# Install git as nargo needs it to clone. +RUN apt-get update && apt-get install -y git tini && rm -rf /var/lib/apt/lists/* && apt-get clean +COPY --from=0 /usr/src/noir/target/release/nargo /usr/src/noir/target/release/nargo +ENTRYPOINT ["/usr/bin/tini", "--", "/usr/src/noir/target/release/nargo"] \ No newline at end of file diff --git a/Dockerfile.ci b/Dockerfile.ci new file mode 100644 index 00000000000..57dcbe9cfee --- /dev/null +++ b/Dockerfile.ci @@ -0,0 +1,31 @@ +FROM rust:1.71.1-slim-bookworm as base +RUN apt-get update && apt-get upgrade -y && apt-get install build-essential git -y +WORKDIR /usr/src/noir +ENV PATH="${PATH}:/usr/src/noir/target/release" + +FROM base as base-nargo +COPY . . +RUN .github/scripts/nargo-build.sh + +FROM base as base-js +RUN apt-get install -y ca-certificates curl gnupg +RUN mkdir -p /etc/apt/keyrings +RUN curl -fsSL https://deb.nodesource.com/gpgkey/nodesource-repo.gpg.key | gpg --dearmor -o /etc/apt/keyrings/nodesource.gpg +RUN echo "deb [signed-by=/etc/apt/keyrings/nodesource.gpg] https://deb.nodesource.com/node_20.x nodistro main" | tee /etc/apt/sources.list.d/nodesource.list +RUN apt-get update && apt-get install nodejs -y +RUN corepack enable +RUN apt-get install -y jq +COPY yarn.lock package.json .yarnrc.yml ./ +COPY .yarn/ ./.yarn/ +COPY ./acvm-repo/acvm_js/package.json ./acvm-repo/acvm_js/ +COPY ./tooling/noirc_abi_wasm/package.json ./tooling/noirc_abi_wasm/ +COPY ./compiler/wasm/package.json ./compiler/wasm/ +COPY ./tooling/noir_js_types/package.json ./tooling/noir_js_types/ +COPY ./tooling/noir_js_backend_barretenberg/package.json ./tooling/noir_js_backend_barretenberg/ +COPY ./tooling/noir_js/package.json ./tooling/noir_js/ +COPY ./tooling/noir_codegen/package.json ./tooling/noir_codegen/ +COPY ./compiler/integration-tests/package.json ./compiler/integration-tests/ +COPY ./release-tests/package.json ./release-tests/ +COPY ./docs/package.json ./docs/ +RUN yarn --immutable +COPY . . diff --git a/Dockerfile.packages b/Dockerfile.packages new file mode 100644 index 00000000000..17eb0bcd648 --- /dev/null +++ b/Dockerfile.packages @@ -0,0 +1,21 @@ +FROM rust:alpine3.17 +RUN apk update \ + && apk upgrade \ + && apk add --no-cache \ + build-base \ + pkgconfig \ + openssl-dev \ + npm \ + yarn \ + bash \ + jq \ + git + +WORKDIR /usr/src/noir +COPY . . +RUN ./scripts/bootstrap_packages.sh + +FROM scratch +COPY --from=0 /usr/src/noir/packages /usr/src/noir/packages +# For some unknown reason, on alpine only, we need this to exist. +COPY --from=0 /usr/src/noir/node_modules/@noir-lang /usr/src/noir/node_modules/@noir-lang \ No newline at end of file diff --git a/README.md b/README.md index c48c9110187..038ce5691cd 100644 --- a/README.md +++ b/README.md @@ -6,7 +6,7 @@ Noir is a Domain Specific Language for SNARK proving systems. It has been design ## Quick Start -Read the installation section [here](https://noir-lang.org/getting_started/nargo_installation). +Read the installation section [here](https://noir-lang.org/docs/dev/getting_started/installation/). Once you have read through the documentation, you can visit [Awesome Noir](https://github.com/noir-lang/awesome-noir) to run some of the examples that others have created. @@ -58,7 +58,7 @@ This crate's minimum supported rustc version is 1.71.1. ## Working on this project -This project uses [Nix](https://nixos.org/) and [direnv](https://direnv.net/) to streamline the development experience. Please follow [our guidelines](https://noir-lang.org/getting_started/nargo_installation/#option-4-compile-from-source) to setup your environment for working on the project. +This project uses [Nix](https://nixos.org/) and [direnv](https://direnv.net/) to streamline the development experience. Please follow [our guidelines](https://noir-lang.org/docs/dev/getting_started/installation/other_install_methods#option-3-compile-from-source) to setup your environment for working on the project. ### Building against a different local/remote version of Barretenberg diff --git a/acvm-repo/CHANGELOG.md b/acvm-repo/CHANGELOG.md index 3eb4e9146c6..d413bd390c4 100644 --- a/acvm-repo/CHANGELOG.md +++ b/acvm-repo/CHANGELOG.md @@ -5,6 +5,239 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). +## [0.38.0](https://github.com/noir-lang/noir/compare/v0.37.1...v0.38.0) (2023-12-18) + + +### ⚠ BREAKING CHANGES + +* Remove unused methods on ACIR opcodes ([#3841](https://github.com/noir-lang/noir/issues/3841)) +* Remove partial backend feature ([#3805](https://github.com/noir-lang/noir/issues/3805)) + +### Features + +* Aztec-packages ([#3754](https://github.com/noir-lang/noir/issues/3754)) ([c043265](https://github.com/noir-lang/noir/commit/c043265e550b59bd4296504826fe15d3ce3e9ad2)) +* Speed up transformation of debug messages ([#3815](https://github.com/noir-lang/noir/issues/3815)) ([2a8af1e](https://github.com/noir-lang/noir/commit/2a8af1e4141ffff61547ee1c2837a6392bd5db48)) + + +### Bug Fixes + +* Deserialize odd length hex literals ([#3747](https://github.com/noir-lang/noir/issues/3747)) ([4000fb2](https://github.com/noir-lang/noir/commit/4000fb279221eb07187d657bfaa7f1c7b311abf2)) + + +### Miscellaneous Chores + +* Remove partial backend feature ([#3805](https://github.com/noir-lang/noir/issues/3805)) ([0383100](https://github.com/noir-lang/noir/commit/0383100853a80a5b28b797cdfeae0d271f1b7805)) +* Remove unused methods on ACIR opcodes ([#3841](https://github.com/noir-lang/noir/issues/3841)) ([9e5d0e8](https://github.com/noir-lang/noir/commit/9e5d0e813d61a0bfb5ee68174ed287c5a20f1579)) + +## [0.37.1](https://github.com/noir-lang/noir/compare/v0.37.0...v0.37.1) (2023-12-15) + + +### Features + +* Aztec-packages ([#3754](https://github.com/noir-lang/noir/issues/3754)) ([c043265](https://github.com/noir-lang/noir/commit/c043265e550b59bd4296504826fe15d3ce3e9ad2)) +* Speed up transformation of debug messages ([#3815](https://github.com/noir-lang/noir/issues/3815)) ([2a8af1e](https://github.com/noir-lang/noir/commit/2a8af1e4141ffff61547ee1c2837a6392bd5db48)) + + +### Bug Fixes + +* Deserialize odd length hex literals ([#3747](https://github.com/noir-lang/noir/issues/3747)) ([4000fb2](https://github.com/noir-lang/noir/commit/4000fb279221eb07187d657bfaa7f1c7b311abf2)) + +## [0.37.0](https://github.com/noir-lang/noir/compare/v0.36.0...v0.37.0) (2023-12-01) + + +### ⚠ BREAKING CHANGES + +* Move circuit serialization circuit into acir ([#3345](https://github.com/noir-lang/noir/issues/3345)) +* expose pedersen hash in acir and bb solver ([#3269](https://github.com/noir-lang/noir/issues/3269)) +* Switch to new pedersen implementation ([#3151](https://github.com/noir-lang/noir/issues/3151)) +* Pass ACIR to ACVM by reference rather than passing ownership ([#2872](https://github.com/noir-lang/noir/issues/2872)) +* **wasm:** improve and simplify wasm compiler interface ([#2976](https://github.com/noir-lang/noir/issues/2976)) +* Maintain shape of foreign call arguments ([#2935](https://github.com/noir-lang/noir/issues/2935)) + +### Features + +* **acvm_js:** Export black box solver functions ([#2812](https://github.com/noir-lang/noir/issues/2812)) ([da8a98e](https://github.com/noir-lang/noir/commit/da8a98ed312fe69cb0bdb8f9d0a70ee7a981398f)) +* **acvm:** Separate ACVM optimizations and transformations ([#2979](https://github.com/noir-lang/noir/issues/2979)) ([5865d1a](https://github.com/noir-lang/noir/commit/5865d1a1bca16e1853663c71f893ff81fa3f7185)) +* Add `FieldElement::from<usize>` implementation ([#3647](https://github.com/noir-lang/noir/issues/3647)) ([8b7c5aa](https://github.com/noir-lang/noir/commit/8b7c5aa5311f4e6811438f67bd552b641b13fc9a)) +* Add ACIR serializer C++ codegen ([#2961](https://github.com/noir-lang/noir/issues/2961)) ([7556982](https://github.com/noir-lang/noir/commit/7556982dbebe25eaa17240abbe270b771b55de45)) +* Add conditional compilation of methods based on the underlying field being used ([#3045](https://github.com/noir-lang/noir/issues/3045)) ([2e008e2](https://github.com/noir-lang/noir/commit/2e008e2438795bbc41b0641e830378b76bf2e194)) +* Add debugger commands to introspect (and modify) the current state ([#3391](https://github.com/noir-lang/noir/issues/3391)) ([9e1ad85](https://github.com/noir-lang/noir/commit/9e1ad858cf8a1d9aba0137abe6a749267498bfaf)) +* Aztec-packages ([#3599](https://github.com/noir-lang/noir/issues/3599)) ([2cd6dc3](https://github.com/noir-lang/noir/commit/2cd6dc39e3a956aa5dff721d47aaf1921f98fded)) +* Expose pedersen hash in acir and bb solver ([#3269](https://github.com/noir-lang/noir/issues/3269)) ([0108b6c](https://github.com/noir-lang/noir/commit/0108b6c1e8dc0dfc766ab3c4944deae9354dec36)) +* Extract Brillig VM to allow step debugging ([#3259](https://github.com/noir-lang/noir/issues/3259)) ([f6431f9](https://github.com/noir-lang/noir/commit/f6431f99711f15a96a4f7fed2f413daece94b5e1)) +* Implement euclidean division and signed division in terms of `AcirVar`s ([#3230](https://github.com/noir-lang/noir/issues/3230)) ([b8b7782](https://github.com/noir-lang/noir/commit/b8b77825410c0e1f95549259a51e2c40de1ec342)) +* Maintain shape of foreign call arguments ([#2935](https://github.com/noir-lang/noir/issues/2935)) ([f7869e6](https://github.com/noir-lang/noir/commit/f7869e6fb492b617e776e538ac4babfa56261d26)) +* Pass ACIR to ACVM by reference rather than passing ownership ([#2872](https://github.com/noir-lang/noir/issues/2872)) ([b3a9c34](https://github.com/noir-lang/noir/commit/b3a9c343993ce3207de62106bda6cb2b2ef3de50)) +* Pass brillig bytecode to VM by reference ([#3030](https://github.com/noir-lang/noir/issues/3030)) ([4ee290b](https://github.com/noir-lang/noir/commit/4ee290b8b6f75bc1974a5750248570eeca8d244e)) +* Refactor debugger and separate core from UI ([#3308](https://github.com/noir-lang/noir/issues/3308)) ([8466810](https://github.com/noir-lang/noir/commit/846681079ab7295b201480a5c8baebc45e858c6f)) +* Replace boolean range constraints with arithmetic opcodes ([#3234](https://github.com/noir-lang/noir/issues/3234)) ([949222c](https://github.com/noir-lang/noir/commit/949222c20d9e65152e3814d02da1c4c41ffc23a5)) +* Save Brillig execution state in ACVM ([#3026](https://github.com/noir-lang/noir/issues/3026)) ([88682da](https://github.com/noir-lang/noir/commit/88682da87ffc9e26da5c9e4b5a4d8e62a6ee43c6)) +* Solve `fixed_base_scalar_mul` black box functions in rust ([#3153](https://github.com/noir-lang/noir/issues/3153)) ([1c1afbc](https://github.com/noir-lang/noir/commit/1c1afbcddf0b5fdb39f00ad28ae90caf699d1265)) +* Switch to new pedersen implementation ([#3151](https://github.com/noir-lang/noir/issues/3151)) ([35fb3f7](https://github.com/noir-lang/noir/commit/35fb3f7076d52db7ca3bef0a70a3dbccaf82f58d)) +* **wasm:** Improve and simplify wasm compiler interface ([#2976](https://github.com/noir-lang/noir/issues/2976)) ([1b5124b](https://github.com/noir-lang/noir/commit/1b5124bc74f7ac5360db04b34d1b7b2284061fd3)) + + +### Bug Fixes + +* ACIR optimizer should update assertion messages ([#3010](https://github.com/noir-lang/noir/issues/3010)) ([758b6b6](https://github.com/noir-lang/noir/commit/758b6b62918907c1a39f3090a77419003551745e)) +* **acvm:** Return false rather than panicking on invalid ECDSA signatures ([#2783](https://github.com/noir-lang/noir/issues/2783)) ([155abc0](https://github.com/noir-lang/noir/commit/155abc0d99fff41c79163c16bf297d41e5dff0fa)) +* Determinism of fallback transformer ([#3100](https://github.com/noir-lang/noir/issues/3100)) ([12daad1](https://github.com/noir-lang/noir/commit/12daad19c902caf5ee9e2eb4b6847bde5a924353)) +* Fix method `program_counter`, change method signature ([#3012](https://github.com/noir-lang/noir/issues/3012)) ([5ea522b](https://github.com/noir-lang/noir/commit/5ea522b840ca0f6f90d02ca00f0de32f515d450f)) +* Minor problems with `aztec` publishing ([#3095](https://github.com/noir-lang/noir/issues/3095)) ([0fc8f20](https://github.com/noir-lang/noir/commit/0fc8f20b8b87d033d27ce18db039399c17f81837)) +* Prevent duplicated assert message transformation ([#3038](https://github.com/noir-lang/noir/issues/3038)) ([082a6d0](https://github.com/noir-lang/noir/commit/082a6d02dad67a25692bed15c340a16a848a320e)) +* Return error rather than panicking on unreadable circuits ([#3179](https://github.com/noir-lang/noir/issues/3179)) ([d4f61d3](https://github.com/noir-lang/noir/commit/d4f61d3d51d515e40a5fd02d35315889f841bf53)) + + +### Miscellaneous Chores + +* Move circuit serialization circuit into acir ([#3345](https://github.com/noir-lang/noir/issues/3345)) ([122119b](https://github.com/noir-lang/noir/commit/122119b7377cec1b7c42c586c64b69b3bdf4d539)) + +## [0.36.0](https://github.com/noir-lang/noir/compare/v0.35.0...v0.36.0) (2023-12-01) + + +### ⚠ BREAKING CHANGES + +* Move circuit serialization circuit into acir ([#3345](https://github.com/noir-lang/noir/issues/3345)) +* expose pedersen hash in acir and bb solver ([#3269](https://github.com/noir-lang/noir/issues/3269)) +* Switch to new pedersen implementation ([#3151](https://github.com/noir-lang/noir/issues/3151)) +* Pass ACIR to ACVM by reference rather than passing ownership ([#2872](https://github.com/noir-lang/noir/issues/2872)) +* **wasm:** improve and simplify wasm compiler interface ([#2976](https://github.com/noir-lang/noir/issues/2976)) +* Maintain shape of foreign call arguments ([#2935](https://github.com/noir-lang/noir/issues/2935)) + +### Features + +* **acvm_js:** Export black box solver functions ([#2812](https://github.com/noir-lang/noir/issues/2812)) ([da8a98e](https://github.com/noir-lang/noir/commit/da8a98ed312fe69cb0bdb8f9d0a70ee7a981398f)) +* **acvm:** Separate ACVM optimizations and transformations ([#2979](https://github.com/noir-lang/noir/issues/2979)) ([5865d1a](https://github.com/noir-lang/noir/commit/5865d1a1bca16e1853663c71f893ff81fa3f7185)) +* Add `FieldElement::from<usize>` implementation ([#3647](https://github.com/noir-lang/noir/issues/3647)) ([8b7c5aa](https://github.com/noir-lang/noir/commit/8b7c5aa5311f4e6811438f67bd552b641b13fc9a)) +* Add ACIR serializer C++ codegen ([#2961](https://github.com/noir-lang/noir/issues/2961)) ([7556982](https://github.com/noir-lang/noir/commit/7556982dbebe25eaa17240abbe270b771b55de45)) +* Add conditional compilation of methods based on the underlying field being used ([#3045](https://github.com/noir-lang/noir/issues/3045)) ([2e008e2](https://github.com/noir-lang/noir/commit/2e008e2438795bbc41b0641e830378b76bf2e194)) +* Add debugger commands to introspect (and modify) the current state ([#3391](https://github.com/noir-lang/noir/issues/3391)) ([9e1ad85](https://github.com/noir-lang/noir/commit/9e1ad858cf8a1d9aba0137abe6a749267498bfaf)) +* Aztec-packages ([#3599](https://github.com/noir-lang/noir/issues/3599)) ([2cd6dc3](https://github.com/noir-lang/noir/commit/2cd6dc39e3a956aa5dff721d47aaf1921f98fded)) +* Expose pedersen hash in acir and bb solver ([#3269](https://github.com/noir-lang/noir/issues/3269)) ([0108b6c](https://github.com/noir-lang/noir/commit/0108b6c1e8dc0dfc766ab3c4944deae9354dec36)) +* Extract Brillig VM to allow step debugging ([#3259](https://github.com/noir-lang/noir/issues/3259)) ([f6431f9](https://github.com/noir-lang/noir/commit/f6431f99711f15a96a4f7fed2f413daece94b5e1)) +* Implement euclidean division and signed division in terms of `AcirVar`s ([#3230](https://github.com/noir-lang/noir/issues/3230)) ([b8b7782](https://github.com/noir-lang/noir/commit/b8b77825410c0e1f95549259a51e2c40de1ec342)) +* Maintain shape of foreign call arguments ([#2935](https://github.com/noir-lang/noir/issues/2935)) ([f7869e6](https://github.com/noir-lang/noir/commit/f7869e6fb492b617e776e538ac4babfa56261d26)) +* Pass ACIR to ACVM by reference rather than passing ownership ([#2872](https://github.com/noir-lang/noir/issues/2872)) ([b3a9c34](https://github.com/noir-lang/noir/commit/b3a9c343993ce3207de62106bda6cb2b2ef3de50)) +* Pass brillig bytecode to VM by reference ([#3030](https://github.com/noir-lang/noir/issues/3030)) ([4ee290b](https://github.com/noir-lang/noir/commit/4ee290b8b6f75bc1974a5750248570eeca8d244e)) +* Refactor debugger and separate core from UI ([#3308](https://github.com/noir-lang/noir/issues/3308)) ([8466810](https://github.com/noir-lang/noir/commit/846681079ab7295b201480a5c8baebc45e858c6f)) +* Replace boolean range constraints with arithmetic opcodes ([#3234](https://github.com/noir-lang/noir/issues/3234)) ([949222c](https://github.com/noir-lang/noir/commit/949222c20d9e65152e3814d02da1c4c41ffc23a5)) +* Save Brillig execution state in ACVM ([#3026](https://github.com/noir-lang/noir/issues/3026)) ([88682da](https://github.com/noir-lang/noir/commit/88682da87ffc9e26da5c9e4b5a4d8e62a6ee43c6)) +* Solve `fixed_base_scalar_mul` black box functions in rust ([#3153](https://github.com/noir-lang/noir/issues/3153)) ([1c1afbc](https://github.com/noir-lang/noir/commit/1c1afbcddf0b5fdb39f00ad28ae90caf699d1265)) +* Switch to new pedersen implementation ([#3151](https://github.com/noir-lang/noir/issues/3151)) ([35fb3f7](https://github.com/noir-lang/noir/commit/35fb3f7076d52db7ca3bef0a70a3dbccaf82f58d)) +* **wasm:** Improve and simplify wasm compiler interface ([#2976](https://github.com/noir-lang/noir/issues/2976)) ([1b5124b](https://github.com/noir-lang/noir/commit/1b5124bc74f7ac5360db04b34d1b7b2284061fd3)) + + +### Bug Fixes + +* ACIR optimizer should update assertion messages ([#3010](https://github.com/noir-lang/noir/issues/3010)) ([758b6b6](https://github.com/noir-lang/noir/commit/758b6b62918907c1a39f3090a77419003551745e)) +* **acvm:** Return false rather than panicking on invalid ECDSA signatures ([#2783](https://github.com/noir-lang/noir/issues/2783)) ([155abc0](https://github.com/noir-lang/noir/commit/155abc0d99fff41c79163c16bf297d41e5dff0fa)) +* Determinism of fallback transformer ([#3100](https://github.com/noir-lang/noir/issues/3100)) ([12daad1](https://github.com/noir-lang/noir/commit/12daad19c902caf5ee9e2eb4b6847bde5a924353)) +* Fix method `program_counter`, change method signature ([#3012](https://github.com/noir-lang/noir/issues/3012)) ([5ea522b](https://github.com/noir-lang/noir/commit/5ea522b840ca0f6f90d02ca00f0de32f515d450f)) +* Minor problems with `aztec` publishing ([#3095](https://github.com/noir-lang/noir/issues/3095)) ([0fc8f20](https://github.com/noir-lang/noir/commit/0fc8f20b8b87d033d27ce18db039399c17f81837)) +* Prevent duplicated assert message transformation ([#3038](https://github.com/noir-lang/noir/issues/3038)) ([082a6d0](https://github.com/noir-lang/noir/commit/082a6d02dad67a25692bed15c340a16a848a320e)) +* Return error rather than panicking on unreadable circuits ([#3179](https://github.com/noir-lang/noir/issues/3179)) ([d4f61d3](https://github.com/noir-lang/noir/commit/d4f61d3d51d515e40a5fd02d35315889f841bf53)) + + +### Miscellaneous Chores + +* Move circuit serialization circuit into acir ([#3345](https://github.com/noir-lang/noir/issues/3345)) ([122119b](https://github.com/noir-lang/noir/commit/122119b7377cec1b7c42c586c64b69b3bdf4d539)) + +## [0.35.0](https://github.com/noir-lang/noir/compare/v0.34.0...v0.35.0) (2023-11-28) + + +### ⚠ BREAKING CHANGES + +* Move circuit serialization circuit into acir ([#3345](https://github.com/noir-lang/noir/issues/3345)) +* expose pedersen hash in acir and bb solver ([#3269](https://github.com/noir-lang/noir/issues/3269)) +* Switch to new pedersen implementation ([#3151](https://github.com/noir-lang/noir/issues/3151)) +* Pass ACIR to ACVM by reference rather than passing ownership ([#2872](https://github.com/noir-lang/noir/issues/2872)) +* **wasm:** improve and simplify wasm compiler interface ([#2976](https://github.com/noir-lang/noir/issues/2976)) +* Maintain shape of foreign call arguments ([#2935](https://github.com/noir-lang/noir/issues/2935)) + +### Features + +* **acvm_js:** Export black box solver functions ([#2812](https://github.com/noir-lang/noir/issues/2812)) ([da8a98e](https://github.com/noir-lang/noir/commit/da8a98ed312fe69cb0bdb8f9d0a70ee7a981398f)) +* **acvm:** Separate ACVM optimizations and transformations ([#2979](https://github.com/noir-lang/noir/issues/2979)) ([5865d1a](https://github.com/noir-lang/noir/commit/5865d1a1bca16e1853663c71f893ff81fa3f7185)) +* Add ACIR serializer C++ codegen ([#2961](https://github.com/noir-lang/noir/issues/2961)) ([7556982](https://github.com/noir-lang/noir/commit/7556982dbebe25eaa17240abbe270b771b55de45)) +* Add conditional compilation of methods based on the underlying field being used ([#3045](https://github.com/noir-lang/noir/issues/3045)) ([2e008e2](https://github.com/noir-lang/noir/commit/2e008e2438795bbc41b0641e830378b76bf2e194)) +* Add debugger commands to introspect (and modify) the current state ([#3391](https://github.com/noir-lang/noir/issues/3391)) ([9e1ad85](https://github.com/noir-lang/noir/commit/9e1ad858cf8a1d9aba0137abe6a749267498bfaf)) +* Expose pedersen hash in acir and bb solver ([#3269](https://github.com/noir-lang/noir/issues/3269)) ([0108b6c](https://github.com/noir-lang/noir/commit/0108b6c1e8dc0dfc766ab3c4944deae9354dec36)) +* Extract Brillig VM to allow step debugging ([#3259](https://github.com/noir-lang/noir/issues/3259)) ([f6431f9](https://github.com/noir-lang/noir/commit/f6431f99711f15a96a4f7fed2f413daece94b5e1)) +* Implement euclidean division and signed division in terms of `AcirVar`s ([#3230](https://github.com/noir-lang/noir/issues/3230)) ([b8b7782](https://github.com/noir-lang/noir/commit/b8b77825410c0e1f95549259a51e2c40de1ec342)) +* Maintain shape of foreign call arguments ([#2935](https://github.com/noir-lang/noir/issues/2935)) ([f7869e6](https://github.com/noir-lang/noir/commit/f7869e6fb492b617e776e538ac4babfa56261d26)) +* Pass ACIR to ACVM by reference rather than passing ownership ([#2872](https://github.com/noir-lang/noir/issues/2872)) ([b3a9c34](https://github.com/noir-lang/noir/commit/b3a9c343993ce3207de62106bda6cb2b2ef3de50)) +* Pass brillig bytecode to VM by reference ([#3030](https://github.com/noir-lang/noir/issues/3030)) ([4ee290b](https://github.com/noir-lang/noir/commit/4ee290b8b6f75bc1974a5750248570eeca8d244e)) +* Refactor debugger and separate core from UI ([#3308](https://github.com/noir-lang/noir/issues/3308)) ([8466810](https://github.com/noir-lang/noir/commit/846681079ab7295b201480a5c8baebc45e858c6f)) +* Replace boolean range constraints with arithmetic opcodes ([#3234](https://github.com/noir-lang/noir/issues/3234)) ([949222c](https://github.com/noir-lang/noir/commit/949222c20d9e65152e3814d02da1c4c41ffc23a5)) +* Save Brillig execution state in ACVM ([#3026](https://github.com/noir-lang/noir/issues/3026)) ([88682da](https://github.com/noir-lang/noir/commit/88682da87ffc9e26da5c9e4b5a4d8e62a6ee43c6)) +* Solve `fixed_base_scalar_mul` black box functions in rust ([#3153](https://github.com/noir-lang/noir/issues/3153)) ([1c1afbc](https://github.com/noir-lang/noir/commit/1c1afbcddf0b5fdb39f00ad28ae90caf699d1265)) +* Switch to new pedersen implementation ([#3151](https://github.com/noir-lang/noir/issues/3151)) ([35fb3f7](https://github.com/noir-lang/noir/commit/35fb3f7076d52db7ca3bef0a70a3dbccaf82f58d)) +* **wasm:** Improve and simplify wasm compiler interface ([#2976](https://github.com/noir-lang/noir/issues/2976)) ([1b5124b](https://github.com/noir-lang/noir/commit/1b5124bc74f7ac5360db04b34d1b7b2284061fd3)) + + +### Bug Fixes + +* ACIR optimizer should update assertion messages ([#3010](https://github.com/noir-lang/noir/issues/3010)) ([758b6b6](https://github.com/noir-lang/noir/commit/758b6b62918907c1a39f3090a77419003551745e)) +* **acvm:** Return false rather than panicking on invalid ECDSA signatures ([#2783](https://github.com/noir-lang/noir/issues/2783)) ([155abc0](https://github.com/noir-lang/noir/commit/155abc0d99fff41c79163c16bf297d41e5dff0fa)) +* Determinism of fallback transformer ([#3100](https://github.com/noir-lang/noir/issues/3100)) ([12daad1](https://github.com/noir-lang/noir/commit/12daad19c902caf5ee9e2eb4b6847bde5a924353)) +* Fix method `program_counter`, change method signature ([#3012](https://github.com/noir-lang/noir/issues/3012)) ([5ea522b](https://github.com/noir-lang/noir/commit/5ea522b840ca0f6f90d02ca00f0de32f515d450f)) +* Minor problems with `aztec` publishing ([#3095](https://github.com/noir-lang/noir/issues/3095)) ([0fc8f20](https://github.com/noir-lang/noir/commit/0fc8f20b8b87d033d27ce18db039399c17f81837)) +* Prevent duplicated assert message transformation ([#3038](https://github.com/noir-lang/noir/issues/3038)) ([082a6d0](https://github.com/noir-lang/noir/commit/082a6d02dad67a25692bed15c340a16a848a320e)) +* Return error rather than panicking on unreadable circuits ([#3179](https://github.com/noir-lang/noir/issues/3179)) ([d4f61d3](https://github.com/noir-lang/noir/commit/d4f61d3d51d515e40a5fd02d35315889f841bf53)) + + +### Miscellaneous Chores + +* Move circuit serialization circuit into acir ([#3345](https://github.com/noir-lang/noir/issues/3345)) ([122119b](https://github.com/noir-lang/noir/commit/122119b7377cec1b7c42c586c64b69b3bdf4d539)) + +## [0.34.0](https://github.com/noir-lang/noir/compare/v0.33.0...v0.34.0) (2023-11-22) + + +### ⚠ BREAKING CHANGES + +* Move circuit serialization circuit into acir ([#3345](https://github.com/noir-lang/noir/issues/3345)) +* expose pedersen hash in acir and bb solver ([#3269](https://github.com/noir-lang/noir/issues/3269)) +* Switch to new pedersen implementation ([#3151](https://github.com/noir-lang/noir/issues/3151)) +* Pass ACIR to ACVM by reference rather than passing ownership ([#2872](https://github.com/noir-lang/noir/issues/2872)) +* **wasm:** improve and simplify wasm compiler interface ([#2976](https://github.com/noir-lang/noir/issues/2976)) +* Maintain shape of foreign call arguments ([#2935](https://github.com/noir-lang/noir/issues/2935)) + +### Features + +* **acvm_js:** Export black box solver functions ([#2812](https://github.com/noir-lang/noir/issues/2812)) ([da8a98e](https://github.com/noir-lang/noir/commit/da8a98ed312fe69cb0bdb8f9d0a70ee7a981398f)) +* **acvm:** Separate ACVM optimizations and transformations ([#2979](https://github.com/noir-lang/noir/issues/2979)) ([5865d1a](https://github.com/noir-lang/noir/commit/5865d1a1bca16e1853663c71f893ff81fa3f7185)) +* Add ACIR serializer C++ codegen ([#2961](https://github.com/noir-lang/noir/issues/2961)) ([7556982](https://github.com/noir-lang/noir/commit/7556982dbebe25eaa17240abbe270b771b55de45)) +* Add conditional compilation of methods based on the underlying field being used ([#3045](https://github.com/noir-lang/noir/issues/3045)) ([2e008e2](https://github.com/noir-lang/noir/commit/2e008e2438795bbc41b0641e830378b76bf2e194)) +* Add debugger commands to introspect (and modify) the current state ([#3391](https://github.com/noir-lang/noir/issues/3391)) ([9e1ad85](https://github.com/noir-lang/noir/commit/9e1ad858cf8a1d9aba0137abe6a749267498bfaf)) +* Expose pedersen hash in acir and bb solver ([#3269](https://github.com/noir-lang/noir/issues/3269)) ([0108b6c](https://github.com/noir-lang/noir/commit/0108b6c1e8dc0dfc766ab3c4944deae9354dec36)) +* Extract Brillig VM to allow step debugging ([#3259](https://github.com/noir-lang/noir/issues/3259)) ([f6431f9](https://github.com/noir-lang/noir/commit/f6431f99711f15a96a4f7fed2f413daece94b5e1)) +* Implement euclidean division and signed division in terms of `AcirVar`s ([#3230](https://github.com/noir-lang/noir/issues/3230)) ([b8b7782](https://github.com/noir-lang/noir/commit/b8b77825410c0e1f95549259a51e2c40de1ec342)) +* Maintain shape of foreign call arguments ([#2935](https://github.com/noir-lang/noir/issues/2935)) ([f7869e6](https://github.com/noir-lang/noir/commit/f7869e6fb492b617e776e538ac4babfa56261d26)) +* Pass ACIR to ACVM by reference rather than passing ownership ([#2872](https://github.com/noir-lang/noir/issues/2872)) ([b3a9c34](https://github.com/noir-lang/noir/commit/b3a9c343993ce3207de62106bda6cb2b2ef3de50)) +* Pass brillig bytecode to VM by reference ([#3030](https://github.com/noir-lang/noir/issues/3030)) ([4ee290b](https://github.com/noir-lang/noir/commit/4ee290b8b6f75bc1974a5750248570eeca8d244e)) +* Refactor debugger and separate core from UI ([#3308](https://github.com/noir-lang/noir/issues/3308)) ([8466810](https://github.com/noir-lang/noir/commit/846681079ab7295b201480a5c8baebc45e858c6f)) +* Replace boolean range constraints with arithmetic opcodes ([#3234](https://github.com/noir-lang/noir/issues/3234)) ([949222c](https://github.com/noir-lang/noir/commit/949222c20d9e65152e3814d02da1c4c41ffc23a5)) +* Save Brillig execution state in ACVM ([#3026](https://github.com/noir-lang/noir/issues/3026)) ([88682da](https://github.com/noir-lang/noir/commit/88682da87ffc9e26da5c9e4b5a4d8e62a6ee43c6)) +* Solve `fixed_base_scalar_mul` black box functions in rust ([#3153](https://github.com/noir-lang/noir/issues/3153)) ([1c1afbc](https://github.com/noir-lang/noir/commit/1c1afbcddf0b5fdb39f00ad28ae90caf699d1265)) +* Switch to new pedersen implementation ([#3151](https://github.com/noir-lang/noir/issues/3151)) ([35fb3f7](https://github.com/noir-lang/noir/commit/35fb3f7076d52db7ca3bef0a70a3dbccaf82f58d)) +* **wasm:** Improve and simplify wasm compiler interface ([#2976](https://github.com/noir-lang/noir/issues/2976)) ([1b5124b](https://github.com/noir-lang/noir/commit/1b5124bc74f7ac5360db04b34d1b7b2284061fd3)) + + +### Bug Fixes + +* ACIR optimizer should update assertion messages ([#3010](https://github.com/noir-lang/noir/issues/3010)) ([758b6b6](https://github.com/noir-lang/noir/commit/758b6b62918907c1a39f3090a77419003551745e)) +* **acvm:** Return false rather than panicking on invalid ECDSA signatures ([#2783](https://github.com/noir-lang/noir/issues/2783)) ([155abc0](https://github.com/noir-lang/noir/commit/155abc0d99fff41c79163c16bf297d41e5dff0fa)) +* Determinism of fallback transformer ([#3100](https://github.com/noir-lang/noir/issues/3100)) ([12daad1](https://github.com/noir-lang/noir/commit/12daad19c902caf5ee9e2eb4b6847bde5a924353)) +* Fix method `program_counter`, change method signature ([#3012](https://github.com/noir-lang/noir/issues/3012)) ([5ea522b](https://github.com/noir-lang/noir/commit/5ea522b840ca0f6f90d02ca00f0de32f515d450f)) +* Minor problems with `aztec` publishing ([#3095](https://github.com/noir-lang/noir/issues/3095)) ([0fc8f20](https://github.com/noir-lang/noir/commit/0fc8f20b8b87d033d27ce18db039399c17f81837)) +* Prevent duplicated assert message transformation ([#3038](https://github.com/noir-lang/noir/issues/3038)) ([082a6d0](https://github.com/noir-lang/noir/commit/082a6d02dad67a25692bed15c340a16a848a320e)) +* Return error rather than panicking on unreadable circuits ([#3179](https://github.com/noir-lang/noir/issues/3179)) ([d4f61d3](https://github.com/noir-lang/noir/commit/d4f61d3d51d515e40a5fd02d35315889f841bf53)) + + +### Miscellaneous Chores + +* Move circuit serialization circuit into acir ([#3345](https://github.com/noir-lang/noir/issues/3345)) ([122119b](https://github.com/noir-lang/noir/commit/122119b7377cec1b7c42c586c64b69b3bdf4d539)) + ## [0.33.0](https://github.com/noir-lang/noir/compare/v0.32.0...v0.33.0) (2023-11-07) @@ -415,7 +648,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ### Features -* add optimisations to fallback black box functions on booleans ([#446](https://github.com/noir-lang/acvm/issues/446)) ([2cfb2a8](https://github.com/noir-lang/acvm/commit/2cfb2a8cf911a81eedbd9da13ab2c616abd67f83)) +* add optimizations to fallback black box functions on booleans ([#446](https://github.com/noir-lang/acvm/issues/446)) ([2cfb2a8](https://github.com/noir-lang/acvm/commit/2cfb2a8cf911a81eedbd9da13ab2c616abd67f83)) * **stdlib:** Add fallback implementation of `Keccak256` black box function ([#445](https://github.com/noir-lang/acvm/issues/445)) ([f7ebb03](https://github.com/noir-lang/acvm/commit/f7ebb03653c971f119700ff8126d9eb5ff01be0f)) ## [0.20.0](https://github.com/noir-lang/acvm/compare/root-v0.19.1...root-v0.20.0) (2023-07-20) @@ -485,7 +718,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ### Miscellaneous Chores -* **acvm:** Remove `CircuitSimplifer` ([#421](https://github.com/noir-lang/acvm/issues/421)) ([e07a56d](https://github.com/noir-lang/acvm/commit/e07a56d9c542a7f03ce156761054cd403de0bd23)) +* **acvm:** Remove `CircuitSimplifier` ([#421](https://github.com/noir-lang/acvm/issues/421)) ([e07a56d](https://github.com/noir-lang/acvm/commit/e07a56d9c542a7f03ce156761054cd403de0bd23)) ## [0.17.0](https://github.com/noir-lang/acvm/compare/root-v0.16.0...root-v0.17.0) (2023-07-07) @@ -744,7 +977,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 * require `Backend` to implement `Default` trait ([#223](https://github.com/noir-lang/acvm/issues/223)) * Make GeneralOptimizer crate visible ([#220](https://github.com/noir-lang/acvm/issues/220)) * return `PartialWitnessGeneratorStatus` from `PartialWitnessGenerator.solve` ([#213](https://github.com/noir-lang/acvm/issues/213)) -* organise operator implementations for Expression ([#190](https://github.com/noir-lang/acvm/issues/190)) +* organize operator implementations for Expression ([#190](https://github.com/noir-lang/acvm/issues/190)) ### Features @@ -767,7 +1000,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ### Miscellaneous Chores * Make GeneralOptimizer crate visible ([#220](https://github.com/noir-lang/acvm/issues/220)) ([64bb346](https://github.com/noir-lang/acvm/commit/64bb346524428a0ce196826ea1e5ccde08ad6201)) -* organise operator implementations for Expression ([#190](https://github.com/noir-lang/acvm/issues/190)) ([a619df6](https://github.com/noir-lang/acvm/commit/a619df614bbb9b2518b788b42a7553b069823a0f)) +* organize operator implementations for Expression ([#190](https://github.com/noir-lang/acvm/issues/190)) ([a619df6](https://github.com/noir-lang/acvm/commit/a619df614bbb9b2518b788b42a7553b069823a0f)) ## [0.9.0](https://github.com/noir-lang/acvm/compare/root-v0.8.1...root-v0.9.0) (2023-04-07) @@ -881,7 +1114,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 * **acir:** make PublicInputs use a BTreeSet rather than Vec ([#99](https://github.com/noir-lang/acvm/issues/99)) * refactor ToRadix to ToRadixLe and ToRadixBe ([#58](https://github.com/noir-lang/acvm/issues/58)) * **acir:** Add keccak256 Opcode ([#91](https://github.com/noir-lang/acvm/issues/91)) -* reorganise compiler in terms of optimisers and transformers ([#88](https://github.com/noir-lang/acvm/issues/88)) +* reorganize compiler in terms of optimizers and transformers ([#88](https://github.com/noir-lang/acvm/issues/88)) ### Features @@ -905,7 +1138,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ### Miscellaneous Chores * refactor ToRadix to ToRadixLe and ToRadixBe ([#58](https://github.com/noir-lang/acvm/issues/58)) ([2427a27](https://github.com/noir-lang/acvm/commit/2427a275048e598c6d651cce8348a4c55148f235)) -* reorganise compiler in terms of optimisers and transformers ([#88](https://github.com/noir-lang/acvm/issues/88)) ([9329307](https://github.com/noir-lang/acvm/commit/9329307e054de202cfc55207162ad952b70d515e)) +* reorganize compiler in terms of optimizers and transformers ([#88](https://github.com/noir-lang/acvm/issues/88)) ([9329307](https://github.com/noir-lang/acvm/commit/9329307e054de202cfc55207162ad952b70d515e)) ## [0.4.1] - 2023-02-08 diff --git a/acvm-repo/acir/CHANGELOG.md b/acvm-repo/acir/CHANGELOG.md index e31ee66379a..661980a87c9 100644 --- a/acvm-repo/acir/CHANGELOG.md +++ b/acvm-repo/acir/CHANGELOG.md @@ -404,7 +404,7 @@ * replace `MerkleMembership` opcode with `ComputeMerkleRoot` ([#233](https://github.com/noir-lang/acvm/issues/233)) * return `PartialWitnessGeneratorStatus` from `PartialWitnessGenerator.solve` ([#213](https://github.com/noir-lang/acvm/issues/213)) -* organise operator implementations for Expression ([#190](https://github.com/noir-lang/acvm/issues/190)) +* organize operator implementations for Expression ([#190](https://github.com/noir-lang/acvm/issues/190)) ### Features @@ -415,7 +415,7 @@ ### Miscellaneous Chores -* organise operator implementations for Expression ([#190](https://github.com/noir-lang/acvm/issues/190)) ([a619df6](https://github.com/noir-lang/acvm/commit/a619df614bbb9b2518b788b42a7553b069823a0f)) +* organize operator implementations for Expression ([#190](https://github.com/noir-lang/acvm/issues/190)) ([a619df6](https://github.com/noir-lang/acvm/commit/a619df614bbb9b2518b788b42a7553b069823a0f)) ## [0.9.0](https://github.com/noir-lang/acvm/compare/acir-v0.8.1...acir-v0.9.0) (2023-04-07) @@ -507,7 +507,7 @@ * **acir:** make PublicInputs use a BTreeSet rather than Vec ([#99](https://github.com/noir-lang/acvm/issues/99)) * refactor ToRadix to ToRadixLe and ToRadixBe ([#58](https://github.com/noir-lang/acvm/issues/58)) * **acir:** Add keccak256 Opcode ([#91](https://github.com/noir-lang/acvm/issues/91)) -* reorganise compiler in terms of optimisers and transformers ([#88](https://github.com/noir-lang/acvm/issues/88)) +* reorganize compiler in terms of optimizers and transformers ([#88](https://github.com/noir-lang/acvm/issues/88)) ### Features @@ -518,4 +518,4 @@ ### Miscellaneous Chores * refactor ToRadix to ToRadixLe and ToRadixBe ([#58](https://github.com/noir-lang/acvm/issues/58)) ([2427a27](https://github.com/noir-lang/acvm/commit/2427a275048e598c6d651cce8348a4c55148f235)) -* reorganise compiler in terms of optimisers and transformers ([#88](https://github.com/noir-lang/acvm/issues/88)) ([9329307](https://github.com/noir-lang/acvm/commit/9329307e054de202cfc55207162ad952b70d515e)) +* reorganize compiler in terms of optimizers and transformers ([#88](https://github.com/noir-lang/acvm/issues/88)) ([9329307](https://github.com/noir-lang/acvm/commit/9329307e054de202cfc55207162ad952b70d515e)) diff --git a/acvm-repo/acir/Cargo.toml b/acvm-repo/acir/Cargo.toml index 25d770e1f17..a0877120a58 100644 --- a/acvm-repo/acir/Cargo.toml +++ b/acvm-repo/acir/Cargo.toml @@ -2,7 +2,7 @@ name = "acir" description = "ACIR is the IR that the VM processes, it is analogous to LLVM IR" # x-release-please-start-version -version = "0.33.0" +version = "0.38.0" # x-release-please-end authors.workspace = true edition.workspace = true diff --git a/acvm-repo/acir/acir_docs.md b/acvm-repo/acir/acir_docs.md new file mode 100644 index 00000000000..801aeac1140 --- /dev/null +++ b/acvm-repo/acir/acir_docs.md @@ -0,0 +1,206 @@ +# ACIR documentation (draft) + +## Abstract +This document describes the purpose of ACIR, what it is and how ACIR programs can be used by compilers and proving systems. It is intended to be a reference documentation for ACIR. + +## Introduction +The purpose of ACIR is to make the link between a generic proving system, such as Aztec's Barretenberg, and a frontend, such as Noir, which describes user-specific computations. + +More precisely, Noir is a programming language for zero-knowledge proofs (ZKP) which allows users to write programs in an intuitive way using a high-level language close to Rust syntax. Noir is able to generate a proof of execution of a Noir program, using an external proving system. However, proving systems use specific low-level constrain-based languages. Similarly, frontends have their own internal representation in order to represent user programs. + +The goal of ACIR is to provide a generic open-source intermediate representation close to proving system 'languages', but agnostic to a specific proving system, that can be used both by proving system as well as a target for frontends. So, at the end of the day, an ACIR program is just another representation of a program, dedicated to proving systems. + +## Abstract Circuit Intermediate Representation +ACIR stands for abstract circuit intermediate representation: +- **abstract circuit**: circuits are a simple computation model where basic computation units, named gates, are connected with wires. Data flows through the wires while gates compute output wires based on their input. More formally, they are directed acyclic graphs (DAG) where the vertices are the gates and the edges are the wires. Due to the immutability nature of the wires (their value does not change during an execution), they are well suited for describing computations for ZKPs. Furthermore, we do not lose any expressiveness when using a circuit as it is well known that any bounded computation can be translated into an arithmetic circuit (i.e a circuit with only addition and multiplication gates). +The term abstract here simply means that we do not refer to an actual physical circuit (such as an electronic circuit). Furthermore, we will not exactly use the circuit model, but another model even better suited to ZKPs, the constraint model (see below). +- **intermediate representation**: The ACIR representation is intermediate because it lies between a frontend and its proving system. ACIR bytecode makes the link between noir compiler output and the proving system backend input. + +## The constraint model +The first step for generating a proof that a specific program was executed, is to execute this program. Since the proving system is going to handle ACIR programs, we need in fact to execute an ACIR program, using the user-supplied inputs. + +In ACIR terminology, the gates are called opcodes and the wires are called partial witnesses. However, instead of connecting the opcodes together through wires, we create constraints: an opcode constraints together a set of wires. +This constraint model trivially supersedes the circuit model. For instance, an addition gate output_wire = input_wire_1 + input_wire_2 can be expressed with the following arithmetic constraint: output_wire - (input_wire_1 + input_wire_2) = 0 + + + +## Solving +Because of these constraints, executing an ACIR program is called solving the witnesses. From the witnesses representing the inputs of the program, whose values are supplied by the user, we find out what the other witnesses should be by executing/solving the constraints one-by-one in the order they were defined. + +For instance, if input_wire_1 and input_wire_2 values are supplied as 3 and 8, then we can solve the opcode output_wire - (input_wire_1 + input_wire_2) = 0 by saying that output_wire is 11. + +In summary, the workflow is the following: +1. user program -> (compilation) ACIR, a list of opcodes which constrain (partial) witnesses +2. user inputs + ACIR -> (execution/solving) assign values to all the (partial) witnesses +3. witness assignment + ACIR -> (proving system) proof + + +Although the ordering of opcode does not matter in theory, since a system of equations is not dependent on its ordering, in practice it matters a lot for the solving (i.e the performance of the execution). ACIR opcodes **must be ordered** so that each opcode can be resolved one after the other. + + +The values of the witnesses lie in the scalar field of the proving system. We will refer to it as FieldElement or ACIR field. The proving system needs the values of all the partial witnesses and all the constraints in order to generate a proof. + + +*Remark*: The value of a partial witness is unique and fixed throughout a program execution, although in some rare cases, multiple values are possible for a same execution and witness (when there are several valid solutions to the constraints). Having multiple possible values for a witness may indicate that the circuit is not safe. + +*Remark*: Why do we use the term partial witnesses? It is because the proving system may create other constraints and witnesses (especially with BlackBoxFuncCall, see below). A proof refers to a full witness assignments and their constraints. ACIR opcodes and their partial witnesses are still an intermediate representation before getting the full list of constraints and witnesses. For the sake of simplicity, we will refer to witness instead of partial witness from now on. + + +## ACIR Reference +We assume here that the proving system is Barretenberg. Some parameters may slightly change with another proving system, in particular the bit size of FieldElement, which is 254 for Barretenberg. + +Some opcodes have inputs and outputs, which means that the output is constrained to be the result of the opcode computation from the inputs. The solver expects that all inputs are known when solving such opcodes. + +Some opcodes are not constrained, which means they will not be used by the proving system and are only used by the solver. + +Finally, some opcodes will have a predicate, whose value is 0 or 1. Its purpose is to nullify the opcode when the value is 0, so that it has no effect. Note that removing the opcode is not a solution because this modifies the circuit (the circuit being mainly the list of the opcodes). + +*Remark*: Opcodes operate on witnesses, but we will see that some opcode work on expressions of witnesses. We call an expression a linear combination of witnesses and/or products of two witnesses (and also a constant term). A single witness is a (simple) expression, and conversely, an expression can be turned into a single witness using an assert-zero opcode (see below). So basically, using witnesses or expressions is equivalent, but the latter can avoid the creation of witness in some cases. + +### AssertZero opcode +An AssertZero opcode adds the constraint that P(w) = 0, where w=(w_1,..w_n) is a tuple of n witnesses, and P is a multi-variate polynomial of total degree at most 2. +The coefficients ${q_M}_{i,j}, q_i,q_c$ of the polynomial are known values which define the opcode. +A general expression of assert-zero opcode is the following: $\sum_{i,j} {q_M}_{i,j}w_iw_j + \sum_i q_iw_i +q_c = 0$ + +An assert-zero opcode can be used to: +- **express a constraint** on witnesses; for instance to express that a witness $w$ is a boolean, you can add the opcode: $w*w-w=0$ +- or, to **compute the value** of an arithmetic operation of some inputs. For instance, to multiply two witnesses $x$ and $y$, you would use the opcode $z-x*y=0$, which would constraint $z$ to be $x*y$. + + +The solver expects that at most one witness is not known when executing the opcode. + +### BlackBoxFuncCall opcode +These opcodes represent a specific computation. Even if any computation can be done using only assert-zero opcodes, it is not always efficient. Some proving systems, and in particular the proving system from Aztec, can implement several computations more efficiently using for instance look-up tables. The BlackBoxFuncCall opcode is used to ask the proving system to handle the computation by itself. +All black box functions take as input a tuple (witness, num_bits), where num_bits is a constant representing the bit size of the input witness, and they have one or several witnesses as output. +Some more advanced computations assume that the proving system has an 'embedded curve'. It is a curve that cycle with the main curve of the proving system, i.e the scalar field of the embedded curve is the base field of the main one, and vice-versa. The curves used by the proving system are dependent on the proving system (and/or its configuration). Aztec's Barretenberg uses BN254 as the main curve and Grumpkin as the embedded curve. + +The black box functions supported by ACIR are: + +**AND**: performs the bitwise AND of lhs and rhs. bit_size must be the same for both inputs. +- lhs: (witness, bit_size) +- rhs: (witness, bit_size) +- output: a witness whose value is constrained to be lhs AND rhs, as bit_size bit integers + +**XOR**: performs the bitwise XOR of lhs and rhs. bit_size must be the same for both inputs. +- lhs: (witness, bit_size) +- rhs: (witness, bit_size) +- output: a witness whose value is constrained to be lhs XOR rhs, as bit_size bit integers + +**RANGE**: constraint the input to be of the provided bit size +input: (witness, bit_size) + +**SHA256**: computes sha256 of the inputs +- inputs are a byte array, i.e a vector of (FieldElement, 8) +- output is a byte array of len 32, i.e a vector of 32 (FieldElement, 8), constrained to be the sha256 of the inputs. + +**Blake2s**: computes the Blake2s hash of the inputs, as specified in https://tools.ietf.org/html/rfc7693 +- inputs are a byte array, i.e a vector of (FieldElement, 8) +- output is a byte array of length 32, i.e a vector of 32 (FieldElement, 8), constrained to be the blake2s of the inputs. + + +**SchnorrVerify**: Verify a Schnorr signature over the embedded curve +- inputs are: + - Public key as 2 (FieldElement, 254) + - signature as a vector of 64 bytes (FieldElement, 8) + - message as a vector of (FieldElement, 8) +- output: A witness representing the result of the signature verification; 0 for failure and 1 for success. + +Since the scalar field of the embedded curve is NOT the ACIR field, the (r,s) signature is represented as a 64 bytes array for the two field elements. On the other hand, the public key coordinates are ACIR fields. +The proving system decides how the message is to be hashed. Barretenberg uses Blake2s. + + +**PedersenCommitment**: Computes a Pedersen commitments of the inputs using generators of the embedded curve +- input: vector of (FieldElement, 254) +- output: 2 witnesses representing the x,y coordinates of the resulting Grumpkin point +- domain separator: a constant public value (a field element) that you can use so that the commitment also depends on the domain separator. Noir uses 0 as domain separator. + +The backend should handle proper conversion between the inputs being ACIR field elements and the scalar field of the embedded curve. In the case of Aztec's Barretenberg, the latter is bigger than the ACIR field so it is straightforward. The Pedersen generators are managed by the proving system. + + +**PedersenHash**: Computes a Pedersen commitments of the inputs and their number, using generators of the embedded curve +- input: vector of (FieldElement, 254) +- output: the x-coordinate of the pedersen commitment of the 'prepended input' (see below) +- domain separator: a constant public value (a field element) that you can use so that the hash also depends on the domain separator. Noir uses 0 as domain separator. + +In Barretenberg, PedersenHash is doing the same as PedersenCommitment, except that it prepends the inputs with their length. + + +**HashToField128Security**: This opcode is deprecated and will be removed. + +**EcdsaSecp256k1**: Verify an ECDSA signature over Secp256k1 +- inputs: + - x coordinate of public key as 32 bytes + - y coordinate of public key as 32 bytes + - the signature, as a 64 bytes array + - the hash of the message, as a vector of bytes +- output: 0 for failure and 1 for success + +Inputs and outputs are similar to SchnorrVerify, except that because we use a different curve (secp256k1), the field elements involved in the signature and the public key are defined as an array of 32 bytes. Another difference is that we assume the message is already hashed. + +**EcdsaSecp256r1**: Same as EcdsaSecp256k1, but done over another curve. + +**FixedBaseScalarMul**: scalar multiplication with a fixed generator of the embedded curve +- input: low, high are 2 (field , 254), representing the low and high part of the input. For Barretenberg, they must both be less than 128 bits. +- output: x and y coordinates of $low*G+high*2^{128}*G$, where G is a fixed generator + +Because the Grumpkin scalar field is bigger than the ACIR field, we provide 2 ACIR fields representing the low and high parts of the Grumpkin scalar $a$: +$a=low+high*2^{128},$ with $low, high < 2^{128}$ + +**Keccak256**: Computes the Keccak-256 (Ethereum version) of the inputs. +- inputs: Vector of bytes (FieldElement, 8) +- outputs: Vector of 32 bytes (FieldElement, 8) + + +**Keccak256VariableLength**: Computes the Keccak-256 (Ethereum version) of the inputs, restricted to the given length. +- inputs: Vector of bytes (FieldElement, 8) +- var_message_size: number of inputs to hash; it must be less (or equal) than the inputs length +- outputs: a vector of 32 bytes (FieldElement, 8) + + + +**RecursiveAggregation**: verify a proof inside the circuit. +**Warning: this opcode is subject to change.** +- verification_key: Vector of (FieldElement, 254) representing the verification key of the circuit being verified +- public_inputs: Vector of (FieldElement, 254) representing the public inputs corresponding to the proof being verified +- key_hash: one (FieldElement, 254). It should be the hash of the verification key. Barretenberg expects the Pedersen hash of the verification key +- input_aggregation_object: an optional vector of (FieldElement, 254). It is a blob of data specific to the proving system. +- output_aggregation_object: Some witnesses returned by the function, representing some data internal to the proving system. + +This black box function does not fully verify a proof, what it does is verifying that the key_hash is indeed a hash of verification_key, allowing the user to use the verification key as private inputs and only have the key_hash as public input, which is more performant. +Another thing that it does is preparing the verification of the proof. In order to fully verify a proof, some operations may still be required to be done by the final verifier. This is why this black box function does not say if verification is passing or not. +If you have several proofs to verify in one ACIR program, you would call RecursiveAggregation() multiple times and passing the output_aggregation_object as input_aggregation_object to the next RecursiveAggregation() call, except for the first call where you do not have any input_aggregation_object. +If one of the proof you verify with the black box function does not verify, then the verification of the proof of the main ACIR program will ultimately fail. + + +### Brillig +This opcode is used as a hint for the solver when executing (solving) the circuit. The opcode does not generate any constraint and is usually the result of the compilation of an unconstrained noir function. +- inputs: inputs to the opcode, as 'arithmetic expressions'. +- outputs: opcode outputs, as witnesses +- bytecode: assembly code representing the computation to perform within this opcode. The noir assembly specification is not part of this document. +- predicate: an arithmetic expression that disable the opcode when it is null. + +Let's see an example with euclidean division. +The normal way to compute a/b, where a and b are 8-bits integers, is to implement Euclid algorithm which computes in a loop (or recursively) modulus of the kind 'a mod b'. Doing this computation requires a lot of steps to be properly implemented in ACIR, especially the loop with a condition. However, euclidean division can be easily constrained with one assert-zero opcode: a = bq+r, assuming q is 8 bits and r); }; + struct Blake3 { + std::vector inputs; + std::vector outputs; + + friend bool operator==(const Blake3&, const Blake3&); + std::vector bincodeSerialize() const; + static Blake3 bincodeDeserialize(std::vector); + }; + struct SchnorrVerify { Circuit::FunctionInput public_key_x; Circuit::FunctionInput public_key_y; @@ -102,15 +111,6 @@ namespace Circuit { static PedersenHash bincodeDeserialize(std::vector); }; - struct HashToField128Security { - std::vector inputs; - Circuit::Witness output; - - friend bool operator==(const HashToField128Security&, const HashToField128Security&); - std::vector bincodeSerialize() const; - static HashToField128Security bincodeDeserialize(std::vector); - }; - struct EcdsaSecp256k1 { std::vector public_key_x; std::vector public_key_y; @@ -164,20 +164,27 @@ namespace Circuit { static Keccak256VariableLength bincodeDeserialize(std::vector); }; + struct Keccakf1600 { + std::vector inputs; + std::vector outputs; + + friend bool operator==(const Keccakf1600&, const Keccakf1600&); + std::vector bincodeSerialize() const; + static Keccakf1600 bincodeDeserialize(std::vector); + }; + struct RecursiveAggregation { std::vector verification_key; std::vector proof; std::vector public_inputs; Circuit::FunctionInput key_hash; - std::optional> input_aggregation_object; - std::vector output_aggregation_object; friend bool operator==(const RecursiveAggregation&, const RecursiveAggregation&); std::vector bincodeSerialize() const; static RecursiveAggregation bincodeDeserialize(std::vector); }; - std::variant value; + std::variant value; friend bool operator==(const BlackBoxFuncCall&, const BlackBoxFuncCall&); std::vector bincodeSerialize() const; @@ -408,15 +415,6 @@ namespace Circuit { static Keccak256 bincodeDeserialize(std::vector); }; - struct HashToField128Security { - Circuit::HeapVector message; - Circuit::RegisterIndex output; - - friend bool operator==(const HashToField128Security&, const HashToField128Security&); - std::vector bincodeSerialize() const; - static HashToField128Security bincodeDeserialize(std::vector); - }; - struct EcdsaSecp256k1 { Circuit::HeapVector hashed_msg; Circuit::HeapArray public_key_x; @@ -483,7 +481,7 @@ namespace Circuit { static FixedBaseScalarMul bincodeDeserialize(std::vector); }; - std::variant value; + std::variant value; friend bool operator==(const BlackBoxOp&, const BlackBoxOp&); std::vector bincodeSerialize() const; @@ -767,12 +765,12 @@ namespace Circuit { struct Opcode { - struct Arithmetic { + struct AssertZero { Circuit::Expression value; - friend bool operator==(const Arithmetic&, const Arithmetic&); + friend bool operator==(const AssertZero&, const AssertZero&); std::vector bincodeSerialize() const; - static Arithmetic bincodeDeserialize(std::vector); + static AssertZero bincodeDeserialize(std::vector); }; struct BlackBoxFuncCall { @@ -818,7 +816,7 @@ namespace Circuit { static MemoryInit bincodeDeserialize(std::vector); }; - std::variant value; + std::variant value; friend bool operator==(const Opcode&, const Opcode&); std::vector bincodeSerialize() const; @@ -1839,6 +1837,47 @@ Circuit::BlackBoxFuncCall::Blake2s serde::Deserializable BlackBoxFuncCall::Blake3::bincodeSerialize() const { + auto serializer = serde::BincodeSerializer(); + serde::Serializable::serialize(*this, serializer); + return std::move(serializer).bytes(); + } + + inline BlackBoxFuncCall::Blake3 BlackBoxFuncCall::Blake3::bincodeDeserialize(std::vector input) { + auto deserializer = serde::BincodeDeserializer(input); + auto value = serde::Deserializable::deserialize(deserializer); + if (deserializer.get_buffer_offset() < input.size()) { + throw serde::deserialization_error("Some input bytes were not read"); + } + return value; + } + +} // end of namespace Circuit + +template <> +template +void serde::Serializable::serialize(const Circuit::BlackBoxFuncCall::Blake3 &obj, Serializer &serializer) { + serde::Serializable::serialize(obj.inputs, serializer); + serde::Serializable::serialize(obj.outputs, serializer); +} + +template <> +template +Circuit::BlackBoxFuncCall::Blake3 serde::Deserializable::deserialize(Deserializer &deserializer) { + Circuit::BlackBoxFuncCall::Blake3 obj; + obj.inputs = serde::Deserializable::deserialize(deserializer); + obj.outputs = serde::Deserializable::deserialize(deserializer); + return obj; +} + namespace Circuit { inline bool operator==(const BlackBoxFuncCall::SchnorrVerify &lhs, const BlackBoxFuncCall::SchnorrVerify &rhs) { @@ -1977,47 +2016,6 @@ Circuit::BlackBoxFuncCall::PedersenHash serde::Deserializable BlackBoxFuncCall::HashToField128Security::bincodeSerialize() const { - auto serializer = serde::BincodeSerializer(); - serde::Serializable::serialize(*this, serializer); - return std::move(serializer).bytes(); - } - - inline BlackBoxFuncCall::HashToField128Security BlackBoxFuncCall::HashToField128Security::bincodeDeserialize(std::vector input) { - auto deserializer = serde::BincodeDeserializer(input); - auto value = serde::Deserializable::deserialize(deserializer); - if (deserializer.get_buffer_offset() < input.size()) { - throw serde::deserialization_error("Some input bytes were not read"); - } - return value; - } - -} // end of namespace Circuit - -template <> -template -void serde::Serializable::serialize(const Circuit::BlackBoxFuncCall::HashToField128Security &obj, Serializer &serializer) { - serde::Serializable::serialize(obj.inputs, serializer); - serde::Serializable::serialize(obj.output, serializer); -} - -template <> -template -Circuit::BlackBoxFuncCall::HashToField128Security serde::Deserializable::deserialize(Deserializer &deserializer) { - Circuit::BlackBoxFuncCall::HashToField128Security obj; - obj.inputs = serde::Deserializable::deserialize(deserializer); - obj.output = serde::Deserializable::deserialize(deserializer); - return obj; -} - namespace Circuit { inline bool operator==(const BlackBoxFuncCall::EcdsaSecp256k1 &lhs, const BlackBoxFuncCall::EcdsaSecp256k1 &rhs) { @@ -2247,6 +2245,47 @@ Circuit::BlackBoxFuncCall::Keccak256VariableLength serde::Deserializable BlackBoxFuncCall::Keccakf1600::bincodeSerialize() const { + auto serializer = serde::BincodeSerializer(); + serde::Serializable::serialize(*this, serializer); + return std::move(serializer).bytes(); + } + + inline BlackBoxFuncCall::Keccakf1600 BlackBoxFuncCall::Keccakf1600::bincodeDeserialize(std::vector input) { + auto deserializer = serde::BincodeDeserializer(input); + auto value = serde::Deserializable::deserialize(deserializer); + if (deserializer.get_buffer_offset() < input.size()) { + throw serde::deserialization_error("Some input bytes were not read"); + } + return value; + } + +} // end of namespace Circuit + +template <> +template +void serde::Serializable::serialize(const Circuit::BlackBoxFuncCall::Keccakf1600 &obj, Serializer &serializer) { + serde::Serializable::serialize(obj.inputs, serializer); + serde::Serializable::serialize(obj.outputs, serializer); +} + +template <> +template +Circuit::BlackBoxFuncCall::Keccakf1600 serde::Deserializable::deserialize(Deserializer &deserializer) { + Circuit::BlackBoxFuncCall::Keccakf1600 obj; + obj.inputs = serde::Deserializable::deserialize(deserializer); + obj.outputs = serde::Deserializable::deserialize(deserializer); + return obj; +} + namespace Circuit { inline bool operator==(const BlackBoxFuncCall::RecursiveAggregation &lhs, const BlackBoxFuncCall::RecursiveAggregation &rhs) { @@ -2254,8 +2293,6 @@ namespace Circuit { if (!(lhs.proof == rhs.proof)) { return false; } if (!(lhs.public_inputs == rhs.public_inputs)) { return false; } if (!(lhs.key_hash == rhs.key_hash)) { return false; } - if (!(lhs.input_aggregation_object == rhs.input_aggregation_object)) { return false; } - if (!(lhs.output_aggregation_object == rhs.output_aggregation_object)) { return false; } return true; } @@ -2283,8 +2320,6 @@ void serde::Serializable::seria serde::Serializable::serialize(obj.proof, serializer); serde::Serializable::serialize(obj.public_inputs, serializer); serde::Serializable::serialize(obj.key_hash, serializer); - serde::Serializable::serialize(obj.input_aggregation_object, serializer); - serde::Serializable::serialize(obj.output_aggregation_object, serializer); } template <> @@ -2295,8 +2330,6 @@ Circuit::BlackBoxFuncCall::RecursiveAggregation serde::Deserializable::deserialize(deserializer); obj.public_inputs = serde::Deserializable::deserialize(deserializer); obj.key_hash = serde::Deserializable::deserialize(deserializer); - obj.input_aggregation_object = serde::Deserializable::deserialize(deserializer); - obj.output_aggregation_object = serde::Deserializable::deserialize(deserializer); return obj; } @@ -2465,47 +2498,6 @@ Circuit::BlackBoxOp::Keccak256 serde::Deserializable BlackBoxOp::HashToField128Security::bincodeSerialize() const { - auto serializer = serde::BincodeSerializer(); - serde::Serializable::serialize(*this, serializer); - return std::move(serializer).bytes(); - } - - inline BlackBoxOp::HashToField128Security BlackBoxOp::HashToField128Security::bincodeDeserialize(std::vector input) { - auto deserializer = serde::BincodeDeserializer(input); - auto value = serde::Deserializable::deserialize(deserializer); - if (deserializer.get_buffer_offset() < input.size()) { - throw serde::deserialization_error("Some input bytes were not read"); - } - return value; - } - -} // end of namespace Circuit - -template <> -template -void serde::Serializable::serialize(const Circuit::BlackBoxOp::HashToField128Security &obj, Serializer &serializer) { - serde::Serializable::serialize(obj.message, serializer); - serde::Serializable::serialize(obj.output, serializer); -} - -template <> -template -Circuit::BlackBoxOp::HashToField128Security serde::Deserializable::deserialize(Deserializer &deserializer) { - Circuit::BlackBoxOp::HashToField128Security obj; - obj.message = serde::Deserializable::deserialize(deserializer); - obj.output = serde::Deserializable::deserialize(deserializer); - return obj; -} - namespace Circuit { inline bool operator==(const BlackBoxOp::EcdsaSecp256k1 &lhs, const BlackBoxOp::EcdsaSecp256k1 &rhs) { @@ -4268,20 +4260,20 @@ Circuit::Opcode serde::Deserializable::deserialize(Deserializer namespace Circuit { - inline bool operator==(const Opcode::Arithmetic &lhs, const Opcode::Arithmetic &rhs) { + inline bool operator==(const Opcode::AssertZero &lhs, const Opcode::AssertZero &rhs) { if (!(lhs.value == rhs.value)) { return false; } return true; } - inline std::vector Opcode::Arithmetic::bincodeSerialize() const { + inline std::vector Opcode::AssertZero::bincodeSerialize() const { auto serializer = serde::BincodeSerializer(); - serde::Serializable::serialize(*this, serializer); + serde::Serializable::serialize(*this, serializer); return std::move(serializer).bytes(); } - inline Opcode::Arithmetic Opcode::Arithmetic::bincodeDeserialize(std::vector input) { + inline Opcode::AssertZero Opcode::AssertZero::bincodeDeserialize(std::vector input) { auto deserializer = serde::BincodeDeserializer(input); - auto value = serde::Deserializable::deserialize(deserializer); + auto value = serde::Deserializable::deserialize(deserializer); if (deserializer.get_buffer_offset() < input.size()) { throw serde::deserialization_error("Some input bytes were not read"); } @@ -4292,14 +4284,14 @@ namespace Circuit { template <> template -void serde::Serializable::serialize(const Circuit::Opcode::Arithmetic &obj, Serializer &serializer) { +void serde::Serializable::serialize(const Circuit::Opcode::AssertZero &obj, Serializer &serializer) { serde::Serializable::serialize(obj.value, serializer); } template <> template -Circuit::Opcode::Arithmetic serde::Deserializable::deserialize(Deserializer &deserializer) { - Circuit::Opcode::Arithmetic obj; +Circuit::Opcode::AssertZero serde::Deserializable::deserialize(Deserializer &deserializer) { + Circuit::Opcode::AssertZero obj; obj.value = serde::Deserializable::deserialize(deserializer); return obj; } diff --git a/acvm-repo/acir/src/circuit/black_box_functions.rs b/acvm-repo/acir/src/circuit/black_box_functions.rs index 9129f44008c..445da50ac81 100644 --- a/acvm-repo/acir/src/circuit/black_box_functions.rs +++ b/acvm-repo/acir/src/circuit/black_box_functions.rs @@ -1,8 +1,5 @@ //! Black box functions are ACIR opcodes which rely on backends implementing support for specialized constraints. //! This makes certain zk-snark unfriendly computations cheaper than if they were implemented in more basic constraints. -//! -//! It is possible to fallback to less efficient implementations written in ACIR in some cases. -//! These are implemented inside the ACVM stdlib. use serde::{Deserialize, Serialize}; #[cfg(test)] @@ -22,6 +19,8 @@ pub enum BlackBoxFunc { SHA256, /// Calculates the Blake2s hash of the inputs. Blake2s, + /// Calculates the Blake3 hash of the inputs. + Blake3, /// Verifies a Schnorr signature over a curve which is "pairing friendly" with the curve on which the ACIR circuit is defined. /// /// The exact curve which this signature uses will vary based on the curve being used by ACIR. @@ -33,12 +32,6 @@ pub enum BlackBoxFunc { PedersenCommitment, /// Calculates a Pedersen hash to the inputs. PedersenHash, - /// Hashes a set of inputs and applies the field modulus to the result - /// to return a value which can be represented as a [`FieldElement`][acir_field::FieldElement] - /// - /// This is implemented using the `Blake2s` hash function. - /// The "128" in the name specifies that this function should have 128 bits of security. - HashToField128Security, /// Verifies a ECDSA signature over the secp256k1 curve. EcdsaSecp256k1, /// Verifies a ECDSA signature over the secp256r1 curve. @@ -47,6 +40,8 @@ pub enum BlackBoxFunc { FixedBaseScalarMul, /// Calculates the Keccak256 hash of the inputs. Keccak256, + /// Keccak Permutation function of 1600 width + Keccakf1600, /// Compute a recursive aggregation object when verifying a proof inside another circuit. /// This outputted aggregation object will then be either checked in a top-level verifier or aggregated upon again. RecursiveAggregation, @@ -64,15 +59,16 @@ impl BlackBoxFunc { BlackBoxFunc::SHA256 => "sha256", BlackBoxFunc::SchnorrVerify => "schnorr_verify", BlackBoxFunc::Blake2s => "blake2s", - BlackBoxFunc::PedersenCommitment => "pedersen", + BlackBoxFunc::Blake3 => "blake3", + BlackBoxFunc::PedersenCommitment => "pedersen_commitment", BlackBoxFunc::PedersenHash => "pedersen_hash", - BlackBoxFunc::HashToField128Security => "hash_to_field_128_security", BlackBoxFunc::EcdsaSecp256k1 => "ecdsa_secp256k1", BlackBoxFunc::FixedBaseScalarMul => "fixed_base_scalar_mul", BlackBoxFunc::AND => "and", BlackBoxFunc::XOR => "xor", BlackBoxFunc::RANGE => "range", BlackBoxFunc::Keccak256 => "keccak256", + BlackBoxFunc::Keccakf1600 => "keccakf1600", BlackBoxFunc::RecursiveAggregation => "recursive_aggregation", BlackBoxFunc::EcdsaSecp256r1 => "ecdsa_secp256r1", } @@ -82,9 +78,9 @@ impl BlackBoxFunc { "sha256" => Some(BlackBoxFunc::SHA256), "schnorr_verify" => Some(BlackBoxFunc::SchnorrVerify), "blake2s" => Some(BlackBoxFunc::Blake2s), - "pedersen" => Some(BlackBoxFunc::PedersenCommitment), + "blake3" => Some(BlackBoxFunc::Blake3), + "pedersen_commitment" => Some(BlackBoxFunc::PedersenCommitment), "pedersen_hash" => Some(BlackBoxFunc::PedersenHash), - "hash_to_field_128_security" => Some(BlackBoxFunc::HashToField128Security), "ecdsa_secp256k1" => Some(BlackBoxFunc::EcdsaSecp256k1), "ecdsa_secp256r1" => Some(BlackBoxFunc::EcdsaSecp256r1), "fixed_base_scalar_mul" => Some(BlackBoxFunc::FixedBaseScalarMul), @@ -92,6 +88,7 @@ impl BlackBoxFunc { "xor" => Some(BlackBoxFunc::XOR), "range" => Some(BlackBoxFunc::RANGE), "keccak256" => Some(BlackBoxFunc::Keccak256), + "keccakf1600" => Some(BlackBoxFunc::Keccakf1600), "recursive_aggregation" => Some(BlackBoxFunc::RecursiveAggregation), _ => None, } diff --git a/acvm-repo/acir/src/circuit/directives.rs b/acvm-repo/acir/src/circuit/directives.rs index a86eb525c1f..c3a5b055f19 100644 --- a/acvm-repo/acir/src/circuit/directives.rs +++ b/acvm-repo/acir/src/circuit/directives.rs @@ -34,13 +34,3 @@ pub enum Directive { sort_by: Vec, // specify primary index to sort by, then the secondary,... For instance, if tuple is 2 and sort_by is [1,0], then a=[(a0,b0),..] is sorted by bi and then ai. }, } - -impl Directive { - pub fn name(&self) -> &str { - match self { - Directive::Quotient(_) => "quotient", - Directive::ToLeRadix { .. } => "to_le_radix", - Directive::PermutationSort { .. } => "permutation_sort", - } - } -} diff --git a/acvm-repo/acir/src/circuit/mod.rs b/acvm-repo/acir/src/circuit/mod.rs index 99ab389e31e..b248b30b1d9 100644 --- a/acvm-repo/acir/src/circuit/mod.rs +++ b/acvm-repo/acir/src/circuit/mod.rs @@ -99,7 +99,7 @@ impl FromStr for OpcodeLocation { let brillig_index = parts[1].parse()?; Ok(OpcodeLocation::Brillig { acir_index, brillig_index }) } - _ => unreachable!(), + _ => unreachable!("`OpcodeLocation` has too many components"), } } @@ -250,6 +250,64 @@ mod tests { input: FunctionInput { witness: Witness(1), num_bits: 8 }, }) } + fn keccakf1600_opcode() -> Opcode { + Opcode::BlackBoxFuncCall(BlackBoxFuncCall::Keccakf1600 { + inputs: vec![ + FunctionInput { witness: Witness(1), num_bits: 64 }, + FunctionInput { witness: Witness(2), num_bits: 64 }, + FunctionInput { witness: Witness(3), num_bits: 64 }, + FunctionInput { witness: Witness(4), num_bits: 64 }, + FunctionInput { witness: Witness(5), num_bits: 64 }, + FunctionInput { witness: Witness(6), num_bits: 64 }, + FunctionInput { witness: Witness(7), num_bits: 64 }, + FunctionInput { witness: Witness(8), num_bits: 64 }, + FunctionInput { witness: Witness(9), num_bits: 64 }, + FunctionInput { witness: Witness(10), num_bits: 64 }, + FunctionInput { witness: Witness(11), num_bits: 64 }, + FunctionInput { witness: Witness(12), num_bits: 64 }, + FunctionInput { witness: Witness(13), num_bits: 64 }, + FunctionInput { witness: Witness(14), num_bits: 64 }, + FunctionInput { witness: Witness(15), num_bits: 64 }, + FunctionInput { witness: Witness(16), num_bits: 64 }, + FunctionInput { witness: Witness(17), num_bits: 64 }, + FunctionInput { witness: Witness(18), num_bits: 64 }, + FunctionInput { witness: Witness(19), num_bits: 64 }, + FunctionInput { witness: Witness(20), num_bits: 64 }, + FunctionInput { witness: Witness(21), num_bits: 64 }, + FunctionInput { witness: Witness(22), num_bits: 64 }, + FunctionInput { witness: Witness(23), num_bits: 64 }, + FunctionInput { witness: Witness(24), num_bits: 64 }, + FunctionInput { witness: Witness(25), num_bits: 64 }, + ], + outputs: vec![ + Witness(26), + Witness(27), + Witness(28), + Witness(29), + Witness(30), + Witness(31), + Witness(32), + Witness(33), + Witness(34), + Witness(35), + Witness(36), + Witness(37), + Witness(38), + Witness(39), + Witness(40), + Witness(41), + Witness(42), + Witness(43), + Witness(44), + Witness(45), + Witness(46), + Witness(47), + Witness(48), + Witness(49), + Witness(50), + ], + }) + } #[test] fn serialization_roundtrip() { @@ -277,13 +335,14 @@ mod tests { let circuit = Circuit { current_witness_index: 0, opcodes: vec![ - Opcode::Arithmetic(crate::native_types::Expression { + Opcode::AssertZero(crate::native_types::Expression { mul_terms: vec![], linear_combinations: vec![], q_c: FieldElement::from(8u128), }), range_opcode(), and_opcode(), + keccakf1600_opcode(), ], private_parameters: BTreeSet::new(), public_parameters: PublicInputs(BTreeSet::from_iter(vec![Witness(2)])), diff --git a/acvm-repo/acir/src/circuit/opcodes.rs b/acvm-repo/acir/src/circuit/opcodes.rs index dc7f73b47e5..ac5ea0b8a69 100644 --- a/acvm-repo/acir/src/circuit/opcodes.rs +++ b/acvm-repo/acir/src/circuit/opcodes.rs @@ -13,7 +13,7 @@ pub use memory_operation::{BlockId, MemOp}; #[derive(Clone, PartialEq, Eq, Serialize, Deserialize)] pub enum Opcode { - Arithmetic(Expression), + AssertZero(Expression), /// Calls to "gadgets" which rely on backends implementing support for specialized constraints. /// /// Often used for exposing more efficient implementations of SNARK-unfriendly computations. @@ -33,62 +33,10 @@ pub enum Opcode { }, } -#[derive(Clone, PartialEq, Eq, Debug)] -pub enum UnsupportedMemoryOpcode { - MemoryOp, - MemoryInit, -} - -impl std::fmt::Display for UnsupportedMemoryOpcode { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - match self { - UnsupportedMemoryOpcode::MemoryOp => write!(f, "MemoryOp"), - UnsupportedMemoryOpcode::MemoryInit => write!(f, "MemoryInit"), - } - } -} - -impl Opcode { - // TODO We can add a domain separator by doing something like: - // TODO concat!("directive:", directive.name) - pub fn name(&self) -> &str { - match self { - Opcode::Arithmetic(_) => "arithmetic", - Opcode::Directive(directive) => directive.name(), - Opcode::BlackBoxFuncCall(g) => g.name(), - Opcode::Brillig(_) => "brillig", - Opcode::MemoryOp { .. } => "mem", - Opcode::MemoryInit { .. } => "init memory block", - } - } - - pub fn unsupported_opcode(&self) -> UnsupportedMemoryOpcode { - match self { - Opcode::MemoryOp { .. } => UnsupportedMemoryOpcode::MemoryOp, - Opcode::MemoryInit { .. } => UnsupportedMemoryOpcode::MemoryInit, - Opcode::BlackBoxFuncCall(_) => { - unreachable!("Unsupported Blackbox function should not be reported here") - } - _ => unreachable!("Opcode is supported"), - } - } - - pub fn is_arithmetic(&self) -> bool { - matches!(self, Opcode::Arithmetic(_)) - } - - pub fn arithmetic(self) -> Option { - match self { - Opcode::Arithmetic(expr) => Some(expr), - _ => None, - } - } -} - impl std::fmt::Display for Opcode { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { - Opcode::Arithmetic(expr) => { + Opcode::AssertZero(expr) => { write!(f, "EXPR [ ")?; for i in &expr.mul_terms { write!(f, "({}, _{}, _{}) ", i.0, i.1.witness_index(), i.2.witness_index())?; diff --git a/acvm-repo/acir/src/circuit/opcodes/black_box_function_call.rs b/acvm-repo/acir/src/circuit/opcodes/black_box_function_call.rs index 22278bdc635..6e4d7fdd660 100644 --- a/acvm-repo/acir/src/circuit/opcodes/black_box_function_call.rs +++ b/acvm-repo/acir/src/circuit/opcodes/black_box_function_call.rs @@ -39,6 +39,10 @@ pub enum BlackBoxFuncCall { inputs: Vec, outputs: Vec, }, + Blake3 { + inputs: Vec, + outputs: Vec, + }, SchnorrVerify { public_key_x: FunctionInput, public_key_y: FunctionInput, @@ -56,12 +60,6 @@ pub enum BlackBoxFuncCall { domain_separator: u32, output: Witness, }, - // 128 here specifies that this function - // should have 128 bits of security - HashToField128Security { - inputs: Vec, - output: Witness, - }, EcdsaSecp256k1 { public_key_x: Vec, public_key_y: Vec, @@ -94,6 +92,10 @@ pub enum BlackBoxFuncCall { var_message_size: FunctionInput, outputs: Vec, }, + Keccakf1600 { + inputs: Vec, + outputs: Vec, + }, RecursiveAggregation { verification_key: Vec, proof: Vec, @@ -105,90 +107,10 @@ pub enum BlackBoxFuncCall { /// The circuit implementing this opcode can use this hash to ensure that the /// key provided to the circuit matches the key produced by the circuit creator key_hash: FunctionInput, - /// An aggregation object is blob of data that the top-level verifier must run some proof system specific - /// algorithm on to complete verification. The size is proof system specific and will be set by the backend integrating this opcode. - /// The input aggregation object is only not `None` when we are verifying a previous recursive aggregation in - /// the current circuit. If this is the first recursive aggregation there is no input aggregation object. - /// It is left to the backend to determine how to handle when there is no input aggregation object. - input_aggregation_object: Option>, - /// This is the result of a recursive aggregation and is what will be fed into the next verifier. - /// The next verifier can either perform a final verification (returning true or false) - /// or perform another recursive aggregation where this output aggregation object - /// will be the input aggregation object of the next recursive aggregation. - output_aggregation_object: Vec, }, } impl BlackBoxFuncCall { - #[deprecated = "BlackBoxFuncCall::dummy() is unnecessary and will be removed in ACVM 0.24.0"] - pub fn dummy(bb_func: BlackBoxFunc) -> Self { - match bb_func { - BlackBoxFunc::AND => BlackBoxFuncCall::AND { - lhs: FunctionInput::dummy(), - rhs: FunctionInput::dummy(), - output: Witness(0), - }, - BlackBoxFunc::XOR => BlackBoxFuncCall::XOR { - lhs: FunctionInput::dummy(), - rhs: FunctionInput::dummy(), - output: Witness(0), - }, - BlackBoxFunc::RANGE => BlackBoxFuncCall::RANGE { input: FunctionInput::dummy() }, - BlackBoxFunc::SHA256 => BlackBoxFuncCall::SHA256 { inputs: vec![], outputs: vec![] }, - BlackBoxFunc::Blake2s => BlackBoxFuncCall::Blake2s { inputs: vec![], outputs: vec![] }, - BlackBoxFunc::SchnorrVerify => BlackBoxFuncCall::SchnorrVerify { - public_key_x: FunctionInput::dummy(), - public_key_y: FunctionInput::dummy(), - signature: vec![], - message: vec![], - output: Witness(0), - }, - BlackBoxFunc::PedersenCommitment => BlackBoxFuncCall::PedersenCommitment { - inputs: vec![], - domain_separator: 0, - outputs: (Witness(0), Witness(0)), - }, - BlackBoxFunc::PedersenHash => BlackBoxFuncCall::PedersenHash { - inputs: vec![], - domain_separator: 0, - output: Witness(0), - }, - BlackBoxFunc::HashToField128Security => { - BlackBoxFuncCall::HashToField128Security { inputs: vec![], output: Witness(0) } - } - BlackBoxFunc::EcdsaSecp256k1 => BlackBoxFuncCall::EcdsaSecp256k1 { - public_key_x: vec![], - public_key_y: vec![], - signature: vec![], - hashed_message: vec![], - output: Witness(0), - }, - BlackBoxFunc::EcdsaSecp256r1 => BlackBoxFuncCall::EcdsaSecp256r1 { - public_key_x: vec![], - public_key_y: vec![], - signature: vec![], - hashed_message: vec![], - output: Witness(0), - }, - BlackBoxFunc::FixedBaseScalarMul => BlackBoxFuncCall::FixedBaseScalarMul { - low: FunctionInput::dummy(), - high: FunctionInput::dummy(), - outputs: (Witness(0), Witness(0)), - }, - BlackBoxFunc::Keccak256 => { - BlackBoxFuncCall::Keccak256 { inputs: vec![], outputs: vec![] } - } - BlackBoxFunc::RecursiveAggregation => BlackBoxFuncCall::RecursiveAggregation { - verification_key: vec![], - proof: vec![], - public_inputs: vec![], - key_hash: FunctionInput::dummy(), - input_aggregation_object: None, - output_aggregation_object: vec![], - }, - } - } - pub fn get_black_box_func(&self) -> BlackBoxFunc { match self { BlackBoxFuncCall::AND { .. } => BlackBoxFunc::AND, @@ -196,15 +118,16 @@ impl BlackBoxFuncCall { BlackBoxFuncCall::RANGE { .. } => BlackBoxFunc::RANGE, BlackBoxFuncCall::SHA256 { .. } => BlackBoxFunc::SHA256, BlackBoxFuncCall::Blake2s { .. } => BlackBoxFunc::Blake2s, + BlackBoxFuncCall::Blake3 { .. } => BlackBoxFunc::Blake3, BlackBoxFuncCall::SchnorrVerify { .. } => BlackBoxFunc::SchnorrVerify, BlackBoxFuncCall::PedersenCommitment { .. } => BlackBoxFunc::PedersenCommitment, BlackBoxFuncCall::PedersenHash { .. } => BlackBoxFunc::PedersenHash, - BlackBoxFuncCall::HashToField128Security { .. } => BlackBoxFunc::HashToField128Security, BlackBoxFuncCall::EcdsaSecp256k1 { .. } => BlackBoxFunc::EcdsaSecp256k1, BlackBoxFuncCall::EcdsaSecp256r1 { .. } => BlackBoxFunc::EcdsaSecp256r1, BlackBoxFuncCall::FixedBaseScalarMul { .. } => BlackBoxFunc::FixedBaseScalarMul, BlackBoxFuncCall::Keccak256 { .. } => BlackBoxFunc::Keccak256, BlackBoxFuncCall::Keccak256VariableLength { .. } => BlackBoxFunc::Keccak256, + BlackBoxFuncCall::Keccakf1600 { .. } => BlackBoxFunc::Keccakf1600, BlackBoxFuncCall::RecursiveAggregation { .. } => BlackBoxFunc::RecursiveAggregation, } } @@ -217,10 +140,11 @@ impl BlackBoxFuncCall { match self { BlackBoxFuncCall::SHA256 { inputs, .. } | BlackBoxFuncCall::Blake2s { inputs, .. } + | BlackBoxFuncCall::Blake3 { inputs, .. } | BlackBoxFuncCall::Keccak256 { inputs, .. } + | BlackBoxFuncCall::Keccakf1600 { inputs, .. } | BlackBoxFuncCall::PedersenCommitment { inputs, .. } - | BlackBoxFuncCall::PedersenHash { inputs, .. } - | BlackBoxFuncCall::HashToField128Security { inputs, .. } => inputs.to_vec(), + | BlackBoxFuncCall::PedersenHash { inputs, .. } => inputs.to_vec(), BlackBoxFuncCall::AND { lhs, rhs, .. } | BlackBoxFuncCall::XOR { lhs, rhs, .. } => { vec![*lhs, *rhs] } @@ -288,16 +212,12 @@ impl BlackBoxFuncCall { proof, public_inputs, key_hash, - .. } => { let mut inputs = Vec::new(); inputs.extend(key.iter().copied()); inputs.extend(proof.iter().copied()); inputs.extend(public_inputs.iter().copied()); inputs.push(*key_hash); - // NOTE: we do not return an input aggregation object as it will either be non-existent for the first recursive aggregation - // or the output aggregation object of a previous recursive aggregation. We do not simulate recursive aggregation - // thus the input aggregation object will always be unassigned until proving inputs } } @@ -307,20 +227,20 @@ impl BlackBoxFuncCall { match self { BlackBoxFuncCall::SHA256 { outputs, .. } | BlackBoxFuncCall::Blake2s { outputs, .. } + | BlackBoxFuncCall::Blake3 { outputs, .. } | BlackBoxFuncCall::Keccak256 { outputs, .. } - | BlackBoxFuncCall::RecursiveAggregation { - output_aggregation_object: outputs, .. - } => outputs.to_vec(), + | BlackBoxFuncCall::Keccakf1600 { outputs, .. } => outputs.to_vec(), BlackBoxFuncCall::AND { output, .. } | BlackBoxFuncCall::XOR { output, .. } - | BlackBoxFuncCall::HashToField128Security { output, .. } | BlackBoxFuncCall::SchnorrVerify { output, .. } | BlackBoxFuncCall::EcdsaSecp256k1 { output, .. } | BlackBoxFuncCall::PedersenHash { output, .. } | BlackBoxFuncCall::EcdsaSecp256r1 { output, .. } => vec![*output], BlackBoxFuncCall::FixedBaseScalarMul { outputs, .. } | BlackBoxFuncCall::PedersenCommitment { outputs, .. } => vec![outputs.0, outputs.1], - BlackBoxFuncCall::RANGE { .. } => vec![], + BlackBoxFuncCall::RANGE { .. } | BlackBoxFuncCall::RecursiveAggregation { .. } => { + vec![] + } BlackBoxFuncCall::Keccak256VariableLength { outputs, .. } => outputs.to_vec(), } } diff --git a/acvm-repo/acir/src/native_types/expression/mod.rs b/acvm-repo/acir/src/native_types/expression/mod.rs index fe729720663..402aa3eb3a6 100644 --- a/acvm-repo/acir/src/native_types/expression/mod.rs +++ b/acvm-repo/acir/src/native_types/expression/mod.rs @@ -8,7 +8,7 @@ mod ordering; // In the addition polynomial // We can have arbitrary fan-in/out, so we need more than wL,wR and wO -// When looking at the arithmetic opcode for the quotient polynomial in standard plonk +// When looking at the assert-zero opcode for the quotient polynomial in standard plonk // You can think of it as fan-in 2 and fan out-1 , or you can think of it as fan-in 1 and fan-out 2 // // In the multiplication polynomial @@ -16,7 +16,7 @@ mod ordering; #[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, Hash)] pub struct Expression { // To avoid having to create intermediate variables pre-optimization - // We collect all of the multiplication terms in the arithmetic opcode + // We collect all of the multiplication terms in the assert-zero opcode // A multiplication term if of the form q_M * wL * wR // Hence this vector represents the following sum: q_M1 * wL1 * wR1 + q_M2 * wL2 * wR2 + .. + pub mul_terms: Vec<(FieldElement, Witness, Witness)>, @@ -42,7 +42,7 @@ impl std::fmt::Display for Expression { if let Some(witness) = self.to_witness() { write!(f, "x{}", witness.witness_index()) } else { - write!(f, "%{:?}%", crate::circuit::opcodes::Opcode::Arithmetic(self.clone())) + write!(f, "%{:?}%", crate::circuit::opcodes::Opcode::AssertZero(self.clone())) } } } @@ -178,7 +178,13 @@ impl Expression { self.linear_combinations.sort_by(|a, b| a.1.cmp(&b.1)); } - /// Checks if this polynomial can fit into one arithmetic identity + /// Checks if this expression can fit into one arithmetic identity + /// TODO: This needs to be reworded, arithmetic identity only makes sense in the context + /// TODO of PLONK, whereas we want expressions to be generic. + /// TODO: We just need to reword it to say exactly what its doing and + /// TODO then reference the fact that this is what plonk will accept. + /// TODO alternatively, we can define arithmetic identity in the context of expressions + /// TODO and then reference that. pub fn fits_in_one_identity(&self, width: usize) -> bool { // A Polynomial with more than one mul term cannot fit into one opcode if self.mul_terms.len() > 1 { diff --git a/acvm-repo/acir/src/native_types/expression/operators.rs b/acvm-repo/acir/src/native_types/expression/operators.rs index 35a548a2e3f..29cdc6967bb 100644 --- a/acvm-repo/acir/src/native_types/expression/operators.rs +++ b/acvm-repo/acir/src/native_types/expression/operators.rs @@ -230,7 +230,7 @@ fn single_mul(w: Witness, b: &Expression) -> Expression { } #[test] -fn add_smoketest() { +fn add_smoke_test() { let a = Expression { mul_terms: vec![], linear_combinations: vec![(FieldElement::from(2u128), Witness(2))], @@ -260,7 +260,7 @@ fn add_smoketest() { } #[test] -fn mul_smoketest() { +fn mul_smoke_test() { let a = Expression { mul_terms: vec![], linear_combinations: vec![(FieldElement::from(2u128), Witness(2))], diff --git a/acvm-repo/acir/tests/test_program_serialization.rs b/acvm-repo/acir/tests/test_program_serialization.rs index ff69ba34437..7d3b7b32d35 100644 --- a/acvm-repo/acir/tests/test_program_serialization.rs +++ b/acvm-repo/acir/tests/test_program_serialization.rs @@ -24,7 +24,7 @@ use brillig::{HeapArray, RegisterIndex, RegisterOrMemory}; #[test] fn addition_circuit() { - let addition = Opcode::Arithmetic(Expression { + let addition = Opcode::AssertZero(Expression { mul_terms: Vec::new(), linear_combinations: vec![ (FieldElement::one(), Witness(1)), @@ -102,9 +102,9 @@ fn pedersen_circuit() { let bytes = Circuit::serialize_circuit(&circuit); let expected_serialization: Vec = vec![ - 31, 139, 8, 0, 0, 0, 0, 0, 0, 255, 93, 138, 9, 10, 0, 64, 8, 2, 103, 15, 250, 255, 139, - 163, 162, 130, 72, 16, 149, 241, 3, 135, 84, 164, 172, 173, 213, 175, 251, 45, 198, 96, - 243, 211, 50, 152, 67, 220, 211, 92, 0, 0, 0, + 31, 139, 8, 0, 0, 0, 0, 0, 0, 255, 93, 138, 9, 10, 0, 64, 8, 2, 103, 15, 232, 255, 31, 142, + 138, 10, 34, 65, 84, 198, 15, 28, 82, 145, 178, 182, 86, 191, 238, 183, 24, 131, 205, 79, + 203, 0, 166, 242, 158, 93, 92, 0, 0, 0, ]; assert_eq!(bytes, expected_serialization) @@ -145,7 +145,7 @@ fn schnorr_verify_circuit() { let expected_serialization: Vec = vec![ 31, 139, 8, 0, 0, 0, 0, 0, 0, 255, 77, 210, 87, 78, 2, 1, 20, 134, 209, 177, 247, 222, 123, 71, 68, 68, 68, 68, 68, 68, 68, 68, 68, 221, 133, 251, 95, 130, 145, 27, 206, 36, 78, 50, - 57, 16, 94, 200, 253, 191, 159, 36, 73, 134, 146, 193, 19, 142, 241, 183, 255, 14, 179, + 57, 16, 94, 200, 253, 191, 159, 36, 73, 134, 146, 193, 19, 142, 243, 183, 255, 14, 179, 233, 247, 145, 254, 59, 217, 127, 71, 57, 198, 113, 78, 48, 125, 167, 56, 205, 25, 206, 114, 142, 243, 92, 224, 34, 151, 184, 204, 21, 174, 114, 141, 235, 220, 224, 38, 183, 184, 205, 29, 238, 114, 143, 251, 60, 224, 33, 143, 120, 204, 19, 158, 242, 140, 25, 158, 51, @@ -158,7 +158,7 @@ fn schnorr_verify_circuit() { 91, 159, 218, 56, 99, 219, 172, 77, 115, 182, 204, 219, 176, 96, 187, 162, 205, 74, 182, 42, 219, 168, 98, 155, 170, 77, 106, 182, 168, 219, 160, 225, 246, 77, 55, 111, 185, 113, 219, 109, 59, 110, 218, 117, 203, 158, 27, 166, 55, 75, 239, 150, 184, 101, 250, 252, 1, - 19, 89, 159, 101, 220, 3, 0, 0, + 55, 204, 92, 74, 220, 3, 0, 0, ]; assert_eq!(bytes, expected_serialization) diff --git a/acvm-repo/acir_field/Cargo.toml b/acvm-repo/acir_field/Cargo.toml index 2cd2801db10..cedfc66e734 100644 --- a/acvm-repo/acir_field/Cargo.toml +++ b/acvm-repo/acir_field/Cargo.toml @@ -2,7 +2,7 @@ name = "acir_field" description = "The field implementation being used by ACIR." # x-release-please-start-version -version = "0.33.0" +version = "0.38.0" # x-release-please-end authors.workspace = true edition.workspace = true diff --git a/acvm-repo/acir_field/src/generic_ark.rs b/acvm-repo/acir_field/src/generic_ark.rs index 0f4be21ad54..542e291982b 100644 --- a/acvm-repo/acir_field/src/generic_ark.rs +++ b/acvm-repo/acir_field/src/generic_ark.rs @@ -2,6 +2,7 @@ use ark_ff::PrimeField; use ark_ff::Zero; use num_bigint::BigUint; use serde::{Deserialize, Serialize}; +use std::borrow::Cow; // XXX: Switch out for a trait and proper implementations // This implementation is in-efficient, can definitely remove hex usage and Iterator instances for trivial functionality @@ -125,8 +126,8 @@ impl<'de, T: ark_ff::PrimeField> Deserialize<'de> for FieldElement { where D: serde::Deserializer<'de>, { - let s = <&str>::deserialize(deserializer)?; - match Self::from_hex(s) { + let s: Cow<'de, str> = Deserialize::deserialize(deserializer)?; + match Self::from_hex(&s) { Some(value) => Ok(value), None => Err(serde::de::Error::custom(format!("Invalid hex for FieldElement: {s}",))), } @@ -143,6 +144,12 @@ impl From for FieldElement { } } +impl From for FieldElement { + fn from(a: usize) -> FieldElement { + FieldElement::from(a as u128) + } +} + impl From for FieldElement { fn from(boolean: bool) -> FieldElement { if boolean { @@ -266,7 +273,10 @@ impl FieldElement { } pub fn from_hex(hex_str: &str) -> Option> { let value = hex_str.strip_prefix("0x").unwrap_or(hex_str); - let hex_as_bytes = hex::decode(value).ok()?; + // Values of odd length require an additional "0" prefix + let sanitized_value = + if value.len() % 2 == 0 { value.to_string() } else { format!("0{}", value) }; + let hex_as_bytes = hex::decode(sanitized_value).ok()?; Some(FieldElement::from_be_bytes_reduce(&hex_as_bytes)) } @@ -440,6 +450,25 @@ mod tests { assert_eq!(minus_i_field_element.to_hex(), string); } } + + #[test] + fn deserialize_even_and_odd_length_hex() { + // Test cases of (odd, even) length hex strings + let hex_strings = + vec![("0x0", "0x00"), ("0x1", "0x01"), ("0x002", "0x0002"), ("0x00003", "0x000003")]; + for (i, case) in hex_strings.into_iter().enumerate() { + let i_field_element = + crate::generic_ark::FieldElement::::from(i as i128); + let odd_field_element = + crate::generic_ark::FieldElement::::from_hex(case.0).unwrap(); + let even_field_element = + crate::generic_ark::FieldElement::::from_hex(case.1).unwrap(); + + assert_eq!(i_field_element, odd_field_element); + assert_eq!(odd_field_element, even_field_element); + } + } + #[test] fn max_num_bits_smoke() { let max_num_bits_bn254 = crate::generic_ark::FieldElement::::max_num_bits(); diff --git a/acvm-repo/acvm/CHANGELOG.md b/acvm-repo/acvm/CHANGELOG.md index 29a4aa93adc..48d8317a80e 100644 --- a/acvm-repo/acvm/CHANGELOG.md +++ b/acvm-repo/acvm/CHANGELOG.md @@ -285,7 +285,7 @@ ### ⚠ BREAKING CHANGES * add backend-solvable blackboxes to brillig & unify implementations ([#422](https://github.com/noir-lang/acvm/issues/422)) -* **acvm:** Remove `CircuitSimplifer` ([#421](https://github.com/noir-lang/acvm/issues/421)) +* **acvm:** Remove `CircuitSimplifier` ([#421](https://github.com/noir-lang/acvm/issues/421)) * **acvm:** Add `circuit: &Circuit` to `eth_contract_from_vk` function signature ([#420](https://github.com/noir-lang/acvm/issues/420)) * Returns index of failing opcode and transformation mapping ([#412](https://github.com/noir-lang/acvm/issues/412)) @@ -299,7 +299,7 @@ ### Miscellaneous Chores -* **acvm:** Remove `CircuitSimplifer` ([#421](https://github.com/noir-lang/acvm/issues/421)) ([e07a56d](https://github.com/noir-lang/acvm/commit/e07a56d9c542a7f03ce156761054cd403de0bd23)) +* **acvm:** Remove `CircuitSimplifier` ([#421](https://github.com/noir-lang/acvm/issues/421)) ([e07a56d](https://github.com/noir-lang/acvm/commit/e07a56d9c542a7f03ce156761054cd403de0bd23)) ### Dependencies @@ -537,7 +537,7 @@ * require `Backend` to implement `Default` trait ([#223](https://github.com/noir-lang/acvm/issues/223)) * Make GeneralOptimizer crate visible ([#220](https://github.com/noir-lang/acvm/issues/220)) * return `PartialWitnessGeneratorStatus` from `PartialWitnessGenerator.solve` ([#213](https://github.com/noir-lang/acvm/issues/213)) -* organise operator implementations for Expression ([#190](https://github.com/noir-lang/acvm/issues/190)) +* organize operator implementations for Expression ([#190](https://github.com/noir-lang/acvm/issues/190)) ### Features @@ -557,7 +557,7 @@ ### Miscellaneous Chores * Make GeneralOptimizer crate visible ([#220](https://github.com/noir-lang/acvm/issues/220)) ([64bb346](https://github.com/noir-lang/acvm/commit/64bb346524428a0ce196826ea1e5ccde08ad6201)) -* organise operator implementations for Expression ([#190](https://github.com/noir-lang/acvm/issues/190)) ([a619df6](https://github.com/noir-lang/acvm/commit/a619df614bbb9b2518b788b42a7553b069823a0f)) +* organize operator implementations for Expression ([#190](https://github.com/noir-lang/acvm/issues/190)) ([a619df6](https://github.com/noir-lang/acvm/commit/a619df614bbb9b2518b788b42a7553b069823a0f)) ## [0.9.0](https://github.com/noir-lang/acvm/compare/acvm-v0.8.1...acvm-v0.9.0) (2023-04-07) @@ -641,7 +641,7 @@ * update `ProofSystemCompiler` to not take ownership of keys ([#111](https://github.com/noir-lang/acvm/issues/111)) * update `ProofSystemCompiler` methods to take `&Circuit` ([#108](https://github.com/noir-lang/acvm/issues/108)) * refactor ToRadix to ToRadixLe and ToRadixBe ([#58](https://github.com/noir-lang/acvm/issues/58)) -* reorganise compiler in terms of optimisers and transformers ([#88](https://github.com/noir-lang/acvm/issues/88)) +* reorganize compiler in terms of optimizers and transformers ([#88](https://github.com/noir-lang/acvm/issues/88)) ### Features @@ -659,4 +659,4 @@ ### Miscellaneous Chores * refactor ToRadix to ToRadixLe and ToRadixBe ([#58](https://github.com/noir-lang/acvm/issues/58)) ([2427a27](https://github.com/noir-lang/acvm/commit/2427a275048e598c6d651cce8348a4c55148f235)) -* reorganise compiler in terms of optimisers and transformers ([#88](https://github.com/noir-lang/acvm/issues/88)) ([9329307](https://github.com/noir-lang/acvm/commit/9329307e054de202cfc55207162ad952b70d515e)) +* reorganize compiler in terms of optimizers and transformers ([#88](https://github.com/noir-lang/acvm/issues/88)) ([9329307](https://github.com/noir-lang/acvm/commit/9329307e054de202cfc55207162ad952b70d515e)) diff --git a/acvm-repo/acvm/Cargo.toml b/acvm-repo/acvm/Cargo.toml index 032d93230da..f5819d6fa34 100644 --- a/acvm-repo/acvm/Cargo.toml +++ b/acvm-repo/acvm/Cargo.toml @@ -2,7 +2,7 @@ name = "acvm" description = "The virtual machine that processes ACIR given a backend/proof system." # x-release-please-start-version -version = "0.33.0" +version = "0.38.0" # x-release-please-end authors.workspace = true edition.workspace = true @@ -16,30 +16,26 @@ repository.workspace = true num-bigint.workspace = true num-traits.workspace = true thiserror.workspace = true +tracing.workspace = true acir.workspace = true -stdlib.workspace = true brillig_vm.workspace = true acvm_blackbox_solver.workspace = true indexmap = "1.7.0" [features] -default = ["bn254", "testing"] +default = ["bn254"] bn254 = [ "acir/bn254", - "stdlib/bn254", "brillig_vm/bn254", "acvm_blackbox_solver/bn254", ] bls12_381 = [ "acir/bls12_381", - "stdlib/bls12_381", "brillig_vm/bls12_381", "acvm_blackbox_solver/bls12_381", ] -testing = ["stdlib/testing", "unstable-fallbacks"] -unstable-fallbacks = [] [dev-dependencies] rand = "0.8.5" diff --git a/acvm-repo/acvm/src/compiler/mod.rs b/acvm-repo/acvm/src/compiler/mod.rs index 4abf94a2e78..ccb043914d6 100644 --- a/acvm-repo/acvm/src/compiler/mod.rs +++ b/acvm-repo/acvm/src/compiler/mod.rs @@ -1,10 +1,8 @@ -use acir::{ - circuit::{opcodes::UnsupportedMemoryOpcode, Circuit, Opcode, OpcodeLocation}, - BlackBoxFunc, -}; -use thiserror::Error; +use std::collections::HashMap; -use crate::Language; +use acir::circuit::{Circuit, OpcodeLocation}; + +use crate::ExpressionWidth; // The various passes that we can use over ACIR mod optimizers; @@ -15,24 +13,26 @@ use optimizers::optimize_internal; pub use transformers::transform; use transformers::transform_internal; -#[derive(PartialEq, Eq, Debug, Error)] -pub enum CompileError { - #[error("The blackbox function {0} is not supported by the backend and acvm does not have a fallback implementation")] - UnsupportedBlackBox(BlackBoxFunc), - #[error("The opcode {0} is not supported by the backend and acvm does not have a fallback implementation")] - UnsupportedMemoryOpcode(UnsupportedMemoryOpcode), -} - /// This module moves and decomposes acir opcodes. The transformation map allows consumers of this module to map /// metadata they had about the opcodes to the new opcode structure generated after the transformation. #[derive(Debug)] pub struct AcirTransformationMap { - /// This is a vector of pointers to the old acir opcodes. The index of the vector is the new opcode index. - /// The value of the vector is the old opcode index pointed. - acir_opcode_positions: Vec, + /// Maps the old acir indices to the new acir indices + old_indices_to_new_indices: HashMap>, } impl AcirTransformationMap { + /// Builds a map from a vector of pointers to the old acir opcodes. + /// The index of the vector is the new opcode index. + /// The value of the vector is the old opcode index pointed. + fn new(acir_opcode_positions: Vec) -> Self { + let mut old_indices_to_new_indices = HashMap::with_capacity(acir_opcode_positions.len()); + for (new_index, old_index) in acir_opcode_positions.into_iter().enumerate() { + old_indices_to_new_indices.entry(old_index).or_insert_with(Vec::new).push(new_index); + } + AcirTransformationMap { old_indices_to_new_indices } + } + pub fn new_locations( &self, old_location: OpcodeLocation, @@ -42,16 +42,16 @@ impl AcirTransformationMap { OpcodeLocation::Brillig { acir_index, .. } => acir_index, }; - self.acir_opcode_positions - .iter() - .enumerate() - .filter(move |(_, &old_index)| old_index == old_acir_index) - .map(move |(new_index, _)| match old_location { - OpcodeLocation::Acir(_) => OpcodeLocation::Acir(new_index), - OpcodeLocation::Brillig { brillig_index, .. } => { - OpcodeLocation::Brillig { acir_index: new_index, brillig_index } - } - }) + self.old_indices_to_new_indices.get(&old_acir_index).into_iter().flat_map( + move |new_indices| { + new_indices.iter().map(move |new_index| match old_location { + OpcodeLocation::Acir(_) => OpcodeLocation::Acir(*new_index), + OpcodeLocation::Brillig { brillig_index, .. } => { + OpcodeLocation::Brillig { acir_index: *new_index, brillig_index } + } + }) + }, + ) } } @@ -71,15 +71,16 @@ fn transform_assert_messages( /// Applies [`ProofSystemCompiler`][crate::ProofSystemCompiler] specific optimizations to a [`Circuit`]. pub fn compile( acir: Circuit, - np_language: Language, - is_opcode_supported: impl Fn(&Opcode) -> bool, -) -> Result<(Circuit, AcirTransformationMap), CompileError> { - let (acir, AcirTransformationMap { acir_opcode_positions }) = optimize_internal(acir); + expression_width: ExpressionWidth, +) -> (Circuit, AcirTransformationMap) { + let (acir, acir_opcode_positions) = optimize_internal(acir); + + let (mut acir, acir_opcode_positions) = + transform_internal(acir, expression_width, acir_opcode_positions); - let (mut acir, transformation_map) = - transform_internal(acir, np_language, is_opcode_supported, acir_opcode_positions)?; + let transformation_map = AcirTransformationMap::new(acir_opcode_positions); acir.assert_messages = transform_assert_messages(acir.assert_messages, &transformation_map); - Ok((acir, transformation_map)) + (acir, transformation_map) } diff --git a/acvm-repo/acvm/src/compiler/optimizers/mod.rs b/acvm-repo/acvm/src/compiler/optimizers/mod.rs index 627ddbb4117..923756580b3 100644 --- a/acvm-repo/acvm/src/compiler/optimizers/mod.rs +++ b/acvm-repo/acvm/src/compiler/optimizers/mod.rs @@ -6,6 +6,7 @@ mod unused_memory; pub(crate) use general::GeneralOptimizer; pub(crate) use redundant_range::RangeOptimizer; +use tracing::info; use self::unused_memory::UnusedMemoryOptimizer; @@ -13,7 +14,9 @@ use super::{transform_assert_messages, AcirTransformationMap}; /// Applies [`ProofSystemCompiler`][crate::ProofSystemCompiler] independent optimizations to a [`Circuit`]. pub fn optimize(acir: Circuit) -> (Circuit, AcirTransformationMap) { - let (mut acir, transformation_map) = optimize_internal(acir); + let (mut acir, new_opcode_positions) = optimize_internal(acir); + + let transformation_map = AcirTransformationMap::new(new_opcode_positions); acir.assert_messages = transform_assert_messages(acir.assert_messages, &transformation_map); @@ -21,14 +24,17 @@ pub fn optimize(acir: Circuit) -> (Circuit, AcirTransformationMap) { } /// Applies [`ProofSystemCompiler`][crate::ProofSystemCompiler] independent optimizations to a [`Circuit`]. -pub(super) fn optimize_internal(acir: Circuit) -> (Circuit, AcirTransformationMap) { +#[tracing::instrument(level = "trace", name = "optimize_acir" skip(acir))] +pub(super) fn optimize_internal(acir: Circuit) -> (Circuit, Vec) { + info!("Number of opcodes before: {}", acir.opcodes.len()); + // General optimizer pass let opcodes: Vec = acir .opcodes .into_iter() .map(|opcode| { - if let Opcode::Arithmetic(arith_expr) = opcode { - Opcode::Arithmetic(GeneralOptimizer::optimize(arith_expr)) + if let Opcode::AssertZero(arith_expr) = opcode { + Opcode::AssertZero(GeneralOptimizer::optimize(arith_expr)) } else { opcode } @@ -50,7 +56,7 @@ pub(super) fn optimize_internal(acir: Circuit) -> (Circuit, AcirTransformationMa let (acir, acir_opcode_positions) = range_optimizer.replace_redundant_ranges(acir_opcode_positions); - let transformation_map = AcirTransformationMap { acir_opcode_positions }; + info!("Number of opcodes after: {}", acir.opcodes.len()); - (acir, transformation_map) + (acir, acir_opcode_positions) } diff --git a/acvm-repo/acvm/src/compiler/optimizers/redundant_range.rs b/acvm-repo/acvm/src/compiler/optimizers/redundant_range.rs index 766d3674113..5d19f9629ba 100644 --- a/acvm-repo/acvm/src/compiler/optimizers/redundant_range.rs +++ b/acvm-repo/acvm/src/compiler/optimizers/redundant_range.rs @@ -48,25 +48,51 @@ impl RangeOptimizer { /// only store the fact that we have constrained it to /// be 16 bits. fn collect_ranges(circuit: &Circuit) -> BTreeMap { - let mut witness_to_bit_sizes = BTreeMap::new(); + let mut witness_to_bit_sizes: BTreeMap = BTreeMap::new(); for opcode in &circuit.opcodes { - // Extract the witness index and number of bits, - // if it is a range constraint - let (witness, num_bits) = match extract_range_opcode(opcode) { - Some(func_inputs) => func_inputs, - None => continue, + let Some((witness, num_bits)) = (match opcode { + Opcode::AssertZero(expr) => { + // If the opcode is constraining a witness to be equal to a value then it can be considered + // as a range opcode for the number of bits required to hold that value. + if expr.is_degree_one_univariate() { + let (k, witness) = expr.linear_combinations[0]; + let constant = expr.q_c; + let witness_value = -constant / k; + + if witness_value.is_zero() { + Some((witness, 0)) + } else { + // We subtract off 1 bit from the implied witness value to give the weakest range constraint + // which would be stricter than the constraint imposed by this opcode. + let implied_range_constraint_bits = witness_value.num_bits() - 1; + Some((witness, implied_range_constraint_bits)) + } + } else { + None + } + } + + + Opcode::BlackBoxFuncCall(BlackBoxFuncCall::RANGE { + input: FunctionInput { witness, num_bits }, + }) => { + Some((*witness, *num_bits)) + } + + _ => None, + }) else { + continue; }; // Check if the witness has already been recorded and if the witness // size is more than the current one, we replace it - let should_replace = match witness_to_bit_sizes.get(&witness).copied() { - Some(old_range_bits) => old_range_bits > num_bits, - None => true, - }; - if should_replace { - witness_to_bit_sizes.insert(witness, num_bits); - } + witness_to_bit_sizes + .entry(witness) + .and_modify(|old_range_bits| { + *old_range_bits = std::cmp::min(*old_range_bits, num_bits); + }) + .or_insert(num_bits); } witness_to_bit_sizes } @@ -116,23 +142,17 @@ impl RangeOptimizer { /// Extract the range opcode from the `Opcode` enum /// Returns None, if `Opcode` is not the range opcode. fn extract_range_opcode(opcode: &Opcode) -> Option<(Witness, u32)> { - // Range constraints are blackbox function calls - // so we first extract the function call - let func_call = match opcode { - acir::circuit::Opcode::BlackBoxFuncCall(func_call) => func_call, - _ => return None, - }; - - // Skip if it is not a range constraint - match func_call { - BlackBoxFuncCall::RANGE { input } => Some((input.witness, input.num_bits)), + match opcode { + Opcode::BlackBoxFuncCall(BlackBoxFuncCall::RANGE { input }) => { + Some((input.witness, input.num_bits)) + } _ => None, } } fn optimized_range_opcode(witness: Witness, num_bits: u32) -> Opcode { if num_bits == 1 { - Opcode::Arithmetic(Expression { + Opcode::AssertZero(Expression { mul_terms: vec![(FieldElement::one(), witness, witness)], linear_combinations: vec![(-FieldElement::one(), witness)], q_c: FieldElement::zero(), @@ -234,16 +254,29 @@ mod tests { #[test] fn non_range_opcodes() { // The optimizer should not remove or change non-range opcodes - // The four Arithmetic opcodes should remain unchanged. + // The four AssertZero opcodes should remain unchanged. let mut circuit = test_circuit(vec![(Witness(1), 16), (Witness(1), 16)]); - circuit.opcodes.push(Opcode::Arithmetic(Expression::default())); - circuit.opcodes.push(Opcode::Arithmetic(Expression::default())); - circuit.opcodes.push(Opcode::Arithmetic(Expression::default())); - circuit.opcodes.push(Opcode::Arithmetic(Expression::default())); + circuit.opcodes.push(Opcode::AssertZero(Expression::default())); + circuit.opcodes.push(Opcode::AssertZero(Expression::default())); + circuit.opcodes.push(Opcode::AssertZero(Expression::default())); + circuit.opcodes.push(Opcode::AssertZero(Expression::default())); let acir_opcode_positions = circuit.opcodes.iter().enumerate().map(|(i, _)| i).collect(); let optimizer = RangeOptimizer::new(circuit); let (optimized_circuit, _) = optimizer.replace_redundant_ranges(acir_opcode_positions); assert_eq!(optimized_circuit.opcodes.len(), 5); } + + #[test] + fn constant_implied_ranges() { + // The optimizer should use knowledge about constant witness assignments to remove range opcodes. + let mut circuit = test_circuit(vec![(Witness(1), 16)]); + + circuit.opcodes.push(Opcode::AssertZero(Witness(1).into())); + let acir_opcode_positions = circuit.opcodes.iter().enumerate().map(|(i, _)| i).collect(); + let optimizer = RangeOptimizer::new(circuit); + let (optimized_circuit, _) = optimizer.replace_redundant_ranges(acir_opcode_positions); + assert_eq!(optimized_circuit.opcodes.len(), 1); + assert_eq!(optimized_circuit.opcodes[0], Opcode::AssertZero(Witness(1).into())); + } } diff --git a/acvm-repo/acvm/src/compiler/transformers/csat.rs b/acvm-repo/acvm/src/compiler/transformers/csat.rs index 9f89ac4671a..9e2e3091c74 100644 --- a/acvm-repo/acvm/src/compiler/transformers/csat.rs +++ b/acvm-repo/acvm/src/compiler/transformers/csat.rs @@ -9,7 +9,7 @@ use indexmap::IndexMap; /// A transformer which processes any [`Expression`]s to break them up such that they /// fit within the [`ProofSystemCompiler`][crate::ProofSystemCompiler]'s width. /// -/// This transformer is only used when targetting the [`PLONKCSat`][crate::Language::PLONKCSat] language. +/// This transformer is only used when targeting the [`Bounded`][crate::ExpressionWidth::Bounded] configuration. /// /// This is done by creating intermediate variables to hold partial calculations and then combining them /// to calculate the original expression. @@ -62,7 +62,7 @@ impl CSatTransformer { } // Still missing dead witness optimization. - // To do this, we will need the whole set of arithmetic opcodes + // To do this, we will need the whole set of assert-zero opcodes // I think it can also be done before the local optimization seen here, as dead variables will come from the user pub(crate) fn transform( &mut self, @@ -84,7 +84,7 @@ impl CSatTransformer { opcode } - // This optimization will search for combinations of terms which can be represented in a single arithmetic opcode + // This optimization will search for combinations of terms which can be represented in a single assert-zero opcode // Case 1 : qM * wL * wR + qL * wL + qR * wR + qO * wO + qC // This polynomial does not require any further optimizations, it can be safely represented in one opcode // ie a polynomial with 1 mul(bi-variate) term and 3 (univariate) terms where 2 of those terms match the bivariate term @@ -93,13 +93,13 @@ impl CSatTransformer { // // // Case 2: qM * wL * wR + qL * wL + qR * wR + qO * wO + qC + qM2 * wL2 * wR2 + qL * wL2 + qR * wR2 + qO * wO2 + qC2 - // This polynomial cannot be represented using one arithmetic opcode. + // This polynomial cannot be represented using one assert-zero opcode. // // This algorithm will first extract the first full opcode(if possible): // t = qM * wL * wR + qL * wL + qR * wR + qO * wO + qC // // The polynomial now looks like so t + qM2 * wL2 * wR2 + qL * wL2 + qR * wR2 + qO * wO2 + qC2 - // This polynomial cannot be represented using one arithmetic opcode. + // This polynomial cannot be represented using one assert-zero opcode. // // This algorithm will then extract the second full opcode(if possible): // t2 = qM2 * wL2 * wR2 + qL * wL2 + qR * wR2 + qO * wO2 + qC2 @@ -121,7 +121,7 @@ impl CSatTransformer { // If the opcode only has one mul term, then this algorithm cannot optimize it any further // Either it can be represented in a single arithmetic equation or it's fan-in is too large and we need intermediate variables for those // large-fan-in optimization is not this algorithms purpose. - // If the opcode has 0 mul terms, then it is an add opcode and similarly it can either fit into a single arithmetic opcode or it has a large fan-in + // If the opcode has 0 mul terms, then it is an add opcode and similarly it can either fit into a single assert-zero opcode or it has a large fan-in if opcode.mul_terms.len() <= 1 { return opcode; } @@ -194,7 +194,7 @@ impl CSatTransformer { } } - // Now we have used up 2 spaces in our arithmetic opcode. The width now dictates, how many more we can add + // Now we have used up 2 spaces in our assert-zero opcode. The width now dictates, how many more we can add let mut remaining_space = self.width - 2 - 1; // We minus 1 because we need an extra space to contain the intermediate variable // Keep adding terms until we have no more left, or we reach the width let mut remaining_linear_terms = @@ -325,7 +325,7 @@ impl CSatTransformer { // Then use intermediate variables again to squash the fan-in, so that it can fit into the appropriate width // First check if this polynomial actually needs a partial opcode optimization - // There is the chance that it fits perfectly within the arithmetic opcode + // There is the chance that it fits perfectly within the assert-zero opcode if opcode.fits_in_one_identity(self.width) { return opcode; } diff --git a/acvm-repo/acvm/src/compiler/transformers/fallback.rs b/acvm-repo/acvm/src/compiler/transformers/fallback.rs deleted file mode 100644 index 06dfc84a798..00000000000 --- a/acvm-repo/acvm/src/compiler/transformers/fallback.rs +++ /dev/null @@ -1,158 +0,0 @@ -use super::super::CompileError; -use acir::{ - circuit::{opcodes::BlackBoxFuncCall, Circuit, Opcode}, - native_types::Expression, -}; - -/// The initial transformer to act on a [`Circuit`]. This replaces any unsupported opcodes with -/// fallback implementations consisting of well supported opcodes. -pub(crate) struct FallbackTransformer; - -impl FallbackTransformer { - //ACIR pass which replace unsupported opcodes using arithmetic fallback - pub(crate) fn transform( - acir: Circuit, - is_supported: impl Fn(&Opcode) -> bool, - opcode_positions: Vec, - ) -> Result<(Circuit, Vec), CompileError> { - let mut acir_supported_opcodes = Vec::with_capacity(acir.opcodes.len()); - let mut new_opcode_positions = Vec::with_capacity(opcode_positions.len()); - let mut witness_idx = acir.current_witness_index + 1; - - for (idx, opcode) in acir.opcodes.into_iter().enumerate() { - match &opcode { - Opcode::Arithmetic(_) | Opcode::Directive(_) | Opcode::Brillig(_) => { - // directive, arithmetic expression or blocks are handled by acvm - new_opcode_positions.push(opcode_positions[idx]); - acir_supported_opcodes.push(opcode); - continue; - } - Opcode::MemoryInit { .. } | Opcode::MemoryOp { .. } => { - if !is_supported(&opcode) { - return Err(CompileError::UnsupportedMemoryOpcode( - opcode.unsupported_opcode(), - )); - } - new_opcode_positions.push(opcode_positions[idx]); - acir_supported_opcodes.push(opcode); - } - Opcode::BlackBoxFuncCall(bb_func_call) => { - // We know it is an black box function. Now check if it is - // supported by the backend. If it is supported, then we can simply - // collect the opcode - if is_supported(&opcode) { - new_opcode_positions.push(opcode_positions[idx]); - acir_supported_opcodes.push(opcode); - continue; - } else { - // If we get here then we know that this black box function is not supported - // so we need to replace it with a version of the opcode which only uses arithmetic - // expressions - let (updated_witness_index, opcodes_fallback) = - Self::opcode_fallback(bb_func_call, witness_idx)?; - witness_idx = updated_witness_index; - new_opcode_positions - .extend(vec![opcode_positions[idx]; opcodes_fallback.len()]); - acir_supported_opcodes.extend(opcodes_fallback); - } - } - } - } - - Ok(( - Circuit { - current_witness_index: witness_idx - 1, - opcodes: acir_supported_opcodes, - ..acir - }, - new_opcode_positions, - )) - } - - fn opcode_fallback( - gc: &BlackBoxFuncCall, - current_witness_idx: u32, - ) -> Result<(u32, Vec), CompileError> { - let (updated_witness_index, opcodes_fallback) = match gc { - BlackBoxFuncCall::AND { lhs, rhs, output } => { - assert_eq!( - lhs.num_bits, rhs.num_bits, - "number of bits specified for each input must be the same" - ); - stdlib::blackbox_fallbacks::and( - Expression::from(lhs.witness), - Expression::from(rhs.witness), - *output, - lhs.num_bits, - current_witness_idx, - ) - } - BlackBoxFuncCall::XOR { lhs, rhs, output } => { - assert_eq!( - lhs.num_bits, rhs.num_bits, - "number of bits specified for each input must be the same" - ); - stdlib::blackbox_fallbacks::xor( - Expression::from(lhs.witness), - Expression::from(rhs.witness), - *output, - lhs.num_bits, - current_witness_idx, - ) - } - BlackBoxFuncCall::RANGE { input } => { - // Note there are no outputs because range produces no outputs - stdlib::blackbox_fallbacks::range( - Expression::from(input.witness), - input.num_bits, - current_witness_idx, - ) - } - #[cfg(feature = "unstable-fallbacks")] - BlackBoxFuncCall::SHA256 { inputs, outputs } => { - let sha256_inputs = - inputs.iter().map(|input| (input.witness.into(), input.num_bits)).collect(); - stdlib::blackbox_fallbacks::sha256( - sha256_inputs, - outputs.to_vec(), - current_witness_idx, - ) - } - #[cfg(feature = "unstable-fallbacks")] - BlackBoxFuncCall::Blake2s { inputs, outputs } => { - let blake2s_inputs = - inputs.iter().map(|input| (input.witness.into(), input.num_bits)).collect(); - stdlib::blackbox_fallbacks::blake2s( - blake2s_inputs, - outputs.to_vec(), - current_witness_idx, - ) - } - #[cfg(feature = "unstable-fallbacks")] - BlackBoxFuncCall::HashToField128Security { inputs, output } => { - let hash_to_field_inputs = - inputs.iter().map(|input| (input.witness.into(), input.num_bits)).collect(); - stdlib::blackbox_fallbacks::hash_to_field( - hash_to_field_inputs, - *output, - current_witness_idx, - ) - } - #[cfg(feature = "unstable-fallbacks")] - BlackBoxFuncCall::Keccak256 { inputs, outputs } => { - let keccak_inputs = - inputs.iter().map(|input| (input.witness.into(), input.num_bits)).collect(); - stdlib::blackbox_fallbacks::keccak256( - keccak_inputs, - outputs.to_vec(), - current_witness_idx, - ) - } - _ => { - return Err(CompileError::UnsupportedBlackBox(gc.get_black_box_func())); - } - }; - - Ok((updated_witness_index, opcodes_fallback)) - } -} diff --git a/acvm-repo/acvm/src/compiler/transformers/mod.rs b/acvm-repo/acvm/src/compiler/transformers/mod.rs index d827b759666..d26b3f8bbf3 100644 --- a/acvm-repo/acvm/src/compiler/transformers/mod.rs +++ b/acvm-repo/acvm/src/compiler/transformers/mod.rs @@ -5,56 +5,50 @@ use acir::{ }; use indexmap::IndexMap; -use crate::Language; +use crate::ExpressionWidth; mod csat; -mod fallback; mod r1cs; pub(crate) use csat::CSatTransformer; -pub(crate) use fallback::FallbackTransformer; pub(crate) use r1cs::R1CSTransformer; -use super::{transform_assert_messages, AcirTransformationMap, CompileError}; +use super::{transform_assert_messages, AcirTransformationMap}; /// Applies [`ProofSystemCompiler`][crate::ProofSystemCompiler] specific optimizations to a [`Circuit`]. pub fn transform( acir: Circuit, - np_language: Language, - is_opcode_supported: impl Fn(&Opcode) -> bool, -) -> Result<(Circuit, AcirTransformationMap), CompileError> { + expression_width: ExpressionWidth, +) -> (Circuit, AcirTransformationMap) { // Track original acir opcode positions throughout the transformation passes of the compilation // by applying the modifications done to the circuit opcodes and also to the opcode_positions (delete and insert) let acir_opcode_positions = acir.opcodes.iter().enumerate().map(|(i, _)| i).collect(); - let (mut acir, transformation_map) = - transform_internal(acir, np_language, is_opcode_supported, acir_opcode_positions)?; + let (mut acir, acir_opcode_positions) = + transform_internal(acir, expression_width, acir_opcode_positions); + + let transformation_map = AcirTransformationMap::new(acir_opcode_positions); acir.assert_messages = transform_assert_messages(acir.assert_messages, &transformation_map); - Ok((acir, transformation_map)) + (acir, transformation_map) } /// Applies [`ProofSystemCompiler`][crate::ProofSystemCompiler] specific optimizations to a [`Circuit`]. /// /// Accepts an injected `acir_opcode_positions` to allow transformations to be applied directly after optimizations. +#[tracing::instrument(level = "trace", name = "transform_acir", skip(acir, acir_opcode_positions))] pub(super) fn transform_internal( acir: Circuit, - np_language: Language, - is_opcode_supported: impl Fn(&Opcode) -> bool, + expression_width: ExpressionWidth, acir_opcode_positions: Vec, -) -> Result<(Circuit, AcirTransformationMap), CompileError> { - // Fallback transformer pass - let (acir, acir_opcode_positions) = - FallbackTransformer::transform(acir, is_opcode_supported, acir_opcode_positions)?; - - let mut transformer = match &np_language { - crate::Language::R1CS => { - let transformation_map = AcirTransformationMap { acir_opcode_positions }; +) -> (Circuit, Vec) { + let mut transformer = match &expression_width { + crate::ExpressionWidth::Unbounded => { let transformer = R1CSTransformer::new(acir); - return Ok((transformer.transform(), transformation_map)); + return (transformer.transform(), acir_opcode_positions); } - crate::Language::PLONKCSat { width } => { + crate::ExpressionWidth::Bounded { width } => { let mut csat = CSatTransformer::new(*width); for value in acir.circuit_arguments() { csat.mark_solvable(value); @@ -68,7 +62,7 @@ pub(super) fn transform_internal( // TODO or at the very least, we could put all of it inside of CSatOptimizer pass let mut new_acir_opcode_positions: Vec = Vec::with_capacity(acir_opcode_positions.len()); - // Optimize the arithmetic gates by reducing them into the correct width and + // Optimize the assert-zero gates by reducing them into the correct width and // creating intermediate variables when necessary let mut transformed_opcodes = Vec::new(); @@ -78,7 +72,7 @@ pub(super) fn transform_internal( let mut intermediate_variables: IndexMap = IndexMap::new(); for (index, opcode) in acir.opcodes.into_iter().enumerate() { match opcode { - Opcode::Arithmetic(arith_expr) => { + Opcode::AssertZero(arith_expr) => { let len = intermediate_variables.len(); let arith_expr = transformer.transform( @@ -101,7 +95,7 @@ pub(super) fn transform_internal( new_opcodes.push(arith_expr); for opcode in new_opcodes { new_acir_opcode_positions.push(acir_opcode_positions[index]); - transformed_opcodes.push(Opcode::Arithmetic(opcode)); + transformed_opcodes.push(Opcode::AssertZero(opcode)); } } Opcode::BlackBoxFuncCall(ref func) => { @@ -110,18 +104,17 @@ pub(super) fn transform_internal( | acir::circuit::opcodes::BlackBoxFuncCall::XOR { output, .. } => { transformer.mark_solvable(*output); } - acir::circuit::opcodes::BlackBoxFuncCall::RANGE { .. } => (), + acir::circuit::opcodes::BlackBoxFuncCall::RANGE { .. } + | acir::circuit::opcodes::BlackBoxFuncCall::RecursiveAggregation { .. } => (), acir::circuit::opcodes::BlackBoxFuncCall::SHA256 { outputs, .. } | acir::circuit::opcodes::BlackBoxFuncCall::Keccak256 { outputs, .. } | acir::circuit::opcodes::BlackBoxFuncCall::Keccak256VariableLength { outputs, .. } - | acir::circuit::opcodes::BlackBoxFuncCall::RecursiveAggregation { - output_aggregation_object: outputs, - .. - } - | acir::circuit::opcodes::BlackBoxFuncCall::Blake2s { outputs, .. } => { + | acir::circuit::opcodes::BlackBoxFuncCall::Keccakf1600 { outputs, .. } + | acir::circuit::opcodes::BlackBoxFuncCall::Blake2s { outputs, .. } + | acir::circuit::opcodes::BlackBoxFuncCall::Blake3 { outputs, .. } => { for witness in outputs { transformer.mark_solvable(*witness); } @@ -137,11 +130,7 @@ pub(super) fn transform_internal( transformer.mark_solvable(outputs.0); transformer.mark_solvable(outputs.1); } - acir::circuit::opcodes::BlackBoxFuncCall::HashToField128Security { - output, - .. - } - | acir::circuit::opcodes::BlackBoxFuncCall::EcdsaSecp256k1 { output, .. } + acir::circuit::opcodes::BlackBoxFuncCall::EcdsaSecp256k1 { output, .. } | acir::circuit::opcodes::BlackBoxFuncCall::EcdsaSecp256r1 { output, .. } | acir::circuit::opcodes::BlackBoxFuncCall::SchnorrVerify { output, .. } | acir::circuit::opcodes::BlackBoxFuncCall::PedersenHash { output, .. } => { @@ -214,8 +203,5 @@ pub(super) fn transform_internal( ..acir }; - let transformation_map = - AcirTransformationMap { acir_opcode_positions: new_acir_opcode_positions }; - - Ok((acir, transformation_map)) + (acir, new_acir_opcode_positions) } diff --git a/acvm-repo/acvm/src/lib.rs b/acvm-repo/acvm/src/lib.rs index 0ab037a2e4b..626bb2c9b91 100644 --- a/acvm-repo/acvm/src/lib.rs +++ b/acvm-repo/acvm/src/lib.rs @@ -18,10 +18,16 @@ pub use brillig_vm; // re-export blackbox solver pub use acvm_blackbox_solver as blackbox_solver; -/// Supported NP complete languages -/// This might need to be in ACIR instead +/// Specifies the maximum width of the expressions which will be constrained. +/// +/// Unbounded Expressions are useful if you are eventually going to pass the ACIR +/// into a proving system which supports R1CS. +/// +/// Bounded Expressions are useful if you are eventually going to pass the ACIR +/// into a proving system which supports PLONK, where arithmetic expressions have a +/// finite fan-in. #[derive(Debug, Clone, Copy)] -pub enum Language { - R1CS, - PLONKCSat { width: usize }, +pub enum ExpressionWidth { + Unbounded, + Bounded { width: usize }, } diff --git a/acvm-repo/acvm/src/pwg/arithmetic.rs b/acvm-repo/acvm/src/pwg/arithmetic.rs index 93a39fb249c..81462ea495e 100644 --- a/acvm-repo/acvm/src/pwg/arithmetic.rs +++ b/acvm-repo/acvm/src/pwg/arithmetic.rs @@ -5,9 +5,9 @@ use acir::{ use super::{insert_value, ErrorLocation, OpcodeNotSolvable, OpcodeResolutionError}; -/// An Arithmetic solver will take a Circuit's arithmetic opcodes with witness assignments +/// An Expression solver will take a Circuit's assert-zero opcodes with witness assignments /// and create the other witness variables -pub(super) struct ArithmeticSolver; +pub(super) struct ExpressionSolver; #[allow(clippy::enum_variant_names)] pub(super) enum OpcodeStatus { @@ -22,17 +22,17 @@ pub(crate) enum MulTerm { Solved(FieldElement), } -impl ArithmeticSolver { +impl ExpressionSolver { /// Derives the rest of the witness based on the initial low level variables pub(super) fn solve( initial_witness: &mut WitnessMap, opcode: &Expression, ) -> Result<(), OpcodeResolutionError> { - let opcode = &ArithmeticSolver::evaluate(opcode, initial_witness); + let opcode = &ExpressionSolver::evaluate(opcode, initial_witness); // Evaluate multiplication term - let mul_result = ArithmeticSolver::solve_mul_term(opcode, initial_witness); + let mul_result = ExpressionSolver::solve_mul_term(opcode, initial_witness); // Evaluate the fan-in terms - let opcode_status = ArithmeticSolver::solve_fan_in_term(opcode, initial_witness); + let opcode_status = ExpressionSolver::solve_fan_in_term(opcode, initial_witness); match (mul_result, opcode_status) { (MulTerm::TooManyUnknowns, _) | (_, OpcodeStatus::OpcodeUnsolvable) => { @@ -126,7 +126,7 @@ impl ArithmeticSolver { } } - /// Returns the evaluation of the multiplication term in the arithmetic opcode + /// Returns the evaluation of the multiplication term in the expression /// If the witness values are not known, then the function returns a None /// XXX: Do we need to account for the case where 5xy + 6x = 0 ? We do not know y, but it can be solved given x . But I believe x can be solved with another opcode /// XXX: What about making a mul opcode = a constant 5xy + 7 = 0 ? This is the same as the above. @@ -135,11 +135,11 @@ impl ArithmeticSolver { // We are assuming it has been optimized. match arith_opcode.mul_terms.len() { 0 => MulTerm::Solved(FieldElement::zero()), - 1 => ArithmeticSolver::solve_mul_term_helper( + 1 => ExpressionSolver::solve_mul_term_helper( &arith_opcode.mul_terms[0], witness_assignments, ), - _ => panic!("Mul term in the arithmetic opcode must contain either zero or one term"), + _ => panic!("Mul term in the assert-zero opcode must contain either zero or one term"), } } @@ -186,7 +186,7 @@ impl ArithmeticSolver { let mut result = FieldElement::zero(); for term in arith_opcode.linear_combinations.iter() { - let value = ArithmeticSolver::solve_fan_in_term_helper(term, witness_assignments); + let value = ExpressionSolver::solve_fan_in_term_helper(term, witness_assignments); match value { Some(a) => result += a, None => { @@ -212,7 +212,7 @@ impl ArithmeticSolver { pub(super) fn evaluate(expr: &Expression, initial_witness: &WitnessMap) -> Expression { let mut result = Expression::default(); for &(c, w1, w2) in &expr.mul_terms { - let mul_result = ArithmeticSolver::solve_mul_term_helper(&(c, w1, w2), initial_witness); + let mul_result = ExpressionSolver::solve_mul_term_helper(&(c, w1, w2), initial_witness); match mul_result { MulTerm::OneUnknown(v, w) => { if !v.is_zero() { @@ -228,7 +228,7 @@ impl ArithmeticSolver { } } for &(c, w) in &expr.linear_combinations { - if let Some(f) = ArithmeticSolver::solve_fan_in_term_helper(&(c, w), initial_witness) { + if let Some(f) = ExpressionSolver::solve_fan_in_term_helper(&(c, w), initial_witness) { result.q_c += f; } else if !c.is_zero() { result.linear_combinations.push((c, w)); @@ -240,7 +240,7 @@ impl ArithmeticSolver { } #[test] -fn arithmetic_smoke_test() { +fn expression_solver_smoke_test() { let a = Witness(0); let b = Witness(1); let c = Witness(2); @@ -274,8 +274,8 @@ fn arithmetic_smoke_test() { values.insert(c, FieldElement::from(1_i128)); values.insert(d, FieldElement::from(1_i128)); - assert_eq!(ArithmeticSolver::solve(&mut values, &opcode_a), Ok(())); - assert_eq!(ArithmeticSolver::solve(&mut values, &opcode_b), Ok(())); + assert_eq!(ExpressionSolver::solve(&mut values, &opcode_a), Ok(())); + assert_eq!(ExpressionSolver::solve(&mut values, &opcode_b), Ok(())); assert_eq!(values.get(&a).unwrap(), &FieldElement::from(4_i128)); } diff --git a/acvm-repo/acvm/src/pwg/blackbox/hash.rs b/acvm-repo/acvm/src/pwg/blackbox/hash.rs index 80665a743c4..1ada397fc59 100644 --- a/acvm-repo/acvm/src/pwg/blackbox/hash.rs +++ b/acvm-repo/acvm/src/pwg/blackbox/hash.rs @@ -3,26 +3,11 @@ use acir::{ native_types::{Witness, WitnessMap}, BlackBoxFunc, FieldElement, }; -use acvm_blackbox_solver::{hash_to_field_128_security, BlackBoxResolutionError}; +use acvm_blackbox_solver::BlackBoxResolutionError; use crate::pwg::{insert_value, witness_to_value}; use crate::OpcodeResolutionError; -/// Attempts to solve a `HashToField128Security` opcode -/// If successful, `initial_witness` will be mutated to contain the new witness assignment. -pub(super) fn solve_hash_to_field( - initial_witness: &mut WitnessMap, - inputs: &[FunctionInput], - output: &Witness, -) -> Result<(), OpcodeResolutionError> { - let message_input = get_hash_input(initial_witness, inputs, None)?; - let field = hash_to_field_128_security(&message_input)?; - - insert_value(output, field, initial_witness)?; - - Ok(()) -} - /// Attempts to solve a 256 bit hash function opcode. /// If successful, `initial_witness` will be mutated to contain the new witness assignment. pub(super) fn solve_generic_256_hash_opcode( diff --git a/acvm-repo/acvm/src/pwg/blackbox/mod.rs b/acvm-repo/acvm/src/pwg/blackbox/mod.rs index 7e8ab8b948c..c18a97733b8 100644 --- a/acvm-repo/acvm/src/pwg/blackbox/mod.rs +++ b/acvm-repo/acvm/src/pwg/blackbox/mod.rs @@ -3,12 +3,12 @@ use acir::{ native_types::{Witness, WitnessMap}, FieldElement, }; -use acvm_blackbox_solver::{blake2s, keccak256, sha256}; +use acvm_blackbox_solver::{blake2s, blake3, keccak256, keccakf1600, sha256}; use self::pedersen::pedersen_hash; use super::{insert_value, OpcodeNotSolvable, OpcodeResolutionError}; -use crate::BlackBoxFunctionSolver; +use crate::{pwg::witness_to_value, BlackBoxFunctionSolver}; mod fixed_base_scalar_mul; mod hash; @@ -19,7 +19,7 @@ mod signature; use fixed_base_scalar_mul::fixed_base_scalar_mul; // Hash functions should eventually be exposed for external consumers. -use hash::{solve_generic_256_hash_opcode, solve_hash_to_field}; +use hash::solve_generic_256_hash_opcode; use logic::{and, xor}; use pedersen::pedersen; use range::solve_range_opcode; @@ -83,6 +83,14 @@ pub(crate) fn solve( blake2s, bb_func.get_black_box_func(), ), + BlackBoxFuncCall::Blake3 { inputs, outputs } => solve_generic_256_hash_opcode( + initial_witness, + inputs, + None, + outputs, + blake3, + bb_func.get_black_box_func(), + ), BlackBoxFuncCall::Keccak256 { inputs, outputs } => solve_generic_256_hash_opcode( initial_witness, inputs, @@ -101,8 +109,21 @@ pub(crate) fn solve( bb_func.get_black_box_func(), ) } - BlackBoxFuncCall::HashToField128Security { inputs, output } => { - solve_hash_to_field(initial_witness, inputs, output) + BlackBoxFuncCall::Keccakf1600 { inputs, outputs } => { + let mut state = [0; 25]; + for (i, input) in inputs.iter().enumerate() { + let witness = input.witness; + let num_bits = input.num_bits as usize; + assert_eq!(num_bits, 64); + let witness_assignment = witness_to_value(initial_witness, witness)?; + let lane = witness_assignment.try_to_u64(); + state[i] = lane.unwrap(); + } + let state = keccakf1600(state)?; + for (output_witness, value) in outputs.iter().zip(state.into_iter()) { + insert_value(output_witness, FieldElement::from(value as u128), initial_witness)?; + } + Ok(()) } BlackBoxFuncCall::SchnorrVerify { public_key_x, @@ -156,13 +177,7 @@ pub(crate) fn solve( BlackBoxFuncCall::FixedBaseScalarMul { low, high, outputs } => { fixed_base_scalar_mul(backend, initial_witness, *low, *high, *outputs) } - BlackBoxFuncCall::RecursiveAggregation { output_aggregation_object, .. } => { - // Solve the output of the recursive aggregation to zero to prevent missing assignment errors - // The correct value will be computed by the backend - for witness in output_aggregation_object { - insert_value(witness, FieldElement::zero(), initial_witness)?; - } - Ok(()) - } + // Recursive aggregation will be entirely handled by the backend and is not solved by the ACVM + BlackBoxFuncCall::RecursiveAggregation { .. } => Ok(()), } } diff --git a/acvm-repo/acvm/src/pwg/brillig.rs b/acvm-repo/acvm/src/pwg/brillig.rs index b0e5ec6dd48..0db38c776e2 100644 --- a/acvm-repo/acvm/src/pwg/brillig.rs +++ b/acvm-repo/acvm/src/pwg/brillig.rs @@ -112,6 +112,22 @@ impl<'b, B: BlackBoxFunctionSolver> BrilligSolver<'b, B> { Ok(Self { vm, acir_index }) } + pub fn get_registers(&self) -> &Registers { + self.vm.get_registers() + } + + pub fn set_register(&mut self, register_index: usize, value: Value) { + self.vm.set_register(RegisterIndex(register_index), value); + } + + pub fn get_memory(&self) -> &[Value] { + self.vm.get_memory() + } + + pub fn write_memory_at(&mut self, ptr: usize, value: Value) { + self.vm.write_memory_at(ptr, value); + } + pub(super) fn solve(&mut self) -> Result { let status = self.vm.process_opcodes(); self.handle_vm_status(status) diff --git a/acvm-repo/acvm/src/pwg/memory_op.rs b/acvm-repo/acvm/src/pwg/memory_op.rs index 42951dfa3c1..c1da2cd95cf 100644 --- a/acvm-repo/acvm/src/pwg/memory_op.rs +++ b/acvm-repo/acvm/src/pwg/memory_op.rs @@ -6,7 +6,7 @@ use acir::{ FieldElement, }; -use super::{arithmetic::ArithmeticSolver, get_value, insert_value, witness_to_value}; +use super::{arithmetic::ExpressionSolver, get_value, insert_value, witness_to_value}; use super::{ErrorLocation, OpcodeResolutionError}; type MemoryIndex = u32; @@ -75,7 +75,7 @@ impl MemoryOpSolver { // // In read operations, this corresponds to the witness index at which the value from memory will be written. // In write operations, this corresponds to the expression which will be written to memory. - let value = ArithmeticSolver::evaluate(&op.value, initial_witness); + let value = ExpressionSolver::evaluate(&op.value, initial_witness); // `operation == 0` implies a read operation. (`operation == 1` implies write operation). let is_read_operation = operation.is_zero(); diff --git a/acvm-repo/acvm/src/pwg/mod.rs b/acvm-repo/acvm/src/pwg/mod.rs index 6016d0d1f3e..41b96572658 100644 --- a/acvm-repo/acvm/src/pwg/mod.rs +++ b/acvm-repo/acvm/src/pwg/mod.rs @@ -10,8 +10,8 @@ use acir::{ }; use acvm_blackbox_solver::BlackBoxResolutionError; -use self::{arithmetic::ArithmeticSolver, directives::solve_directives, memory_op::MemoryOpSolver}; -use crate::{BlackBoxFunctionSolver, Language}; +use self::{arithmetic::ExpressionSolver, directives::solve_directives, memory_op::MemoryOpSolver}; +use crate::BlackBoxFunctionSolver; use thiserror::Error; @@ -69,8 +69,8 @@ pub enum StepResult<'a, B: BlackBoxFunctionSolver> { // The most common being that one of its input has not been // assigned a value. // -// TODO: ExpressionHasTooManyUnknowns is specific for arithmetic expressions -// TODO: we could have a error enum for arithmetic failure cases in that module +// TODO: ExpressionHasTooManyUnknowns is specific for expression solver +// TODO: we could have a error enum for expression solver failure cases in that module // TODO that can be converted into an OpcodeNotSolvable or OpcodeResolutionError enum #[derive(Clone, PartialEq, Eq, Debug, Error)] pub enum OpcodeNotSolvable { @@ -104,8 +104,6 @@ impl std::fmt::Display for ErrorLocation { pub enum OpcodeResolutionError { #[error("Cannot solve opcode: {0}")] OpcodeNotSolvable(#[from] OpcodeNotSolvable), - #[error("Backend does not currently support the {0} opcode. ACVM does not currently have a fallback for this opcode.")] - UnsupportedBlackBoxFunc(BlackBoxFunc), #[error("Cannot satisfy constraint")] UnsatisfiedConstrain { opcode_location: ErrorLocation }, #[error("Index out of bounds, array has size {array_size:?}, but index was {index:?}")] @@ -122,9 +120,6 @@ impl From for OpcodeResolutionError { BlackBoxResolutionError::Failed(func, reason) => { OpcodeResolutionError::BlackBoxFunctionFailed(func, reason) } - BlackBoxResolutionError::Unsupported(func) => { - OpcodeResolutionError::UnsupportedBlackBoxFunc(func) - } } } } @@ -168,6 +163,14 @@ impl<'a, B: BlackBoxFunctionSolver> ACVM<'a, B> { &self.witness_map } + pub fn overwrite_witness( + &mut self, + witness: Witness, + value: FieldElement, + ) -> Option { + self.witness_map.insert(witness, value) + } + /// Returns a slice containing the opcodes of the circuit being executed. pub fn opcodes(&self) -> &[Opcode] { self.opcodes @@ -250,7 +253,7 @@ impl<'a, B: BlackBoxFunctionSolver> ACVM<'a, B> { let opcode = &self.opcodes[self.instruction_pointer]; let resolution = match opcode { - Opcode::Arithmetic(expr) => ArithmeticSolver::solve(&mut self.witness_map, expr), + Opcode::AssertZero(expr) => ExpressionSolver::solve(&mut self.witness_map, expr), Opcode::BlackBoxFuncCall(bb_func) => { blackbox::solve(self.backend, &mut self.witness_map, bb_func) } @@ -394,7 +397,7 @@ pub fn get_value( expr: &Expression, initial_witness: &WitnessMap, ) -> Result { - let expr = ArithmeticSolver::evaluate(expr, initial_witness); + let expr = ExpressionSolver::evaluate(expr, initial_witness); match expr.to_const() { Some(value) => Ok(value), None => Err(OpcodeResolutionError::OpcodeNotSolvable( @@ -442,30 +445,3 @@ fn any_witness_from_expression(expr: &Expression) -> Option { Some(expr.linear_combinations[0].1) } } - -#[deprecated( - note = "For backwards compatibility, this method allows you to derive _sensible_ defaults for opcode support based on the np language. \n Backends should simply specify what they support." -)] -// This is set to match the previous functionality that we had -// Where we could deduce what opcodes were supported -// by knowing the np complete language -pub fn default_is_opcode_supported(language: Language) -> fn(&Opcode) -> bool { - // R1CS does not support any of the opcode except Arithmetic by default. - // The compiler will replace those that it can -- ie range, xor, and - fn r1cs_is_supported(opcode: &Opcode) -> bool { - matches!(opcode, Opcode::Arithmetic(_)) - } - - // PLONK supports most of the opcodes by default - // The ones which are not supported, the acvm compiler will - // attempt to transform into supported opcodes. If these are also not available - // then a compiler error will be emitted. - fn plonk_is_supported(_opcode: &Opcode) -> bool { - true - } - - match language { - Language::R1CS => r1cs_is_supported, - Language::PLONKCSat { .. } => plonk_is_supported, - } -} diff --git a/acvm-repo/acvm/tests/solver.rs b/acvm-repo/acvm/tests/solver.rs index d578555c5dc..b4011a994a5 100644 --- a/acvm-repo/acvm/tests/solver.rs +++ b/acvm-repo/acvm/tests/solver.rs @@ -111,18 +111,18 @@ fn inversion_brillig_oracle_equivalence() { let opcodes = vec![ Opcode::Brillig(brillig_data), - Opcode::Arithmetic(Expression { + Opcode::AssertZero(Expression { mul_terms: vec![], linear_combinations: vec![(fe_1, w_x), (fe_1, w_y), (-fe_1, w_z)], q_c: fe_0, }), // Opcode::Directive(Directive::Invert { x: w_z, result: w_z_inverse }), - Opcode::Arithmetic(Expression { + Opcode::AssertZero(Expression { mul_terms: vec![(fe_1, w_z, w_z_inverse)], linear_combinations: vec![], q_c: -fe_1, }), - Opcode::Arithmetic(Expression { + Opcode::AssertZero(Expression { mul_terms: vec![], linear_combinations: vec![(-fe_1, w_oracle), (fe_1, w_z_inverse)], q_c: fe_0, @@ -238,18 +238,18 @@ fn double_inversion_brillig_oracle() { let opcodes = vec![ Opcode::Brillig(brillig_data), - Opcode::Arithmetic(Expression { + Opcode::AssertZero(Expression { mul_terms: vec![], linear_combinations: vec![(fe_1, w_x), (fe_1, w_y), (-fe_1, w_z)], q_c: fe_0, }), // Opcode::Directive(Directive::Invert { x: w_z, result: w_z_inverse }), - Opcode::Arithmetic(Expression { + Opcode::AssertZero(Expression { mul_terms: vec![(fe_1, w_z, w_z_inverse)], linear_combinations: vec![], q_c: -fe_1, }), - Opcode::Arithmetic(Expression { + Opcode::AssertZero(Expression { mul_terms: vec![], linear_combinations: vec![(-fe_1, w_oracle), (fe_1, w_z_inverse)], q_c: fe_0, @@ -377,9 +377,9 @@ fn oracle_dependent_execution() { }; let opcodes = vec![ - Opcode::Arithmetic(equality_check), + Opcode::AssertZero(equality_check), Opcode::Brillig(brillig_data), - Opcode::Arithmetic(inverse_equality_check), + Opcode::AssertZero(inverse_equality_check), ]; let witness_assignments = @@ -516,7 +516,7 @@ fn unsatisfied_opcode_resolved() { values.insert(c, FieldElement::from(1_i128)); values.insert(d, FieldElement::from(2_i128)); - let opcodes = vec![Opcode::Arithmetic(opcode_a)]; + let opcodes = vec![Opcode::AssertZero(opcode_a)]; let mut acvm = ACVM::new(&StubbedBackend, &opcodes, values); let solver_status = acvm.solve(); assert_eq!( @@ -595,7 +595,7 @@ fn unsatisfied_opcode_resolved_brillig() { values.insert(w_y, FieldElement::from(1_i128)); values.insert(w_result, FieldElement::from(0_i128)); - let opcodes = vec![brillig_opcode, Opcode::Arithmetic(opcode_a)]; + let opcodes = vec![brillig_opcode, Opcode::AssertZero(opcode_a)]; let mut acvm = ACVM::new(&StubbedBackend, &opcodes, values); let solver_status = acvm.solve(); @@ -630,7 +630,7 @@ fn memory_operations() { predicate: None, }; - let expression = Opcode::Arithmetic(Expression { + let expression = Opcode::AssertZero(Expression { mul_terms: Vec::new(), linear_combinations: vec![ (FieldElement::one(), Witness(7)), diff --git a/acvm-repo/acvm/tests/stdlib.rs b/acvm-repo/acvm/tests/stdlib.rs deleted file mode 100644 index c96c55f9401..00000000000 --- a/acvm-repo/acvm/tests/stdlib.rs +++ /dev/null @@ -1,354 +0,0 @@ -#![cfg(feature = "testing")] -mod solver; -use crate::solver::StubbedBackend; -use acir::{ - circuit::{ - opcodes::{BlackBoxFuncCall, FunctionInput}, - Circuit, Opcode, - }, - native_types::{Expression, Witness}, - FieldElement, -}; -use acvm::{ - compiler::compile, - pwg::{ACVMStatus, ACVM}, - Language, -}; -use acvm_blackbox_solver::{blake2s, hash_to_field_128_security, keccak256, sha256}; -use paste::paste; -use proptest::prelude::*; -use std::collections::{BTreeMap, BTreeSet}; -use stdlib::blackbox_fallbacks::{UInt32, UInt64, UInt8}; - -test_uint!(test_uint8, UInt8, u8, 8); -test_uint!(test_uint32, UInt32, u32, 32); -test_uint!(test_uint64, UInt64, u64, 64); - -#[macro_export] -macro_rules! test_uint { - ( - $name:tt, - $uint:ident, - $u:ident, - $size:expr - ) => { - paste! { - test_uint_inner!( - [<$name _rol>], - [<$name _ror>], - [<$name _euclidean_division>], - [<$name _add>], - [<$name _sub>], - [<$name _left_shift>], - [<$name _right_shift>], - [<$name _less_than>], - $uint, - $u, - $size - ); - } - }; -} - -#[macro_export] -macro_rules! test_uint_inner { - ( - $rol:tt, - $ror:tt, - $euclidean_division:tt, - $add:tt, - $sub:tt, - $left_shift:tt, - $right_shift:tt, - $less_than:tt, - $uint: ident, - $u: ident, - $size: expr - ) => { - proptest! { - #[test] - fn $rol(x in 0..$u::MAX, y in 0..32_u32) { - let fe = FieldElement::from(x as u128); - let w = Witness(1); - let result = x.rotate_left(y); - let uint = $uint::new(w); - let (w, extra_opcodes, _) = uint.rol(y, 2); - let witness_assignments = BTreeMap::from([(Witness(1), fe)]).into(); - let mut acvm = ACVM::new(&StubbedBackend, &extra_opcodes, witness_assignments); - let solver_status = acvm.solve(); - - prop_assert_eq!(acvm.witness_map().get(&w.get_inner()).unwrap(), &FieldElement::from(result as u128)); - prop_assert_eq!(solver_status, ACVMStatus::Solved, "should be fully solved"); - } - - #[test] - fn $ror(x in 0..$u::MAX, y in 0..32_u32) { - let fe = FieldElement::from(x as u128); - let w = Witness(1); - let result = x.rotate_right(y); - let uint = $uint::new(w); - let (w, extra_opcodes, _) = uint.ror(y, 2); - let witness_assignments = BTreeMap::from([(Witness(1), fe)]).into(); - let mut acvm = ACVM::new(&StubbedBackend, &extra_opcodes, witness_assignments); - let solver_status = acvm.solve(); - - prop_assert_eq!(acvm.witness_map().get(&w.get_inner()).unwrap(), &FieldElement::from(result as u128)); - prop_assert_eq!(solver_status, ACVMStatus::Solved, "should be fully solved"); - } - - #[test] - fn $euclidean_division(x in 0..$u::MAX, y in 1 - ..$u::MAX) { - let lhs = FieldElement::from(x as u128); - let rhs = FieldElement::from(y as u128); - let w1 = Witness(1); - let w2 = Witness(2); - let q = x.div_euclid(y); - let r = x.rem_euclid(y); - let u32_1 = $uint::new(w1); - let u32_2 = $uint::new(w2); - let (q_w, r_w, extra_opcodes, _) = $uint::euclidean_division(&u32_1, &u32_2, 3); - let witness_assignments = BTreeMap::from([(Witness(1), lhs),(Witness(2), rhs)]).into(); - let mut acvm = ACVM::new(&StubbedBackend, &extra_opcodes, witness_assignments); - let solver_status = acvm.solve(); - - prop_assert_eq!(acvm.witness_map().get(&q_w.get_inner()).unwrap(), &FieldElement::from(q as u128)); - prop_assert_eq!(acvm.witness_map().get(&r_w.get_inner()).unwrap(), &FieldElement::from(r as u128)); - prop_assert_eq!(solver_status, ACVMStatus::Solved, "should be fully solved"); - } - - #[test] - fn $add(x in 0..$u::MAX, y in 0..$u::MAX, z in 0..$u::MAX) { - let lhs = FieldElement::from(x as u128); - let rhs = FieldElement::from(y as u128); - let rhs_z = FieldElement::from(z as u128); - let result = FieldElement::from(((x as u128).wrapping_add(y as u128) % (1_u128 << $size)).wrapping_add(z as u128) % (1_u128 << $size)); - let w1 = Witness(1); - let w2 = Witness(2); - let w3 = Witness(3); - let u32_1 = $uint::new(w1); - let u32_2 = $uint::new(w2); - let u32_3 = $uint::new(w3); - let mut opcodes = Vec::new(); - let (w, extra_opcodes, num_witness) = u32_1.add(&u32_2, 4); - opcodes.extend(extra_opcodes); - let (w2, extra_opcodes, _) = w.add(&u32_3, num_witness); - opcodes.extend(extra_opcodes); - let witness_assignments = BTreeMap::from([(Witness(1), lhs), (Witness(2), rhs), (Witness(3), rhs_z)]).into(); - let mut acvm = ACVM::new(&StubbedBackend, &opcodes, witness_assignments); - let solver_status = acvm.solve(); - - prop_assert_eq!(acvm.witness_map().get(&w2.get_inner()).unwrap(), &result); - prop_assert_eq!(solver_status, ACVMStatus::Solved, "should be fully solved"); - } - - #[test] - fn $sub(x in 0..$u::MAX, y in 0..$u::MAX, z in 0..$u::MAX) { - let lhs = FieldElement::from(x as u128); - let rhs = FieldElement::from(y as u128); - let rhs_z = FieldElement::from(z as u128); - let result = FieldElement::from(((x as u128).wrapping_sub(y as u128) % (1_u128 << $size)).wrapping_sub(z as u128) % (1_u128 << $size)); - let w1 = Witness(1); - let w2 = Witness(2); - let w3 = Witness(3); - let u32_1 = $uint::new(w1); - let u32_2 = $uint::new(w2); - let u32_3 = $uint::new(w3); - let mut opcodes = Vec::new(); - let (w, extra_opcodes, num_witness) = u32_1.sub(&u32_2, 4); - opcodes.extend(extra_opcodes); - let (w2, extra_opcodes, _) = w.sub(&u32_3, num_witness); - opcodes.extend(extra_opcodes); - let witness_assignments = BTreeMap::from([(Witness(1), lhs), (Witness(2), rhs), (Witness(3), rhs_z)]).into(); - let mut acvm = ACVM::new(&StubbedBackend, &opcodes, witness_assignments); - let solver_status = acvm.solve(); - - prop_assert_eq!(acvm.witness_map().get(&w2.get_inner()).unwrap(), &result); - prop_assert_eq!(solver_status, ACVMStatus::Solved, "should be fully solved"); - } - - #[test] - fn $left_shift(x in 0..$u::MAX, y in 0..32_u32) { - let lhs = FieldElement::from(x as u128); - let w1 = Witness(1); - let result = x.overflowing_shl(y).0; - let u32_1 = $uint::new(w1); - let (w, extra_opcodes, _) = u32_1.leftshift(y, 2); - let witness_assignments = BTreeMap::from([(Witness(1), lhs)]).into(); - let mut acvm = ACVM::new(&StubbedBackend, &extra_opcodes, witness_assignments); - let solver_status = acvm.solve(); - - prop_assert_eq!(acvm.witness_map().get(&w.get_inner()).unwrap(), &FieldElement::from(result as u128)); - prop_assert_eq!(solver_status, ACVMStatus::Solved, "should be fully solved"); - } - - #[test] - fn $right_shift(x in 0..$u::MAX, y in 0..32_u32) { - let lhs = FieldElement::from(x as u128); - let w1 = Witness(1); - let result = x.overflowing_shr(y).0; - let u32_1 = $uint::new(w1); - let (w, extra_opcodes, _) = u32_1.rightshift(y, 2); - let witness_assignments = BTreeMap::from([(Witness(1), lhs)]).into(); - let mut acvm = ACVM::new(&StubbedBackend, &extra_opcodes, witness_assignments); - let solver_status = acvm.solve(); - - prop_assert_eq!(acvm.witness_map().get(&w.get_inner()).unwrap(), &FieldElement::from(result as u128)); - prop_assert_eq!(solver_status, ACVMStatus::Solved, "should be fully solved"); - } - - #[test] - fn $less_than(x in 0..$u::MAX, y in 0..$u::MAX) { - let lhs = FieldElement::from(x as u128); - let rhs = FieldElement::from(y as u128); - let w1 = Witness(1); - let w2 = Witness(2); - let result = x < y; - let u32_1 = $uint::new(w1); - let u32_2 = $uint::new(w2); - let (w, extra_opcodes, _) = u32_1.less_than_comparison(&u32_2, 3); - let witness_assignments = BTreeMap::from([(Witness(1), lhs), (Witness(2), rhs)]).into(); - let mut acvm = ACVM::new(&StubbedBackend, &extra_opcodes, witness_assignments); - let solver_status = acvm.solve(); - - prop_assert_eq!(acvm.witness_map().get(&w.get_inner()).unwrap(), &FieldElement::from(result as u128)); - prop_assert_eq!(solver_status, ACVMStatus::Solved, "should be fully solved"); - } - } - }; -} - -test_hashes!(test_sha256, sha256, SHA256, does_not_support_sha256); -test_hashes!(test_blake2s, blake2s, Blake2s, does_not_support_blake2s); -test_hashes!(test_keccak, keccak256, Keccak256, does_not_support_keccak); - -fn does_not_support_sha256(opcode: &Opcode) -> bool { - !matches!(opcode, Opcode::BlackBoxFuncCall(BlackBoxFuncCall::SHA256 { .. })) -} -fn does_not_support_blake2s(opcode: &Opcode) -> bool { - !matches!(opcode, Opcode::BlackBoxFuncCall(BlackBoxFuncCall::Blake2s { .. })) -} -fn does_not_support_keccak(opcode: &Opcode) -> bool { - !matches!(opcode, Opcode::BlackBoxFuncCall(BlackBoxFuncCall::Keccak256 { .. })) -} - -#[macro_export] -macro_rules! test_hashes { - ( - $name:ident, - $hasher:ident, - $opcode:ident, - $opcode_support: ident - ) => { - proptest! { - #![proptest_config(ProptestConfig::with_cases(3))] - #[test] - fn $name(input_values in proptest::collection::vec(0..u8::MAX, 1..50)) { - let mut opcodes = Vec::new(); - let mut witness_assignments = BTreeMap::new(); - let mut input_witnesses: Vec = Vec::new(); - let mut correct_result_witnesses: Vec = Vec::new(); - let mut output_witnesses: Vec = Vec::new(); - - // prepare test data - let mut counter = 0; - let output = $hasher(&input_values).unwrap(); - for inp_v in input_values { - counter += 1; - let function_input = FunctionInput { witness: Witness(counter), num_bits: 8 }; - input_witnesses.push(function_input); - witness_assignments.insert(Witness(counter), FieldElement::from(inp_v as u128)); - } - - for o_v in output { - counter += 1; - correct_result_witnesses.push(Witness(counter)); - witness_assignments.insert(Witness(counter), FieldElement::from(o_v as u128)); - } - - for _ in 0..32 { - counter += 1; - output_witnesses.push(Witness(counter)); - } - let blackbox = Opcode::BlackBoxFuncCall(BlackBoxFuncCall::$opcode { inputs: input_witnesses, outputs: output_witnesses.clone() }); - opcodes.push(blackbox); - - // constrain the output to be the same as the hasher - for i in 0..correct_result_witnesses.len() { - let mut output_constraint = Expression::from(correct_result_witnesses[i]); - output_constraint.push_addition_term(-FieldElement::one(), output_witnesses[i]); - opcodes.push(Opcode::Arithmetic(output_constraint)); - } - - // compile circuit - let circuit = Circuit { - current_witness_index: witness_assignments.len() as u32 + 32, - opcodes, - private_parameters: BTreeSet::new(), // This is not correct but is unused in this test. - ..Circuit::default() - }; - let circuit = compile(circuit, Language::PLONKCSat{ width: 3 }, $opcode_support).unwrap().0; - - // solve witnesses - let mut acvm = ACVM::new(&StubbedBackend, &circuit.opcodes, witness_assignments.into()); - let solver_status = acvm.solve(); - - prop_assert_eq!(solver_status, ACVMStatus::Solved, "should be fully solved"); - } - } - }; -} - -fn does_not_support_hash_to_field(opcode: &Opcode) -> bool { - !matches!(opcode, Opcode::BlackBoxFuncCall(BlackBoxFuncCall::HashToField128Security { .. })) -} - -proptest! { - #![proptest_config(ProptestConfig::with_cases(3))] - #[test] - fn test_hash_to_field(input_values in proptest::collection::vec(0..u8::MAX, 1..50)) { - let mut opcodes = Vec::new(); - let mut witness_assignments = BTreeMap::new(); - let mut input_witnesses: Vec = Vec::new(); - - // prepare test data - let mut counter = 0; - let output = hash_to_field_128_security(&input_values).unwrap(); - for inp_v in input_values { - counter += 1; - let function_input = FunctionInput { witness: Witness(counter), num_bits: 8 }; - input_witnesses.push(function_input); - witness_assignments.insert(Witness(counter), FieldElement::from(inp_v as u128)); - } - - counter += 1; - let correct_result_witnesses: Witness = Witness(counter); - witness_assignments.insert(Witness(counter), output); - - counter += 1; - let output_witness: Witness = Witness(counter); - - let blackbox = Opcode::BlackBoxFuncCall(BlackBoxFuncCall::HashToField128Security { inputs: input_witnesses, output: output_witness }); - opcodes.push(blackbox); - - // constrain the output to be the same as the hasher - let mut output_constraint = Expression::from(correct_result_witnesses); - output_constraint.push_addition_term(-FieldElement::one(), output_witness); - opcodes.push(Opcode::Arithmetic(output_constraint)); - - // compile circuit - let circuit = Circuit { - current_witness_index: witness_assignments.len() as u32 + 1, - opcodes, - private_parameters: BTreeSet::new(), // This is not correct but is unused in this test. - ..Circuit::default() - }; - let circuit = compile(circuit, Language::PLONKCSat{ width: 3 }, does_not_support_hash_to_field).unwrap().0; - - // solve witnesses - let mut acvm = ACVM::new(&StubbedBackend, &circuit.opcodes, witness_assignments.into()); - let solver_status = acvm.solve(); - - prop_assert_eq!(solver_status, ACVMStatus::Solved, "should be fully solved"); - } -} diff --git a/acvm-repo/acvm_js/Cargo.toml b/acvm-repo/acvm_js/Cargo.toml index e287ac9ed17..e8d46b9717e 100644 --- a/acvm-repo/acvm_js/Cargo.toml +++ b/acvm-repo/acvm_js/Cargo.toml @@ -2,7 +2,7 @@ name = "acvm_js" description = "Typescript wrapper around the ACVM allowing execution of ACIR code" # x-release-please-start-version -version = "0.33.0" +version = "0.38.0" # x-release-please-end authors.workspace = true edition.workspace = true @@ -19,17 +19,17 @@ crate-type = ["cdylib"] cfg-if = "1.0.0" [target.'cfg(target_arch = "wasm32")'.dependencies] -acvm = { path = "../acvm", default-features = false } -barretenberg_blackbox_solver = { path = "../barretenberg_blackbox_solver", default-features = false } +acvm.workspace = true +bn254_blackbox_solver = { workspace = true, optional = true } wasm-bindgen.workspace = true wasm-bindgen-futures.workspace = true console_error_panic_hook.workspace = true gloo-utils.workspace = true -js-sys.workspace = true +js-sys.workspace = true +tracing-subscriber.workspace = true +tracing-web.workspace = true serde = { version = "1.0.136", features = ["derive"] } -log = "0.4.17" -wasm-logger = "0.2.0" const-str = "0.5.5" [build-dependencies] @@ -41,5 +41,5 @@ wasm-bindgen-test.workspace = true [features] default = ["bn254"] -bn254 = ["acvm/bn254", "barretenberg_blackbox_solver/bn254"] -bls12_381 = ["acvm/bls12_381", "barretenberg_blackbox_solver/bls12_381"] +bn254 = ["acvm/bn254", "dep:bn254_blackbox_solver"] +bls12_381 = ["acvm/bls12_381"] diff --git a/acvm-repo/acvm_js/build.sh b/acvm-repo/acvm_js/build.sh index 37f2fd0a5a9..24af149bcea 100755 --- a/acvm-repo/acvm_js/build.sh +++ b/acvm-repo/acvm_js/build.sh @@ -34,7 +34,7 @@ export CARGO_TARGET_DIR=$self_path/target rm -rf $self_path/outputs >/dev/null 2>&1 rm -rf $self_path/result >/dev/null 2>&1 -if [ -v out ]; then +if [ -n "$out" ]; then echo "Will install package to $out (defined outside installPhase.sh script)" else export out="$self_path/outputs/out" diff --git a/acvm-repo/acvm_js/package.json b/acvm-repo/acvm_js/package.json index 19090022e25..2d878e961da 100644 --- a/acvm-repo/acvm_js/package.json +++ b/acvm-repo/acvm_js/package.json @@ -1,16 +1,21 @@ { "name": "@noir-lang/acvm_js", - "version": "0.33.0", - "repository": { - "type": "git", - "url": "https://github.com/noir-lang/acvm.git" - }, + "version": "0.38.0", "publishConfig": { "access": "public" }, - "collaborators": [ + "contributors": [ "The Noir Team " ], + "homepage": "https://noir-lang.org/", + "repository": { + "url": "https://github.com/noir-lang/noir.git", + "directory": "acvm_repo/acvm_js", + "type": "git" + }, + "bugs": { + "url": "https://github.com/noir-lang/noir/issues" + }, "license": "MIT", "main": "./nodejs/acvm_js.js", "types": "./web/acvm_js.d.ts", diff --git a/acvm-repo/acvm_js/src/black_box_solvers.rs b/acvm-repo/acvm_js/src/black_box_solvers.rs index cc3edc3de04..fc0e3b28ebf 100644 --- a/acvm-repo/acvm_js/src/black_box_solvers.rs +++ b/acvm-repo/acvm_js/src/black_box_solvers.rs @@ -40,21 +40,6 @@ pub fn keccak256(inputs: &[u8]) -> Vec { acvm::blackbox_solver::keccak256(inputs).unwrap().into() } -/// Calculates the Blake2s256 hash of the input bytes and represents these as a single field element. -// #[wasm_bindgen] -// pub fn hash_to_field_128_security(inputs: Vec) -> JsString { -// let input_bytes: Vec = inputs -// .into_iter() -// .flat_map(|field_string| { -// let field_element = js_value_to_field_element(field_string.into()).unwrap(); -// witness_assignment.fetch_nearest_bytes(FieldElement::max_num_bits()); -// }) -// .collect(); -// field_element_to_js_string( -// &acvm::blackbox_solver::hash_to_field_128_security(&input_bytes).unwrap(), -// ) -// } - /// Verifies a ECDSA signature over the secp256k1 curve. #[wasm_bindgen] pub fn ecdsa_secp256k1_verify( diff --git a/acvm-repo/acvm_js/src/execute.rs b/acvm-repo/acvm_js/src/execute.rs index 81e2a11ed5a..3f691e1abf2 100644 --- a/acvm-repo/acvm_js/src/execute.rs +++ b/acvm-repo/acvm_js/src/execute.rs @@ -2,8 +2,7 @@ use acvm::{ acir::circuit::Circuit, pwg::{ACVMStatus, ErrorLocation, OpcodeResolutionError, ACVM}, }; -#[allow(deprecated)] -use barretenberg_blackbox_solver::BarretenbergSolver; +use bn254_blackbox_solver::Bn254BlackBoxSolver; use js_sys::Error; use wasm_bindgen::prelude::wasm_bindgen; @@ -14,13 +13,11 @@ use crate::{ }; #[wasm_bindgen] -#[allow(deprecated)] -pub struct WasmBlackBoxFunctionSolver(BarretenbergSolver); +pub struct WasmBlackBoxFunctionSolver(Bn254BlackBoxSolver); impl WasmBlackBoxFunctionSolver { async fn initialize() -> WasmBlackBoxFunctionSolver { - #[allow(deprecated)] - WasmBlackBoxFunctionSolver(BarretenbergSolver::initialize().await) + WasmBlackBoxFunctionSolver(Bn254BlackBoxSolver::initialize().await) } } @@ -64,8 +61,8 @@ pub async fn execute_circuit_with_black_box_solver( foreign_call_handler: ForeignCallHandler, ) -> Result { console_error_panic_hook::set_once(); - let circuit: Circuit = - Circuit::deserialize_circuit(&circuit).expect("Failed to deserialize circuit"); + let circuit: Circuit = Circuit::deserialize_circuit(&circuit) + .map_err(|_| JsExecutionError::new("Failed to deserialize circuit. This is likely due to differing serialization formats between ACVM_JS and your compiler".to_string(), None))?; let mut acvm = ACVM::new(&solver.0, &circuit.opcodes, initial_witness.into()); diff --git a/acvm-repo/acvm_js/src/lib.rs b/acvm-repo/acvm_js/src/lib.rs index ba2a37bf984..88afd1767c9 100644 --- a/acvm-repo/acvm_js/src/lib.rs +++ b/acvm-repo/acvm_js/src/lib.rs @@ -24,7 +24,7 @@ cfg_if::cfg_if! { pub use compression::{compress_witness, decompress_witness}; pub use execute::{execute_circuit, execute_circuit_with_black_box_solver, create_black_box_solver}; pub use js_witness_map::JsWitnessMap; - pub use logging::{init_log_level, LogLevel}; + pub use logging::init_log_level; pub use public_witness::{get_public_parameters_witness, get_public_witness, get_return_witness}; pub use js_execution_error::JsExecutionError; } diff --git a/acvm-repo/acvm_js/src/logging.rs b/acvm-repo/acvm_js/src/logging.rs index d939c5f8367..f5d71fae067 100644 --- a/acvm-repo/acvm_js/src/logging.rs +++ b/acvm-repo/acvm_js/src/logging.rs @@ -1,31 +1,26 @@ -use js_sys::JsString; -use log::Level; -use std::str::FromStr; +use tracing_subscriber::prelude::*; +use tracing_subscriber::EnvFilter; +use tracing_web::MakeWebConsoleWriter; use wasm_bindgen::prelude::*; -#[wasm_bindgen(typescript_custom_section)] -const LOG_LEVEL: &'static str = r#" -export type LogLevel = "OFF" | "ERROR" | "WARN" | "INFO" | "DEBUG" | "TRACE"; -"#; - -#[wasm_bindgen] -extern "C" { - #[wasm_bindgen(extends = JsString, typescript_type = "LogLevel")] - pub type LogLevel; -} - /// Sets the package's logging level. /// /// @param {LogLevel} level - The maximum level of logging to be emitted. #[wasm_bindgen(js_name = initLogLevel, skip_jsdoc)] -pub fn init_log_level(level: LogLevel) { +pub fn init_log_level(filter: String) { // Set the static variable from Rust use std::sync::Once; - let log_level = level.as_string().unwrap(); - let log_level = Level::from_str(&log_level).unwrap_or(Level::Error); + let filter: EnvFilter = + filter.parse().expect("Could not parse log filter while initializing logger"); + static SET_HOOK: Once = Once::new(); SET_HOOK.call_once(|| { - wasm_logger::init(wasm_logger::Config::new(log_level)); + let fmt_layer = tracing_subscriber::fmt::layer() + .with_ansi(false) + .without_time() + .with_writer(MakeWebConsoleWriter::new()); + + tracing_subscriber::registry().with(fmt_layer.with_filter(filter)).init(); }); } diff --git a/acvm-repo/acvm_js/test/browser/black_box_solvers.test.ts b/acvm-repo/acvm_js/test/browser/black_box_solvers.test.ts index 9aec1403f6c..3c54fe8e38f 100644 --- a/acvm-repo/acvm_js/test/browser/black_box_solvers.test.ts +++ b/acvm-repo/acvm_js/test/browser/black_box_solvers.test.ts @@ -64,18 +64,6 @@ it('successfully calculates the keccak256 hash', async () => { } }); -// it("successfully calculates the hash_to_field_128_security field", async () => { -// const { hash_to_field_128_security_test_cases } = await import( -// "../shared/black_box_solvers" -// ); - -// for (const testCase of hash_to_field_128_security_test_cases) { -// const [preimage, expectedResult] = testCase; -// const hashField = hash_to_field_128_security(preimage); -// expect(hashField).to.be.eq(expectedResult); -// } -// }); - it('successfully verifies secp256k1 ECDSA signatures', async () => { const { ecdsa_secp256k1_test_cases } = await import('../shared/black_box_solvers'); diff --git a/acvm-repo/acvm_js/test/browser/execute_circuit.test.ts b/acvm-repo/acvm_js/test/browser/execute_circuit.test.ts index 925c1a07eb8..259c51ed1c6 100644 --- a/acvm-repo/acvm_js/test/browser/execute_circuit.test.ts +++ b/acvm-repo/acvm_js/test/browser/execute_circuit.test.ts @@ -53,7 +53,7 @@ it('successfully processes simple brillig foreign call opcodes', async () => { expect(observedInputs).to.be.deep.eq(oracleCallInputs); // If incorrect value is written into circuit then execution should halt due to unsatisfied constraint in - // arithmetic opcode. Nevertheless, check that returned value was inserted correctly. + // assert-zero opcode. Nevertheless, check that returned value was inserted correctly. expect(solved_witness).to.be.deep.eq(expectedWitnessMap); }); @@ -79,7 +79,7 @@ it('successfully processes complex brillig foreign call opcodes', async () => { expect(observedInputs).to.be.deep.eq(oracleCallInputs); // If incorrect value is written into circuit then execution should halt due to unsatisfied constraint in - // arithmetic opcode. Nevertheless, check that returned value was inserted correctly. + // assert-zero opcode. Nevertheless, check that returned value was inserted correctly. expect(solved_witness).to.be.deep.eq(expectedWitnessMap); }); diff --git a/acvm-repo/acvm_js/test/node/execute_circuit.test.ts b/acvm-repo/acvm_js/test/node/execute_circuit.test.ts index b28b9e72591..adee3c15312 100644 --- a/acvm-repo/acvm_js/test/node/execute_circuit.test.ts +++ b/acvm-repo/acvm_js/test/node/execute_circuit.test.ts @@ -46,7 +46,7 @@ it('successfully processes simple brillig foreign call opcodes', async () => { expect(observedInputs).to.be.deep.eq(oracleCallInputs); // If incorrect value is written into circuit then execution should halt due to unsatisfied constraint in - // arithmetic opcode. Nevertheless, check that returned value was inserted correctly. + // assert-zero opcode. Nevertheless, check that returned value was inserted correctly. expect(solved_witness).to.be.deep.eq(expectedWitnessMap); }); @@ -72,7 +72,7 @@ it('successfully processes complex brillig foreign call opcodes', async () => { expect(observedInputs).to.be.deep.eq(oracleCallInputs); // If incorrect value is written into circuit then execution should halt due to unsatisfied constraint in - // arithmetic opcode. Nevertheless, check that returned value was inserted correctly. + // assert-zero opcode. Nevertheless, check that returned value was inserted correctly. expect(solved_witness).to.be.deep.eq(expectedWitnessMap); }); diff --git a/acvm-repo/acvm_js/test/shared/black_box_solvers.ts b/acvm-repo/acvm_js/test/shared/black_box_solvers.ts index a5b6d1ac996..0ab3fc12b72 100644 --- a/acvm-repo/acvm_js/test/shared/black_box_solvers.ts +++ b/acvm-repo/acvm_js/test/shared/black_box_solvers.ts @@ -66,13 +66,6 @@ export const keccak256_test_cases: [Uint8Array, Uint8Array][] = [ ], ]; -// export const hash_to_field_128_security_test_cases: [string[], string][] = [ -// [ -// ["0x0000000000000000000000000000000000000000000000000000000000000001"], -// "0x25cebc29ded2fa515a937e2b5f674e3026c012e5b57f8a48d7dce6b7d274f9d9", -// ], -// ]; - export const ecdsa_secp256k1_test_cases: [[Uint8Array, Uint8Array, Uint8Array, Uint8Array], boolean][] = [ [ [ diff --git a/acvm-repo/acvm_js/test/shared/pedersen.ts b/acvm-repo/acvm_js/test/shared/pedersen.ts index 668ee2b510b..e35893fc355 100644 --- a/acvm-repo/acvm_js/test/shared/pedersen.ts +++ b/acvm-repo/acvm_js/test/shared/pedersen.ts @@ -1,7 +1,7 @@ // See `pedersen_circuit` integration test in `acir/tests/test_program_serialization.rs`. export const bytecode = Uint8Array.from([ - 31, 139, 8, 0, 0, 0, 0, 0, 0, 255, 93, 138, 9, 10, 0, 64, 8, 2, 103, 15, 250, 255, 139, 163, 162, 130, 72, 16, 149, - 241, 3, 135, 84, 164, 172, 173, 213, 175, 251, 45, 198, 96, 243, 211, 50, 152, 67, 220, 211, 92, 0, 0, 0, + 31, 139, 8, 0, 0, 0, 0, 0, 0, 255, 93, 138, 9, 10, 0, 64, 8, 2, 103, 15, 232, 255, 31, 142, 138, 10, 34, 65, 84, 198, + 15, 28, 82, 145, 178, 182, 86, 191, 238, 183, 24, 131, 205, 79, 203, 0, 166, 242, 158, 93, 92, 0, 0, 0, ]); export const initialWitnessMap = new Map([[1, '0x0000000000000000000000000000000000000000000000000000000000000001']]); diff --git a/acvm-repo/acvm_js/test/shared/schnorr_verify.ts b/acvm-repo/acvm_js/test/shared/schnorr_verify.ts index f88a70ba4a1..5716cbd30f8 100644 --- a/acvm-repo/acvm_js/test/shared/schnorr_verify.ts +++ b/acvm-repo/acvm_js/test/shared/schnorr_verify.ts @@ -2,7 +2,7 @@ export const bytecode = Uint8Array.from([ 31, 139, 8, 0, 0, 0, 0, 0, 0, 255, 77, 210, 87, 78, 2, 1, 20, 134, 209, 177, 247, 222, 123, 71, 68, 68, 68, 68, 68, 68, 68, 68, 68, 221, 133, 251, 95, 130, 145, 27, 206, 36, 78, 50, 57, 16, 94, 200, 253, 191, 159, 36, 73, 134, 146, - 193, 19, 142, 241, 183, 255, 14, 179, 233, 247, 145, 254, 59, 217, 127, 71, 57, 198, 113, 78, 48, 125, 167, 56, 205, + 193, 19, 142, 243, 183, 255, 14, 179, 233, 247, 145, 254, 59, 217, 127, 71, 57, 198, 113, 78, 48, 125, 167, 56, 205, 25, 206, 114, 142, 243, 92, 224, 34, 151, 184, 204, 21, 174, 114, 141, 235, 220, 224, 38, 183, 184, 205, 29, 238, 114, 143, 251, 60, 224, 33, 143, 120, 204, 19, 158, 242, 140, 25, 158, 51, 203, 11, 230, 120, 201, 60, 175, 88, 224, 53, 139, 188, 97, 137, 183, 44, 243, 142, 21, 222, 179, 202, 7, 214, 248, 200, 58, 159, 216, 224, 51, 155, 124, 97, 235, @@ -11,7 +11,7 @@ export const bytecode = Uint8Array.from([ 162, 149, 232, 36, 26, 137, 62, 162, 141, 232, 34, 154, 136, 30, 162, 133, 232, 32, 26, 136, 253, 99, 251, 195, 100, 176, 121, 236, 29, 91, 159, 218, 56, 99, 219, 172, 77, 115, 182, 204, 219, 176, 96, 187, 162, 205, 74, 182, 42, 219, 168, 98, 155, 170, 77, 106, 182, 168, 219, 160, 225, 246, 77, 55, 111, 185, 113, 219, 109, 59, 110, 218, 117, 203, - 158, 27, 166, 55, 75, 239, 150, 184, 101, 250, 252, 1, 19, 89, 159, 101, 220, 3, 0, 0, + 158, 27, 166, 55, 75, 239, 150, 184, 101, 250, 252, 1, 55, 204, 92, 74, 220, 3, 0, 0, ]); export const initialWitnessMap = new Map([ diff --git a/acvm-repo/barretenberg_blackbox_solver/Cargo.toml b/acvm-repo/barretenberg_blackbox_solver/Cargo.toml deleted file mode 100644 index 6917e7fbf81..00000000000 --- a/acvm-repo/barretenberg_blackbox_solver/Cargo.toml +++ /dev/null @@ -1,57 +0,0 @@ -[package] -name = "barretenberg_blackbox_solver" -description = "A wrapper around a barretenberg WASM binary to execute black box functions for which there is no rust implementation" -# x-release-please-start-version -version = "0.33.0" -# x-release-please-end -authors.workspace = true -edition.workspace = true -license.workspace = true -rust-version.workspace = true -repository.workspace = true - -# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html - -[dependencies] -acir.workspace = true -acvm_blackbox_solver.workspace = true -thiserror.workspace = true - -rust-embed = { version = "6.6.0", features = [ - "debug-embed", - "interpolate-folder-path", - "include-exclude", -] } - -# BN254 fixed base scalar multiplication solver -grumpkin = { git = "https://github.com/noir-lang/grumpkin", rev = "56d99799381f79e42148aaef0de2b0cf9a4b9a5d", features = ["std"] } -ark-ec = { version = "^0.4.0", default-features = false } -ark-ff = { version = "^0.4.0", default-features = false } -num-bigint.workspace = true - -[target.'cfg(target_arch = "wasm32")'.dependencies] -wasmer = { version = "4.2.3", default-features = false, features = [ - "js-default", -] } - -getrandom = { workspace = true, features = ["js"] } -wasm-bindgen-futures.workspace = true -js-sys.workspace = true - -[target.'cfg(not(target_arch = "wasm32"))'.dependencies] -getrandom.workspace = true -wasmer = "4.2.3" - -[build-dependencies] -pkg-config = "0.3" -tar = "~0.4.15" -flate2 = "~1.0.1" -reqwest = { version = "0.11.20", default-features = false, features = [ - "rustls-tls", - "blocking", -] } - -[features] -default = ["bn254"] -bn254 = ["acir/bn254"] -bls12_381 = ["acir/bls12_381"] diff --git a/acvm-repo/barretenberg_blackbox_solver/src/lib.rs b/acvm-repo/barretenberg_blackbox_solver/src/lib.rs deleted file mode 100644 index 5d2ab834536..00000000000 --- a/acvm-repo/barretenberg_blackbox_solver/src/lib.rs +++ /dev/null @@ -1,95 +0,0 @@ -#![warn(unreachable_pub)] -#![warn(clippy::semicolon_if_nothing_returned)] -#![cfg_attr(not(test), warn(unused_crate_dependencies, unused_extern_crates))] - -use acir::{BlackBoxFunc, FieldElement}; -use acvm_blackbox_solver::{BlackBoxFunctionSolver, BlackBoxResolutionError}; - -mod fixed_base_scalar_mul; -mod wasm; - -pub use fixed_base_scalar_mul::fixed_base_scalar_mul; -use wasm::Barretenberg; - -use self::wasm::{Pedersen, SchnorrSig}; - -#[deprecated = "The `BarretenbergSolver` is a temporary solution and will be removed in future."] -pub struct BarretenbergSolver { - blackbox_vendor: Barretenberg, -} - -#[allow(deprecated)] -impl BarretenbergSolver { - #[cfg(target_arch = "wasm32")] - pub async fn initialize() -> BarretenbergSolver { - let blackbox_vendor = Barretenberg::initialize().await; - BarretenbergSolver { blackbox_vendor } - } - - #[cfg(not(target_arch = "wasm32"))] - pub fn new() -> BarretenbergSolver { - let blackbox_vendor = Barretenberg::new(); - BarretenbergSolver { blackbox_vendor } - } -} - -#[cfg(not(target_arch = "wasm32"))] -#[allow(deprecated)] -impl Default for BarretenbergSolver { - fn default() -> Self { - Self::new() - } -} - -#[allow(deprecated)] -impl BlackBoxFunctionSolver for BarretenbergSolver { - fn schnorr_verify( - &self, - public_key_x: &FieldElement, - public_key_y: &FieldElement, - signature: &[u8], - message: &[u8], - ) -> Result { - let pub_key_bytes: Vec = - public_key_x.to_be_bytes().iter().copied().chain(public_key_y.to_be_bytes()).collect(); - - let pub_key: [u8; 64] = pub_key_bytes.try_into().unwrap(); - let sig_s: [u8; 32] = signature[0..32].try_into().unwrap(); - let sig_e: [u8; 32] = signature[32..64].try_into().unwrap(); - - #[allow(deprecated)] - self.blackbox_vendor.verify_signature(pub_key, sig_s, sig_e, message).map_err(|err| { - BlackBoxResolutionError::Failed(BlackBoxFunc::SchnorrVerify, err.to_string()) - }) - } - - fn pedersen_commitment( - &self, - inputs: &[FieldElement], - domain_separator: u32, - ) -> Result<(FieldElement, FieldElement), BlackBoxResolutionError> { - #[allow(deprecated)] - self.blackbox_vendor.encrypt(inputs.to_vec(), domain_separator).map_err(|err| { - BlackBoxResolutionError::Failed(BlackBoxFunc::PedersenCommitment, err.to_string()) - }) - } - - fn pedersen_hash( - &self, - inputs: &[FieldElement], - domain_separator: u32, - ) -> Result { - #[allow(deprecated)] - self.blackbox_vendor.hash(inputs.to_vec(), domain_separator).map_err(|err| { - BlackBoxResolutionError::Failed(BlackBoxFunc::PedersenCommitment, err.to_string()) - }) - } - - fn fixed_base_scalar_mul( - &self, - low: &FieldElement, - high: &FieldElement, - ) -> Result<(FieldElement, FieldElement), BlackBoxResolutionError> { - fixed_base_scalar_mul(low, high) - } -} diff --git a/acvm-repo/blackbox_solver/Cargo.toml b/acvm-repo/blackbox_solver/Cargo.toml index f8b100890ca..749ef8f289a 100644 --- a/acvm-repo/blackbox_solver/Cargo.toml +++ b/acvm-repo/blackbox_solver/Cargo.toml @@ -2,7 +2,7 @@ name = "acvm_blackbox_solver" description = "A solver for the blackbox functions found in ACIR and Brillig" # x-release-please-start-version -version = "0.33.0" +version = "0.38.0" # x-release-please-end authors.workspace = true edition.workspace = true @@ -17,8 +17,10 @@ acir.workspace = true thiserror.workspace = true blake2 = "0.10.6" +blake3 = "1.5.0" sha2 = "0.10.6" sha3 = "0.10.6" +keccak = "0.1.4" k256 = { version = "0.11.0", features = [ "ecdsa", "ecdsa-core", diff --git a/acvm-repo/blackbox_solver/src/lib.rs b/acvm-repo/blackbox_solver/src/lib.rs index 13d0f562415..9518854de8e 100644 --- a/acvm-repo/blackbox_solver/src/lib.rs +++ b/acvm-repo/blackbox_solver/src/lib.rs @@ -16,8 +16,6 @@ use thiserror::Error; #[derive(Clone, PartialEq, Eq, Debug, Error)] pub enum BlackBoxResolutionError { - #[error("unsupported blackbox function: {0}")] - Unsupported(BlackBoxFunc), #[error("failed to solve blackbox function: {0}, reason: {1}")] Failed(BlackBoxFunc, String), } @@ -61,14 +59,22 @@ pub fn blake2s(inputs: &[u8]) -> Result<[u8; 32], BlackBoxResolutionError> { .map_err(|err| BlackBoxResolutionError::Failed(BlackBoxFunc::Blake2s, err)) } +pub fn blake3(inputs: &[u8]) -> Result<[u8; 32], BlackBoxResolutionError> { + Ok(blake3::hash(inputs).into()) +} + pub fn keccak256(inputs: &[u8]) -> Result<[u8; 32], BlackBoxResolutionError> { generic_hash_256::(inputs) .map_err(|err| BlackBoxResolutionError::Failed(BlackBoxFunc::Keccak256, err)) } -pub fn hash_to_field_128_security(inputs: &[u8]) -> Result { - generic_hash_to_field::(inputs) - .map_err(|err| BlackBoxResolutionError::Failed(BlackBoxFunc::HashToField128Security, err)) +const KECCAK_LANES: usize = 25; + +pub fn keccakf1600( + mut state: [u64; KECCAK_LANES], +) -> Result<[u64; KECCAK_LANES], BlackBoxResolutionError> { + keccak::f1600(&mut state); + Ok(state) } pub fn ecdsa_secp256k1_verify( @@ -97,14 +103,6 @@ fn generic_hash_256(message: &[u8]) -> Result<[u8; 32], String> { Ok(output_bytes) } -/// Does a generic hash of the entire inputs converting the resulting hash into a single output field. -fn generic_hash_to_field(message: &[u8]) -> Result { - let output_bytes: [u8; 32] = - D::digest(message).as_slice().try_into().map_err(|_| "digest should be 256 bits")?; - - Ok(FieldElement::from_be_bytes_reduce(&output_bytes)) -} - fn verify_secp256k1_ecdsa_signature( hashed_msg: &[u8], public_key_x_bytes: &[u8; 32], @@ -240,6 +238,79 @@ fn verify_secp256r1_ecdsa_signature( } } +#[cfg(test)] +mod keccakf1600_tests { + use crate::keccakf1600; + + #[test] + fn sanity_check() { + // Test vectors are copied from XKCP (eXtended Keccak Code Package) + // https://github.com/XKCP/XKCP/blob/master/tests/TestVectors/KeccakF-1600-IntermediateValues.txt + let zero_state = [0u64; 25]; + + let expected_state_first = [ + 0xF1258F7940E1DDE7, + 0x84D5CCF933C0478A, + 0xD598261EA65AA9EE, + 0xBD1547306F80494D, + 0x8B284E056253D057, + 0xFF97A42D7F8E6FD4, + 0x90FEE5A0A44647C4, + 0x8C5BDA0CD6192E76, + 0xAD30A6F71B19059C, + 0x30935AB7D08FFC64, + 0xEB5AA93F2317D635, + 0xA9A6E6260D712103, + 0x81A57C16DBCF555F, + 0x43B831CD0347C826, + 0x01F22F1A11A5569F, + 0x05E5635A21D9AE61, + 0x64BEFEF28CC970F2, + 0x613670957BC46611, + 0xB87C5A554FD00ECB, + 0x8C3EE88A1CCF32C8, + 0x940C7922AE3A2614, + 0x1841F924A2C509E4, + 0x16F53526E70465C2, + 0x75F644E97F30A13B, + 0xEAF1FF7B5CECA249, + ]; + let expected_state_second = [ + 0x2D5C954DF96ECB3C, + 0x6A332CD07057B56D, + 0x093D8D1270D76B6C, + 0x8A20D9B25569D094, + 0x4F9C4F99E5E7F156, + 0xF957B9A2DA65FB38, + 0x85773DAE1275AF0D, + 0xFAF4F247C3D810F7, + 0x1F1B9EE6F79A8759, + 0xE4FECC0FEE98B425, + 0x68CE61B6B9CE68A1, + 0xDEEA66C4BA8F974F, + 0x33C43D836EAFB1F5, + 0xE00654042719DBD9, + 0x7CF8A9F009831265, + 0xFD5449A6BF174743, + 0x97DDAD33D8994B40, + 0x48EAD5FC5D0BE774, + 0xE3B8C8EE55B7B03C, + 0x91A0226E649E42E9, + 0x900E3129E7BADD7B, + 0x202A9EC5FAA3CCE8, + 0x5B3402464E1C3DB6, + 0x609F4E62A44C1059, + 0x20D06CD26A8FBF5C, + ]; + + let state_first = keccakf1600(zero_state).unwrap(); + let state_second = keccakf1600(state_first).unwrap(); + + assert_eq!(state_first, expected_state_first); + assert_eq!(state_second, expected_state_second); + } +} + #[cfg(test)] mod secp256k1_tests { use super::verify_secp256k1_ecdsa_signature; diff --git a/acvm-repo/barretenberg_blackbox_solver/CHANGELOG.md b/acvm-repo/bn254_blackbox_solver/CHANGELOG.md similarity index 100% rename from acvm-repo/barretenberg_blackbox_solver/CHANGELOG.md rename to acvm-repo/bn254_blackbox_solver/CHANGELOG.md diff --git a/acvm-repo/bn254_blackbox_solver/Cargo.toml b/acvm-repo/bn254_blackbox_solver/Cargo.toml new file mode 100644 index 00000000000..b98bb370f74 --- /dev/null +++ b/acvm-repo/bn254_blackbox_solver/Cargo.toml @@ -0,0 +1,56 @@ +[package] +name = "bn254_blackbox_solver" +description = "Solvers for black box functions which are specific for the bn254 curve" +# x-release-please-start-version +version = "0.38.0" +# x-release-please-end +authors.workspace = true +edition.workspace = true +license.workspace = true +rust-version.workspace = true +repository.workspace = true + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] +acir.workspace = true +acvm_blackbox_solver.workspace = true +thiserror.workspace = true + +rust-embed = { version = "6.6.0", features = [ + "debug-embed", + "interpolate-folder-path", + "include-exclude", +] } + +# BN254 fixed base scalar multiplication solver +grumpkin = { git = "https://github.com/noir-lang/grumpkin", rev = "56d99799381f79e42148aaef0de2b0cf9a4b9a5d", features = ["std"] } +ark-ec = { version = "^0.4.0", default-features = false } +ark-ff = { version = "^0.4.0", default-features = false } +num-bigint.workspace = true + +[target.'cfg(target_arch = "wasm32")'.dependencies] +wasmer = { version = "4.2.3", default-features = false, features = [ + "js-default", +] } + +getrandom = { workspace = true, features = ["js"] } +wasm-bindgen-futures.workspace = true +js-sys.workspace = true + +[target.'cfg(not(target_arch = "wasm32"))'.dependencies] +getrandom.workspace = true +wasmer = "4.2.3" + +[build-dependencies] +pkg-config = "0.3" +tar = "~0.4.15" +flate2 = "~1.0.1" +reqwest = { version = "0.11.20", default-features = false, features = [ + "rustls-tls", + "blocking", +] } + +[features] +default = ["bn254"] +bn254 = ["acir/bn254"] diff --git a/acvm-repo/barretenberg_blackbox_solver/build.rs b/acvm-repo/bn254_blackbox_solver/build.rs similarity index 100% rename from acvm-repo/barretenberg_blackbox_solver/build.rs rename to acvm-repo/bn254_blackbox_solver/build.rs diff --git a/acvm-repo/barretenberg_blackbox_solver/src/acvm_backend.wasm b/acvm-repo/bn254_blackbox_solver/src/acvm_backend.wasm similarity index 100% rename from acvm-repo/barretenberg_blackbox_solver/src/acvm_backend.wasm rename to acvm-repo/bn254_blackbox_solver/src/acvm_backend.wasm diff --git a/acvm-repo/barretenberg_blackbox_solver/src/fixed_base_scalar_mul.rs b/acvm-repo/bn254_blackbox_solver/src/fixed_base_scalar_mul.rs similarity index 100% rename from acvm-repo/barretenberg_blackbox_solver/src/fixed_base_scalar_mul.rs rename to acvm-repo/bn254_blackbox_solver/src/fixed_base_scalar_mul.rs diff --git a/acvm-repo/bn254_blackbox_solver/src/lib.rs b/acvm-repo/bn254_blackbox_solver/src/lib.rs new file mode 100644 index 00000000000..e315c4650be --- /dev/null +++ b/acvm-repo/bn254_blackbox_solver/src/lib.rs @@ -0,0 +1,90 @@ +#![warn(unreachable_pub)] +#![warn(clippy::semicolon_if_nothing_returned)] +#![cfg_attr(not(test), warn(unused_crate_dependencies, unused_extern_crates))] + +use acir::{BlackBoxFunc, FieldElement}; +use acvm_blackbox_solver::{BlackBoxFunctionSolver, BlackBoxResolutionError}; + +mod fixed_base_scalar_mul; +mod wasm; + +pub use fixed_base_scalar_mul::fixed_base_scalar_mul; +use wasm::Barretenberg; + +use self::wasm::{Pedersen, SchnorrSig}; + +pub struct Bn254BlackBoxSolver { + blackbox_vendor: Barretenberg, +} + +impl Bn254BlackBoxSolver { + #[cfg(target_arch = "wasm32")] + pub async fn initialize() -> Bn254BlackBoxSolver { + let blackbox_vendor = Barretenberg::initialize().await; + Bn254BlackBoxSolver { blackbox_vendor } + } + + #[cfg(not(target_arch = "wasm32"))] + pub fn new() -> Bn254BlackBoxSolver { + let blackbox_vendor = Barretenberg::new(); + Bn254BlackBoxSolver { blackbox_vendor } + } +} + +#[cfg(not(target_arch = "wasm32"))] +impl Default for Bn254BlackBoxSolver { + fn default() -> Self { + Self::new() + } +} + +impl BlackBoxFunctionSolver for Bn254BlackBoxSolver { + fn schnorr_verify( + &self, + public_key_x: &FieldElement, + public_key_y: &FieldElement, + signature: &[u8], + message: &[u8], + ) -> Result { + let pub_key_bytes: Vec = + public_key_x.to_be_bytes().iter().copied().chain(public_key_y.to_be_bytes()).collect(); + + let pub_key: [u8; 64] = pub_key_bytes.try_into().unwrap(); + let sig_s: [u8; 32] = signature[0..32].try_into().unwrap(); + let sig_e: [u8; 32] = signature[32..64].try_into().unwrap(); + + self.blackbox_vendor.verify_signature(pub_key, sig_s, sig_e, message).map_err(|err| { + BlackBoxResolutionError::Failed(BlackBoxFunc::SchnorrVerify, err.to_string()) + }) + } + + fn pedersen_commitment( + &self, + inputs: &[FieldElement], + domain_separator: u32, + ) -> Result<(FieldElement, FieldElement), BlackBoxResolutionError> { + #[allow(deprecated)] + self.blackbox_vendor.encrypt(inputs.to_vec(), domain_separator).map_err(|err| { + BlackBoxResolutionError::Failed(BlackBoxFunc::PedersenCommitment, err.to_string()) + }) + } + + fn pedersen_hash( + &self, + inputs: &[FieldElement], + domain_separator: u32, + ) -> Result { + #[allow(deprecated)] + self.blackbox_vendor.hash(inputs.to_vec(), domain_separator).map_err(|err| { + BlackBoxResolutionError::Failed(BlackBoxFunc::PedersenCommitment, err.to_string()) + }) + } + + fn fixed_base_scalar_mul( + &self, + low: &FieldElement, + high: &FieldElement, + ) -> Result<(FieldElement, FieldElement), BlackBoxResolutionError> { + fixed_base_scalar_mul(low, high) + } +} diff --git a/acvm-repo/barretenberg_blackbox_solver/src/wasm/barretenberg_structures.rs b/acvm-repo/bn254_blackbox_solver/src/wasm/barretenberg_structures.rs similarity index 100% rename from acvm-repo/barretenberg_blackbox_solver/src/wasm/barretenberg_structures.rs rename to acvm-repo/bn254_blackbox_solver/src/wasm/barretenberg_structures.rs diff --git a/acvm-repo/barretenberg_blackbox_solver/src/wasm/mod.rs b/acvm-repo/bn254_blackbox_solver/src/wasm/mod.rs similarity index 100% rename from acvm-repo/barretenberg_blackbox_solver/src/wasm/mod.rs rename to acvm-repo/bn254_blackbox_solver/src/wasm/mod.rs diff --git a/acvm-repo/barretenberg_blackbox_solver/src/wasm/pedersen.rs b/acvm-repo/bn254_blackbox_solver/src/wasm/pedersen.rs similarity index 100% rename from acvm-repo/barretenberg_blackbox_solver/src/wasm/pedersen.rs rename to acvm-repo/bn254_blackbox_solver/src/wasm/pedersen.rs diff --git a/acvm-repo/barretenberg_blackbox_solver/src/wasm/schnorr.rs b/acvm-repo/bn254_blackbox_solver/src/wasm/schnorr.rs similarity index 100% rename from acvm-repo/barretenberg_blackbox_solver/src/wasm/schnorr.rs rename to acvm-repo/bn254_blackbox_solver/src/wasm/schnorr.rs diff --git a/acvm-repo/brillig/Cargo.toml b/acvm-repo/brillig/Cargo.toml index 542839a5767..ee8651faeec 100644 --- a/acvm-repo/brillig/Cargo.toml +++ b/acvm-repo/brillig/Cargo.toml @@ -2,7 +2,7 @@ name = "brillig" description = "Brillig is the bytecode ACIR uses for non-determinism." # x-release-please-start-version -version = "0.33.0" +version = "0.38.0" # x-release-please-end authors.workspace = true edition.workspace = true diff --git a/acvm-repo/brillig/src/black_box.rs b/acvm-repo/brillig/src/black_box.rs index 75fae0a10f0..41e54ab2705 100644 --- a/acvm-repo/brillig/src/black_box.rs +++ b/acvm-repo/brillig/src/black_box.rs @@ -11,12 +11,6 @@ pub enum BlackBoxOp { Blake2s { message: HeapVector, output: HeapArray }, /// Calculates the Keccak256 hash of the inputs. Keccak256 { message: HeapVector, output: HeapArray }, - /// Hashes a set of inputs and applies the field modulus to the result - /// to return a value which can be represented as a [`FieldElement`][acir_field::FieldElement] - /// - /// This is implemented using the `Blake2s` hash function. - /// The "128" in the name specifies that this function should have 128 bits of security. - HashToField128Security { message: HeapVector, output: RegisterIndex }, /// Verifies a ECDSA signature over the secp256k1 curve. EcdsaSecp256k1 { hashed_msg: HeapVector, diff --git a/acvm-repo/brillig/src/opcodes.rs b/acvm-repo/brillig/src/opcodes.rs index 44d90acde47..79295cc6e5d 100644 --- a/acvm-repo/brillig/src/opcodes.rs +++ b/acvm-repo/brillig/src/opcodes.rs @@ -133,28 +133,6 @@ pub enum BrilligOpcode { Stop, } -impl BrilligOpcode { - pub fn name(&self) -> &'static str { - match self { - BrilligOpcode::BinaryFieldOp { .. } => "binary_field_op", - BrilligOpcode::BinaryIntOp { .. } => "binary_int_op", - BrilligOpcode::JumpIfNot { .. } => "jmp_if_not", - BrilligOpcode::JumpIf { .. } => "jmp_if", - BrilligOpcode::Jump { .. } => "jmp", - BrilligOpcode::Call { .. } => "call", - BrilligOpcode::Const { .. } => "const", - BrilligOpcode::Return => "return", - BrilligOpcode::ForeignCall { .. } => "foreign_call", - BrilligOpcode::Mov { .. } => "mov", - BrilligOpcode::Load { .. } => "load", - BrilligOpcode::Store { .. } => "store", - BrilligOpcode::BlackBox(_) => "black_box", - BrilligOpcode::Trap => "trap", - BrilligOpcode::Stop => "stop", - } - } -} - /// Binary fixed-length field expressions #[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)] pub enum BinaryFieldOp { diff --git a/acvm-repo/brillig_vm/Cargo.toml b/acvm-repo/brillig_vm/Cargo.toml index 2d89aa7aa15..91bef2572bb 100644 --- a/acvm-repo/brillig_vm/Cargo.toml +++ b/acvm-repo/brillig_vm/Cargo.toml @@ -2,7 +2,7 @@ name = "brillig_vm" description = "The virtual machine that processes Brillig bytecode, used to introduce non-determinism to the ACVM" # x-release-please-start-version -version = "0.33.0" +version = "0.38.0" # x-release-please-end authors.workspace = true edition.workspace = true diff --git a/acvm-repo/brillig_vm/src/black_box.rs b/acvm-repo/brillig_vm/src/black_box.rs index 66d40c48aec..94feb23e1a6 100644 --- a/acvm-repo/brillig_vm/src/black_box.rs +++ b/acvm-repo/brillig_vm/src/black_box.rs @@ -1,8 +1,8 @@ use acir::brillig::{BlackBoxOp, HeapArray, HeapVector, Value}; use acir::{BlackBoxFunc, FieldElement}; use acvm_blackbox_solver::{ - blake2s, ecdsa_secp256k1_verify, ecdsa_secp256r1_verify, hash_to_field_128_security, keccak256, - sha256, BlackBoxFunctionSolver, BlackBoxResolutionError, + blake2s, ecdsa_secp256k1_verify, ecdsa_secp256r1_verify, keccak256, sha256, + BlackBoxFunctionSolver, BlackBoxResolutionError, }; use crate::{Memory, Registers}; @@ -64,13 +64,6 @@ pub(crate) fn evaluate_black_box( memory.write_slice(registers.get(output.pointer).to_usize(), &to_value_vec(&bytes)); Ok(()) } - BlackBoxOp::HashToField128Security { message, output } => { - let field = hash_to_field_128_security(&to_u8_vec(read_heap_vector( - memory, registers, message, - )))?; - registers.set(*output, field.into()); - Ok(()) - } BlackBoxOp::EcdsaSecp256k1 { hashed_msg, public_key_x, @@ -85,11 +78,7 @@ pub(crate) fn evaluate_black_box( signature, result: result_register, } => { - let bb_func = match op { - BlackBoxOp::EcdsaSecp256k1 { .. } => BlackBoxFunc::EcdsaSecp256k1, - BlackBoxOp::EcdsaSecp256r1 { .. } => BlackBoxFunc::EcdsaSecp256r1, - _ => unreachable!(), - }; + let bb_func = black_box_function_from_op(op); let public_key_x: [u8; 32] = to_u8_vec(read_heap_array( memory, @@ -124,7 +113,7 @@ pub(crate) fn evaluate_black_box( BlackBoxOp::EcdsaSecp256r1 { .. } => { ecdsa_secp256r1_verify(&hashed_msg, &public_key_x, &public_key_y, &signature)? } - _ => unreachable!(), + _ => unreachable!("`BlackBoxOp` is guarded against being a non-ecdsa operation"), }; registers.set(*result_register, result.into()); @@ -178,6 +167,20 @@ pub(crate) fn evaluate_black_box( } } +fn black_box_function_from_op(op: &BlackBoxOp) -> BlackBoxFunc { + match op { + BlackBoxOp::Sha256 { .. } => BlackBoxFunc::SHA256, + BlackBoxOp::Blake2s { .. } => BlackBoxFunc::Blake2s, + BlackBoxOp::Keccak256 { .. } => BlackBoxFunc::Keccak256, + BlackBoxOp::EcdsaSecp256k1 { .. } => BlackBoxFunc::EcdsaSecp256k1, + BlackBoxOp::EcdsaSecp256r1 { .. } => BlackBoxFunc::EcdsaSecp256r1, + BlackBoxOp::SchnorrVerify { .. } => BlackBoxFunc::SchnorrVerify, + BlackBoxOp::PedersenCommitment { .. } => BlackBoxFunc::PedersenCommitment, + BlackBoxOp::PedersenHash { .. } => BlackBoxFunc::PedersenHash, + BlackBoxOp::FixedBaseScalarMul { .. } => BlackBoxFunc::FixedBaseScalarMul, + } +} + #[cfg(test)] mod test { use acir::brillig::BlackBoxOp; diff --git a/acvm-repo/brillig_vm/src/lib.rs b/acvm-repo/brillig_vm/src/lib.rs index e0d30f7b2e0..482b9b36d77 100644 --- a/acvm-repo/brillig_vm/src/lib.rs +++ b/acvm-repo/brillig_vm/src/lib.rs @@ -164,10 +164,18 @@ impl<'a, B: BlackBoxFunctionSolver> VM<'a, B> { &self.registers } + pub fn set_register(&mut self, register_index: RegisterIndex, value: Value) { + self.registers.set(register_index, value); + } + pub fn get_memory(&self) -> &Vec { self.memory.values() } + pub fn write_memory_at(&mut self, ptr: usize, value: Value) { + self.memory.write(ptr, value); + } + /// Process a single opcode and modify the program counter. pub fn process_opcode(&mut self) -> VMStatus { let opcode = &self.bytecode[self.program_counter]; diff --git a/acvm-repo/stdlib/CHANGELOG.md b/acvm-repo/stdlib/CHANGELOG.md deleted file mode 100644 index bea80c95d1e..00000000000 --- a/acvm-repo/stdlib/CHANGELOG.md +++ /dev/null @@ -1,350 +0,0 @@ -# Changelog - -## [0.27.0](https://github.com/noir-lang/acvm/compare/acvm_stdlib-v0.26.1...acvm_stdlib-v0.27.0) (2023-09-19) - - -### Miscellaneous Chores - -* **acvm_stdlib:** Synchronize acvm versions - -## [0.26.1](https://github.com/noir-lang/acvm/compare/acvm_stdlib-v0.26.0...acvm_stdlib-v0.26.1) (2023-09-12) - - -### Miscellaneous Chores - -* **acvm_stdlib:** Synchronize acvm versions - -## [0.26.0](https://github.com/noir-lang/acvm/compare/acvm_stdlib-v0.25.0...acvm_stdlib-v0.26.0) (2023-09-07) - - -### Miscellaneous Chores - -* **acvm_stdlib:** Synchronize acvm versions - -## [0.25.0](https://github.com/noir-lang/acvm/compare/acvm_stdlib-v0.24.1...acvm_stdlib-v0.25.0) (2023-09-04) - - -### Miscellaneous Chores - -* **acvm_stdlib:** Synchronize acvm versions - -## [0.24.1](https://github.com/noir-lang/acvm/compare/acvm_stdlib-v0.24.0...acvm_stdlib-v0.24.1) (2023-09-03) - - -### Miscellaneous Chores - -* **acvm_stdlib:** Synchronize acvm versions - -## [0.24.0](https://github.com/noir-lang/acvm/compare/acvm_stdlib-v0.23.0...acvm_stdlib-v0.24.0) (2023-08-31) - - -### Miscellaneous Chores - -* **acvm_stdlib:** Synchronize acvm versions - -## [0.23.0](https://github.com/noir-lang/acvm/compare/acvm_stdlib-v0.22.0...acvm_stdlib-v0.23.0) (2023-08-30) - - -### Miscellaneous Chores - -* **acvm_stdlib:** Synchronize acvm versions - -## [0.22.0](https://github.com/noir-lang/acvm/compare/acvm_stdlib-v0.21.0...acvm_stdlib-v0.22.0) (2023-08-18) - - -### Miscellaneous Chores - -* **acvm_stdlib:** Synchronize acvm versions - -## [0.21.0](https://github.com/noir-lang/acvm/compare/acvm_stdlib-v0.20.1...acvm_stdlib-v0.21.0) (2023-07-26) - - -### Miscellaneous Chores - -* **acvm_stdlib:** Synchronize acvm versions - -## [0.20.1](https://github.com/noir-lang/acvm/compare/acvm_stdlib-v0.20.0...acvm_stdlib-v0.20.1) (2023-07-26) - - -### Features - -* add optimisations to fallback black box functions on booleans ([#446](https://github.com/noir-lang/acvm/issues/446)) ([2cfb2a8](https://github.com/noir-lang/acvm/commit/2cfb2a8cf911a81eedbd9da13ab2c616abd67f83)) -* **stdlib:** Add fallback implementation of `Keccak256` black box function ([#445](https://github.com/noir-lang/acvm/issues/445)) ([f7ebb03](https://github.com/noir-lang/acvm/commit/f7ebb03653c971f119700ff8126d9eb5ff01be0f)) - -## [0.20.0](https://github.com/noir-lang/acvm/compare/acvm_stdlib-v0.19.1...acvm_stdlib-v0.20.0) (2023-07-20) - - -### Features - -* **stdlib:** Add fallback implementation of `HashToField128Security` black box function ([#435](https://github.com/noir-lang/acvm/issues/435)) ([ed40f22](https://github.com/noir-lang/acvm/commit/ed40f228529e888d1960bfa70cb92b277e24b37f)) - -## [0.19.1](https://github.com/noir-lang/acvm/compare/acvm_stdlib-v0.19.0...acvm_stdlib-v0.19.1) (2023-07-17) - - -### Miscellaneous Chores - -* **acvm_stdlib:** Synchronize acvm versions - -## [0.19.0](https://github.com/noir-lang/acvm/compare/acvm_stdlib-v0.18.2...acvm_stdlib-v0.19.0) (2023-07-15) - - -### Miscellaneous Chores - -* **acvm_stdlib:** Synchronize acvm versions - -## [0.18.2](https://github.com/noir-lang/acvm/compare/acvm_stdlib-v0.18.1...acvm_stdlib-v0.18.2) (2023-07-12) - - -### Features - -* **stdlib:** Add fallback implementation of `Blake2s` black box function ([#424](https://github.com/noir-lang/acvm/issues/424)) ([982d940](https://github.com/noir-lang/acvm/commit/982d94087d46092ce7a5e94dbd7e732195f58e42)) - -## [0.18.1](https://github.com/noir-lang/acvm/compare/acvm_stdlib-v0.18.0...acvm_stdlib-v0.18.1) (2023-07-12) - - -### Miscellaneous Chores - -* **acvm_stdlib:** Synchronize acvm versions - -## [0.18.0](https://github.com/noir-lang/acvm/compare/acvm_stdlib-v0.17.0...acvm_stdlib-v0.18.0) (2023-07-12) - - -### Features - -* **stdlib:** Add fallback implementation of `SHA256` black box function ([#407](https://github.com/noir-lang/acvm/issues/407)) ([040369a](https://github.com/noir-lang/acvm/commit/040369adc8749fa5ec2edd255ff54c105c3140f5)) - -## [0.17.0](https://github.com/noir-lang/acvm/compare/acvm_stdlib-v0.16.0...acvm_stdlib-v0.17.0) (2023-07-07) - - -### Miscellaneous Chores - -* **acvm_stdlib:** Synchronize acvm versions - -## [0.16.0](https://github.com/noir-lang/acvm/compare/acvm_stdlib-v0.15.1...acvm_stdlib-v0.16.0) (2023-07-06) - - -### Miscellaneous Chores - -* **acvm_stdlib:** Synchronize acvm versions - -## [0.15.1](https://github.com/noir-lang/acvm/compare/acvm_stdlib-v0.15.0...acvm_stdlib-v0.15.1) (2023-06-20) - - -### Miscellaneous Chores - -* **acvm_stdlib:** Synchronize acvm versions - -## [0.15.0](https://github.com/noir-lang/acvm/compare/acvm_stdlib-v0.14.2...acvm_stdlib-v0.15.0) (2023-06-15) - - -### Miscellaneous Chores - -* **acvm_stdlib:** Synchronize acvm versions - -## [0.14.2](https://github.com/noir-lang/acvm/compare/acvm_stdlib-v0.14.1...acvm_stdlib-v0.14.2) (2023-06-08) - - -### Miscellaneous Chores - -* **acvm_stdlib:** Synchronize acvm versions - -## [0.14.1](https://github.com/noir-lang/acvm/compare/acvm_stdlib-v0.14.0...acvm_stdlib-v0.14.1) (2023-06-07) - - -### Miscellaneous Chores - -* **acvm_stdlib:** Synchronize acvm versions - -## [0.14.0](https://github.com/noir-lang/acvm/compare/acvm_stdlib-v0.13.3...acvm_stdlib-v0.14.0) (2023-06-06) - - -### Miscellaneous Chores - -* **acvm_stdlib:** Synchronize acvm versions - -## [0.13.3](https://github.com/noir-lang/acvm/compare/acvm_stdlib-v0.13.2...acvm_stdlib-v0.13.3) (2023-06-05) - - -### Bug Fixes - -* Empty commit to trigger release-please ([e8f0748](https://github.com/noir-lang/acvm/commit/e8f0748042ef505d59ab63266d3c36c5358ee30d)) - -## [0.13.2](https://github.com/noir-lang/acvm/compare/acvm_stdlib-v0.13.1...acvm_stdlib-v0.13.2) (2023-06-02) - - -### Miscellaneous Chores - -* **acvm_stdlib:** Synchronize acvm versions - -## [0.13.1](https://github.com/noir-lang/acvm/compare/acvm_stdlib-v0.13.0...acvm_stdlib-v0.13.1) (2023-06-01) - - -### Bug Fixes - -* **ci:** Correct typo to avoid `undefined` in changelogs ([#333](https://github.com/noir-lang/acvm/issues/333)) ([d3424c0](https://github.com/noir-lang/acvm/commit/d3424c04fd303c9cbe25d03118d8b358cbb84b83)) - -## [0.13.0](https://github.com/noir-lang/acvm/compare/acvm_stdlib-v0.12.0...acvm_stdlib-v0.13.0) (2023-06-01) - - -### Miscellaneous Chores - -* **acvm_stdlib:** Synchronize acvm versions - -## [0.12.0](https://github.com/noir-lang/acvm/compare/acvm_stdlib-v0.11.0...acvm_stdlib-v0.12.0) (2023-05-17) - - -### Miscellaneous Chores - -* **acvm_stdlib:** Synchronize acvm versions - -## [0.11.0](https://github.com/noir-lang/acvm/compare/acvm_stdlib-v0.10.3...acvm_stdlib-v0.11.0) (2023-05-04) - - -### Miscellaneous Chores - -* **acvm_stdlib:** Synchronize acvm versions - -## [0.10.3](https://github.com/noir-lang/acvm/compare/acvm_stdlib-v0.10.2...acvm_stdlib-v0.10.3) (2023-04-28) - - -### Miscellaneous Chores - -* **acvm_stdlib:** Synchronize acvm versions - -## [0.10.2](https://github.com/noir-lang/acvm/compare/acvm_stdlib-v0.10.1...acvm_stdlib-v0.10.2) (2023-04-28) - - -### Bug Fixes - -* add default flag to `acvm_stdlib` ([#242](https://github.com/noir-lang/acvm/issues/242)) ([83b6fa8](https://github.com/noir-lang/acvm/commit/83b6fa8302569add7e3ac8481b2fd2a6a1ff3576)) - -## [0.10.1](https://github.com/noir-lang/acvm/compare/acvm_stdlib-v0.10.0...acvm_stdlib-v0.10.1) (2023-04-28) - - -### Miscellaneous Chores - -* **acvm_stdlib:** Synchronize acvm versions - -## [0.10.0](https://github.com/noir-lang/acvm/compare/acvm_stdlib-v0.9.0...acvm_stdlib-v0.10.0) (2023-04-26) - - -### ⚠ BREAKING CHANGES - -* organise operator implementations for Expression ([#190](https://github.com/noir-lang/acvm/issues/190)) - -### Bug Fixes - -* prevent `bn254` feature flag always being enabled ([#225](https://github.com/noir-lang/acvm/issues/225)) ([82eee6a](https://github.com/noir-lang/acvm/commit/82eee6ab08ae480f04904ca8571fd88f4466c000)) - - -### Miscellaneous Chores - -* organise operator implementations for Expression ([#190](https://github.com/noir-lang/acvm/issues/190)) ([a619df6](https://github.com/noir-lang/acvm/commit/a619df614bbb9b2518b788b42a7553b069823a0f)) - -## [0.9.0](https://github.com/noir-lang/acvm/compare/acvm_stdlib-v0.8.1...acvm_stdlib-v0.9.0) (2023-04-07) - - -### Miscellaneous Chores - -* **acvm_stdlib:** Synchronize acvm versions - - -### Dependencies - -* The following workspace dependencies were updated - * dependencies - * acir bumped from 0.8.1 to 0.9.0 - -## [0.8.1](https://github.com/noir-lang/acvm/compare/acvm_stdlib-v0.8.0...acvm_stdlib-v0.8.1) (2023-03-30) - - -### Miscellaneous Chores - -* **acvm_stdlib:** Synchronize acvm versions - - -### Dependencies - -* The following workspace dependencies were updated - * dependencies - * acir bumped from 0.8.0 to 0.8.1 - -## [0.8.0](https://github.com/noir-lang/acvm/compare/acvm_stdlib-v0.7.1...acvm_stdlib-v0.8.0) (2023-03-28) - - -### Miscellaneous Chores - -* **acvm_stdlib:** Synchronize acvm versions - - -### Dependencies - -* The following workspace dependencies were updated - * dependencies - * acir bumped from 0.7.1 to 0.8.0 - -## [0.7.1](https://github.com/noir-lang/acvm/compare/acvm_stdlib-v0.7.0...acvm_stdlib-v0.7.1) (2023-03-27) - - -### Miscellaneous Chores - -* **acvm_stdlib:** Synchronize acvm versions - - -### Dependencies - -* The following workspace dependencies were updated - * dependencies - * acir bumped from 0.7.0 to 0.7.1 - -## [0.7.0](https://github.com/noir-lang/acvm/compare/acvm_stdlib-v0.6.0...acvm_stdlib-v0.7.0) (2023-03-23) - - -### Miscellaneous Chores - -* **acvm_stdlib:** Synchronize acvm versions - - -### Dependencies - -* The following workspace dependencies were updated - * dependencies - * acir bumped from 0.6.0 to 0.7.0 - -## [0.6.0](https://github.com/noir-lang/acvm/compare/acvm_stdlib-v0.5.0...acvm_stdlib-v0.6.0) (2023-03-03) - - -### ⚠ BREAKING CHANGES - -* **acir:** rename `term_addition` to `push_addition_term` -* **acir:** rename `term_multiplication` to `push_multiplication_term` ([#122](https://github.com/noir-lang/acvm/issues/122)) - -### Miscellaneous Chores - -* **acir:** rename `term_addition` to `push_addition_term` ([d389385](https://github.com/noir-lang/acvm/commit/d38938542851a97dc01727438391e6a65e44c689)) -* **acir:** rename `term_multiplication` to `push_multiplication_term` ([#122](https://github.com/noir-lang/acvm/issues/122)) ([d389385](https://github.com/noir-lang/acvm/commit/d38938542851a97dc01727438391e6a65e44c689)) - - -### Dependencies - -* The following workspace dependencies were updated - * dependencies - * acir bumped from 0.5.0 to 0.6.0 - -## [0.5.0](https://github.com/noir-lang/acvm/compare/acvm_stdlib-v0.4.1...acvm_stdlib-v0.5.0) (2023-02-22) - - -### ⚠ BREAKING CHANGES - -* refactor ToRadix to ToRadixLe and ToRadixBe ([#58](https://github.com/noir-lang/acvm/issues/58)) - -### Miscellaneous Chores - -* refactor ToRadix to ToRadixLe and ToRadixBe ([#58](https://github.com/noir-lang/acvm/issues/58)) ([2427a27](https://github.com/noir-lang/acvm/commit/2427a275048e598c6d651cce8348a4c55148f235)) - - -### Dependencies - -* The following workspace dependencies were updated - * dependencies - * acir bumped from 0.4.1 to 0.5.0 diff --git a/acvm-repo/stdlib/Cargo.toml b/acvm-repo/stdlib/Cargo.toml deleted file mode 100644 index 63a8b71eb15..00000000000 --- a/acvm-repo/stdlib/Cargo.toml +++ /dev/null @@ -1,22 +0,0 @@ -[package] -name = "acvm_stdlib" -description = "The ACVM standard library." -# x-release-please-start-version -version = "0.33.0" -# x-release-please-end -authors.workspace = true -edition.workspace = true -license.workspace = true -rust-version.workspace = true -repository.workspace = true - -# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html - -[dependencies] -acir.workspace = true - -[features] -default = ["bn254"] -bn254 = ["acir/bn254"] -bls12_381 = ["acir/bls12_381"] -testing = ["bn254"] diff --git a/acvm-repo/stdlib/src/blackbox_fallbacks/blake2s.rs b/acvm-repo/stdlib/src/blackbox_fallbacks/blake2s.rs deleted file mode 100644 index 92bf93d2d56..00000000000 --- a/acvm-repo/stdlib/src/blackbox_fallbacks/blake2s.rs +++ /dev/null @@ -1,468 +0,0 @@ -//! Blake2s fallback function. -use super::{ - utils::{byte_decomposition, round_to_nearest_byte}, - UInt32, -}; -use acir::{ - circuit::Opcode, - native_types::{Expression, Witness}, - FieldElement, -}; -use std::vec; - -const BLAKE2S_BLOCKBYTES_USIZE: usize = 64; -const MSG_SCHEDULE_BLAKE2: [[usize; 16]; 10] = [ - [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15], - [14, 10, 4, 8, 9, 15, 13, 6, 1, 12, 0, 2, 11, 7, 5, 3], - [11, 8, 12, 0, 5, 2, 15, 13, 10, 14, 3, 6, 7, 1, 9, 4], - [7, 9, 3, 1, 13, 12, 11, 14, 2, 6, 5, 10, 4, 0, 15, 8], - [9, 0, 5, 7, 2, 4, 10, 15, 14, 1, 11, 12, 6, 8, 3, 13], - [2, 12, 6, 10, 0, 11, 8, 3, 4, 13, 7, 5, 15, 14, 1, 9], - [12, 5, 1, 15, 14, 13, 4, 10, 0, 7, 6, 3, 9, 2, 8, 11], - [13, 11, 7, 14, 12, 1, 3, 9, 5, 0, 15, 4, 8, 6, 2, 10], - [6, 15, 14, 9, 11, 3, 0, 8, 12, 2, 13, 7, 1, 4, 10, 5], - [10, 2, 8, 4, 7, 6, 1, 5, 15, 11, 9, 14, 3, 12, 13, 0], -]; -const INITIAL_H: [u32; 8] = [ - 0x6b08e647, 0xbb67ae85, 0x3c6ef372, 0xa54ff53a, 0x510e527f, 0x9b05688c, 0x1f83d9ab, 0x5be0cd19, -]; -const IV_VALUE: [u32; 8] = [ - 0x6A09E667, 0xBB67AE85, 0x3C6EF372, 0xA54FF53A, 0x510E527F, 0x9B05688C, 0x1F83D9AB, 0x5BE0CD19, -]; - -pub fn blake2s( - inputs: Vec<(Expression, u32)>, - outputs: Vec, - mut num_witness: u32, -) -> (u32, Vec) { - let mut new_opcodes = Vec::new(); - let mut new_inputs = Vec::new(); - - // Decompose the input field elements into bytes and collect the resulting witnesses. - for (witness, num_bits) in inputs { - let num_bytes = round_to_nearest_byte(num_bits); - let (extra_opcodes, extra_inputs, updated_witness_counter) = - byte_decomposition(witness, num_bytes, num_witness); - new_opcodes.extend(extra_opcodes); - new_inputs.extend(extra_inputs); - num_witness = updated_witness_counter; - } - - let (result, num_witness, extra_opcodes) = create_blake2s_constraint(new_inputs, num_witness); - new_opcodes.extend(extra_opcodes); - - // constrain the outputs to be the same as the result of the circuit - for i in 0..outputs.len() { - let mut expr = Expression::from(outputs[i]); - expr.push_addition_term(-FieldElement::one(), result[i]); - new_opcodes.push(Opcode::Arithmetic(expr)); - } - (num_witness, new_opcodes) -} - -pub(crate) fn create_blake2s_constraint( - input: Vec, - num_witness: u32, -) -> (Vec, u32, Vec) { - let mut new_opcodes = Vec::new(); - - // prepare constants - let (mut blake2s_state, extra_opcodes, num_witness) = Blake2sState::init(num_witness); - new_opcodes.extend(extra_opcodes); - let (blake2s_constants, extra_opcodes, num_witness) = - Blake2sConstantsInCircuit::init(num_witness); - new_opcodes.extend(extra_opcodes); - let (blake2s_iv, extra_opcodes, mut num_witness) = Blake2sIV::init(num_witness); - new_opcodes.extend(extra_opcodes); - - let mut offset = 0; - let mut size = input.len(); - - while size > BLAKE2S_BLOCKBYTES_USIZE { - let (extra_opcodes, updated_witness_counter) = blake2s_increment_counter( - &mut blake2s_state, - &blake2s_constants.blake2s_blockbytes_uint32, - num_witness, - ); - new_opcodes.extend(extra_opcodes); - let (extra_opcodes, updated_witness_counter) = blake2s_compress( - &mut blake2s_state, - &blake2s_iv, - input.get(offset..offset + BLAKE2S_BLOCKBYTES_USIZE).unwrap(), - updated_witness_counter, - ); - new_opcodes.extend(extra_opcodes); - offset += BLAKE2S_BLOCKBYTES_USIZE; - size -= BLAKE2S_BLOCKBYTES_USIZE; - num_witness = updated_witness_counter; - } - - let (u32_max, extra_opcodes, mut num_witness) = UInt32::load_constant(u32::MAX, num_witness); - new_opcodes.extend(extra_opcodes); - blake2s_state.f[0] = u32_max; - - // pad final block - let mut final_block = input.get(offset..).unwrap().to_vec(); - for _ in 0..BLAKE2S_BLOCKBYTES_USIZE - final_block.len() { - let (pad, extra_opcodes, updated_witness_counter) = - UInt32::load_constant(0_u32, num_witness); - new_opcodes.extend(extra_opcodes); - final_block.push(pad.inner); - num_witness = updated_witness_counter; - } - - let (size_w, extra_opcodes, num_witness) = UInt32::load_constant(size as u32, num_witness); - new_opcodes.extend(extra_opcodes); - let (extra_opcodes, num_witness) = - blake2s_increment_counter(&mut blake2s_state, &size_w, num_witness); - new_opcodes.extend(extra_opcodes); - - let (extra_opcodes, num_witness) = - blake2s_compress(&mut blake2s_state, &blake2s_iv, &final_block, num_witness); - new_opcodes.extend(extra_opcodes); - - // decompose the result bytes in u32 to u8 - let (extra_opcodes, mut byte1, num_witness) = - byte_decomposition(Expression::from(blake2s_state.h[0].inner), 4, num_witness); - new_opcodes.extend(extra_opcodes); - let (extra_opcodes, mut byte2, num_witness) = - byte_decomposition(Expression::from(blake2s_state.h[1].inner), 4, num_witness); - new_opcodes.extend(extra_opcodes); - let (extra_opcodes, mut byte3, num_witness) = - byte_decomposition(Expression::from(blake2s_state.h[2].inner), 4, num_witness); - new_opcodes.extend(extra_opcodes); - let (extra_opcodes, mut byte4, num_witness) = - byte_decomposition(Expression::from(blake2s_state.h[3].inner), 4, num_witness); - new_opcodes.extend(extra_opcodes); - let (extra_opcodes, mut byte5, num_witness) = - byte_decomposition(Expression::from(blake2s_state.h[4].inner), 4, num_witness); - new_opcodes.extend(extra_opcodes); - let (extra_opcodes, mut byte6, num_witness) = - byte_decomposition(Expression::from(blake2s_state.h[5].inner), 4, num_witness); - new_opcodes.extend(extra_opcodes); - let (extra_opcodes, mut byte7, num_witness) = - byte_decomposition(Expression::from(blake2s_state.h[6].inner), 4, num_witness); - new_opcodes.extend(extra_opcodes); - let (extra_opcodes, mut byte8, num_witness) = - byte_decomposition(Expression::from(blake2s_state.h[7].inner), 4, num_witness); - new_opcodes.extend(extra_opcodes); - - byte1.reverse(); - byte2.reverse(); - byte3.reverse(); - byte4.reverse(); - byte5.reverse(); - byte6.reverse(); - byte7.reverse(); - byte8.reverse(); - - let result = vec![byte1, byte2, byte3, byte4, byte5, byte6, byte7, byte8] - .into_iter() - .flatten() - .collect(); - - (result, num_witness, new_opcodes) -} - -fn blake2s_increment_counter( - state: &mut Blake2sState, - inc: &UInt32, - num_witness: u32, -) -> (Vec, u32) { - let mut new_opcodes = Vec::new(); - - // t0 + inc - let (state_t0, extra_opcodes, num_witness) = state.t[0].add(inc, num_witness); - new_opcodes.extend(extra_opcodes); - state.t[0] = state_t0; - - // t1 + (t0 < inc) - let (to_inc, extra_opcodes, num_witness) = state.t[0].less_than_comparison(inc, num_witness); - new_opcodes.extend(extra_opcodes); - let (state_t1, extra_opcodes, num_witness) = state.t[1].add(&to_inc, num_witness); - new_opcodes.extend(extra_opcodes); - state.t[1] = state_t1; - - (new_opcodes, num_witness) -} - -fn blake2s_compress( - state: &mut Blake2sState, - blake2s_iv: &Blake2sIV, - input: &[Witness], - mut num_witness: u32, -) -> (Vec, u32) { - let mut new_opcodes = Vec::new(); - let mut m = Vec::new(); - let mut v = Vec::new(); - - for i in 0..16 { - let mut mi_bytes = input.get(i * 4..i * 4 + 4).unwrap().to_vec(); - mi_bytes.reverse(); - let (mi, extra_opcodes, updated_witness_counter) = - UInt32::from_witnesses(&mi_bytes, num_witness); - new_opcodes.extend(extra_opcodes); - m.push(mi[0]); - num_witness = updated_witness_counter; - } - - for i in 0..8 { - v.push(state.h[i]); - } - - v.push(blake2s_iv.iv[0]); - v.push(blake2s_iv.iv[1]); - v.push(blake2s_iv.iv[2]); - v.push(blake2s_iv.iv[3]); - let (v12, extra_opcodes, num_witness) = state.t[0].xor(&blake2s_iv.iv[4], num_witness); - new_opcodes.extend(extra_opcodes); - v.push(v12); - let (v13, extra_opcodes, num_witness) = state.t[1].xor(&blake2s_iv.iv[5], num_witness); - new_opcodes.extend(extra_opcodes); - v.push(v13); - let (v14, extra_opcodes, num_witness) = state.f[0].xor(&blake2s_iv.iv[6], num_witness); - new_opcodes.extend(extra_opcodes); - v.push(v14); - let (v15, extra_opcodes, num_witness) = state.f[1].xor(&blake2s_iv.iv[7], num_witness); - new_opcodes.extend(extra_opcodes); - v.push(v15); - - let (extra_opcodes, num_witness) = blake2s_round(&mut v, &m, 0, num_witness); - new_opcodes.extend(extra_opcodes); - let (extra_opcodes, num_witness) = blake2s_round(&mut v, &m, 1, num_witness); - new_opcodes.extend(extra_opcodes); - let (extra_opcodes, num_witness) = blake2s_round(&mut v, &m, 2, num_witness); - new_opcodes.extend(extra_opcodes); - let (extra_opcodes, num_witness) = blake2s_round(&mut v, &m, 3, num_witness); - new_opcodes.extend(extra_opcodes); - let (extra_opcodes, num_witness) = blake2s_round(&mut v, &m, 4, num_witness); - new_opcodes.extend(extra_opcodes); - let (extra_opcodes, num_witness) = blake2s_round(&mut v, &m, 5, num_witness); - new_opcodes.extend(extra_opcodes); - let (extra_opcodes, num_witness) = blake2s_round(&mut v, &m, 6, num_witness); - new_opcodes.extend(extra_opcodes); - let (extra_opcodes, num_witness) = blake2s_round(&mut v, &m, 7, num_witness); - new_opcodes.extend(extra_opcodes); - let (extra_opcodes, num_witness) = blake2s_round(&mut v, &m, 8, num_witness); - new_opcodes.extend(extra_opcodes); - let (extra_opcodes, mut num_witness) = blake2s_round(&mut v, &m, 9, num_witness); - new_opcodes.extend(extra_opcodes); - - for i in 0..8 { - let (a, extra_opcodes, updated_witness_counter) = state.h[i].xor(&v[i], num_witness); - new_opcodes.extend(extra_opcodes); - let (state_hi, extra_opcodes, updated_witness_counter) = - a.xor(&v[i + 8], updated_witness_counter); - new_opcodes.extend(extra_opcodes); - state.h[i] = state_hi; - num_witness = updated_witness_counter; - } - - (new_opcodes, num_witness) -} - -fn blake2s_round( - state: &mut [UInt32], - msg: &[UInt32], - round: usize, - num_witness: u32, -) -> (Vec, u32) { - let mut new_opcodes = Vec::new(); - let schedule = &MSG_SCHEDULE_BLAKE2[round]; - - // Mix the columns. - let (extra_opcodes, num_witness) = - g(state, 0, 4, 8, 12, msg[schedule[0]], msg[schedule[1]], num_witness); - new_opcodes.extend(extra_opcodes); - let (extra_opcodes, num_witness) = - g(state, 1, 5, 9, 13, msg[schedule[2]], msg[schedule[3]], num_witness); - new_opcodes.extend(extra_opcodes); - let (extra_opcodes, num_witness) = - g(state, 2, 6, 10, 14, msg[schedule[4]], msg[schedule[5]], num_witness); - new_opcodes.extend(extra_opcodes); - let (extra_opcodes, num_witness) = - g(state, 3, 7, 11, 15, msg[schedule[6]], msg[schedule[7]], num_witness); - new_opcodes.extend(extra_opcodes); - - // Mix the rows. - let (extra_opcodes, num_witness) = - g(state, 0, 5, 10, 15, msg[schedule[8]], msg[schedule[9]], num_witness); - new_opcodes.extend(extra_opcodes); - let (extra_opcodes, num_witness) = - g(state, 1, 6, 11, 12, msg[schedule[10]], msg[schedule[11]], num_witness); - new_opcodes.extend(extra_opcodes); - let (extra_opcodes, num_witness) = - g(state, 2, 7, 8, 13, msg[schedule[12]], msg[schedule[13]], num_witness); - new_opcodes.extend(extra_opcodes); - let (extra_opcodes, num_witness) = - g(state, 3, 4, 9, 14, msg[schedule[14]], msg[schedule[15]], num_witness); - new_opcodes.extend(extra_opcodes); - - (new_opcodes, num_witness) -} - -#[allow(clippy::too_many_arguments)] -fn g( - state: &mut [UInt32], - a: usize, - b: usize, - c: usize, - d: usize, - x: UInt32, - y: UInt32, - num_witness: u32, -) -> (Vec, u32) { - let mut new_opcodes = Vec::new(); - - // calculate state[a] as `state[a] + state[b] + x` - let (state_a_1, extra_opcodes, num_witness) = state[a].add(&state[b], num_witness); - new_opcodes.extend(extra_opcodes); - let (state_a, extra_opcodes, num_witness) = state_a_1.add(&x, num_witness); - new_opcodes.extend(extra_opcodes); - state[a] = state_a; - - // calculate state[d] as `(state[d] ^ state[a]).ror(16)` - let (state_d_1, extra_opcodes, num_witness) = state[d].xor(&state[a], num_witness); - new_opcodes.extend(extra_opcodes); - let (state_d, extra_opcodes, num_witness) = state_d_1.ror(16, num_witness); - new_opcodes.extend(extra_opcodes); - state[d] = state_d; - - // calculate state[c] as `state[c] + state[d]` - let (state_c, extra_opcodes, num_witness) = state[c].add(&state[d], num_witness); - new_opcodes.extend(extra_opcodes); - state[c] = state_c; - - // caclulate state[b] as `(state[b] ^ state[c]).ror(12)` - let (state_b_1, extra_opcodes, num_witness) = state[b].xor(&state[c], num_witness); - new_opcodes.extend(extra_opcodes); - let (state_b, extra_opcodes, num_witness) = state_b_1.ror(12, num_witness); - new_opcodes.extend(extra_opcodes); - state[b] = state_b; - - // calculate state[a] as `state[a] + state[b] + y` - let (state_a_1, extra_opcodes, num_witness) = state[a].add(&state[b], num_witness); - new_opcodes.extend(extra_opcodes); - let (state_a, extra_opcodes, num_witness) = state_a_1.add(&y, num_witness); - new_opcodes.extend(extra_opcodes); - state[a] = state_a; - - // calculate state[d] as `(state[d] ^ state[a]).ror(8)` - let (state_d_1, extra_opcodes, num_witness) = state[d].xor(&state[a], num_witness); - new_opcodes.extend(extra_opcodes); - let (state_d, extra_opcodes, num_witness) = state_d_1.ror(8, num_witness); - new_opcodes.extend(extra_opcodes); - state[d] = state_d; - - // calculate state[c] as `state[c] + state[d]` - let (state_c, extra_opcodes, num_witness) = state[c].add(&state[d], num_witness); - new_opcodes.extend(extra_opcodes); - state[c] = state_c; - - // caclulate state[b] as `(state[b] ^ state[c]).ror(7)` - let (state_b_1, extra_opcodes, num_witness) = state[b].xor(&state[c], num_witness); - new_opcodes.extend(extra_opcodes); - let (state_b, extra_opcodes, num_witness) = state_b_1.ror(7, num_witness); - new_opcodes.extend(extra_opcodes); - state[b] = state_b; - - (new_opcodes, num_witness) -} - -/// Blake2s state `h` `t` and `f` -#[derive(Debug)] -struct Blake2sState { - h: Vec, - t: Vec, - f: Vec, -} - -impl Blake2sState { - fn new(h: Vec, t: Vec, f: Vec) -> Self { - Blake2sState { h, t, f } - } - - /// Initialize internal state of Blake2s - fn init(mut num_witness: u32) -> (Blake2sState, Vec, u32) { - let mut new_opcodes = Vec::new(); - let mut h = Vec::new(); - let mut t = Vec::new(); - let mut f = Vec::new(); - - for init_h in INITIAL_H { - let (new_witness, extra_opcodes, updated_witness_counter) = - UInt32::load_constant(init_h, num_witness); - new_opcodes.extend(extra_opcodes); - h.push(new_witness); - num_witness = updated_witness_counter; - } - - for _ in 0..2 { - let (new_witness, extra_opcodes, updated_witness_counter) = - UInt32::load_constant(0_u32, num_witness); - new_opcodes.extend(extra_opcodes); - t.push(new_witness); - num_witness = updated_witness_counter; - } - - for _ in 0..2 { - let (new_witness, extra_opcodes, updated_witness_counter) = - UInt32::load_constant(0_u32, num_witness); - new_opcodes.extend(extra_opcodes); - f.push(new_witness); - num_witness = updated_witness_counter; - } - - let blake2s_state = Blake2sState::new(h, t, f); - - (blake2s_state, new_opcodes, num_witness) - } -} - -/// Blake2s IV (Initialization Vector) -struct Blake2sIV { - iv: Vec, -} - -impl Blake2sIV { - fn new(iv: Vec) -> Self { - Blake2sIV { iv } - } - - /// Initialize IV of Blake2s - fn init(mut num_witness: u32) -> (Blake2sIV, Vec, u32) { - let mut new_opcodes = Vec::new(); - let mut iv = Vec::new(); - - for iv_v in IV_VALUE { - let (new_witness, extra_opcodes, updated_witness_counter) = - UInt32::load_constant(iv_v, num_witness); - new_opcodes.extend(extra_opcodes); - iv.push(new_witness); - num_witness = updated_witness_counter; - } - - let blake2s_iv = Blake2sIV::new(iv); - - (blake2s_iv, new_opcodes, num_witness) - } -} - -struct Blake2sConstantsInCircuit { - blake2s_blockbytes_uint32: UInt32, -} - -impl Blake2sConstantsInCircuit { - fn new(blake2s_blockbytes_uint32: UInt32) -> Self { - Blake2sConstantsInCircuit { blake2s_blockbytes_uint32 } - } - - fn init(num_witness: u32) -> (Blake2sConstantsInCircuit, Vec, u32) { - let mut new_opcodes = Vec::new(); - let (blake2s_blockbytes_uint32, extra_opcodes, num_witness) = - UInt32::load_constant(64_u32, num_witness); - new_opcodes.extend(extra_opcodes); - - (Blake2sConstantsInCircuit::new(blake2s_blockbytes_uint32), new_opcodes, num_witness) - } -} diff --git a/acvm-repo/stdlib/src/blackbox_fallbacks/hash_to_field.rs b/acvm-repo/stdlib/src/blackbox_fallbacks/hash_to_field.rs deleted file mode 100644 index 91a7cdd09e4..00000000000 --- a/acvm-repo/stdlib/src/blackbox_fallbacks/hash_to_field.rs +++ /dev/null @@ -1,168 +0,0 @@ -//! HashToField128Security fallback function. -use super::{ - blake2s::create_blake2s_constraint, - utils::{byte_decomposition, round_to_nearest_byte}, - UInt32, -}; -use crate::helpers::VariableStore; -use acir::{ - brillig::{self, RegisterIndex}, - circuit::{ - brillig::{Brillig, BrilligInputs, BrilligOutputs}, - Opcode, - }, - native_types::{Expression, Witness}, - FieldElement, -}; - -pub fn hash_to_field( - inputs: Vec<(Expression, u32)>, - outputs: Witness, - mut num_witness: u32, -) -> (u32, Vec) { - let mut new_opcodes = Vec::new(); - let mut new_inputs = Vec::new(); - - // Decompose the input field elements into bytes and collect the resulting witnesses. - for (witness, num_bits) in inputs { - let num_bytes = round_to_nearest_byte(num_bits); - let (extra_opcodes, extra_inputs, updated_witness_counter) = - byte_decomposition(witness, num_bytes, num_witness); - new_opcodes.extend(extra_opcodes); - new_inputs.extend(extra_inputs); - num_witness = updated_witness_counter; - } - - let (result, num_witness, extra_opcodes) = create_blake2s_constraint(new_inputs, num_witness); - new_opcodes.extend(extra_opcodes); - - // transform bytes to a single field - let (result, extra_opcodes, num_witness) = field_from_be_bytes(&result, num_witness); - new_opcodes.extend(extra_opcodes); - - // constrain the outputs to be the same as the result of the circuit - let mut expr = Expression::from(outputs); - expr.push_addition_term(-FieldElement::one(), result); - new_opcodes.push(Opcode::Arithmetic(expr)); - (num_witness, new_opcodes) -} - -/// Convert bytes represented by [Witness]es to a single [FieldElement] -fn field_from_be_bytes(result: &[Witness], num_witness: u32) -> (Witness, Vec, u32) { - let mut new_opcodes = Vec::new(); - - // Load `0` and `256` using the load constant function from UInt32 - let (new_witness, extra_opcodes, num_witness) = UInt32::load_constant(0, num_witness); - let mut new_witness = new_witness.inner; - new_opcodes.extend(extra_opcodes); - let (const_256, extra_opcodes, mut num_witness) = UInt32::load_constant(256, num_witness); - let const_256 = const_256.inner; - new_opcodes.extend(extra_opcodes); - - // add byte and multiply 256 each round - for r in result.iter().take(result.len() - 1) { - let (updated_witness, extra_opcodes, updated_witness_counter) = - field_addition(&new_witness, r, num_witness); - new_opcodes.extend(extra_opcodes); - let (updated_witness, extra_opcodes, updated_witness_counter) = - field_mul(&updated_witness, &const_256, updated_witness_counter); - new_opcodes.extend(extra_opcodes); - new_witness = updated_witness; - num_witness = updated_witness_counter; - } - - let (new_witness, extra_opcodes, num_witness) = - field_addition(&new_witness, &result[result.len() - 1], num_witness); - new_opcodes.extend(extra_opcodes); - - (new_witness, new_opcodes, num_witness) -} - -/// Caculate and constrain `self` + `rhs` as field -fn field_addition( - lhs: &Witness, - rhs: &Witness, - mut num_witness: u32, -) -> (Witness, Vec, u32) { - let mut new_opcodes = Vec::new(); - let mut variables = VariableStore::new(&mut num_witness); - let new_witness = variables.new_variable(); - - // calculate `self` + `rhs` as field - let brillig_opcode = Opcode::Brillig(Brillig { - inputs: vec![ - BrilligInputs::Single(Expression { - mul_terms: vec![], - linear_combinations: vec![(FieldElement::one(), *lhs)], - q_c: FieldElement::zero(), - }), - BrilligInputs::Single(Expression { - mul_terms: vec![], - linear_combinations: vec![(FieldElement::one(), *rhs)], - q_c: FieldElement::zero(), - }), - ], - outputs: vec![BrilligOutputs::Simple(new_witness)], - bytecode: vec![brillig::Opcode::BinaryFieldOp { - op: brillig::BinaryFieldOp::Add, - lhs: RegisterIndex::from(0), - rhs: RegisterIndex::from(1), - destination: RegisterIndex::from(0), - }], - predicate: None, - }); - new_opcodes.push(brillig_opcode); - let num_witness = variables.finalize(); - - // constrain addition - let mut add_expr = Expression::from(new_witness); - add_expr.push_addition_term(-FieldElement::one(), *lhs); - add_expr.push_addition_term(-FieldElement::one(), *rhs); - new_opcodes.push(Opcode::Arithmetic(add_expr)); - - (new_witness, new_opcodes, num_witness) -} - -/// Calculate and constrain `self` * `rhs` as field -pub(crate) fn field_mul( - lhs: &Witness, - rhs: &Witness, - mut num_witness: u32, -) -> (Witness, Vec, u32) { - let mut new_opcodes = Vec::new(); - let mut variables = VariableStore::new(&mut num_witness); - let new_witness = variables.new_variable(); - - // calulate `self` * `rhs` with overflow - let brillig_opcode = Opcode::Brillig(Brillig { - inputs: vec![ - BrilligInputs::Single(Expression { - mul_terms: vec![], - linear_combinations: vec![(FieldElement::one(), *lhs)], - q_c: FieldElement::zero(), - }), - BrilligInputs::Single(Expression { - mul_terms: vec![], - linear_combinations: vec![(FieldElement::one(), *rhs)], - q_c: FieldElement::zero(), - }), - ], - outputs: vec![BrilligOutputs::Simple(new_witness)], - bytecode: vec![brillig::Opcode::BinaryFieldOp { - op: brillig::BinaryFieldOp::Mul, - lhs: RegisterIndex::from(0), - rhs: RegisterIndex::from(1), - destination: RegisterIndex::from(0), - }], - predicate: None, - }); - new_opcodes.push(brillig_opcode); - let num_witness = variables.finalize(); - - // constrain mul - let mut mul_constraint = Expression::from(new_witness); - mul_constraint.push_multiplication_term(-FieldElement::one(), *lhs, *rhs); - new_opcodes.push(Opcode::Arithmetic(mul_constraint)); - - (new_witness, new_opcodes, num_witness) -} diff --git a/acvm-repo/stdlib/src/blackbox_fallbacks/keccak256.rs b/acvm-repo/stdlib/src/blackbox_fallbacks/keccak256.rs deleted file mode 100644 index d91db3dc2c6..00000000000 --- a/acvm-repo/stdlib/src/blackbox_fallbacks/keccak256.rs +++ /dev/null @@ -1,269 +0,0 @@ -//! Keccak256 fallback function. -use super::{ - sha256::pad, - uint8::UInt8, - utils::{byte_decomposition, round_to_nearest_byte}, - UInt64, -}; -use acir::{ - circuit::Opcode, - native_types::{Expression, Witness}, - FieldElement, -}; - -const STATE_NUM_BYTES: usize = 200; -const BITS: usize = 256; -const WORD_SIZE: usize = 8; -const BLOCK_SIZE: usize = (1600 - BITS * 2) / WORD_SIZE; -const ROUND_CONSTANTS: [u64; 24] = [ - 1, - 0x8082, - 0x800000000000808a, - 0x8000000080008000, - 0x808b, - 0x80000001, - 0x8000000080008081, - 0x8000000000008009, - 0x8a, - 0x88, - 0x80008009, - 0x8000000a, - 0x8000808b, - 0x800000000000008b, - 0x8000000000008089, - 0x8000000000008003, - 0x8000000000008002, - 0x8000000000000080, - 0x800a, - 0x800000008000000a, - 0x8000000080008081, - 0x8000000000008080, - 0x80000001, - 0x8000000080008008, -]; -const RHO: [u32; 24] = - [1, 3, 6, 10, 15, 21, 28, 36, 45, 55, 2, 14, 27, 41, 56, 8, 25, 43, 62, 18, 39, 61, 20, 44]; -const PI: [usize; 24] = - [10, 7, 11, 17, 18, 3, 5, 16, 8, 21, 24, 4, 15, 23, 19, 13, 12, 2, 20, 14, 22, 9, 6, 1]; - -pub fn keccak256( - inputs: Vec<(Expression, u32)>, - outputs: Vec, - mut num_witness: u32, -) -> (u32, Vec) { - let mut new_opcodes = Vec::new(); - let mut new_inputs = Vec::new(); - - // Decompose the input field elements into bytes and collect the resulting witnesses. - for (witness, num_bits) in inputs { - let num_bytes = round_to_nearest_byte(num_bits); - let (extra_opcodes, extra_inputs, updated_witness_counter) = - byte_decomposition(witness, num_bytes, num_witness); - new_opcodes.extend(extra_opcodes); - new_inputs.extend(extra_inputs); - num_witness = updated_witness_counter; - } - - let (result, num_witness, extra_opcodes) = create_keccak_constraint(new_inputs, num_witness); - new_opcodes.extend(extra_opcodes); - - // constrain the outputs to be the same as the result of the circuit - for i in 0..outputs.len() { - let mut expr = Expression::from(outputs[i]); - expr.push_addition_term(-FieldElement::one(), result[i]); - new_opcodes.push(Opcode::Arithmetic(expr)); - } - (num_witness, new_opcodes) -} - -fn create_keccak_constraint( - input: Vec, - num_witness: u32, -) -> (Vec, u32, Vec) { - let mut new_opcodes = Vec::new(); - let num_blocks = input.len() / BLOCK_SIZE + 1; - - // pad keccak - let (input, extra_opcodes, mut num_witness) = pad_keccak(input, num_blocks, num_witness); - new_opcodes.extend(extra_opcodes); - - // prepare state - let mut state = Vec::with_capacity(200); - for _ in 0..STATE_NUM_BYTES { - let (zero, extra_opcodes, updated_witness_counter) = UInt8::load_constant(0, num_witness); - new_opcodes.extend(extra_opcodes); - state.push(zero); - num_witness = updated_witness_counter; - } - - // process block - for i in 0..num_blocks { - for j in 0..BLOCK_SIZE { - let (new_state, extra_opcodes, updated_witness_counter) = - state[j].xor(&UInt8::new(input[i * BLOCK_SIZE + j]), num_witness); - new_opcodes.extend(extra_opcodes); - state[j] = new_state; - num_witness = updated_witness_counter; - } - let (new_state, extra_opcodes, updated_witness_counter) = keccakf(state, num_witness); - new_opcodes.extend(extra_opcodes); - num_witness = updated_witness_counter; - state = new_state; - } - - let result: Vec = state[..32].iter().map(|x| x.inner).collect(); - (result, num_witness, new_opcodes) -} - -fn keccakf(state: Vec, num_witness: u32) -> (Vec, Vec, u32) { - let mut new_opcodes = Vec::new(); - - // turn state into UInt64 - let mut state_witnesses: Vec = Vec::new(); - for i in 0..state.len() / 8 { - for j in 0..8 { - state_witnesses.push(state[i * 8 + (7 - j)].inner); - } - } - let (mut state_u64, extra_opcodes, mut num_witness) = - UInt64::from_witnesses(&state_witnesses, num_witness); - new_opcodes.extend(extra_opcodes); - - // process round - for round_constant in ROUND_CONSTANTS { - let (new_state_u64, extra_opcodes, updated_witness_counter) = - keccak_round(state_u64, round_constant, num_witness); - state_u64 = new_state_u64; - new_opcodes.extend(extra_opcodes); - num_witness = updated_witness_counter; - } - - // turn state back to UInt8 - let state_u64_witnesses: Vec = state_u64.into_iter().map(|x| x.inner).collect(); - let mut state_u8 = Vec::with_capacity(state_u64_witnesses.len()); - for state_u64_witness in state_u64_witnesses { - let (extra_opcodes, mut u8s, updated_witness_counter) = - byte_decomposition(Expression::from(state_u64_witness), 8, num_witness); - new_opcodes.extend(extra_opcodes); - u8s.reverse(); - state_u8.push(u8s); - num_witness = updated_witness_counter; - } - - let state_u8: Vec = state_u8.into_iter().flatten().map(UInt8::new).collect(); - (state_u8, new_opcodes, num_witness) -} - -fn keccak_round( - mut a: Vec, - round_const: u64, - mut num_witness: u32, -) -> (Vec, Vec, u32) { - let mut new_opcodes = Vec::new(); - - // theta - let mut array = Vec::with_capacity(5); - for _ in 0..5 { - let (zero, extra_opcodes, updated_witness_counter) = UInt64::load_constant(0, num_witness); - array.push(zero); - new_opcodes.extend(extra_opcodes); - num_witness = updated_witness_counter; - } - for x in 0..5 { - for y_count in 0..5 { - let y = y_count * 5; - let (new_array_ele, extra_opcodes, updated_witness_counter) = - array[x].xor(&a[x + y], num_witness); - new_opcodes.extend(extra_opcodes); - num_witness = updated_witness_counter; - array[x] = new_array_ele; - } - } - for x in 0..5 { - for y_count in 0..5 { - let y = y_count * 5; - let (a_ele, extra_opcodes, updated_witness_counter) = - array[(x + 1) % 5].rol(1, num_witness); - new_opcodes.extend(extra_opcodes); - let (b_ele, extra_opcodes, updated_witness_counter) = - array[(x + 4) % 5].xor(&a_ele, updated_witness_counter); - new_opcodes.extend(extra_opcodes); - let (new_array_ele, extra_opcodes, updated_witness_counter) = - a[x + y].xor(&b_ele, updated_witness_counter); - new_opcodes.extend(extra_opcodes); - num_witness = updated_witness_counter; - a[x + y] = new_array_ele; - } - } - - // rho and pi - let mut last = a[1]; - for x in 0..24 { - array[0] = a[PI[x]]; - let (a_ele, extra_opcodes, updated_witness_counter) = last.rol(RHO[x], num_witness); - new_opcodes.extend(extra_opcodes); - a[PI[x]] = a_ele; - num_witness = updated_witness_counter; - last = array[0]; - } - - // chi - for y_step in 0..5 { - let y = y_step * 5; - - array[..5].copy_from_slice(&a[y..(5 + y)]); - - for x in 0..5 { - let (a_ele, extra_opcodes, updated_witness_counter) = - array[(x + 1) % 5].not(num_witness); - new_opcodes.extend(extra_opcodes); - let (b_ele, extra_opcodes, updated_witness_counter) = - a_ele.and(&array[(x + 2) % 5], updated_witness_counter); - new_opcodes.extend(extra_opcodes); - let (c_ele, extra_opcodes, updated_witness_counter) = - array[x].xor(&b_ele, updated_witness_counter); - new_opcodes.extend(extra_opcodes); - - a[y + x] = c_ele; - num_witness = updated_witness_counter; - } - } - - // iota - let (rc, extra_opcodes, num_witness) = UInt64::load_constant(round_const, num_witness); - new_opcodes.extend(extra_opcodes); - let (a_ele, extra_opcodes, num_witness) = a[0].xor(&rc, num_witness); - new_opcodes.extend(extra_opcodes); - a[0] = a_ele; - - (a, new_opcodes, num_witness) -} - -fn pad_keccak( - mut input: Vec, - num_blocks: usize, - num_witness: u32, -) -> (Vec, Vec, u32) { - let mut new_opcodes = Vec::new(); - let total_len = BLOCK_SIZE * num_blocks; - - let (mut num_witness, pad_witness, extra_opcodes) = pad(0x01, 8, num_witness); - - new_opcodes.extend(extra_opcodes); - input.push(pad_witness); - for _ in 0..total_len - input.len() { - let (updated_witness_counter, pad_witness, extra_opcodes) = pad(0x00, 8, num_witness); - new_opcodes.extend(extra_opcodes); - input.push(pad_witness); - num_witness = updated_witness_counter; - } - - let (zero_x_80, extra_opcodes, num_witness) = UInt8::load_constant(0x80, num_witness); - new_opcodes.extend(extra_opcodes); - let (final_pad, extra_opcodes, num_witness) = - UInt8::new(input[total_len - 1]).xor(&zero_x_80, num_witness); - new_opcodes.extend(extra_opcodes); - input[total_len - 1] = final_pad.inner; - - (input, new_opcodes, num_witness) -} diff --git a/acvm-repo/stdlib/src/blackbox_fallbacks/logic_fallbacks.rs b/acvm-repo/stdlib/src/blackbox_fallbacks/logic_fallbacks.rs deleted file mode 100644 index fa8c1060a26..00000000000 --- a/acvm-repo/stdlib/src/blackbox_fallbacks/logic_fallbacks.rs +++ /dev/null @@ -1,127 +0,0 @@ -use crate::{blackbox_fallbacks::utils::mul_with_witness, helpers::VariableStore}; - -use super::utils::{bit_decomposition, boolean_expr}; -use acir::{ - acir_field::FieldElement, - circuit::Opcode, - native_types::{Expression, Witness}, -}; - -// Range constraint -pub fn range(opcode: Expression, bit_size: u32, mut num_witness: u32) -> (u32, Vec) { - if bit_size == 1 { - let mut variables = VariableStore::new(&mut num_witness); - let bit_constraint = Opcode::Arithmetic(boolean_expr(&opcode, &mut variables)); - return (variables.finalize(), vec![bit_constraint]); - } - - let (new_opcodes, _, updated_witness_counter) = - bit_decomposition(opcode, bit_size, num_witness); - (updated_witness_counter, new_opcodes) -} - -/// Returns a set of opcodes which constrain `a & b == result` -/// -/// `a` and `b` are assumed to be constrained to fit within `bit_size` externally. -pub fn and( - a: Expression, - b: Expression, - result: Witness, - bit_size: u32, - mut num_witness: u32, -) -> (u32, Vec) { - if bit_size == 1 { - let mut variables = VariableStore::new(&mut num_witness); - - let mut and_expr = mul_with_witness(&a, &b, &mut variables); - and_expr.push_addition_term(-FieldElement::one(), result); - - return (variables.finalize(), vec![Opcode::Arithmetic(and_expr)]); - } - // Decompose the operands into bits - // - let (extra_opcodes_a, a_bits, updated_witness_counter) = - bit_decomposition(a, bit_size, num_witness); - - let (extra_opcodes_b, b_bits, updated_witness_counter) = - bit_decomposition(b, bit_size, updated_witness_counter); - - assert_eq!(a_bits.len(), b_bits.len()); - assert_eq!(a_bits.len(), bit_size as usize); - - let mut two_pow = FieldElement::one(); - let two = FieldElement::from(2_i128); - - // Build an expression that Multiplies each bit element-wise - // This gives the same truth table as the AND operation - // Additionally, we multiply by a power of 2 to build up the - // expected output; ie result = \sum 2^i x_i * y_i - let mut and_expr = Expression::default(); - for (a_bit, b_bit) in a_bits.into_iter().zip(b_bits) { - and_expr.push_multiplication_term(two_pow, a_bit, b_bit); - two_pow = two * two_pow; - } - and_expr.push_addition_term(-FieldElement::one(), result); - - and_expr.sort(); - - let mut new_opcodes = Vec::new(); - new_opcodes.extend(extra_opcodes_a); - new_opcodes.extend(extra_opcodes_b); - new_opcodes.push(Opcode::Arithmetic(and_expr)); - - (updated_witness_counter, new_opcodes) -} - -/// Returns a set of opcodes which constrain `a ^ b == result` -/// -/// `a` and `b` are assumed to be constrained to fit within `bit_size` externally. -pub fn xor( - a: Expression, - b: Expression, - result: Witness, - bit_size: u32, - mut num_witness: u32, -) -> (u32, Vec) { - if bit_size == 1 { - let mut variables = VariableStore::new(&mut num_witness); - - let product = mul_with_witness(&a, &b, &mut variables); - let mut xor_expr = &(&a + &b) - &product; - xor_expr.push_addition_term(-FieldElement::one(), result); - - return (variables.finalize(), vec![Opcode::Arithmetic(xor_expr)]); - } - - // Decompose the operands into bits - // - let (extra_opcodes_a, a_bits, updated_witness_counter) = - bit_decomposition(a, bit_size, num_witness); - let (extra_opcodes_b, b_bits, updated_witness_counter) = - bit_decomposition(b, bit_size, updated_witness_counter); - - assert_eq!(a_bits.len(), b_bits.len()); - assert_eq!(a_bits.len(), bit_size as usize); - - let mut two_pow = FieldElement::one(); - let two = FieldElement::from(2_i128); - - // Build an xor expression - // TODO: check this is the correct arithmetization - let mut xor_expr = Expression::default(); - for (a_bit, b_bit) in a_bits.into_iter().zip(b_bits) { - xor_expr.push_addition_term(two_pow, a_bit); - xor_expr.push_addition_term(two_pow, b_bit); - two_pow = two * two_pow; - xor_expr.push_multiplication_term(-two_pow, a_bit, b_bit); - } - xor_expr.push_addition_term(-FieldElement::one(), result); - - xor_expr.sort(); - let mut new_opcodes = Vec::new(); - new_opcodes.extend(extra_opcodes_a); - new_opcodes.extend(extra_opcodes_b); - new_opcodes.push(Opcode::Arithmetic(xor_expr)); - - (updated_witness_counter, new_opcodes) -} diff --git a/acvm-repo/stdlib/src/blackbox_fallbacks/mod.rs b/acvm-repo/stdlib/src/blackbox_fallbacks/mod.rs deleted file mode 100644 index d2ca3c50fa7..00000000000 --- a/acvm-repo/stdlib/src/blackbox_fallbacks/mod.rs +++ /dev/null @@ -1,19 +0,0 @@ -mod blake2s; -mod hash_to_field; -mod keccak256; -mod logic_fallbacks; -mod sha256; -#[macro_use] -mod uint; -mod uint32; -mod uint64; -mod uint8; -mod utils; -pub use blake2s::blake2s; -pub use hash_to_field::hash_to_field; -pub use keccak256::keccak256; -pub use logic_fallbacks::{and, range, xor}; -pub use sha256::sha256; -pub use uint32::UInt32; -pub use uint64::UInt64; -pub use uint8::UInt8; diff --git a/acvm-repo/stdlib/src/blackbox_fallbacks/sha256.rs b/acvm-repo/stdlib/src/blackbox_fallbacks/sha256.rs deleted file mode 100644 index 1661b030bcc..00000000000 --- a/acvm-repo/stdlib/src/blackbox_fallbacks/sha256.rs +++ /dev/null @@ -1,377 +0,0 @@ -//! Sha256 fallback function. -use super::uint32::UInt32; -use super::utils::{byte_decomposition, round_to_nearest_byte}; -use crate::helpers::VariableStore; -use acir::{ - brillig, - circuit::{ - brillig::{Brillig, BrilligInputs, BrilligOutputs}, - opcodes::{BlackBoxFuncCall, FunctionInput}, - Opcode, - }, - native_types::{Expression, Witness}, - FieldElement, -}; - -const INIT_CONSTANTS: [u32; 8] = [ - 0x6a09e667, 0xbb67ae85, 0x3c6ef372, 0xa54ff53a, 0x510e527f, 0x9b05688c, 0x1f83d9ab, 0x5be0cd19, -]; - -const ROUND_CONSTANTS: [u32; 64] = [ - 0x428a2f98, 0x71374491, 0xb5c0fbcf, 0xe9b5dba5, 0x3956c25b, 0x59f111f1, 0x923f82a4, 0xab1c5ed5, - 0xd807aa98, 0x12835b01, 0x243185be, 0x550c7dc3, 0x72be5d74, 0x80deb1fe, 0x9bdc06a7, 0xc19bf174, - 0xe49b69c1, 0xefbe4786, 0x0fc19dc6, 0x240ca1cc, 0x2de92c6f, 0x4a7484aa, 0x5cb0a9dc, 0x76f988da, - 0x983e5152, 0xa831c66d, 0xb00327c8, 0xbf597fc7, 0xc6e00bf3, 0xd5a79147, 0x06ca6351, 0x14292967, - 0x27b70a85, 0x2e1b2138, 0x4d2c6dfc, 0x53380d13, 0x650a7354, 0x766a0abb, 0x81c2c92e, 0x92722c85, - 0xa2bfe8a1, 0xa81a664b, 0xc24b8b70, 0xc76c51a3, 0xd192e819, 0xd6990624, 0xf40e3585, 0x106aa070, - 0x19a4c116, 0x1e376c08, 0x2748774c, 0x34b0bcb5, 0x391c0cb3, 0x4ed8aa4a, 0x5b9cca4f, 0x682e6ff3, - 0x748f82ee, 0x78a5636f, 0x84c87814, 0x8cc70208, 0x90befffa, 0xa4506ceb, 0xbef9a3f7, 0xc67178f2, -]; - -pub fn sha256( - inputs: Vec<(Expression, u32)>, - outputs: Vec, - mut num_witness: u32, -) -> (u32, Vec) { - let mut new_opcodes = Vec::new(); - let mut new_inputs = Vec::new(); - let mut total_num_bytes = 0; - - // Decompose the input field elements into bytes and collect the resulting witnesses. - for (witness, num_bits) in inputs { - let num_bytes = round_to_nearest_byte(num_bits); - total_num_bytes += num_bytes; - let (extra_opcodes, extra_inputs, updated_witness_counter) = - byte_decomposition(witness, num_bytes, num_witness); - new_opcodes.extend(extra_opcodes); - new_inputs.extend(extra_inputs); - num_witness = updated_witness_counter; - } - - let (result, num_witness, extra_opcodes) = - create_sha256_constraint(new_inputs, total_num_bytes, num_witness); - new_opcodes.extend(extra_opcodes); - - // constrain the outputs to be the same as the result of the circuit - for i in 0..outputs.len() { - let mut expr = Expression::from(outputs[i]); - expr.push_addition_term(-FieldElement::one(), result[i]); - new_opcodes.push(Opcode::Arithmetic(expr)); - } - (num_witness, new_opcodes) -} - -fn create_sha256_constraint( - mut input: Vec, - total_num_bytes: u32, - num_witness: u32, -) -> (Vec, u32, Vec) { - let mut new_opcodes = Vec::new(); - - // pad the bytes according to sha256 padding rules - let message_bits = total_num_bytes * 8; - let (mut num_witness, pad_witness, extra_opcodes) = pad(128, 8, num_witness); - new_opcodes.extend(extra_opcodes); - input.push(pad_witness); - let bytes_per_block = 64; - let num_bytes = (input.len() + 8) as u32; - let num_blocks = num_bytes / bytes_per_block + ((num_bytes % bytes_per_block != 0) as u32); - let num_total_bytes = num_blocks * bytes_per_block; - for _ in num_bytes..num_total_bytes { - let (updated_witness_counter, pad_witness, extra_opcodes) = pad(0, 8, num_witness); - num_witness = updated_witness_counter; - new_opcodes.extend(extra_opcodes); - input.push(pad_witness); - } - let (num_witness, pad_witness, extra_opcodes) = pad(message_bits, 64, num_witness); - new_opcodes.extend(extra_opcodes); - let (extra_opcodes, pad_witness, num_witness) = - byte_decomposition(pad_witness.into(), 8, num_witness); - new_opcodes.extend(extra_opcodes); - input.extend(pad_witness); - - // turn witness into u32 and load sha256 state - let (input, extra_opcodes, num_witness) = UInt32::from_witnesses(&input, num_witness); - new_opcodes.extend(extra_opcodes); - let (mut rolling_hash, extra_opcodes, num_witness) = prepare_state_constants(num_witness); - new_opcodes.extend(extra_opcodes); - let (round_constants, extra_opcodes, mut num_witness) = prepare_round_constants(num_witness); - new_opcodes.extend(extra_opcodes); - // split the input into blocks of size 16 - let input: Vec> = input.chunks(16).map(|block| block.to_vec()).collect(); - - // process sha256 blocks - for i in &input { - let (new_rolling_hash, extra_opcodes, updated_witness_counter) = - sha256_block(i, rolling_hash.clone(), round_constants.clone(), num_witness); - new_opcodes.extend(extra_opcodes); - num_witness = updated_witness_counter; - rolling_hash = new_rolling_hash; - } - - // decompose the result bytes in u32 to u8 - let (extra_opcodes, byte1, num_witness) = - byte_decomposition(Expression::from(rolling_hash[0].inner), 4, num_witness); - new_opcodes.extend(extra_opcodes); - let (extra_opcodes, byte2, num_witness) = - byte_decomposition(Expression::from(rolling_hash[1].inner), 4, num_witness); - new_opcodes.extend(extra_opcodes); - let (extra_opcodes, byte3, num_witness) = - byte_decomposition(Expression::from(rolling_hash[2].inner), 4, num_witness); - new_opcodes.extend(extra_opcodes); - let (extra_opcodes, byte4, num_witness) = - byte_decomposition(Expression::from(rolling_hash[3].inner), 4, num_witness); - new_opcodes.extend(extra_opcodes); - let (extra_opcodes, byte5, num_witness) = - byte_decomposition(Expression::from(rolling_hash[4].inner), 4, num_witness); - new_opcodes.extend(extra_opcodes); - let (extra_opcodes, byte6, num_witness) = - byte_decomposition(Expression::from(rolling_hash[5].inner), 4, num_witness); - new_opcodes.extend(extra_opcodes); - let (extra_opcodes, byte7, num_witness) = - byte_decomposition(Expression::from(rolling_hash[6].inner), 4, num_witness); - new_opcodes.extend(extra_opcodes); - let (extra_opcodes, byte8, num_witness) = - byte_decomposition(Expression::from(rolling_hash[7].inner), 4, num_witness); - new_opcodes.extend(extra_opcodes); - - let result = vec![byte1, byte2, byte3, byte4, byte5, byte6, byte7, byte8] - .into_iter() - .flatten() - .collect(); - - (result, num_witness, new_opcodes) -} - -pub(crate) fn pad(number: u32, bit_size: u32, mut num_witness: u32) -> (u32, Witness, Vec) { - let mut new_opcodes = Vec::new(); - let mut variables = VariableStore::new(&mut num_witness); - let pad = variables.new_variable(); - - let brillig_opcode = Opcode::Brillig(Brillig { - inputs: vec![BrilligInputs::Single(Expression { - mul_terms: vec![], - linear_combinations: vec![], - q_c: FieldElement::from(number as u128), - })], - outputs: vec![BrilligOutputs::Simple(pad)], - bytecode: vec![brillig::Opcode::Stop], - predicate: None, - }); - new_opcodes.push(brillig_opcode); - - let range = Opcode::BlackBoxFuncCall(BlackBoxFuncCall::RANGE { - input: FunctionInput { witness: pad, num_bits: bit_size }, - }); - new_opcodes.push(range); - - (num_witness, pad, new_opcodes) -} - -fn sha256_block( - input: &[UInt32], - rolling_hash: Vec, - round_constants: Vec, - mut num_witness: u32, -) -> (Vec, Vec, u32) { - let mut new_opcodes = Vec::new(); - let mut w = Vec::new(); - w.extend(input.to_owned()); - - for i in 16..64 { - // calculate s0 `w[i - 15].ror(7) ^ w[i - 15].ror(18) ^ (w[i - 15] >> 3)` - let (a1, extra_opcodes, updated_witness_counter) = w[i - 15].ror(7, num_witness); - new_opcodes.extend(extra_opcodes); - let (a2, extra_opcodes, updated_witness_counter) = - w[i - 15].ror(18, updated_witness_counter); - new_opcodes.extend(extra_opcodes); - let (a3, extra_opcodes, updated_witness_counter) = - w[i - 15].rightshift(3, updated_witness_counter); - new_opcodes.extend(extra_opcodes); - let (a4, extra_opcodes, updated_witness_counter) = a1.xor(&a2, updated_witness_counter); - new_opcodes.extend(extra_opcodes); - let (s0, extra_opcodes, updated_witness_counter) = a4.xor(&a3, updated_witness_counter); - new_opcodes.extend(extra_opcodes); - - // calculate s1 `w[i - 2].ror(17) ^ w[i - 2].ror(19) ^ (w[i - 2] >> 10)` - let (b1, extra_opcodes, updated_witness_counter) = - w[i - 2].ror(17, updated_witness_counter); - new_opcodes.extend(extra_opcodes); - let (b2, extra_opcodes, updated_witness_counter) = - w[i - 2].ror(19, updated_witness_counter); - new_opcodes.extend(extra_opcodes); - let (b3, extra_opcodes, updated_witness_counter) = - w[i - 2].rightshift(10, updated_witness_counter); - new_opcodes.extend(extra_opcodes); - let (b4, extra_opcodes, updated_witness_counter) = b1.xor(&b2, updated_witness_counter); - new_opcodes.extend(extra_opcodes); - let (s1, extra_opcodes, updated_witness_counter) = b4.xor(&b3, updated_witness_counter); - new_opcodes.extend(extra_opcodes); - - // calculate w[i] `w[i - 16] + w[i - 7] + s0 + s1` - let (c1, extra_opcodes, updated_witness_counter) = - w[i - 16].add(&w[i - 7], updated_witness_counter); - new_opcodes.extend(extra_opcodes); - let (c2, extra_opcodes, updated_witness_counter) = c1.add(&s0, updated_witness_counter); - new_opcodes.extend(extra_opcodes); - let (c3, extra_opcodes, updated_witness_counter) = c2.add(&s1, updated_witness_counter); - new_opcodes.extend(extra_opcodes); - w.push(c3); - num_witness = updated_witness_counter; - } - - let mut a = rolling_hash[0]; - let mut b = rolling_hash[1]; - let mut c = rolling_hash[2]; - let mut d = rolling_hash[3]; - let mut e = rolling_hash[4]; - let mut f = rolling_hash[5]; - let mut g = rolling_hash[6]; - let mut h = rolling_hash[7]; - - #[allow(non_snake_case)] - for i in 0..64 { - // calculate S1 `e.ror(6) ^ e.ror(11) ^ e.ror(25)` - let (a1, extra_opcodes, updated_witness_counter) = e.ror(6, num_witness); - new_opcodes.extend(extra_opcodes); - let (a2, extra_opcodes, updated_witness_counter) = e.ror(11, updated_witness_counter); - new_opcodes.extend(extra_opcodes); - let (a3, extra_opcodes, updated_witness_counter) = e.ror(25, updated_witness_counter); - new_opcodes.extend(extra_opcodes); - let (a4, extra_opcodes, updated_witness_counter) = a1.xor(&a2, updated_witness_counter); - new_opcodes.extend(extra_opcodes); - let (S1, extra_opcodes, updated_witness_counter) = a4.xor(&a3, updated_witness_counter); - new_opcodes.extend(extra_opcodes); - - // calculate ch `(e & f) + (~e & g)` - let (b1, extra_opcodes, updated_witness_counter) = e.and(&f, updated_witness_counter); - new_opcodes.extend(extra_opcodes); - let (b2, extra_opcodes, updated_witness_counter) = e.not(updated_witness_counter); - new_opcodes.extend(extra_opcodes); - let (b3, extra_opcodes, updated_witness_counter) = b2.and(&g, updated_witness_counter); - new_opcodes.extend(extra_opcodes); - let (ch, extra_opcodes, updated_witness_counter) = b1.add(&b3, updated_witness_counter); - new_opcodes.extend(extra_opcodes); - - // caculate temp1 `h + S1 + ch + round_constants[i] + w[i]` - let (c1, extra_opcodes, updated_witness_counter) = h.add(&S1, updated_witness_counter); - new_opcodes.extend(extra_opcodes); - let (c2, extra_opcodes, updated_witness_counter) = c1.add(&ch, updated_witness_counter); - new_opcodes.extend(extra_opcodes); - let (c3, extra_opcodes, updated_witness_counter) = - c2.add(&round_constants[i], updated_witness_counter); - new_opcodes.extend(extra_opcodes); - let (temp1, extra_opcodes, updated_witness_counter) = - c3.add(&w[i], updated_witness_counter); - new_opcodes.extend(extra_opcodes); - - // calculate S0 `a.ror(2) ^ a.ror(13) ^ a.ror(22)` - let (d1, extra_opcodes, updated_witness_counter) = a.ror(2, updated_witness_counter); - new_opcodes.extend(extra_opcodes); - let (d2, extra_opcodes, updated_witness_counter) = a.ror(13, updated_witness_counter); - new_opcodes.extend(extra_opcodes); - let (d3, extra_opcodes, updated_witness_counter) = a.ror(22, updated_witness_counter); - new_opcodes.extend(extra_opcodes); - let (d4, extra_opcodes, updated_witness_counter) = d1.xor(&d2, updated_witness_counter); - new_opcodes.extend(extra_opcodes); - let (S0, extra_opcodes, updated_witness_counter) = d4.xor(&d3, updated_witness_counter); - new_opcodes.extend(extra_opcodes); - - // calculate T0 `b & c` - let (T0, extra_opcodes, updated_witness_counter) = b.and(&c, updated_witness_counter); - new_opcodes.extend(extra_opcodes); - - // calculate maj `(a & (b + c - (T0 + T0))) + T0` which is the same as `(a & b) ^ (a & c) ^ (b & c)` - let (e1, extra_opcodes, updated_witness_counter) = T0.add(&T0, updated_witness_counter); - new_opcodes.extend(extra_opcodes); - let (e2, extra_opcodes, updated_witness_counter) = c.sub(&e1, updated_witness_counter); - new_opcodes.extend(extra_opcodes); - let (e3, extra_opcodes, updated_witness_counter) = b.add(&e2, updated_witness_counter); - new_opcodes.extend(extra_opcodes); - let (e4, extra_opcodes, updated_witness_counter) = a.and(&e3, updated_witness_counter); - new_opcodes.extend(extra_opcodes); - let (maj, extra_opcodes, updated_witness_counter) = e4.add(&T0, updated_witness_counter); - new_opcodes.extend(extra_opcodes); - - // calculate temp2 `S0 + maj` - let (temp2, extra_opcodes, updated_witness_counter) = S0.add(&maj, updated_witness_counter); - new_opcodes.extend(extra_opcodes); - - h = g; - g = f; - f = e; - let (new_e, extra_opcodes, updated_witness_counter) = - d.add(&temp1, updated_witness_counter); - new_opcodes.extend(extra_opcodes); - d = c; - c = b; - b = a; - let (new_a, extra_opcodes, updated_witness_counter) = - temp1.add(&temp2, updated_witness_counter); - new_opcodes.extend(extra_opcodes); - num_witness = updated_witness_counter; - a = new_a; - e = new_e; - } - - let mut output = Vec::new(); - let (output0, extra_opcodes, num_witness) = a.add(&rolling_hash[0], num_witness); - new_opcodes.extend(extra_opcodes); - let (output1, extra_opcodes, num_witness) = b.add(&rolling_hash[1], num_witness); - new_opcodes.extend(extra_opcodes); - let (output2, extra_opcodes, num_witness) = c.add(&rolling_hash[2], num_witness); - new_opcodes.extend(extra_opcodes); - let (output3, extra_opcodes, num_witness) = d.add(&rolling_hash[3], num_witness); - new_opcodes.extend(extra_opcodes); - let (output4, extra_opcodes, num_witness) = e.add(&rolling_hash[4], num_witness); - new_opcodes.extend(extra_opcodes); - let (output5, extra_opcodes, num_witness) = f.add(&rolling_hash[5], num_witness); - new_opcodes.extend(extra_opcodes); - let (output6, extra_opcodes, num_witness) = g.add(&rolling_hash[6], num_witness); - new_opcodes.extend(extra_opcodes); - let (output7, extra_opcodes, num_witness) = h.add(&rolling_hash[7], num_witness); - new_opcodes.extend(extra_opcodes); - - output.push(output0); - output.push(output1); - output.push(output2); - output.push(output3); - output.push(output4); - output.push(output5); - output.push(output6); - output.push(output7); - - (output, new_opcodes, num_witness) -} - -/// Load initial state constants of Sha256 -pub(crate) fn prepare_state_constants(mut num_witness: u32) -> (Vec, Vec, u32) { - let mut new_opcodes = Vec::new(); - let mut new_witnesses = Vec::new(); - - for i in INIT_CONSTANTS { - let (new_witness, extra_opcodes, updated_witness_counter) = - UInt32::load_constant(i, num_witness); - new_opcodes.extend(extra_opcodes); - new_witnesses.push(new_witness); - num_witness = updated_witness_counter; - } - - (new_witnesses, new_opcodes, num_witness) -} - -/// Load round constants of Sha256 -pub(crate) fn prepare_round_constants(mut num_witness: u32) -> (Vec, Vec, u32) { - let mut new_opcodes = Vec::new(); - let mut new_witnesses = Vec::new(); - - for i in ROUND_CONSTANTS { - let (new_witness, extra_opcodes, updated_witness_counter) = - UInt32::load_constant(i, num_witness); - new_opcodes.extend(extra_opcodes); - new_witnesses.push(new_witness); - num_witness = updated_witness_counter; - } - - (new_witnesses, new_opcodes, num_witness) -} diff --git a/acvm-repo/stdlib/src/blackbox_fallbacks/uint.rs b/acvm-repo/stdlib/src/blackbox_fallbacks/uint.rs deleted file mode 100644 index 6f4039835f7..00000000000 --- a/acvm-repo/stdlib/src/blackbox_fallbacks/uint.rs +++ /dev/null @@ -1,648 +0,0 @@ -#[macro_export] -macro_rules! impl_uint { - ( - $name:ident, - $type:ty, - $size:expr - ) => { - use acir::{ - brillig::{self, RegisterIndex}, - circuit::{ - brillig::{Brillig, BrilligInputs, BrilligOutputs}, - directives::QuotientDirective, - opcodes::{BlackBoxFuncCall, FunctionInput}, - Opcode, - }, - native_types::{Expression, Witness}, - FieldElement, - }; - use $crate::helpers::VariableStore; - - /// UInt contains a witness that points to a field element that represents a u32 integer - /// It has a inner field of type [Witness] that points to the field element and width = 32 - #[derive(Copy, Clone, Debug)] - pub struct $name { - pub(crate) inner: Witness, - width: u32, - } - - impl $name { - #[cfg(any(test, feature = "testing"))] - pub fn get_inner(&self) -> Witness { - self.inner - } - } - - impl $name { - /// Initialize A new [UInt] type with a [Witness] - pub fn new(witness: Witness) -> Self { - $name { inner: witness, width: $size } - } - - /// Get u(n) + 1 - pub(crate) fn get_max_plus_one( - &self, - mut num_witness: u32, - ) -> ($name, Vec, u32) { - let mut new_opcodes = Vec::new(); - let mut variables = VariableStore::new(&mut num_witness); - let new_witness = variables.new_variable(); - - let brillig_opcode = Opcode::Brillig(Brillig { - inputs: vec![BrilligInputs::Single(Expression { - mul_terms: vec![], - linear_combinations: vec![], - q_c: FieldElement::from(2_u128.pow(self.width)), - })], - outputs: vec![BrilligOutputs::Simple(new_witness)], - bytecode: vec![brillig::Opcode::Stop], - predicate: None, - }); - new_opcodes.push(brillig_opcode); - let num_witness = variables.finalize(); - - ($name::new(new_witness), new_opcodes, num_witness) - } - - /// Load a constant into the circuit - pub(crate) fn load_constant( - constant: $type, - mut num_witness: u32, - ) -> ($name, Vec, u32) { - let mut new_opcodes = Vec::new(); - let mut variables = VariableStore::new(&mut num_witness); - let new_witness = variables.new_variable(); - - let brillig_opcode = Opcode::Brillig(Brillig { - inputs: vec![BrilligInputs::Single(Expression { - mul_terms: vec![], - linear_combinations: vec![], - q_c: FieldElement::from(constant as u128), - })], - outputs: vec![BrilligOutputs::Simple(new_witness)], - bytecode: vec![brillig::Opcode::Stop], - predicate: None, - }); - new_opcodes.push(brillig_opcode); - let num_witness = variables.finalize(); - - ($name::new(new_witness), new_opcodes, num_witness) - } - - /// Returns the quotient and remainder such that lhs = rhs * quotient + remainder - // This should be the same as its equivalent in the Noir repo - pub fn euclidean_division( - lhs: &$name, - rhs: &$name, - mut num_witness: u32, - ) -> ($name, $name, Vec, u32) { - let mut new_opcodes = Vec::new(); - let mut variables = VariableStore::new(&mut num_witness); - let q_witness = variables.new_variable(); - let r_witness = variables.new_variable(); - - // compute quotient using directive function - let quotient_opcode = Opcode::Directive( - acir::circuit::directives::Directive::Quotient(QuotientDirective { - a: lhs.inner.into(), - b: rhs.inner.into(), - q: q_witness, - r: r_witness, - predicate: None, - }), - ); - new_opcodes.push(quotient_opcode); - - // make sure r and q are in 32 bit range - let r_range_opcode = Opcode::BlackBoxFuncCall(BlackBoxFuncCall::RANGE { - input: FunctionInput { witness: r_witness, num_bits: lhs.width }, - }); - let q_range_opcode = Opcode::BlackBoxFuncCall(BlackBoxFuncCall::RANGE { - input: FunctionInput { witness: q_witness, num_bits: lhs.width }, - }); - new_opcodes.push(r_range_opcode); - new_opcodes.push(q_range_opcode); - let num_witness = variables.finalize(); - - // constrain r < rhs - let (rhs_sub_r, extra_opcodes, num_witness) = - rhs.sub_no_overflow(&$name::new(r_witness), num_witness); - new_opcodes.extend(extra_opcodes); - let rhs_sub_r_range_opcode = Opcode::BlackBoxFuncCall(BlackBoxFuncCall::RANGE { - input: FunctionInput { witness: rhs_sub_r.inner, num_bits: lhs.width }, - }); - new_opcodes.push(rhs_sub_r_range_opcode); - - // constrain lhs = rhs * quotient + remainder - let rhs_expr = Expression::from(rhs.inner); - let lhs_constraint = Expression::from(lhs.inner); - let rhs_constraint = &rhs_expr * &Expression::from(q_witness); - let rhs_constraint = &rhs_constraint.unwrap() + &Expression::from(r_witness); - let div_euclidean = &lhs_constraint - &rhs_constraint; - new_opcodes.push(Opcode::Arithmetic(div_euclidean)); - - ($name::new(q_witness), $name::new(r_witness), new_opcodes, num_witness) - } - - /// Rotate left `rotation` bits. `(x << rotation) | (x >> (width - rotation))` - // This should be the same as `u32.rotate_left(rotation)` in rust stdlib - pub fn rol(&self, rotation: u32, num_witness: u32) -> ($name, Vec, u32) { - let rotation = rotation % self.width; - let mut new_opcodes = Vec::new(); - let (right_shift, extra_opcodes, num_witness) = - self.rightshift(self.width - rotation, num_witness); - new_opcodes.extend(extra_opcodes); - let (left_shift, extra_opcodes, num_witness) = - self.leftshift(rotation, num_witness); - new_opcodes.extend(extra_opcodes); - let (result, extra_opcodes, num_witness) = left_shift.or(&right_shift, num_witness); - new_opcodes.extend(extra_opcodes); - - (result, new_opcodes, num_witness) - } - - /// Rotate right `rotation` bits. `(x >> rotation) | (x << (width - rotation))` - // This should be the same as `u32.rotate_right(rotation)` in rust stdlib - pub fn ror(&self, rotation: u32, num_witness: u32) -> ($name, Vec, u32) { - let rotation = rotation % self.width; - let mut new_opcodes = Vec::new(); - let (left_shift, extra_opcodes, num_witness) = - self.leftshift(self.width - rotation, num_witness); - new_opcodes.extend(extra_opcodes); - let (right_shift, extra_opcodes, num_witness) = - self.rightshift(rotation, num_witness); - new_opcodes.extend(extra_opcodes); - let (result, extra_opcodes, num_witness) = left_shift.or(&right_shift, num_witness); - new_opcodes.extend(extra_opcodes); - - (result, new_opcodes, num_witness) - } - - /// left shift by `bits` - pub fn leftshift(&self, bits: u32, num_witness: u32) -> ($name, Vec, u32) { - let bits = bits % self.width; - let mut new_opcodes = Vec::new(); - let two: $type = 2; - let (two_pow_rhs, extra_opcodes, num_witness) = - $name::load_constant(two.pow(bits), num_witness); - new_opcodes.extend(extra_opcodes); - let (left_shift, extra_opcodes, num_witness) = self.mul(&two_pow_rhs, num_witness); - new_opcodes.extend(extra_opcodes); - - (left_shift, new_opcodes, num_witness) - } - - /// right shift by `bits` - pub fn rightshift(&self, bits: u32, num_witness: u32) -> ($name, Vec, u32) { - let bits = bits % self.width; - let mut new_opcodes = Vec::new(); - let two: $type = 2; - let (two_pow_rhs, extra_opcodes, num_witness) = - $name::load_constant(two.pow(bits), num_witness); - new_opcodes.extend(extra_opcodes); - let (right_shift, _, extra_opcodes, num_witness) = - $name::euclidean_division(self, &two_pow_rhs, num_witness); - new_opcodes.extend(extra_opcodes); - - (right_shift, new_opcodes, num_witness) - } - - /// Caculate and constrain `self` + `rhs` - pub fn add(&self, rhs: &$name, mut num_witness: u32) -> ($name, Vec, u32) { - let mut new_opcodes = Vec::new(); - let mut variables = VariableStore::new(&mut num_witness); - let new_witness = variables.new_variable(); - - // calculate `self` + `rhs` with overflow - let brillig_opcode = Opcode::Brillig(Brillig { - inputs: vec![ - BrilligInputs::Single(Expression { - mul_terms: vec![], - linear_combinations: vec![(FieldElement::one(), self.inner)], - q_c: FieldElement::zero(), - }), - BrilligInputs::Single(Expression { - mul_terms: vec![], - linear_combinations: vec![(FieldElement::one(), rhs.inner)], - q_c: FieldElement::zero(), - }), - ], - outputs: vec![BrilligOutputs::Simple(new_witness)], - bytecode: vec![brillig::Opcode::BinaryIntOp { - op: brillig::BinaryIntOp::Add, - bit_size: 127, - lhs: RegisterIndex::from(0), - rhs: RegisterIndex::from(1), - destination: RegisterIndex::from(0), - }], - predicate: None, - }); - new_opcodes.push(brillig_opcode); - let num_witness = variables.finalize(); - - // constrain addition - let mut add_expr = Expression::from(new_witness); - add_expr.push_addition_term(-FieldElement::one(), self.inner); - add_expr.push_addition_term(-FieldElement::one(), rhs.inner); - new_opcodes.push(Opcode::Arithmetic(add_expr)); - - // mod 2^width to get final result as the remainder - let (two_pow_width, extra_opcodes, num_witness) = - self.get_max_plus_one(num_witness); - new_opcodes.extend(extra_opcodes); - let (_, add_mod, extra_opcodes, num_witness) = $name::euclidean_division( - &$name::new(new_witness), - &two_pow_width, - num_witness, - ); - new_opcodes.extend(extra_opcodes); - - (add_mod, new_opcodes, num_witness) - } - - /// Caculate and constrain `self` - `rhs` - pub fn sub(&self, rhs: &$name, mut num_witness: u32) -> ($name, Vec, u32) { - let mut new_opcodes = Vec::new(); - let mut variables = VariableStore::new(&mut num_witness); - let new_witness = variables.new_variable(); - - // calculate 2^32 + self - rhs to avoid overflow - let brillig_opcode = Opcode::Brillig(Brillig { - inputs: vec![ - BrilligInputs::Single(Expression { - mul_terms: vec![], - linear_combinations: vec![(FieldElement::one(), self.inner)], - q_c: FieldElement::zero(), - }), - BrilligInputs::Single(Expression { - mul_terms: vec![], - linear_combinations: vec![(FieldElement::one(), rhs.inner)], - q_c: FieldElement::zero(), - }), - BrilligInputs::Single(Expression { - mul_terms: vec![], - linear_combinations: vec![], - q_c: FieldElement::from(1_u128 << self.width), - }), - ], - outputs: vec![BrilligOutputs::Simple(new_witness)], - bytecode: vec![ - brillig::Opcode::BinaryIntOp { - op: brillig::BinaryIntOp::Add, - bit_size: 127, - lhs: RegisterIndex::from(0), - rhs: RegisterIndex::from(2), - destination: RegisterIndex::from(0), - }, - brillig::Opcode::BinaryIntOp { - op: brillig::BinaryIntOp::Sub, - bit_size: 127, - lhs: RegisterIndex::from(0), - rhs: RegisterIndex::from(1), - destination: RegisterIndex::from(0), - }, - ], - predicate: None, - }); - new_opcodes.push(brillig_opcode); - let num_witness = variables.finalize(); - - // constrain subtraction - let mut sub_constraint = Expression::from(self.inner); - sub_constraint.push_addition_term(-FieldElement::one(), new_witness); - sub_constraint.push_addition_term(-FieldElement::one(), rhs.inner); - sub_constraint.q_c = FieldElement::from(1_u128 << self.width); - new_opcodes.push(Opcode::Arithmetic(sub_constraint)); - - // mod 2^width to get final result as the remainder - let (two_pow_width, extra_opcodes, num_witness) = - self.get_max_plus_one(num_witness); - new_opcodes.extend(extra_opcodes); - let (_, sub_mod, extra_opcodes, num_witness) = $name::euclidean_division( - &$name::new(new_witness), - &two_pow_width, - num_witness, - ); - new_opcodes.extend(extra_opcodes); - - (sub_mod, new_opcodes, num_witness) - } - - /// Calculate and constrain `self` - `rhs` - 1 without allowing overflow - /// This is a helper function to `euclidean_division` - // There is a `-1` because theres a case where rhs = 2^32 and remainder = 0 - pub(crate) fn sub_no_overflow( - &self, - rhs: &$name, - mut num_witness: u32, - ) -> ($name, Vec, u32) { - let mut new_opcodes = Vec::new(); - let mut variables = VariableStore::new(&mut num_witness); - let new_witness = variables.new_variable(); - - // calculate self - rhs - 1 - let brillig_opcode = Opcode::Brillig(Brillig { - inputs: vec![ - BrilligInputs::Single(Expression { - mul_terms: vec![], - linear_combinations: vec![(FieldElement::one(), self.inner)], - q_c: FieldElement::zero(), - }), - BrilligInputs::Single(Expression { - mul_terms: vec![], - linear_combinations: vec![(FieldElement::one(), rhs.inner)], - q_c: FieldElement::zero(), - }), - BrilligInputs::Single(Expression { - mul_terms: vec![], - linear_combinations: vec![], - q_c: FieldElement::one(), - }), - ], - outputs: vec![BrilligOutputs::Simple(new_witness)], - bytecode: vec![ - brillig::Opcode::BinaryIntOp { - op: brillig::BinaryIntOp::Sub, - bit_size: 127, - lhs: RegisterIndex::from(0), - rhs: RegisterIndex::from(1), - destination: RegisterIndex::from(0), - }, - brillig::Opcode::BinaryIntOp { - op: brillig::BinaryIntOp::Sub, - bit_size: 127, - lhs: RegisterIndex::from(0), - rhs: RegisterIndex::from(2), - destination: RegisterIndex::from(0), - }, - ], - predicate: None, - }); - new_opcodes.push(brillig_opcode); - let num_witness = variables.finalize(); - - // constrain subtraction - let mut sub_constraint = Expression::from(self.inner); - sub_constraint.push_addition_term(-FieldElement::one(), new_witness); - sub_constraint.push_addition_term(-FieldElement::one(), rhs.inner); - sub_constraint.q_c = -FieldElement::one(); - new_opcodes.push(Opcode::Arithmetic(sub_constraint)); - - ($name::new(new_witness), new_opcodes, num_witness) - } - - /// Calculate and constrain `self` * `rhs` - pub(crate) fn mul( - &self, - rhs: &$name, - mut num_witness: u32, - ) -> ($name, Vec, u32) { - let mut new_opcodes = Vec::new(); - let mut variables = VariableStore::new(&mut num_witness); - let new_witness = variables.new_variable(); - - // calulate `self` * `rhs` with overflow - let brillig_opcode = Opcode::Brillig(Brillig { - inputs: vec![ - BrilligInputs::Single(Expression { - mul_terms: vec![], - linear_combinations: vec![(FieldElement::one(), self.inner)], - q_c: FieldElement::zero(), - }), - BrilligInputs::Single(Expression { - mul_terms: vec![], - linear_combinations: vec![(FieldElement::one(), rhs.inner)], - q_c: FieldElement::zero(), - }), - ], - outputs: vec![BrilligOutputs::Simple(new_witness)], - bytecode: vec![brillig::Opcode::BinaryFieldOp { - op: brillig::BinaryFieldOp::Mul, - lhs: RegisterIndex::from(0), - rhs: RegisterIndex::from(1), - destination: RegisterIndex::from(0), - }], - predicate: None, - }); - new_opcodes.push(brillig_opcode); - let num_witness = variables.finalize(); - - // constrain mul - let mut mul_constraint = Expression::from(new_witness); - mul_constraint.push_multiplication_term( - -FieldElement::one(), - self.inner, - rhs.inner, - ); - new_opcodes.push(Opcode::Arithmetic(mul_constraint)); - - // mod 2^width to get final result as the remainder - let (two_pow_rhs, extra_opcodes, num_witness) = self.get_max_plus_one(num_witness); - new_opcodes.extend(extra_opcodes); - let (_, mul_mod, extra_opcodes, num_witness) = - $name::euclidean_division(&$name::new(new_witness), &two_pow_rhs, num_witness); - new_opcodes.extend(extra_opcodes); - - (mul_mod, new_opcodes, num_witness) - } - - /// Calculate and constrain `self` and `rhs` - pub fn and(&self, rhs: &$name, mut num_witness: u32) -> ($name, Vec, u32) { - let mut new_opcodes = Vec::new(); - let mut variables = VariableStore::new(&mut num_witness); - let new_witness = variables.new_variable(); - let num_witness = variables.finalize(); - let and_opcode = Opcode::BlackBoxFuncCall(BlackBoxFuncCall::AND { - lhs: FunctionInput { witness: self.inner, num_bits: self.width }, - rhs: FunctionInput { witness: rhs.inner, num_bits: self.width }, - output: new_witness, - }); - new_opcodes.push(and_opcode); - - ($name::new(new_witness), new_opcodes, num_witness) - } - - /// Calculate and constrain `self` xor `rhs` - pub fn xor(&self, rhs: &$name, mut num_witness: u32) -> ($name, Vec, u32) { - let mut new_opcodes = Vec::new(); - let mut variables = VariableStore::new(&mut num_witness); - let new_witness = variables.new_variable(); - let num_witness = variables.finalize(); - let xor_opcode = Opcode::BlackBoxFuncCall(BlackBoxFuncCall::XOR { - lhs: FunctionInput { witness: self.inner, num_bits: self.width }, - rhs: FunctionInput { witness: rhs.inner, num_bits: self.width }, - output: new_witness, - }); - new_opcodes.push(xor_opcode); - - ($name::new(new_witness), new_opcodes, num_witness) - } - - /// Calculate and constrain `self` or `rhs` - pub fn or(&self, rhs: &$name, num_witness: u32) -> ($name, Vec, u32) { - let mut new_opcodes = Vec::new(); - - // a | b = (a & b) + (a ^ b) - let (a_and_b, extra_opcodes, num_witness) = self.and(rhs, num_witness); - new_opcodes.extend(extra_opcodes); - let (a_xor_b, extra_opcodes, num_witness) = self.xor(rhs, num_witness); - new_opcodes.extend(extra_opcodes); - let (or, extra_opcodes, num_witness) = a_and_b.add(&a_xor_b, num_witness); - new_opcodes.extend(extra_opcodes); - - (or, new_opcodes, num_witness) - } - - /// Calculate and constrain not `self` - pub(crate) fn not(&self, mut num_witness: u32) -> ($name, Vec, u32) { - let mut new_opcodes = Vec::new(); - let mut variables = VariableStore::new(&mut num_witness); - let new_witness = variables.new_variable(); - - let brillig_opcode = Opcode::Brillig(Brillig { - inputs: vec![ - BrilligInputs::Single(Expression { - mul_terms: vec![], - linear_combinations: vec![(FieldElement::one(), self.inner)], - q_c: FieldElement::zero(), - }), - BrilligInputs::Single(Expression { - mul_terms: vec![], - linear_combinations: vec![], - q_c: FieldElement::from((1_u128 << self.width) - 1), - }), - ], - outputs: vec![BrilligOutputs::Simple(new_witness)], - bytecode: vec![brillig::Opcode::BinaryIntOp { - op: brillig::BinaryIntOp::Sub, - bit_size: self.width, - lhs: RegisterIndex::from(1), - rhs: RegisterIndex::from(0), - destination: RegisterIndex::from(0), - }], - predicate: None, - }); - new_opcodes.push(brillig_opcode); - let num_witness = variables.finalize(); - - let mut not_constraint = Expression::from(new_witness); - not_constraint.push_addition_term(FieldElement::one(), self.inner); - not_constraint.q_c = -FieldElement::from((1_u128 << self.width) - 1); - new_opcodes.push(Opcode::Arithmetic(not_constraint)); - - ($name::new(new_witness), new_opcodes, num_witness) - } - - /// Calculate and constrain `self` >= `rhs` - // This should be similar to its equivalent in the Noir repo - pub(crate) fn more_than_eq_comparison( - &self, - rhs: &$name, - mut num_witness: u32, - ) -> ($name, Vec, u32) { - let mut new_opcodes = Vec::new(); - let mut variables = VariableStore::new(&mut num_witness); - let new_witness = variables.new_variable(); - let q_witness = variables.new_variable(); - let r_witness = variables.new_variable(); - - // calculate 2^32 + self - rhs - let brillig_opcode = Opcode::Brillig(Brillig { - inputs: vec![ - BrilligInputs::Single(Expression { - mul_terms: vec![], - linear_combinations: vec![(FieldElement::one(), self.inner)], - q_c: FieldElement::zero(), - }), - BrilligInputs::Single(Expression { - mul_terms: vec![], - linear_combinations: vec![(FieldElement::one(), rhs.inner)], - q_c: FieldElement::zero(), - }), - BrilligInputs::Single(Expression { - mul_terms: vec![], - linear_combinations: vec![], - q_c: FieldElement::from(1_u128 << self.width), - }), - ], - outputs: vec![BrilligOutputs::Simple(new_witness)], - bytecode: vec![ - brillig::Opcode::BinaryIntOp { - op: brillig::BinaryIntOp::Add, - bit_size: 127, - lhs: RegisterIndex::from(0), - rhs: RegisterIndex::from(2), - destination: RegisterIndex::from(0), - }, - brillig::Opcode::BinaryIntOp { - op: brillig::BinaryIntOp::Sub, - bit_size: 127, - lhs: RegisterIndex::from(0), - rhs: RegisterIndex::from(1), - destination: RegisterIndex::from(0), - }, - ], - predicate: None, - }); - new_opcodes.push(brillig_opcode); - let num_witness = variables.finalize(); - - // constrain subtraction - let mut sub_constraint = Expression::from(self.inner); - sub_constraint.push_addition_term(-FieldElement::one(), new_witness); - sub_constraint.push_addition_term(-FieldElement::one(), rhs.inner); - sub_constraint.q_c = FieldElement::from(1_u128 << self.width); - new_opcodes.push(Opcode::Arithmetic(sub_constraint)); - - let (two_pow_rhs, extra_opcodes, num_witness) = self.get_max_plus_one(num_witness); - new_opcodes.extend(extra_opcodes); - - // constraint 2^{max_bits} + a - b = q * 2^{max_bits} + r - // q = 1 if a == b - // q = 1 if a > b - // q = 0 if a < b - let quotient_opcode = Opcode::Directive( - acir::circuit::directives::Directive::Quotient(QuotientDirective { - a: new_witness.into(), - b: two_pow_rhs.inner.into(), - q: q_witness, - r: r_witness, - predicate: None, - }), - ); - new_opcodes.push(quotient_opcode); - - // make sure r in 32 bit range and q is 1 bit - let r_range_opcode = Opcode::BlackBoxFuncCall(BlackBoxFuncCall::RANGE { - input: FunctionInput { witness: r_witness, num_bits: self.width }, - }); - let q_range_opcode = Opcode::BlackBoxFuncCall(BlackBoxFuncCall::RANGE { - input: FunctionInput { witness: q_witness, num_bits: 1 }, - }); - new_opcodes.push(r_range_opcode); - new_opcodes.push(q_range_opcode); - - ($name::new(q_witness), new_opcodes, num_witness) - } - - /// Calculate and constrain `self` < `rhs` - pub fn less_than_comparison( - &self, - rhs: &$name, - num_witness: u32, - ) -> ($name, Vec, u32) { - let mut new_opcodes = Vec::new(); - let (mut comparison, extra_opcodes, num_witness) = - self.more_than_eq_comparison(rhs, num_witness); - new_opcodes.extend(extra_opcodes); - comparison.width = 1; - - // `self` < `rhs` == not `self` >= `rhs` - let (less_than, extra_opcodes, num_witness) = comparison.not(num_witness); - new_opcodes.extend(extra_opcodes); - - (less_than, new_opcodes, num_witness) - } - } - }; -} diff --git a/acvm-repo/stdlib/src/blackbox_fallbacks/uint32.rs b/acvm-repo/stdlib/src/blackbox_fallbacks/uint32.rs deleted file mode 100644 index 58314d6ba4c..00000000000 --- a/acvm-repo/stdlib/src/blackbox_fallbacks/uint32.rs +++ /dev/null @@ -1,30 +0,0 @@ -use crate::impl_uint; - -impl_uint!(UInt32, u32, 32); -impl UInt32 { - /// Load a [UInt32] from four [Witness]es each representing a [u8] - pub(crate) fn from_witnesses( - witnesses: &[Witness], - mut num_witness: u32, - ) -> (Vec, Vec, u32) { - let mut new_opcodes = Vec::new(); - let mut variables = VariableStore::new(&mut num_witness); - let mut uint = Vec::new(); - - for i in 0..witnesses.len() / 4 { - let new_witness = variables.new_variable(); - uint.push(UInt32::new(new_witness)); - let mut expr = Expression::from(new_witness); - for j in 0..4 { - let scaling_factor_value = 1 << (8 * (3 - j) as u32); - let scaling_factor = FieldElement::from(scaling_factor_value as u128); - expr.push_addition_term(-scaling_factor, witnesses[i * 4 + j]); - } - - new_opcodes.push(Opcode::Arithmetic(expr)); - } - let num_witness = variables.finalize(); - - (uint, new_opcodes, num_witness) - } -} diff --git a/acvm-repo/stdlib/src/blackbox_fallbacks/uint64.rs b/acvm-repo/stdlib/src/blackbox_fallbacks/uint64.rs deleted file mode 100644 index cddb23275cb..00000000000 --- a/acvm-repo/stdlib/src/blackbox_fallbacks/uint64.rs +++ /dev/null @@ -1,30 +0,0 @@ -use crate::impl_uint; - -impl_uint!(UInt64, u64, 64); -impl UInt64 { - /// Load a [UInt64] from eight [Witness]es each representing a [u8] - pub(crate) fn from_witnesses( - witnesses: &[Witness], - mut num_witness: u32, - ) -> (Vec, Vec, u32) { - let mut new_opcodes = Vec::new(); - let mut variables = VariableStore::new(&mut num_witness); - let mut uint = Vec::new(); - - for i in 0..witnesses.len() / 8 { - let new_witness = variables.new_variable(); - uint.push(UInt64::new(new_witness)); - let mut expr = Expression::from(new_witness); - for j in 0..8 { - let scaling_factor_value: u128 = 1 << (8 * (7 - j) as u32); - let scaling_factor = FieldElement::from(scaling_factor_value); - expr.push_addition_term(-scaling_factor, witnesses[i * 8 + j]); - } - - new_opcodes.push(Opcode::Arithmetic(expr)); - } - let num_witness = variables.finalize(); - - (uint, new_opcodes, num_witness) - } -} diff --git a/acvm-repo/stdlib/src/blackbox_fallbacks/uint8.rs b/acvm-repo/stdlib/src/blackbox_fallbacks/uint8.rs deleted file mode 100644 index 2ffc2cae1be..00000000000 --- a/acvm-repo/stdlib/src/blackbox_fallbacks/uint8.rs +++ /dev/null @@ -1,2 +0,0 @@ -use crate::impl_uint; -impl_uint!(UInt8, u8, 8); diff --git a/acvm-repo/stdlib/src/blackbox_fallbacks/utils.rs b/acvm-repo/stdlib/src/blackbox_fallbacks/utils.rs deleted file mode 100644 index 4921c71c9fe..00000000000 --- a/acvm-repo/stdlib/src/blackbox_fallbacks/utils.rs +++ /dev/null @@ -1,175 +0,0 @@ -use crate::helpers::VariableStore; -use acir::{ - circuit::{ - directives::Directive, - opcodes::{BlackBoxFuncCall, FunctionInput}, - Opcode, - }, - native_types::{Expression, Witness}, - FieldElement, -}; - -fn round_to_nearest_mul_8(num_bits: u32) -> u32 { - let remainder = num_bits % 8; - - if remainder == 0 { - return num_bits; - } - - num_bits + 8 - remainder -} - -pub(crate) fn round_to_nearest_byte(num_bits: u32) -> u32 { - round_to_nearest_mul_8(num_bits) / 8 -} - -pub(crate) fn boolean_expr(expr: &Expression, variables: &mut VariableStore) -> Expression { - &mul_with_witness(expr, expr, variables) - expr -} - -/// Returns an expression which represents `lhs * rhs` -/// -/// If one has multiplicative term and the other is of degree one or more, -/// the function creates [intermediate variables][`Witness`] accordingly. -/// There are two cases where we can optimize the multiplication between two expressions: -/// 1. If both expressions have at most a total degree of 1 in each term, then we can just multiply them -/// as each term in the result will be degree-2. -/// 2. If one expression is a constant, then we can just multiply the constant with the other expression -/// -/// (1) is because an [`Expression`] can hold at most a degree-2 univariate polynomial -/// which is what you get when you multiply two degree-1 univariate polynomials. -pub(crate) fn mul_with_witness( - lhs: &Expression, - rhs: &Expression, - variables: &mut VariableStore, -) -> Expression { - use std::borrow::Cow; - let lhs_is_linear = lhs.is_linear(); - let rhs_is_linear = rhs.is_linear(); - - // Case 1: Both expressions have at most a total degree of 1 in each term - if lhs_is_linear && rhs_is_linear { - return (lhs * rhs) - .expect("one of the expressions is a constant and so this should not fail"); - } - - // Case 2: One or both of the sides needs to be reduced to a degree-1 univariate polynomial - let lhs_reduced = if lhs_is_linear { - Cow::Borrowed(lhs) - } else { - Cow::Owned(variables.new_variable().into()) - }; - - // If the lhs and rhs are the same, then we do not need to reduce - // rhs, we only need to square the lhs. - if lhs == rhs { - return (&*lhs_reduced * &*lhs_reduced) - .expect("Both expressions are reduced to be degree<=1"); - }; - - let rhs_reduced = if rhs_is_linear { - Cow::Borrowed(rhs) - } else { - Cow::Owned(variables.new_variable().into()) - }; - - (&*lhs_reduced * &*rhs_reduced).expect("Both expressions are reduced to be degree<=1") -} - -// Generates opcodes and directives to bit decompose the input `opcode` -// Returns the bits and the updated witness counter -// TODO:Ideally, we return the updated witness counter, or we require the input -// TODO to be a VariableStore. We are not doing this because we want migration to -// TODO be less painful -pub(crate) fn bit_decomposition( - opcode: Expression, - bit_size: u32, - mut num_witness: u32, -) -> (Vec, Vec, u32) { - let mut new_opcodes = Vec::new(); - let mut variables = VariableStore::new(&mut num_witness); - - // First create a witness for each bit - let mut bit_vector = Vec::with_capacity(bit_size as usize); - for _ in 0..bit_size { - bit_vector.push(variables.new_variable()); - } - - // Next create a directive which computes those bits. - new_opcodes.push(Opcode::Directive(Directive::ToLeRadix { - a: opcode.clone(), - b: bit_vector.clone(), - radix: 2, - })); - - // Now apply constraints to the bits such that they are the bit decomposition - // of the input and each bit is actually a bit - let mut binary_exprs = Vec::new(); - let mut bit_decomp_constraint = opcode; - let mut two_pow: FieldElement = FieldElement::one(); - let two = FieldElement::from(2_i128); - for &bit in &bit_vector { - // Bit constraint to ensure each bit is a zero or one; bit^2 - bit = 0 - let expr = boolean_expr(&bit.into(), &mut variables); - binary_exprs.push(Opcode::Arithmetic(expr)); - - // Constraint to ensure that the bits are constrained to be a bit decomposition - // of the input - // ie \sum 2^i * x_i = input - bit_decomp_constraint.push_addition_term(-two_pow, bit); - two_pow = two * two_pow; - } - - new_opcodes.extend(binary_exprs); - bit_decomp_constraint.sort(); // TODO: we have an issue open to check if this is needed. Ideally, we remove it. - new_opcodes.push(Opcode::Arithmetic(bit_decomp_constraint)); - - (new_opcodes, bit_vector, variables.finalize()) -} - -// TODO: Maybe this can be merged with `bit_decomposition` -pub(crate) fn byte_decomposition( - opcode: Expression, - num_bytes: u32, - mut num_witness: u32, -) -> (Vec, Vec, u32) { - let mut new_opcodes = Vec::new(); - let mut variables = VariableStore::new(&mut num_witness); - - // First create a witness for each byte - let mut vector = Vec::with_capacity(num_bytes as usize); - for _ in 0..num_bytes { - vector.push(variables.new_variable()); - } - - // Next create a directive which computes those byte. - new_opcodes.push(Opcode::Directive(Directive::ToLeRadix { - a: opcode.clone(), - b: vector.clone(), - radix: 256, - })); - vector.reverse(); - - // Now apply constraints to the bytes such that they are the byte decomposition - // of the input and each byte is actually a byte - let mut byte_exprs = Vec::new(); - let mut decomp_constraint = opcode; - let byte_shift: u128 = 256; - for (i, v) in vector.iter().enumerate() { - let range = Opcode::BlackBoxFuncCall(BlackBoxFuncCall::RANGE { - input: FunctionInput { witness: *v, num_bits: 8 }, - }); - let scaling_factor_value = byte_shift.pow(num_bytes - 1 - i as u32); - let scaling_factor = FieldElement::from(scaling_factor_value); - - decomp_constraint.push_addition_term(-scaling_factor, *v); - - byte_exprs.push(range); - } - - new_opcodes.extend(byte_exprs); - decomp_constraint.sort(); - new_opcodes.push(Opcode::Arithmetic(decomp_constraint)); - - (new_opcodes, vector, variables.finalize()) -} diff --git a/acvm-repo/stdlib/src/helpers.rs b/acvm-repo/stdlib/src/helpers.rs deleted file mode 100644 index 5ab258368f4..00000000000 --- a/acvm-repo/stdlib/src/helpers.rs +++ /dev/null @@ -1,23 +0,0 @@ -use acir::native_types::Witness; - -// Simple helper struct to keep track of the current witness index -// and create variables -pub struct VariableStore<'a> { - witness_index: &'a mut u32, -} - -impl<'a> VariableStore<'a> { - pub fn new(witness_index: &'a mut u32) -> Self { - Self { witness_index } - } - - pub fn new_variable(&mut self) -> Witness { - let witness = Witness(*self.witness_index); - *self.witness_index += 1; - witness - } - - pub fn finalize(self) -> u32 { - *self.witness_index - } -} diff --git a/acvm-repo/stdlib/src/lib.rs b/acvm-repo/stdlib/src/lib.rs deleted file mode 100644 index 9aecde631fb..00000000000 --- a/acvm-repo/stdlib/src/lib.rs +++ /dev/null @@ -1,7 +0,0 @@ -#![forbid(unsafe_code)] -#![warn(unreachable_pub)] -#![warn(clippy::semicolon_if_nothing_returned)] -#![cfg_attr(not(test), warn(unused_crate_dependencies, unused_extern_crates))] - -pub mod blackbox_fallbacks; -pub mod helpers; diff --git a/aztec_macros/Cargo.toml b/aztec_macros/Cargo.toml new file mode 100644 index 00000000000..04f74d3b022 --- /dev/null +++ b/aztec_macros/Cargo.toml @@ -0,0 +1,14 @@ +[package] +name = "aztec_macros" +version.workspace = true +authors.workspace = true +edition.workspace = true +rust-version.workspace = true +license.workspace = true +repository.workspace = true + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] +noirc_frontend.workspace = true +iter-extended.workspace = true diff --git a/aztec_macros/src/lib.rs b/aztec_macros/src/lib.rs new file mode 100644 index 00000000000..24f0f5a71b6 --- /dev/null +++ b/aztec_macros/src/lib.rs @@ -0,0 +1,1039 @@ +use iter_extended::vecmap; + +use noirc_frontend::macros_api::FieldElement; +use noirc_frontend::macros_api::{ + BlockExpression, CallExpression, CastExpression, Distinctness, Expression, ExpressionKind, + ForLoopStatement, ForRange, FunctionDefinition, FunctionReturnType, FunctionVisibility, + HirContext, HirExpression, HirLiteral, HirStatement, Ident, ImportStatement, IndexExpression, + LetStatement, Literal, MemberAccessExpression, MethodCallExpression, NoirFunction, NoirStruct, + Param, Path, PathKind, Pattern, PrefixExpression, SecondaryAttribute, Signedness, Span, + Statement, StatementKind, StructType, Type, TypeImpl, UnaryOp, UnresolvedType, + UnresolvedTypeData, Visibility, +}; +use noirc_frontend::macros_api::{CrateId, FileId}; +use noirc_frontend::macros_api::{MacroError, MacroProcessor}; +use noirc_frontend::macros_api::{ModuleDefId, NodeInterner, SortedModule, StructId}; + +pub struct AztecMacro; + +impl MacroProcessor for AztecMacro { + fn process_untyped_ast( + &self, + ast: SortedModule, + crate_id: &CrateId, + context: &HirContext, + ) -> Result { + transform(ast, crate_id, context) + } + + fn process_typed_ast(&self, crate_id: &CrateId, context: &mut HirContext) { + transform_hir(crate_id, context) + } +} + +#[derive(Debug, Clone)] +pub enum AztecMacroError { + AztecNotFound, + AztecComputeNoteHashAndNullifierNotFound { span: Span }, +} + +impl From for MacroError { + fn from(err: AztecMacroError) -> Self { + match err { + AztecMacroError::AztecNotFound {} => MacroError { + primary_message: "Aztec dependency not found. Please add aztec as a dependency in your Cargo.toml. For more information go to https://docs.aztec.network/dev_docs/debugging/aztecnr-errors#aztec-dependency-not-found-please-add-aztec-as-a-dependency-in-your-nargotoml".to_owned(), + secondary_message: None, + span: None, + }, + AztecMacroError::AztecComputeNoteHashAndNullifierNotFound { span } => MacroError { + primary_message: "compute_note_hash_and_nullifier function not found. Define it in your contract. For more information go to https://docs.aztec.network/dev_docs/debugging/aztecnr-errors#compute_note_hash_and_nullifier-function-not-found-define-it-in-your-contract".to_owned(), + secondary_message: None, + span: Some(span), + }, + } + } +} + +// +// Helper macros for creating noir ast nodes +// +fn ident(name: &str) -> Ident { + Ident::new(name.to_string(), Span::default()) +} + +fn ident_path(name: &str) -> Path { + Path::from_ident(ident(name)) +} + +fn path(ident: Ident) -> Path { + Path::from_ident(ident) +} + +fn expression(kind: ExpressionKind) -> Expression { + Expression::new(kind, Span::default()) +} + +fn variable(name: &str) -> Expression { + expression(ExpressionKind::Variable(ident_path(name))) +} + +fn variable_ident(identifier: Ident) -> Expression { + expression(ExpressionKind::Variable(path(identifier))) +} + +fn variable_path(path: Path) -> Expression { + expression(ExpressionKind::Variable(path)) +} + +fn method_call(object: Expression, method_name: &str, arguments: Vec) -> Expression { + expression(ExpressionKind::MethodCall(Box::new(MethodCallExpression { + object, + method_name: ident(method_name), + arguments, + }))) +} + +fn call(func: Expression, arguments: Vec) -> Expression { + expression(ExpressionKind::Call(Box::new(CallExpression { func: Box::new(func), arguments }))) +} + +fn pattern(name: &str) -> Pattern { + Pattern::Identifier(ident(name)) +} + +fn mutable(name: &str) -> Pattern { + Pattern::Mutable(Box::new(pattern(name)), Span::default()) +} + +fn mutable_assignment(name: &str, assigned_to: Expression) -> Statement { + make_statement(StatementKind::Let(LetStatement { + pattern: mutable(name), + r#type: make_type(UnresolvedTypeData::Unspecified), + expression: assigned_to, + })) +} + +fn mutable_reference(variable_name: &str) -> Expression { + expression(ExpressionKind::Prefix(Box::new(PrefixExpression { + operator: UnaryOp::MutableReference, + rhs: variable(variable_name), + }))) +} + +fn assignment(name: &str, assigned_to: Expression) -> Statement { + make_statement(StatementKind::Let(LetStatement { + pattern: pattern(name), + r#type: make_type(UnresolvedTypeData::Unspecified), + expression: assigned_to, + })) +} + +fn member_access(lhs: &str, rhs: &str) -> Expression { + expression(ExpressionKind::MemberAccess(Box::new(MemberAccessExpression { + lhs: variable(lhs), + rhs: ident(rhs), + }))) +} + +macro_rules! chained_path { + ( $base:expr $(, $tail:expr)* ) => { + { + let mut base_path = ident_path($base); + $( + base_path.segments.push(ident($tail)); + )* + base_path + } + } +} + +macro_rules! chained_dep { + ( $base:expr $(, $tail:expr)* ) => { + { + let mut base_path = ident_path($base); + base_path.kind = PathKind::Dep; + $( + base_path.segments.push(ident($tail)); + )* + base_path + } + } +} + +fn cast(lhs: Expression, ty: UnresolvedTypeData) -> Expression { + expression(ExpressionKind::Cast(Box::new(CastExpression { lhs, r#type: make_type(ty) }))) +} + +fn make_type(typ: UnresolvedTypeData) -> UnresolvedType { + UnresolvedType { typ, span: None } +} + +fn index_array(array: Ident, index: &str) -> Expression { + expression(ExpressionKind::Index(Box::new(IndexExpression { + collection: variable_path(path(array)), + index: variable(index), + }))) +} + +fn index_array_variable(array: Expression, index: &str) -> Expression { + expression(ExpressionKind::Index(Box::new(IndexExpression { + collection: array, + index: variable(index), + }))) +} + +fn import(path: Path) -> ImportStatement { + ImportStatement { path, alias: None } +} + +// +// Create AST Nodes for Aztec +// + +/// Traverses every function in the ast, calling `transform_function` which +/// determines if further processing is required +fn transform( + mut ast: SortedModule, + crate_id: &CrateId, + context: &HirContext, +) -> Result { + // Usage -> mut ast -> aztec_library::transform(&mut ast) + + // Covers all functions in the ast + for submodule in ast.submodules.iter_mut().filter(|submodule| submodule.is_contract) { + if transform_module(&mut submodule.contents, crate_id, context)? { + check_for_aztec_dependency(crate_id, context)?; + include_relevant_imports(&mut submodule.contents); + } + } + Ok(ast) +} + +// +// Transform Hir Nodes for Aztec +// + +/// Completes the Hir with data gathered from type resolution +fn transform_hir(crate_id: &CrateId, context: &mut HirContext) { + transform_events(crate_id, context); +} + +/// Includes an import to the aztec library if it has not been included yet +fn include_relevant_imports(ast: &mut SortedModule) { + // Create the aztec import path using the assumed chained_dep! macro + let aztec_import_path = import(chained_dep!("aztec")); + + // Check if the aztec import already exists + let is_aztec_imported = + ast.imports.iter().any(|existing_import| existing_import.path == aztec_import_path.path); + + // If aztec is not imported, add the import at the beginning + if !is_aztec_imported { + ast.imports.insert(0, aztec_import_path); + } +} + +/// Creates an error alerting the user that they have not downloaded the Aztec-noir library +fn check_for_aztec_dependency( + crate_id: &CrateId, + context: &HirContext, +) -> Result<(), (MacroError, FileId)> { + let crate_graph = &context.crate_graph[crate_id]; + let has_aztec_dependency = crate_graph.dependencies.iter().any(|dep| dep.as_name() == "aztec"); + if has_aztec_dependency { + Ok(()) + } else { + Err((AztecMacroError::AztecNotFound.into(), crate_graph.root_file_id)) + } +} + +// Check to see if the user has defined a storage struct +fn check_for_storage_definition(module: &SortedModule) -> bool { + module.types.iter().any(|r#struct| r#struct.name.0.contents == "Storage") +} + +// Check if "compute_note_hash_and_nullifier(AztecAddress,Field,Field,[Field; N]) -> [Field; 4]" is defined +fn check_for_compute_note_hash_and_nullifier_definition(module: &SortedModule) -> bool { + module.functions.iter().any(|func| { + func.def.name.0.contents == "compute_note_hash_and_nullifier" + && func.def.parameters.len() == 4 + && match &func.def.parameters[0].typ.typ { + UnresolvedTypeData::Named(path, _) => path.segments.last().unwrap().0.contents == "AztecAddress", + _ => false, + } + && func.def.parameters[1].typ.typ == UnresolvedTypeData::FieldElement + && func.def.parameters[2].typ.typ == UnresolvedTypeData::FieldElement + // checks if the 4th parameter is an array and the Box in + // Array(Option, Box) contains only fields + && match &func.def.parameters[3].typ.typ { + UnresolvedTypeData::Array(_, inner_type) => { + matches!(inner_type.typ, UnresolvedTypeData::FieldElement) + }, + _ => false, + } + // We check the return type the same way as we did the 4th parameter + && match &func.def.return_type { + FunctionReturnType::Default(_) => false, + FunctionReturnType::Ty(unresolved_type) => { + match &unresolved_type.typ { + UnresolvedTypeData::Array(_, inner_type) => { + matches!(inner_type.typ, UnresolvedTypeData::FieldElement) + }, + _ => false, + } + } + } + }) +} + +/// Checks if an attribute is a custom attribute with a specific name +fn is_custom_attribute(attr: &SecondaryAttribute, attribute_name: &str) -> bool { + if let SecondaryAttribute::Custom(custom_attr) = attr { + custom_attr.as_str() == attribute_name + } else { + false + } +} + +/// Determines if ast nodes are annotated with aztec attributes. +/// For annotated functions it calls the `transform` function which will perform the required transformations. +/// Returns true if an annotated node is found, false otherwise +fn transform_module( + module: &mut SortedModule, + crate_id: &CrateId, + context: &HirContext, +) -> Result { + let mut has_transformed_module = false; + + // Check for a user defined storage struct + let storage_defined = check_for_storage_definition(module); + + if storage_defined && !check_for_compute_note_hash_and_nullifier_definition(module) { + let crate_graph = &context.crate_graph[crate_id]; + return Err(( + AztecMacroError::AztecComputeNoteHashAndNullifierNotFound { span: Span::default() } + .into(), + crate_graph.root_file_id, + )); + } + + for structure in module.types.iter() { + if structure.attributes.iter().any(|attr| matches!(attr, SecondaryAttribute::Event)) { + module.impls.push(generate_selector_impl(structure)); + has_transformed_module = true; + } + } + + for func in module.functions.iter_mut() { + for secondary_attribute in func.def.attributes.secondary.clone() { + if is_custom_attribute(&secondary_attribute, "aztec(private)") { + transform_function("Private", func, storage_defined); + has_transformed_module = true; + } else if is_custom_attribute(&secondary_attribute, "aztec(public)") { + transform_function("Public", func, storage_defined); + has_transformed_module = true; + } + } + // Add the storage struct to the beginning of the function if it is unconstrained in an aztec contract + if storage_defined && func.def.is_unconstrained { + transform_unconstrained(func); + has_transformed_module = true; + } + } + Ok(has_transformed_module) +} + +/// If it does, it will insert the following things: +/// - A new Input that is provided for a kernel app circuit, named: {Public/Private}ContextInputs +/// - Hashes all of the function input variables +/// - This instantiates a helper function +fn transform_function(ty: &str, func: &mut NoirFunction, storage_defined: bool) { + let context_name = format!("{}Context", ty); + let inputs_name = format!("{}ContextInputs", ty); + let return_type_name = format!("{}CircuitPublicInputs", ty); + + // Add access to the storage struct + if storage_defined { + let storage_def = abstract_storage(&ty.to_lowercase(), false); + func.def.body.0.insert(0, storage_def); + } + + // Insert the context creation as the first action + let create_context = create_context(&context_name, &func.def.parameters); + func.def.body.0.splice(0..0, (create_context).iter().cloned()); + + // Add the inputs to the params + let input = create_inputs(&inputs_name); + func.def.parameters.insert(0, input); + + // Abstract return types such that they get added to the kernel's return_values + if let Some(return_values) = abstract_return_values(func) { + func.def.body.0.push(return_values); + } + + // Push the finish method call to the end of the function + let finish_def = create_context_finish(); + func.def.body.0.push(finish_def); + + let return_type = create_return_type(&return_type_name); + func.def.return_type = return_type; + func.def.return_visibility = Visibility::Public; + + // Distinct return types are only required for private functions + // Public functions should have open auto-inferred + match ty { + "Private" => func.def.return_distinctness = Distinctness::Distinct, + "Public" => func.def.is_open = true, + _ => (), + } +} + +/// Transform Unconstrained +/// +/// Inserts the following code at the beginning of an unconstrained function +/// ```noir +/// let storage = Storage::init(Context::none()); +/// ``` +/// +/// This will allow developers to access their contract' storage struct in unconstrained functions +fn transform_unconstrained(func: &mut NoirFunction) { + func.def.body.0.insert(0, abstract_storage("Unconstrained", true)); +} + +fn collect_crate_structs(crate_id: &CrateId, context: &HirContext) -> Vec { + context + .def_map(crate_id) + .expect("ICE: Missing crate in def_map") + .modules() + .iter() + .flat_map(|(_, module)| { + module.type_definitions().filter_map(|typ| { + if let ModuleDefId::TypeId(struct_id) = typ { + Some(struct_id) + } else { + None + } + }) + }) + .collect() +} + +/// Substitutes the signature literal that was introduced in the selector method previously with the actual signature. +fn transform_event(struct_id: StructId, interner: &mut NodeInterner) { + let struct_type = interner.get_struct(struct_id); + let selector_id = interner + .lookup_method(&Type::Struct(struct_type, vec![]), struct_id, "selector", false) + .expect("Selector method not found"); + let selector_function = interner.function(&selector_id); + + let compute_selector_statement = interner.statement( + selector_function + .block(interner) + .statements() + .first() + .expect("Compute selector statement not found"), + ); + + let compute_selector_expression = match compute_selector_statement { + HirStatement::Expression(expression_id) => match interner.expression(&expression_id) { + HirExpression::Call(hir_call_expression) => Some(hir_call_expression), + _ => None, + }, + _ => None, + } + .expect("Compute selector statement is not a call expression"); + + let first_arg_id = compute_selector_expression + .arguments + .first() + .expect("Missing argument for compute selector"); + + match interner.expression(first_arg_id) { + HirExpression::Literal(HirLiteral::Str(signature)) + if signature == SIGNATURE_PLACEHOLDER => + { + let selector_literal_id = first_arg_id; + + let structure = interner.get_struct(struct_id); + let signature = event_signature(&structure.borrow()); + interner.update_expression(*selector_literal_id, |expr| { + *expr = HirExpression::Literal(HirLiteral::Str(signature.clone())); + }); + + // Also update the type! It might have a different length now than the placeholder. + interner.push_expr_type( + selector_literal_id, + Type::String(Box::new(Type::Constant(signature.len() as u64))), + ); + } + _ => unreachable!("Signature placeholder literal does not match"), + } +} + +fn transform_events(crate_id: &CrateId, context: &mut HirContext) { + for struct_id in collect_crate_structs(crate_id, context) { + let attributes = context.def_interner.struct_attributes(&struct_id); + if attributes.iter().any(|attr| matches!(attr, SecondaryAttribute::Event)) { + transform_event(struct_id, &mut context.def_interner); + } + } +} + +const SIGNATURE_PLACEHOLDER: &str = "SIGNATURE_PLACEHOLDER"; + +/// Generates the impl for an event selector +/// +/// TODO(https://github.com/AztecProtocol/aztec-packages/issues/3590): Make this point to aztec-nr once the issue is fixed. +/// Inserts the following code: +/// ```noir +/// impl SomeStruct { +/// fn selector() -> FunctionSelector { +/// protocol_types::abis::function_selector::FunctionSelector::from_signature("SIGNATURE_PLACEHOLDER") +/// } +/// } +/// ``` +/// +/// This allows developers to emit events without having to write the signature of the event every time they emit it. +/// The signature cannot be known at this point since types are not resolved yet, so we use a signature placeholder. +/// It'll get resolved after by transforming the HIR. +fn generate_selector_impl(structure: &NoirStruct) -> TypeImpl { + let struct_type = make_type(UnresolvedTypeData::Named(path(structure.name.clone()), vec![])); + + // TODO(https://github.com/AztecProtocol/aztec-packages/issues/3590): Make this point to aztec-nr once the issue is fixed. + let selector_path = + chained_path!("protocol_types", "abis", "function_selector", "FunctionSelector"); + let mut from_signature_path = selector_path.clone(); + from_signature_path.segments.push(ident("from_signature")); + + let selector_fun_body = BlockExpression(vec![make_statement(StatementKind::Expression(call( + variable_path(from_signature_path), + vec![expression(ExpressionKind::Literal(Literal::Str(SIGNATURE_PLACEHOLDER.to_string())))], + )))]); + + // Define `FunctionSelector` return type + let return_type = + FunctionReturnType::Ty(make_type(UnresolvedTypeData::Named(selector_path, vec![]))); + + let mut selector_fn_def = FunctionDefinition::normal( + &ident("selector"), + &vec![], + &[], + &selector_fun_body, + &[], + &return_type, + ); + + selector_fn_def.visibility = FunctionVisibility::Public; + + // Seems to be necessary on contract modules + selector_fn_def.return_visibility = Visibility::Public; + + TypeImpl { + object_type: struct_type, + type_span: structure.span, + generics: vec![], + methods: vec![NoirFunction::normal(selector_fn_def)], + } +} + +/// Helper function that returns what the private context would look like in the ast +/// This should make it available to be consumed within aztec private annotated functions. +/// +/// The replaced code: +/// ```noir +/// /// Before +/// fn foo(inputs: PrivateContextInputs) { +/// // ... +/// } +/// +/// /// After +/// #[aztec(private)] +/// fn foo() { +/// // ... +/// } +fn create_inputs(ty: &str) -> Param { + let context_ident = ident("inputs"); + let context_pattern = Pattern::Identifier(context_ident); + let type_path = chained_path!("aztec", "abi", ty); + let context_type = make_type(UnresolvedTypeData::Named(type_path, vec![])); + let visibility = Visibility::Private; + + Param { pattern: context_pattern, typ: context_type, visibility, span: Span::default() } +} + +/// Creates the private context object to be accessed within the function, the parameters need to be extracted to be +/// appended into the args hash object. +/// +/// The replaced code: +/// ```noir +/// #[aztec(private)] +/// fn foo(structInput: SomeStruct, arrayInput: [u8; 10], fieldInput: Field) -> Field { +/// // Create the hasher object +/// let mut hasher = Hasher::new(); +/// +/// // struct inputs call serialize on them to add an array of fields +/// hasher.add_multiple(structInput.serialize()); +/// +/// // Array inputs are iterated over and each element is added to the hasher (as a field) +/// for i in 0..arrayInput.len() { +/// hasher.add(arrayInput[i] as Field); +/// } +/// // Field inputs are added to the hasher +/// hasher.add({ident}); +/// +/// // Create the context +/// // The inputs (injected by this `create_inputs`) and completed hash object are passed to the context +/// let mut context = PrivateContext::new(inputs, hasher.hash()); +/// } +/// ``` +fn create_context(ty: &str, params: &[Param]) -> Vec { + let mut injected_expressions: Vec = vec![]; + + // `let mut hasher = Hasher::new();` + let let_hasher = mutable_assignment( + "hasher", // Assigned to + call( + variable_path(chained_path!("aztec", "abi", "Hasher", "new")), // Path + vec![], // args + ), + ); + + // Completes: `let mut hasher = Hasher::new();` + injected_expressions.push(let_hasher); + + // Iterate over each of the function parameters, adding to them to the hasher + params.iter().for_each(|Param { pattern, typ, span: _, visibility: _ }| { + match pattern { + Pattern::Identifier(identifier) => { + // Match the type to determine the padding to do + let unresolved_type = &typ.typ; + let expression = match unresolved_type { + // `hasher.add_multiple({ident}.serialize())` + UnresolvedTypeData::Named(..) => add_struct_to_hasher(identifier), + UnresolvedTypeData::Array(_, arr_type) => { + add_array_to_hasher(identifier, arr_type) + } + // `hasher.add({ident})` + UnresolvedTypeData::FieldElement => add_field_to_hasher(identifier), + // Add the integer to the hasher, casted to a field + // `hasher.add({ident} as Field)` + UnresolvedTypeData::Integer(..) | UnresolvedTypeData::Bool => { + add_cast_to_hasher(identifier) + } + _ => unreachable!("[Aztec Noir] Provided parameter type is not supported"), + }; + injected_expressions.push(expression); + } + _ => todo!(), // Maybe unreachable? + } + }); + + // Create the inputs to the context + let inputs_expression = variable("inputs"); + // `hasher.hash()` + let hash_call = method_call( + variable("hasher"), // variable + "hash", // method name + vec![], // args + ); + + // let mut context = {ty}::new(inputs, hash); + let let_context = mutable_assignment( + "context", // Assigned to + call( + variable_path(chained_path!("aztec", "context", ty, "new")), // Path + vec![inputs_expression, hash_call], // args + ), + ); + injected_expressions.push(let_context); + + // Return all expressions that will be injected by the hasher + injected_expressions +} + +/// Abstract Return Type +/// +/// This function intercepts the function's current return type and replaces it with pushes +/// To the kernel +/// +/// The replaced code: +/// ```noir +/// /// Before +/// #[aztec(private)] +/// fn foo() -> abi::PrivateCircuitPublicInputs { +/// // ... +/// let my_return_value: Field = 10; +/// context.return_values.push(my_return_value); +/// } +/// +/// /// After +/// #[aztec(private)] +/// fn foo() -> Field { +/// // ... +/// let my_return_value: Field = 10; +/// my_return_value +/// } +/// ``` +/// Similarly; Structs will be pushed to the context, after serialize() is called on them. +/// Arrays will be iterated over and each element will be pushed to the context. +/// Any primitive type that can be cast will be casted to a field and pushed to the context. +fn abstract_return_values(func: &NoirFunction) -> Option { + let current_return_type = func.return_type().typ; + let len = func.def.body.len(); + let last_statement = &func.def.body.0[len - 1]; + + // TODO: (length, type) => We can limit the size of the array returned to be limited by kernel size + // Doesn't need done until we have settled on a kernel size + // TODO: support tuples here and in inputs -> convert into an issue + + // Check if the return type is an expression, if it is, we can handle it + match last_statement { + Statement { kind: StatementKind::Expression(expression), .. } => { + match current_return_type { + // Call serialize on structs, push the whole array, calling push_array + UnresolvedTypeData::Named(..) => Some(make_struct_return_type(expression.clone())), + UnresolvedTypeData::Array(..) => Some(make_array_return_type(expression.clone())), + // Cast these types to a field before pushing + UnresolvedTypeData::Bool | UnresolvedTypeData::Integer(..) => { + Some(make_castable_return_type(expression.clone())) + } + UnresolvedTypeData::FieldElement => Some(make_return_push(expression.clone())), + _ => None, + } + } + _ => None, + } +} + +/// Abstract storage +/// +/// For private functions: +/// ```noir +/// #[aztec(private)] +/// fn lol() { +/// let storage = Storage::init(Context::private(context)); +/// } +/// ``` +/// +/// For public functions: +/// ```noir +/// #[aztec(public)] +/// fn lol() { +/// let storage = Storage::init(Context::public(context)); +/// } +/// ``` +/// +/// For unconstrained functions: +/// ```noir +/// unconstrained fn lol() { +/// let storage = Storage::init(Context::none()); +/// } +fn abstract_storage(typ: &str, unconstrained: bool) -> Statement { + let init_context_call = if unconstrained { + call( + variable_path(chained_path!("aztec", "context", "Context", "none")), // Path + vec![], // args + ) + } else { + call( + variable_path(chained_path!("aztec", "context", "Context", typ)), // Path + vec![mutable_reference("context")], // args + ) + }; + + assignment( + "storage", // Assigned to + call( + variable_path(chained_path!("Storage", "init")), // Path + vec![init_context_call], // args + ), + ) +} + +/// Context Return Values +/// +/// Creates an instance to the context return values +/// ```noir +/// `context.return_values` +/// ``` +fn context_return_values() -> Expression { + member_access("context", "return_values") +} + +fn make_statement(kind: StatementKind) -> Statement { + Statement { span: Span::default(), kind } +} + +/// Make return Push +/// +/// Translates to: +/// `context.return_values.push({push_value})` +fn make_return_push(push_value: Expression) -> Statement { + make_statement(StatementKind::Semi(method_call( + context_return_values(), + "push", + vec![push_value], + ))) +} + +/// Make Return push array +/// +/// Translates to: +/// `context.return_values.push_array({push_value})` +fn make_return_push_array(push_value: Expression) -> Statement { + make_statement(StatementKind::Semi(method_call( + context_return_values(), + "push_array", + vec![push_value], + ))) +} + +/// Make struct return type +/// +/// Translates to: +/// ```noir +/// `context.return_values.push_array({push_value}.serialize())` +fn make_struct_return_type(expression: Expression) -> Statement { + let serialized_call = method_call( + expression, // variable + "serialize", // method name + vec![], // args + ); + make_return_push_array(serialized_call) +} + +/// Make array return type +/// +/// Translates to: +/// ```noir +/// for i in 0..{ident}.len() { +/// context.return_values.push({ident}[i] as Field) +/// } +/// ``` +fn make_array_return_type(expression: Expression) -> Statement { + let inner_cast_expression = + cast(index_array_variable(expression.clone(), "i"), UnresolvedTypeData::FieldElement); + let assignment = make_statement(StatementKind::Semi(method_call( + context_return_values(), // variable + "push", // method name + vec![inner_cast_expression], + ))); + + create_loop_over(expression, vec![assignment]) +} + +/// Castable return type +/// +/// Translates to: +/// ```noir +/// context.return_values.push({ident} as Field) +/// ``` +fn make_castable_return_type(expression: Expression) -> Statement { + // Cast these types to a field before pushing + let cast_expression = cast(expression, UnresolvedTypeData::FieldElement); + make_return_push(cast_expression) +} + +/// Create Return Type +/// +/// Public functions return abi::PublicCircuitPublicInputs while +/// private functions return abi::PrivateCircuitPublicInputs +/// +/// This call constructs an ast token referencing the above types +/// The name is set in the function above `transform`, hence the +/// whole token name is passed in +/// +/// The replaced code: +/// ```noir +/// +/// /// Before +/// fn foo() -> abi::PrivateCircuitPublicInputs { +/// // ... +/// } +/// +/// /// After +/// #[aztec(private)] +/// fn foo() { +/// // ... +/// } +fn create_return_type(ty: &str) -> FunctionReturnType { + let return_path = chained_path!("aztec", "abi", ty); + + let ty = make_type(UnresolvedTypeData::Named(return_path, vec![])); + FunctionReturnType::Ty(ty) +} + +/// Create Context Finish +/// +/// Each aztec function calls `context.finish()` at the end of a function +/// to return values required by the kernel. +/// +/// The replaced code: +/// ```noir +/// /// Before +/// fn foo() -> abi::PrivateCircuitPublicInputs { +/// // ... +/// context.finish() +/// } +/// +/// /// After +/// #[aztec(private)] +/// fn foo() { +/// // ... +/// } +fn create_context_finish() -> Statement { + let method_call = method_call( + variable("context"), // variable + "finish", // method name + vec![], // args + ); + make_statement(StatementKind::Expression(method_call)) +} + +// +// Methods to create hasher inputs +// + +fn add_struct_to_hasher(identifier: &Ident) -> Statement { + // If this is a struct, we call serialize and add the array to the hasher + let serialized_call = method_call( + variable_path(path(identifier.clone())), // variable + "serialize", // method name + vec![], // args + ); + + make_statement(StatementKind::Semi(method_call( + variable("hasher"), // variable + "add_multiple", // method name + vec![serialized_call], // args + ))) +} + +fn create_loop_over(var: Expression, loop_body: Vec) -> Statement { + // If this is an array of primitive types (integers / fields) we can add them each to the hasher + // casted to a field + let span = var.span; + + // `array.len()` + let end_range_expression = method_call( + var, // variable + "len", // method name + vec![], // args + ); + + // What will be looped over + // - `hasher.add({ident}[i] as Field)` + let for_loop_block = expression(ExpressionKind::Block(BlockExpression(loop_body))); + + // `for i in 0..{ident}.len()` + make_statement(StatementKind::For(ForLoopStatement { + range: ForRange::Range( + expression(ExpressionKind::Literal(Literal::Integer( + FieldElement::from(i128::from(0)), + false, + ))), + end_range_expression, + ), + identifier: ident("i"), + block: for_loop_block, + span, + })) +} + +fn add_array_to_hasher(identifier: &Ident, arr_type: &UnresolvedType) -> Statement { + // If this is an array of primitive types (integers / fields) we can add them each to the hasher + // casted to a field + + // Wrap in the semi thing - does that mean ended with semi colon? + // `hasher.add({ident}[i] as Field)` + + let arr_index = index_array(identifier.clone(), "i"); + let (add_expression, hasher_method_name) = match arr_type.typ { + UnresolvedTypeData::Named(..) => { + let hasher_method_name = "add_multiple".to_owned(); + let call = method_call( + // All serialize on each element + arr_index, // variable + "serialize", // method name + vec![], // args + ); + (call, hasher_method_name) + } + _ => { + let hasher_method_name = "add".to_owned(); + let call = cast( + arr_index, // lhs - `ident[i]` + UnresolvedTypeData::FieldElement, // cast to - `as Field` + ); + (call, hasher_method_name) + } + }; + + let block_statement = make_statement(StatementKind::Semi(method_call( + variable("hasher"), // variable + &hasher_method_name, // method name + vec![add_expression], + ))); + + create_loop_over(variable_ident(identifier.clone()), vec![block_statement]) +} + +fn add_field_to_hasher(identifier: &Ident) -> Statement { + // `hasher.add({ident})` + let ident = variable_path(path(identifier.clone())); + make_statement(StatementKind::Semi(method_call( + variable("hasher"), // variable + "add", // method name + vec![ident], // args + ))) +} + +fn add_cast_to_hasher(identifier: &Ident) -> Statement { + // `hasher.add({ident} as Field)` + // `{ident} as Field` + let cast_operation = cast( + variable_path(path(identifier.clone())), // lhs + UnresolvedTypeData::FieldElement, // rhs + ); + + // `hasher.add({ident} as Field)` + make_statement(StatementKind::Semi(method_call( + variable("hasher"), // variable + "add", // method name + vec![cast_operation], // args + ))) +} + +/// Computes the aztec signature for a resolved type. +fn signature_of_type(typ: &Type) -> String { + match typ { + Type::Integer(Signedness::Signed, bit_size) => format!("i{}", bit_size), + Type::Integer(Signedness::Unsigned, bit_size) => format!("u{}", bit_size), + Type::FieldElement => "Field".to_owned(), + Type::Bool => "bool".to_owned(), + Type::Array(len, typ) => { + if let Type::Constant(len) = **len { + format!("[{};{len}]", signature_of_type(typ)) + } else { + unimplemented!("Cannot generate signature for array with length type {:?}", typ) + } + } + Type::Struct(def, args) => { + let fields = def.borrow().get_fields(args); + let fields = vecmap(fields, |(_, typ)| signature_of_type(&typ)); + format!("({})", fields.join(",")) + } + Type::Tuple(types) => { + let fields = vecmap(types, signature_of_type); + format!("({})", fields.join(",")) + } + _ => unimplemented!("Cannot generate signature for type {:?}", typ), + } +} + +/// Computes the signature for a resolved event type. +/// It has the form 'EventName(Field,(Field),[u8;2])' +fn event_signature(event: &StructType) -> String { + let fields = vecmap(event.get_fields(&[]), |(_, typ)| signature_of_type(&typ)); + format!("{}({})", event.name.0.contents, fields.join(",")) +} diff --git a/bootstrap.sh b/bootstrap.sh new file mode 100755 index 00000000000..5ebe7ade090 --- /dev/null +++ b/bootstrap.sh @@ -0,0 +1,19 @@ +#!/usr/bin/env bash +set -eu + +cd $(dirname "$0") + +CMD=${1:-} + +if [ -n "$CMD" ]; then + if [ "$CMD" = "clean" ]; then + git clean -fdx + exit 0 + else + echo "Unknown command: $CMD" + exit 1 + fi +fi + +./scripts/bootstrap_native.sh +./scripts/bootstrap_packages.sh \ No newline at end of file diff --git a/compiler/fm/Cargo.toml b/compiler/fm/Cargo.toml index 9e4309693ed..42e4b0c25d7 100644 --- a/compiler/fm/Cargo.toml +++ b/compiler/fm/Cargo.toml @@ -9,9 +9,8 @@ license.workspace = true [dependencies] codespan-reporting.workspace = true -rust-embed = "6.6.0" serde.workspace = true [dev-dependencies] -tempfile = "3.2.0" +tempfile.workspace = true iter-extended.workspace = true diff --git a/compiler/fm/build.rs b/compiler/fm/build.rs deleted file mode 100644 index 747ab4fe1a2..00000000000 --- a/compiler/fm/build.rs +++ /dev/null @@ -1,20 +0,0 @@ -use std::path::Path; - -/// Expects that the given directory is an existing path -fn rerun_if_stdlib_changes(directory: &Path) { - for entry in std::fs::read_dir(directory).unwrap() { - let path = entry.unwrap().path(); - - if path.is_dir() { - rerun_if_stdlib_changes(&path); - } else { - // Tell Cargo that if the given file changes, to rerun this build script. - println!("cargo:rerun-if-changed={}", path.to_string_lossy()); - } - } -} - -fn main() { - let stdlib_src_dir = Path::new("../../noir_stdlib/"); - rerun_if_stdlib_changes(stdlib_src_dir); -} diff --git a/compiler/fm/src/file_map.rs b/compiler/fm/src/file_map.rs index 199723a28b5..c4d7002a082 100644 --- a/compiler/fm/src/file_map.rs +++ b/compiler/fm/src/file_map.rs @@ -1,11 +1,12 @@ use codespan_reporting::files::{Error, Files, SimpleFile, SimpleFiles}; use serde::{Deserialize, Serialize}; +use std::collections::HashMap; use std::{ops::Range, path::PathBuf}; // XXX: File and FileMap serve as opaque types, so that the rest of the library does not need to import the dependency // or worry about when we change the dep -#[derive(Clone, Debug)] +#[derive(Clone, Debug, Eq, Hash, PartialEq)] pub struct PathString(PathBuf); impl std::fmt::Display for PathString { @@ -29,8 +30,11 @@ impl From<&PathBuf> for PathString { PathString::from(pb.to_owned()) } } -#[derive(Debug)] -pub struct FileMap(SimpleFiles); +#[derive(Debug, Clone)] +pub struct FileMap { + files: SimpleFiles, + name_to_id: HashMap, +} // XXX: Note that we derive Default here due to ModuleOrigin requiring us to set a FileId #[derive( @@ -59,17 +63,22 @@ impl<'input> File<'input> { impl FileMap { pub fn add_file(&mut self, file_name: PathString, code: String) -> FileId { - let file_id = self.0.add(file_name, code); - FileId(file_id) + let file_id = FileId(self.files.add(file_name.clone(), code)); + self.name_to_id.insert(file_name, file_id); + file_id } + pub fn get_file(&self, file_id: FileId) -> Option { - self.0.get(file_id.0).map(File).ok() + self.files.get(file_id.0).map(File).ok() } -} + pub fn get_file_id(&self, file_name: &PathString) -> Option { + self.name_to_id.get(file_name).cloned() + } +} impl Default for FileMap { fn default() -> Self { - FileMap(SimpleFiles::new()) + FileMap { files: SimpleFiles::new(), name_to_id: HashMap::new() } } } @@ -79,18 +88,18 @@ impl<'a> Files<'a> for FileMap { type Source = &'a str; fn name(&self, file_id: Self::FileId) -> Result { - Ok(self.0.get(file_id.as_usize())?.name().clone()) + Ok(self.files.get(file_id.as_usize())?.name().clone()) } fn source(&'a self, file_id: Self::FileId) -> Result { - Ok(self.0.get(file_id.as_usize())?.source().as_ref()) + Ok(self.files.get(file_id.as_usize())?.source().as_ref()) } fn line_index(&self, file_id: Self::FileId, byte_index: usize) -> Result { - self.0.get(file_id.as_usize())?.line_index((), byte_index) + self.files.get(file_id.as_usize())?.line_index((), byte_index) } fn line_range(&self, file_id: Self::FileId, line_index: usize) -> Result, Error> { - self.0.get(file_id.as_usize())?.line_range((), line_index) + self.files.get(file_id.as_usize())?.line_range((), line_index) } } diff --git a/compiler/fm/src/file_reader.rs b/compiler/fm/src/file_reader.rs deleted file mode 100644 index d17aefeda7e..00000000000 --- a/compiler/fm/src/file_reader.rs +++ /dev/null @@ -1,49 +0,0 @@ -use rust_embed::RustEmbed; -use std::io::{Error, ErrorKind}; -use std::path::Path; - -// Based on the environment, we either read files using the rust standard library or we -// read files using the javascript host function - -pub type FileReader = dyn Fn(&Path) -> std::io::Result + Send; - -#[derive(RustEmbed)] -#[folder = "../../noir_stdlib/src"] -#[cfg_attr(not(target_os = "windows"), prefix = "std/")] -#[cfg_attr(target_os = "windows", prefix = r"std\")] // Note reversed slash direction -struct StdLibAssets; - -#[cfg(target_os = "windows")] -pub(super) fn is_stdlib_asset(path: &Path) -> bool { - path.starts_with("std\\") -} - -#[cfg(not(target_os = "windows"))] -pub(super) fn is_stdlib_asset(path: &Path) -> bool { - path.starts_with("std/") -} - -fn get_stdlib_asset(path: &Path) -> std::io::Result { - if !is_stdlib_asset(path) { - return Err(Error::new(ErrorKind::InvalidInput, "requested a non-stdlib asset")); - } - - match StdLibAssets::get(path.to_str().unwrap()) { - Some(std_lib_asset) => { - Ok(std::str::from_utf8(std_lib_asset.data.as_ref()).unwrap().to_string()) - } - - None => Err(Error::new(ErrorKind::NotFound, "invalid stdlib path")), - } -} - -pub(crate) fn read_file_to_string( - path_to_file: &Path, - get_non_stdlib_asset: &impl Fn(&Path) -> std::io::Result, -) -> std::io::Result { - if is_stdlib_asset(path_to_file) { - get_stdlib_asset(path_to_file) - } else { - get_non_stdlib_asset(path_to_file) - } -} diff --git a/compiler/fm/src/lib.rs b/compiler/fm/src/lib.rs index 6e11489f7f2..55fb762479f 100644 --- a/compiler/fm/src/lib.rs +++ b/compiler/fm/src/lib.rs @@ -4,11 +4,11 @@ #![warn(clippy::semicolon_if_nothing_returned)] mod file_map; -mod file_reader; pub use file_map::{File, FileId, FileMap, PathString}; -use file_reader::is_stdlib_asset; -pub use file_reader::FileReader; + +// Re-export for the lsp +pub use codespan_reporting::files as codespan_files; use std::{ collections::HashMap, @@ -16,13 +16,12 @@ use std::{ }; pub const FILE_EXTENSION: &str = "nr"; - +#[derive(Clone)] pub struct FileManager { root: PathBuf, - file_map: file_map::FileMap, + file_map: FileMap, id_to_path: HashMap, path_to_id: HashMap, - file_reader: Box, } impl std::fmt::Debug for FileManager { @@ -37,13 +36,12 @@ impl std::fmt::Debug for FileManager { } impl FileManager { - pub fn new(root: &Path, file_reader: Box) -> Self { + pub fn new(root: &Path) -> Self { Self { root: root.normalize(), file_map: Default::default(), id_to_path: Default::default(), path_to_id: Default::default(), - file_reader, } } @@ -51,25 +49,32 @@ impl FileManager { &self.file_map } - pub fn add_file(&mut self, file_name: &Path) -> Option { - // Handle both relative file paths and std/lib virtual paths. - let resolved_path: PathBuf = if is_stdlib_asset(file_name) { - // Special case for stdlib where we want to read specifically the `std/` relative path - // TODO: The stdlib path should probably be an absolute path rooted in something people would never create - file_name.to_path_buf() - } else { - self.root.join(file_name).normalize() - }; - - // Check that the resolved path already exists in the file map, if it is, we return it. - if let Some(file_id) = self.path_to_id.get(&resolved_path) { + /// Adds a source file to the [`FileManager`]. + /// + /// The `file_name` is expected to be relative to the [`FileManager`]'s root directory. + pub fn add_file_with_source(&mut self, file_name: &Path, source: String) -> Option { + let file_name = self.root.join(file_name); + self.add_file_with_source_canonical_path(&file_name, source) + } + + /// Adds a source file to the [`FileManager`] using a path which is not appended to the root path. + /// + /// This should only be used for the stdlib as these files do not exist on the user's filesystem. + pub fn add_file_with_source_canonical_path( + &mut self, + file_name: &Path, + source: String, + ) -> Option { + let file_name = file_name.normalize(); + // Check that the file name already exists in the file map, if it is, we return it. + if let Some(file_id) = self.path_to_id.get(&file_name) { return Some(*file_id); } + let file_name_path_buf = file_name.to_path_buf(); // Otherwise we add the file - let source = file_reader::read_file_to_string(&resolved_path, &self.file_reader).ok()?; - let file_id = self.file_map.add_file(resolved_path.clone().into(), source); - self.register_path(file_id, resolved_path); + let file_id = self.file_map.add_file(file_name_path_buf.clone().into(), source); + self.register_path(file_id, file_name_path_buf); Some(file_id) } @@ -83,9 +88,9 @@ impl FileManager { assert!(old_value.is_none(), "ice: the same path was inserted into the file manager twice"); } - pub fn fetch_file(&self, file_id: FileId) -> File { + pub fn fetch_file(&self, file_id: FileId) -> &str { // Unwrap as we ensure that all file_id's map to a corresponding file in the file map - self.file_map.get_file(file_id).unwrap() + self.file_map.get_file(file_id).unwrap().source() } pub fn path(&self, file_id: FileId) -> &Path { @@ -94,39 +99,9 @@ impl FileManager { self.id_to_path.get(&file_id).unwrap().as_path() } - pub fn find_module(&mut self, anchor: FileId, mod_name: &str) -> Result { - let anchor_path = self.path(anchor).with_extension(""); - let anchor_dir = anchor_path.parent().unwrap(); - - // if `anchor` is a `main.nr`, `lib.nr`, `mod.nr` or `{mod_name}.nr`, we check siblings of - // the anchor at `base/mod_name.nr`. - let candidate = if should_check_siblings_for_module(&anchor_path, anchor_dir) { - anchor_dir.join(format!("{mod_name}.{FILE_EXTENSION}")) - } else { - // Otherwise, we check for children of the anchor at `base/anchor/mod_name.nr` - anchor_path.join(format!("{mod_name}.{FILE_EXTENSION}")) - }; - - self.add_file(&candidate).ok_or_else(|| candidate.as_os_str().to_string_lossy().to_string()) - } -} - -/// Returns true if a module's child module's are expected to be in the same directory. -/// Returns false if they are expected to be in a subdirectory matching the name of the module. -fn should_check_siblings_for_module(module_path: &Path, parent_path: &Path) -> bool { - if let Some(filename) = module_path.file_stem() { - // This check also means a `main.nr` or `lib.nr` file outside of the crate root would - // check its same directory for child modules instead of a subdirectory. Should we prohibit - // `main.nr` and `lib.nr` files outside of the crate root? - filename == "main" - || filename == "lib" - || filename == "mod" - || Some(filename) == parent_path.file_stem() - } else { - // If there's no filename, we arbitrarily return true. - // Alternatively, we could panic, but this is left to a different step where we - // ideally have some source location to issue an error. - true + // TODO: This should accept a &Path instead of a PathBuf + pub fn name_to_id(&self, file_name: PathBuf) -> Option { + self.file_map.get_file_id(&PathString::from_path(file_name)) } } @@ -212,25 +187,11 @@ mod tests { use super::*; use tempfile::{tempdir, TempDir}; - fn create_dummy_file(dir: &TempDir, file_name: &Path) { + // Returns the absolute path to the file + fn create_dummy_file(dir: &TempDir, file_name: &Path) -> PathBuf { let file_path = dir.path().join(file_name); - let _file = std::fs::File::create(file_path).unwrap(); - } - - #[test] - fn path_resolve_file_module() { - let dir = tempdir().unwrap(); - - let entry_file_name = Path::new("my_dummy_file.nr"); - create_dummy_file(&dir, entry_file_name); - - let mut fm = FileManager::new(dir.path(), Box::new(|path| std::fs::read_to_string(path))); - - let file_id = fm.add_file(entry_file_name).unwrap(); - - let dep_file_name = Path::new("foo.nr"); - create_dummy_file(&dir, dep_file_name); - fm.find_module(file_id, "foo").unwrap_err(); + let _file = std::fs::File::create(&file_path).unwrap(); + file_path } #[test] @@ -239,52 +200,13 @@ mod tests { let file_name = Path::new("foo.nr"); create_dummy_file(&dir, file_name); - let mut fm = FileManager::new(dir.path(), Box::new(|path| std::fs::read_to_string(path))); + let mut fm = FileManager::new(dir.path()); - let file_id = fm.add_file(file_name).unwrap(); + let file_id = fm.add_file_with_source(file_name, "fn foo() {}".to_string()).unwrap(); assert!(fm.path(file_id).ends_with("foo.nr")); } - #[test] - fn path_resolve_sub_module() { - let dir = tempdir().unwrap(); - let mut fm = FileManager::new(dir.path(), Box::new(|path| std::fs::read_to_string(path))); - - // Create a lib.nr file at the root. - // we now have dir/lib.nr - let file_name = Path::new("lib.nr"); - create_dummy_file(&dir, file_name); - - let file_id = fm.add_file(file_name).unwrap(); - - // Create a sub directory - // we now have: - // - dir/lib.nr - // - dir/sub_dir - let sub_dir = TempDir::new_in(&dir).unwrap(); - let sub_dir_name = sub_dir.path().file_name().unwrap().to_str().unwrap(); - - // Add foo.nr to the subdirectory - // we no have: - // - dir/lib.nr - // - dir/sub_dir/foo.nr - create_dummy_file(&sub_dir, Path::new("foo.nr")); - - // Add a parent module for the sub_dir - // we no have: - // - dir/lib.nr - // - dir/sub_dir.nr - // - dir/sub_dir/foo.nr - create_dummy_file(&dir, Path::new(&format!("{sub_dir_name}.nr"))); - - // First check for the sub_dir.nr file and add it to the FileManager - let sub_dir_file_id = fm.find_module(file_id, sub_dir_name).unwrap(); - - // Now check for files in it's subdirectory - fm.find_module(sub_dir_file_id, "foo").unwrap(); - } - /// Tests that two identical files that have different paths are treated as the same file /// e.g. if we start in the dir ./src and have a file ../../foo.nr /// that should be treated as the same file as ../ starting in ./ @@ -295,7 +217,7 @@ mod tests { let sub_dir = TempDir::new_in(&dir).unwrap(); let sub_sub_dir = TempDir::new_in(&sub_dir).unwrap(); - let mut fm = FileManager::new(dir.path(), Box::new(|path| std::fs::read_to_string(path))); + let mut fm = FileManager::new(dir.path()); // Create a lib.nr file at the root. let file_name = Path::new("lib.nr"); @@ -305,8 +227,9 @@ mod tests { let second_file_name = PathBuf::from(sub_sub_dir.path()).join("./../../lib.nr"); // Add both files to the file manager - let file_id = fm.add_file(file_name).unwrap(); - let second_file_id = fm.add_file(&second_file_name).unwrap(); + let file_id = fm.add_file_with_source(file_name, "fn foo() {}".to_string()).unwrap(); + let second_file_id = + fm.add_file_with_source(&second_file_name, "fn foo() {}".to_string()).unwrap(); assert_eq!(file_id, second_file_id); } diff --git a/compiler/integration-tests/circuits/main/Nargo.toml b/compiler/integration-tests/circuits/main/Nargo.toml deleted file mode 100644 index cc5a0a357fa..00000000000 --- a/compiler/integration-tests/circuits/main/Nargo.toml +++ /dev/null @@ -1,5 +0,0 @@ -[package] -name = "main" -type = "bin" -authors = [""] -[dependencies] diff --git a/compiler/integration-tests/circuits/recursion/src/main.nr b/compiler/integration-tests/circuits/recursion/src/main.nr index 7cf956d1950..173207766fb 100644 --- a/compiler/integration-tests/circuits/recursion/src/main.nr +++ b/compiler/integration-tests/circuits/recursion/src/main.nr @@ -2,13 +2,14 @@ use dep::std; fn main( verification_key: [Field; 114], - proof: [Field; 94], + proof: [Field; 93], public_inputs: [Field; 1], - key_hash: Field, - input_aggregation_object: [Field; 16] -) -> pub [Field; 16] { - let vk : [Field] = verification_key; - let p : [Field] = proof; - let pi : [Field] = public_inputs; - std::verify_proof(vk, p, pi, key_hash, input_aggregation_object) + key_hash: Field +) { + std::verify_proof( + verification_key.as_slice(), + proof.as_slice(), + public_inputs.as_slice(), + key_hash + ) } diff --git a/compiler/integration-tests/hardhat.config.ts b/compiler/integration-tests/hardhat.config.ts index 2c68b3fae0f..af4728c68ec 100644 --- a/compiler/integration-tests/hardhat.config.ts +++ b/compiler/integration-tests/hardhat.config.ts @@ -12,6 +12,9 @@ const config: HardhatUserConfig = { }, }, }, + mocha: { + timeout: 5 * 60 * 1000, + }, }; export default config; diff --git a/compiler/integration-tests/package.json b/compiler/integration-tests/package.json index 344d1361263..c4e424df480 100644 --- a/compiler/integration-tests/package.json +++ b/compiler/integration-tests/package.json @@ -16,7 +16,6 @@ "@noir-lang/backend_barretenberg": "workspace:*", "@noir-lang/noir_js": "workspace:*", "@noir-lang/noir_wasm": "workspace:*", - "@noir-lang/source-resolver": "workspace:*", "@nomicfoundation/hardhat-chai-matchers": "^2.0.0", "@nomicfoundation/hardhat-ethers": "^3.0.0", "@web/dev-server-esbuild": "^0.3.6", diff --git a/compiler/integration-tests/scripts/codegen-verifiers.sh b/compiler/integration-tests/scripts/codegen-verifiers.sh index d9d9d4f243f..e377a3ee3f8 100644 --- a/compiler/integration-tests/scripts/codegen-verifiers.sh +++ b/compiler/integration-tests/scripts/codegen-verifiers.sh @@ -1,20 +1,26 @@ -#!/bin/bash +#!/usr/bin/env bash self_path=$(dirname "$(readlink -f "$0")") repo_root=$self_path/../../.. # Run codegen-verifier for 1_mul -mul_dir=$repo_root/tooling/nargo_cli/tests/execution_success/1_mul +mul_dir=$repo_root/test_programs/execution_success/1_mul nargo --program-dir $mul_dir codegen-verifier -# Run codegen-verifier for main -main_dir=$repo_root/compiler/integration-tests/circuits/main -nargo --program-dir $main_dir codegen-verifier +# Run codegen-verifier for assert_statement +assert_statement_dir=$repo_root/test_programs/execution_success/assert_statement +nargo --program-dir $assert_statement_dir codegen-verifier + +# Run codegen-verifier for recursion +recursion_dir=$repo_root/compiler/integration-tests/circuits/recursion +nargo --program-dir $recursion_dir codegen-verifier # Copy compiled contracts from the root of compiler/integration-tests contracts_dir=$self_path/../contracts +rm -rf $contracts_dir mkdir $contracts_dir cp $mul_dir/contract/1_mul/plonk_vk.sol $contracts_dir/1_mul.sol -cp $main_dir/contract/main/plonk_vk.sol $contracts_dir/main.sol +cp $assert_statement_dir/contract/assert_statement/plonk_vk.sol $contracts_dir/assert_statement.sol +cp $recursion_dir/contract/recursion/plonk_vk.sol $contracts_dir/recursion.sol diff --git a/compiler/integration-tests/test/browser/compile_prove_verify.test.ts b/compiler/integration-tests/test/browser/compile_prove_verify.test.ts index 95a1aa502ad..0a829def09e 100644 --- a/compiler/integration-tests/test/browser/compile_prove_verify.test.ts +++ b/compiler/integration-tests/test/browser/compile_prove_verify.test.ts @@ -1,29 +1,20 @@ import { expect } from '@esm-bundle/chai'; -import { Logger } from 'tslog'; import * as TOML from 'smol-toml'; -import { initializeResolver } from '@noir-lang/source-resolver'; -import newCompiler, { CompiledProgram, compile, init_log_level as compilerLogLevel } from '@noir-lang/noir_wasm'; +import { compile, createFileManager } from '@noir-lang/noir_wasm'; import { Noir } from '@noir-lang/noir_js'; import { InputMap } from '@noir-lang/noirc_abi'; import { BarretenbergBackend } from '@noir-lang/backend_barretenberg'; import { getFile } from './utils.js'; -import { TEST_LOG_LEVEL } from '../environment.js'; - -const logger = new Logger({ name: 'test', minLevel: TEST_LOG_LEVEL }); - -await newCompiler(); - -compilerLogLevel('INFO'); const test_cases = [ { - case: 'tooling/nargo_cli/tests/execution_success/1_mul', + case: 'test_programs/execution_success/1_mul', numPublicInputs: 0, }, { - case: 'compiler/integration-tests/circuits/main', + case: 'test_programs/execution_success/assert_statement', numPublicInputs: 1, }, ]; @@ -32,15 +23,11 @@ const suite = Mocha.Suite.create(mocha.suite, 'Noir end to end test'); suite.timeout(60 * 20e3); //20mins -function getCircuit(noirSource: string): CompiledProgram { - // eslint-disable-next-line @typescript-eslint/no-unused-vars - initializeResolver((id: string) => { - logger.debug('source-resolver: resolving:', id); - return noirSource; - }); - - // We're ignoring this in the resolver but pass in something sensible. - const result = compile('/main.nr'); +async function getCircuit(projectPath: string) { + const fm = createFileManager('/'); + await fm.writeFile('./src/main.nr', await getFile(`${projectPath}/src/main.nr`)); + await fm.writeFile('./Nargo.toml', await getFile(`${projectPath}/Nargo.toml`)); + const result = await compile(fm); if (!('program' in result)) { throw new Error('Compilation failed'); } @@ -54,11 +41,9 @@ test_cases.forEach((testInfo) => { const base_relative_path = '../../../../..'; const test_case = testInfo.case; - const noir_source = await getFile(`${base_relative_path}/${test_case}/src/main.nr`); - - let noir_program: CompiledProgram; + let noir_program; try { - noir_program = getCircuit(noir_source); + noir_program = await getCircuit(`${base_relative_path}/${test_case}`); expect(noir_program, 'Compile output ').to.be.an('object'); } catch (e) { @@ -69,7 +54,7 @@ test_cases.forEach((testInfo) => { const backend = new BarretenbergBackend(noir_program); const program = new Noir(noir_program, backend); - const prover_toml = await getFile(`${base_relative_path}/${test_case}/Prover.toml`); + const prover_toml = await new Response(await getFile(`${base_relative_path}/${test_case}/Prover.toml`)).text(); const inputs: InputMap = TOML.parse(prover_toml) as InputMap; // JS Proving diff --git a/compiler/integration-tests/test/browser/recursion.test.ts b/compiler/integration-tests/test/browser/recursion.test.ts index dbf74882654..80199de5701 100644 --- a/compiler/integration-tests/test/browser/recursion.test.ts +++ b/compiler/integration-tests/test/browser/recursion.test.ts @@ -2,60 +2,46 @@ import { expect } from '@esm-bundle/chai'; import { TEST_LOG_LEVEL } from '../environment.js'; import { Logger } from 'tslog'; -import { initializeResolver } from '@noir-lang/source-resolver'; -import newCompiler, { CompiledProgram, compile, init_log_level as compilerLogLevel } from '@noir-lang/noir_wasm'; import { acvm, abi, Noir } from '@noir-lang/noir_js'; import * as TOML from 'smol-toml'; import { BarretenbergBackend } from '@noir-lang/backend_barretenberg'; import { getFile } from './utils.js'; import { Field, InputMap } from '@noir-lang/noirc_abi'; +import { createFileManager, compile } from '@noir-lang/noir_wasm'; const logger = new Logger({ name: 'test', minLevel: TEST_LOG_LEVEL }); const { default: initACVM } = acvm; const { default: newABICoder } = abi; -await newCompiler(); await newABICoder(); await initACVM(); -compilerLogLevel('INFO'); - const base_relative_path = '../../../../..'; -const circuit_main = 'compiler/integration-tests/circuits/main'; +const circuit_main = 'test_programs/execution_success/assert_statement'; const circuit_recursion = 'compiler/integration-tests/circuits/recursion'; -function getCircuit(noirSource: string): CompiledProgram { - // eslint-disable-next-line @typescript-eslint/no-unused-vars - initializeResolver((id: string) => { - logger.debug('source-resolver: resolving:', id); - return noirSource; - }); - - // We're ignoring this in the resolver but pass in something sensible. - const result = compile('/main.nr'); +async function getCircuit(projectPath: string) { + const fm = createFileManager('/'); + await fm.writeFile('./src/main.nr', await getFile(`${projectPath}/src/main.nr`)); + await fm.writeFile('./Nargo.toml', await getFile(`${projectPath}/Nargo.toml`)); + const result = await compile(fm); if (!('program' in result)) { throw new Error('Compilation failed'); } - return result.program; } describe('It compiles noir program code, receiving circuit bytes and abi object.', () => { - let circuit_main_source; let circuit_main_toml; - let circuit_recursion_source; before(async () => { - circuit_main_source = await getFile(`${base_relative_path}/${circuit_main}/src/main.nr`); - circuit_main_toml = await getFile(`${base_relative_path}/${circuit_main}/Prover.toml`); - - circuit_recursion_source = await getFile(`${base_relative_path}/${circuit_recursion}/src/main.nr`); + circuit_main_toml = await new Response(await getFile(`${base_relative_path}/${circuit_main}/Prover.toml`)).text(); }); it('Should generate valid inner proof for correct input, then verify proof within a proof', async () => { - const main_program = getCircuit(circuit_main_source); + const main_program = await getCircuit(`${base_relative_path}/${circuit_main}`); const main_inputs: InputMap = TOML.parse(circuit_main_toml) as InputMap; const main_backend = new BarretenbergBackend(main_program); @@ -80,12 +66,11 @@ describe('It compiles noir program code, receiving circuit bytes and abi object. proof: proofAsFields, public_inputs: [main_inputs.y as Field], key_hash: vkHash, - input_aggregation_object: ['0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0'], }; logger.debug('recursion_inputs', recursion_inputs); - const recursion_program = await getCircuit(circuit_recursion_source); + const recursion_program = await getCircuit(`${base_relative_path}/${circuit_recursion}`); const recursion_backend = new BarretenbergBackend(recursion_program); diff --git a/compiler/integration-tests/test/browser/utils.ts b/compiler/integration-tests/test/browser/utils.ts index 35588407193..a5e3fe1bc61 100644 --- a/compiler/integration-tests/test/browser/utils.ts +++ b/compiler/integration-tests/test/browser/utils.ts @@ -1,8 +1,8 @@ -export async function getFile(file_path: string): Promise { +export async function getFile(file_path: string): Promise> { const file_url = new URL(file_path, import.meta.url); const response = await fetch(file_url); if (!response.ok) throw new Error('Network response was not OK'); - return await response.text(); + return response.body as ReadableStream; } diff --git a/compiler/integration-tests/test/node/onchain_recursive_verification.test.ts b/compiler/integration-tests/test/node/onchain_recursive_verification.test.ts new file mode 100644 index 00000000000..9cdd80edc15 --- /dev/null +++ b/compiler/integration-tests/test/node/onchain_recursive_verification.test.ts @@ -0,0 +1,78 @@ +import { expect } from 'chai'; +import { ethers } from 'hardhat'; + +import { readFileSync } from 'node:fs'; +import { resolve, join } from 'path'; +import toml from 'toml'; + +import { Noir } from '@noir-lang/noir_js'; +import { BarretenbergBackend } from '@noir-lang/backend_barretenberg'; +import { Field, InputMap } from '@noir-lang/noirc_abi'; + +import { compile, createFileManager } from '@noir-lang/noir_wasm'; + +it(`smart contract can verify a recursive proof`, async () => { + const basePath = resolve(join(__dirname, '../../../../')); + const fm = createFileManager(basePath); + const innerCompilationResult = await compile( + fm, + join(basePath, './test_programs/execution_success/assert_statement'), + ); + if (!('program' in innerCompilationResult)) { + throw new Error('Compilation failed'); + } + const innerProgram = innerCompilationResult.program; + + const recursionCompilationResult = await compile( + fm, + join(basePath, './compiler/integration-tests/circuits/recursion'), + ); + if (!('program' in recursionCompilationResult)) { + throw new Error('Compilation failed'); + } + const recursionProgram = recursionCompilationResult.program; + + // Intermediate proof + + const inner_backend = new BarretenbergBackend(innerProgram); + const inner = new Noir(innerProgram); + + const inner_prover_toml = readFileSync( + join(basePath, `./test_programs/execution_success/assert_statement/Prover.toml`), + ).toString(); + + const inner_inputs = toml.parse(inner_prover_toml); + + const { witness: main_witness } = await inner.execute(inner_inputs); + const intermediate_proof = await inner_backend.generateIntermediateProof(main_witness); + + expect(await inner_backend.verifyIntermediateProof(intermediate_proof)).to.be.true; + + const { proofAsFields, vkAsFields, vkHash } = await inner_backend.generateIntermediateProofArtifacts( + intermediate_proof, + 1, // 1 public input + ); + + // Final proof + + const recursion_backend = new BarretenbergBackend(recursionProgram); + const recursion = new Noir(recursionProgram, recursion_backend); + + const recursion_inputs: InputMap = { + verification_key: vkAsFields, + proof: proofAsFields, + public_inputs: [inner_inputs.y as Field], + key_hash: vkHash, + }; + + const recursion_proof = await recursion.generateFinalProof(recursion_inputs); + expect(await recursion.verifyFinalProof(recursion_proof)).to.be.true; + + // Smart contract verification + + const contract = await ethers.deployContract('contracts/recursion.sol:UltraVerifier', []); + + const result = await contract.verify.staticCall(recursion_proof.proof, recursion_proof.publicInputs); + + expect(result).to.be.true; +}); diff --git a/compiler/integration-tests/test/node/smart_contract_verifier.test.ts b/compiler/integration-tests/test/node/smart_contract_verifier.test.ts index 738bc2df8dd..d870956ea7a 100644 --- a/compiler/integration-tests/test/node/smart_contract_verifier.test.ts +++ b/compiler/integration-tests/test/node/smart_contract_verifier.test.ts @@ -5,21 +5,20 @@ import { readFileSync } from 'node:fs'; import { resolve } from 'path'; import toml from 'toml'; -import { compile, init_log_level as compilerLogLevel } from '@noir-lang/noir_wasm'; import { Noir } from '@noir-lang/noir_js'; import { BarretenbergBackend } from '@noir-lang/backend_barretenberg'; -compilerLogLevel('INFO'); +import { compile, createFileManager } from '@noir-lang/noir_wasm'; const test_cases = [ { - case: 'tooling/nargo_cli/tests/execution_success/1_mul', + case: 'test_programs/execution_success/1_mul', compiled: 'contracts/1_mul.sol:UltraVerifier', numPublicInputs: 0, }, { - case: 'compiler/integration-tests/circuits/main', - compiled: 'contracts/main.sol:UltraVerifier', + case: 'test_programs/execution_success/assert_statement', + compiled: 'contracts/assert_statement.sol:UltraVerifier', numPublicInputs: 1, }, ]; @@ -31,9 +30,8 @@ test_cases.forEach((testInfo) => { const base_relative_path = '../..'; const test_case = testInfo.case; - const noir_source_path = resolve(`${base_relative_path}/${test_case}/src/main.nr`); - - const compileResult = compile(noir_source_path); + const fm = createFileManager(resolve(`${base_relative_path}/${test_case}`)); + const compileResult = await compile(fm); if (!('program' in compileResult)) { throw new Error('Compilation failed'); } @@ -57,7 +55,7 @@ test_cases.forEach((testInfo) => { // Smart contract verification - const contract = await ethers.deployContract(testInfo.compiled, [], {}); + const contract = await ethers.deployContract(testInfo.compiled, []); const result = await contract.verify(proofData.proof, proofData.publicInputs); diff --git a/compiler/noirc_driver/Cargo.toml b/compiler/noirc_driver/Cargo.toml index 09044b39323..eb9650e8aec 100644 --- a/compiler/noirc_driver/Cargo.toml +++ b/compiler/noirc_driver/Cargo.toml @@ -21,3 +21,7 @@ iter-extended.workspace = true fm.workspace = true serde.workspace = true fxhash.workspace = true +rust-embed.workspace = true +tracing.workspace = true + +aztec_macros = { path = "../../aztec_macros" } diff --git a/compiler/noirc_driver/build.rs b/compiler/noirc_driver/build.rs index 6bef7f1fda7..73a56142075 100644 --- a/compiler/noirc_driver/build.rs +++ b/compiler/noirc_driver/build.rs @@ -1,4 +1,5 @@ const GIT_COMMIT: &&str = &"GIT_COMMIT"; +use std::path::Path; fn main() { // Only use build_data if the environment variable isn't set @@ -8,4 +9,21 @@ fn main() { build_data::set_GIT_DIRTY(); build_data::no_debug_rebuilds(); } + + let stdlib_src_dir = Path::new("../../noir_stdlib/"); + rerun_if_stdlib_changes(stdlib_src_dir); +} + +/// Expects that the given directory is an existing path +fn rerun_if_stdlib_changes(directory: &Path) { + for entry in std::fs::read_dir(directory).unwrap() { + let path = entry.unwrap().path(); + + if path.is_dir() { + rerun_if_stdlib_changes(&path); + } else { + // Tell Cargo that if the given file changes, to rerun this build script. + println!("cargo:rerun-if-changed={}", path.to_string_lossy()); + } + } } diff --git a/compiler/noirc_driver/src/abi_gen.rs b/compiler/noirc_driver/src/abi_gen.rs new file mode 100644 index 00000000000..e546cd822b7 --- /dev/null +++ b/compiler/noirc_driver/src/abi_gen.rs @@ -0,0 +1,145 @@ +use std::collections::BTreeMap; + +use acvm::acir::native_types::Witness; +use iter_extended::{btree_map, vecmap}; +use noirc_abi::{Abi, AbiParameter, AbiReturnType, AbiType}; +use noirc_frontend::{ + hir::Context, + hir_def::{function::Param, stmt::HirPattern}, + node_interner::{FuncId, NodeInterner}, + Visibility, +}; +use std::ops::Range; + +/// Arranges a function signature and a generated circuit's return witnesses into a +/// `noirc_abi::Abi`. +pub(super) fn gen_abi( + context: &Context, + func_id: &FuncId, + input_witnesses: Vec, + return_witnesses: Vec, + return_visibility: Visibility, +) -> Abi { + let (parameters, return_type) = compute_function_abi(context, func_id); + let param_witnesses = param_witnesses_from_abi_param(¶meters, input_witnesses); + let return_type = return_type + .map(|typ| AbiReturnType { abi_type: typ, visibility: return_visibility.into() }); + Abi { parameters, return_type, param_witnesses, return_witnesses } +} + +pub(super) fn compute_function_abi( + context: &Context, + func_id: &FuncId, +) -> (Vec, Option) { + let func_meta = context.def_interner.function_meta(func_id); + + let (parameters, return_type) = func_meta.function_signature(); + let parameters = into_abi_params(context, parameters); + let return_type = return_type.map(|typ| AbiType::from_type(context, &typ)); + (parameters, return_type) +} + +/// Attempts to retrieve the name of this parameter. Returns None +/// if this parameter is a tuple or struct pattern. +fn get_param_name<'a>(pattern: &HirPattern, interner: &'a NodeInterner) -> Option<&'a str> { + match pattern { + HirPattern::Identifier(ident) => Some(interner.definition_name(ident.id)), + HirPattern::Mutable(pattern, _) => get_param_name(pattern, interner), + HirPattern::Tuple(_, _) => None, + HirPattern::Struct(_, _, _) => None, + } +} + +fn into_abi_params(context: &Context, params: Vec) -> Vec { + vecmap(params, |(pattern, typ, vis)| { + let param_name = get_param_name(&pattern, &context.def_interner) + .expect("Abi for tuple and struct parameters is unimplemented") + .to_owned(); + let as_abi = AbiType::from_type(context, &typ); + AbiParameter { name: param_name, typ: as_abi, visibility: vis.into() } + }) +} + +// Takes each abi parameter and shallowly maps to the expected witness range in which the +// parameter's constituent values live. +fn param_witnesses_from_abi_param( + abi_params: &Vec, + input_witnesses: Vec, +) -> BTreeMap>> { + let mut idx = 0_usize; + if input_witnesses.is_empty() { + return BTreeMap::new(); + } + + btree_map(abi_params, |param| { + let num_field_elements_needed = param.typ.field_count() as usize; + let param_witnesses = &input_witnesses[idx..idx + num_field_elements_needed]; + + // It's likely that `param_witnesses` will consist of mostly incrementing witness indices. + // We then want to collapse these into `Range`s to save space. + let param_witnesses = collapse_ranges(param_witnesses); + idx += num_field_elements_needed; + (param.name.clone(), param_witnesses) + }) +} + +/// Takes a vector of [`Witnesses`][`Witness`] and collapses it into a vector of [`Range`]s of [`Witnesses`][`Witness`]. +fn collapse_ranges(witnesses: &[Witness]) -> Vec> { + if witnesses.is_empty() { + return Vec::new(); + } + let mut wit = Vec::new(); + let mut last_wit: Witness = witnesses[0]; + + for (i, witness) in witnesses.iter().enumerate() { + if i == 0 { + continue; + }; + let witness_index = witness.witness_index(); + let prev_witness_index = witnesses[i - 1].witness_index(); + if witness_index != prev_witness_index + 1 { + wit.push(last_wit..Witness(prev_witness_index + 1)); + last_wit = *witness; + }; + } + + let last_witness = witnesses.last().unwrap().witness_index(); + wit.push(last_wit..Witness(last_witness + 1)); + + wit +} + +#[cfg(test)] +mod test { + use std::ops::Range; + + use acvm::acir::native_types::Witness; + + use super::collapse_ranges; + + #[test] + fn collapses_single_range() { + let witnesses: Vec<_> = vec![1, 2, 3].into_iter().map(Witness::from).collect(); + + let collapsed_witnesses = collapse_ranges(&witnesses); + + assert_eq!(collapsed_witnesses, vec![Range { start: Witness(1), end: Witness(4) },]); + } + + #[test] + fn collapse_ranges_correctly() { + let witnesses: Vec<_> = + vec![1, 2, 3, 5, 6, 2, 3, 4].into_iter().map(Witness::from).collect(); + + let collapsed_witnesses = collapse_ranges(&witnesses); + + assert_eq!( + collapsed_witnesses, + vec![ + Range { start: Witness(1), end: Witness(4) }, + Range { start: Witness(5), end: Witness(7) }, + Range { start: Witness(2), end: Witness(5) } + ] + ); + } +} diff --git a/compiler/noirc_driver/src/debug.rs b/compiler/noirc_driver/src/debug.rs index 144e636b534..84a3e143357 100644 --- a/compiler/noirc_driver/src/debug.rs +++ b/compiler/noirc_driver/src/debug.rs @@ -31,7 +31,7 @@ pub(crate) fn filter_relevant_files( let mut file_map = BTreeMap::new(); for file_id in files_with_debug_symbols { - let file_source = file_manager.fetch_file(file_id).source(); + let file_source = file_manager.fetch_file(file_id); file_map.insert( file_id, diff --git a/compiler/noirc_driver/src/lib.rs b/compiler/noirc_driver/src/lib.rs index ec7a5091ffd..7136580b770 100644 --- a/compiler/noirc_driver/src/lib.rs +++ b/compiler/noirc_driver/src/lib.rs @@ -4,23 +4,27 @@ #![warn(clippy::semicolon_if_nothing_returned)] use clap::Args; -use fm::FileId; +use fm::{FileId, FileManager}; use iter_extended::vecmap; use noirc_abi::{AbiParameter, AbiType, ContractEvent}; use noirc_errors::{CustomDiagnostic, FileDiagnostic}; +use noirc_evaluator::create_circuit; use noirc_evaluator::errors::RuntimeError; -use noirc_evaluator::{create_circuit, into_abi_params}; use noirc_frontend::graph::{CrateId, CrateName}; use noirc_frontend::hir::def_map::{Contract, CrateDefMap}; use noirc_frontend::hir::Context; +use noirc_frontend::macros_api::MacroProcessor; use noirc_frontend::monomorphization::monomorphize; use noirc_frontend::node_interner::FuncId; use serde::{Deserialize, Serialize}; use std::path::Path; +use tracing::info; +mod abi_gen; mod contract; mod debug; mod program; +mod stdlib; use debug::filter_relevant_files; @@ -59,6 +63,14 @@ pub struct CompileOptions { /// Suppress warnings #[arg(long, conflicts_with = "deny_warnings")] pub silence_warnings: bool, + + /// Output ACIR gzipped bytecode instead of the JSON artefact + #[arg(long, hide = true)] + pub only_acir: bool, + + /// Disables the builtin macros being used in the compiler + #[arg(long, hide = true)] + pub disable_macros: bool, } /// Helper type used to signify where only warnings are expected in file diagnostics @@ -70,13 +82,44 @@ pub type ErrorsAndWarnings = Vec; /// Helper type for connecting a compilation artifact to the errors or warnings which were produced during compilation. pub type CompilationResult = Result<(T, Warnings), ErrorsAndWarnings>; +/// Helper method to return a file manager instance with the stdlib already added +/// +/// TODO: This should become the canonical way to create a file manager and +/// TODO if we use a File manager trait, we can move file manager into this crate +/// TODO as a module +pub fn file_manager_with_stdlib(root: &Path) -> FileManager { + let mut file_manager = FileManager::new(root); + + add_stdlib_source_to_file_manager(&mut file_manager); + + file_manager +} + +/// Adds the source code for the stdlib into the file manager +fn add_stdlib_source_to_file_manager(file_manager: &mut FileManager) { + // Add the stdlib contents to the file manager, since every package automatically has a dependency + // on the stdlib. For other dependencies, we read the package.Dependencies file to add their file + // contents to the file manager. However since the dependency on the stdlib is implicit, we need + // to manually add it here. + let stdlib_paths_with_source = stdlib::stdlib_paths_with_source(); + for (path, source) in stdlib_paths_with_source { + file_manager.add_file_with_source_canonical_path(Path::new(&path), source); + } +} + /// Adds the file from the file system at `Path` to the crate graph as a root file +/// +/// Note: This methods adds the stdlib as a dependency to the crate. +/// This assumes that the stdlib has already been added to the file manager. pub fn prepare_crate(context: &mut Context, file_name: &Path) -> CrateId { let path_to_std_lib_file = Path::new(STD_CRATE_NAME).join("lib.nr"); - let std_file_id = context.file_manager.add_file(&path_to_std_lib_file).unwrap(); + let std_file_id = context + .file_manager + .name_to_id(path_to_std_lib_file) + .expect("stdlib file id is expected to be present"); let std_crate_id = context.crate_graph.add_stdlib(std_file_id); - let root_file_id = context.file_manager.add_file(file_name).unwrap(); + let root_file_id = context.file_manager.name_to_id(file_name.to_path_buf()).unwrap_or_else(|| panic!("files are expected to be added to the FileManager before reaching the compiler file_path: {file_name:?}")); let root_crate_id = context.crate_graph.add_crate_root(root_file_id); @@ -87,7 +130,10 @@ pub fn prepare_crate(context: &mut Context, file_name: &Path) -> CrateId { // Adds the file from the file system at `Path` to the crate graph pub fn prepare_dependency(context: &mut Context, file_name: &Path) -> CrateId { - let root_file_id = context.file_manager.add_file(file_name).unwrap(); + let root_file_id = context + .file_manager + .name_to_id(file_name.to_path_buf()) + .unwrap_or_else(|| panic!("files are expected to be added to the FileManager before reaching the compiler file_path: {file_name:?}")); let crate_id = context.crate_graph.add_crate(root_file_id); @@ -115,13 +161,21 @@ pub fn add_dep( /// /// This returns a (possibly empty) vector of any warnings found on success. /// On error, this returns a non-empty vector of warnings and error messages, with at least one error. +#[tracing::instrument(level = "trace", skip(context))] pub fn check_crate( context: &mut Context, crate_id: CrateId, deny_warnings: bool, + disable_macros: bool, ) -> CompilationResult<()> { + let macros: Vec<&dyn MacroProcessor> = if disable_macros { + vec![] + } else { + vec![&aztec_macros::AztecMacro as &dyn MacroProcessor] + }; + let mut errors = vec![]; - let diagnostics = CrateDefMap::collect_defs(crate_id, context); + let diagnostics = CrateDefMap::collect_defs(crate_id, context, macros); errors.extend(diagnostics.into_iter().map(|(error, file_id)| { let diagnostic: CustomDiagnostic = error.into(); diagnostic.in_file(file_id) @@ -140,12 +194,7 @@ pub fn compute_function_abi( ) -> Option<(Vec, Option)> { let main_function = context.get_main_function(crate_id)?; - let func_meta = context.def_interner.function_meta(&main_function); - - let (parameters, return_type) = func_meta.into_function_signature(); - let parameters = into_abi_params(context, parameters); - let return_type = return_type.map(|typ| AbiType::from_type(context, &typ)); - Some((parameters, return_type)) + Some(abi_gen::compute_function_abi(context, &main_function)) } /// Run the frontend to check the crate for errors then compile the main function if there were none @@ -159,7 +208,8 @@ pub fn compile_main( cached_program: Option, force_compile: bool, ) -> CompilationResult { - let (_, mut warnings) = check_crate(context, crate_id, options.deny_warnings)?; + let (_, mut warnings) = + check_crate(context, crate_id, options.deny_warnings, options.disable_macros)?; let main = context.get_main_function(&crate_id).ok_or_else(|| { // TODO(#2155): This error might be a better to exist in Nargo @@ -192,7 +242,8 @@ pub fn compile_contract( crate_id: CrateId, options: &CompileOptions, ) -> CompilationResult { - let (_, warnings) = check_crate(context, crate_id, options.deny_warnings)?; + let (_, warnings) = + check_crate(context, crate_id, options.deny_warnings, options.disable_macros)?; // TODO: We probably want to error if contracts is empty let contracts = context.get_all_contracts(&crate_id); @@ -324,6 +375,7 @@ fn compile_contract_inner( /// Compile the current crate using `main_function` as the entrypoint. /// /// This function assumes [`check_crate`] is called beforehand. +#[tracing::instrument(level = "trace", skip_all, fields(function_name = context.function_name(&main_function)))] pub fn compile_no_check( context: &Context, options: &CompileOptions, @@ -342,12 +394,15 @@ pub fn compile_no_check( force_compile || options.print_acir || options.show_brillig || options.show_ssa; if !force_compile && hashes_match { + info!("Program matches existing artifact, returning early"); return Ok(cached_program.expect("cache must exist for hashes to match")); } + let visibility = program.return_visibility; + let (circuit, debug, input_witnesses, return_witnesses, warnings) = + create_circuit(program, options.show_ssa, options.show_brillig)?; - let (circuit, debug, abi, warnings) = - create_circuit(context, program, options.show_ssa, options.show_brillig)?; - + let abi = + abi_gen::gen_abi(context, &main_function, input_witnesses, return_witnesses, visibility); let file_map = filter_relevant_files(&[debug.clone()], &context.file_manager); Ok(CompiledProgram { diff --git a/compiler/noirc_driver/src/stdlib.rs b/compiler/noirc_driver/src/stdlib.rs new file mode 100644 index 00000000000..5a91e3f45b5 --- /dev/null +++ b/compiler/noirc_driver/src/stdlib.rs @@ -0,0 +1,24 @@ +use rust_embed::RustEmbed; + +#[derive(RustEmbed)] +#[folder = "../../noir_stdlib/src"] +#[cfg_attr(not(target_os = "windows"), prefix = "std/")] +#[cfg_attr(target_os = "windows", prefix = r"std\")] // Note reversed slash direction +struct StdLibAssets; + +// Returns a vector of tuples containing the path to a stdlib file in the std lib crate +// along with the source code of that file. +// +// This is needed because when we preload the file manager, it needs to know where +// the source code for the stdlib is. The stdlib is treated special because it comes with +// the compiler and is never included as a dependency like other user defined crates. +pub(crate) fn stdlib_paths_with_source() -> Vec<(String, String)> { + StdLibAssets::iter() + .map(|path| { + let source = std::str::from_utf8(StdLibAssets::get(&path).unwrap().data.as_ref()) + .unwrap() + .to_string(); + (path.to_string(), source) + }) + .collect() +} diff --git a/compiler/noirc_errors/Cargo.toml b/compiler/noirc_errors/Cargo.toml index 0cb6afc73bd..02e97b2c670 100644 --- a/compiler/noirc_errors/Cargo.toml +++ b/compiler/noirc_errors/Cargo.toml @@ -15,3 +15,4 @@ fm.workspace = true chumsky.workspace = true serde.workspace = true serde_with = "3.2.0" +tracing.workspace = true \ No newline at end of file diff --git a/compiler/noirc_errors/src/debug_info.rs b/compiler/noirc_errors/src/debug_info.rs index 888c24adc1a..ee40ced19bf 100644 --- a/compiler/noirc_errors/src/debug_info.rs +++ b/compiler/noirc_errors/src/debug_info.rs @@ -38,6 +38,7 @@ impl DebugInfo { /// The [`OpcodeLocation`]s are generated with the ACIR, but passing the ACIR through a transformation step /// renders the old `OpcodeLocation`s invalid. The AcirTransformationMap is able to map the old `OpcodeLocation` to the new ones. /// Note: One old `OpcodeLocation` might have transformed into more than one new `OpcodeLocation`. + #[tracing::instrument(level = "trace", skip(self, update_map))] pub fn update_acir(&mut self, update_map: AcirTransformationMap) { let old_locations = mem::take(&mut self.locations); diff --git a/compiler/noirc_errors/src/position.rs b/compiler/noirc_errors/src/position.rs index e308eb9a2c7..007ec58ca27 100644 --- a/compiler/noirc_errors/src/position.rs +++ b/compiler/noirc_errors/src/position.rs @@ -65,6 +65,10 @@ impl Span { Span::inclusive(start, start) } + pub fn empty(position: u32) -> Span { + Span::from(position..position) + } + #[must_use] pub fn merge(self, other: Span) -> Span { Span(self.0.merge(other.0)) @@ -81,6 +85,16 @@ impl Span { pub fn end(&self) -> u32 { self.0.end().into() } + + pub fn contains(&self, other: &Span) -> bool { + self.start() <= other.start() && self.end() >= other.end() + } + + pub fn is_smaller(&self, other: &Span) -> bool { + let self_distance = self.end() - self.start(); + let other_distance = other.end() - other.start(); + self_distance < other_distance + } } impl From for Range { @@ -129,4 +143,8 @@ impl Location { pub fn dummy() -> Self { Self { span: Span::single_char(0), file: FileId::dummy() } } + + pub fn contains(&self, other: &Location) -> bool { + self.file == other.file && self.span.contains(&other.span) + } } diff --git a/compiler/noirc_evaluator/Cargo.toml b/compiler/noirc_evaluator/Cargo.toml index c9f5f28478b..a8f0e8d83a9 100644 --- a/compiler/noirc_evaluator/Cargo.toml +++ b/compiler/noirc_evaluator/Cargo.toml @@ -10,7 +10,6 @@ license.workspace = true [dependencies] noirc_frontend.workspace = true noirc_errors.workspace = true -noirc_abi.workspace = true acvm.workspace = true fxhash.workspace = true iter-extended.workspace = true @@ -18,3 +17,4 @@ thiserror.workspace = true num-bigint = "0.4" im = { version = "15.1", features = ["serde"] } serde.workspace = true +tracing.workspace = true \ No newline at end of file diff --git a/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_black_box.rs b/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_black_box.rs index 0d97dd12601..a6d3220fa85 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_black_box.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_black_box.rs @@ -1,9 +1,9 @@ -use acvm::acir::{ - brillig::{BlackBoxOp, HeapVector, RegisterOrMemory}, - BlackBoxFunc, -}; +use acvm::acir::{brillig::BlackBoxOp, BlackBoxFunc}; -use crate::brillig::brillig_ir::BrilligContext; +use crate::brillig::brillig_ir::{ + brillig_variable::{BrilligVariable, BrilligVector}, + BrilligContext, +}; /// Transforms SSA's black box function calls into the corresponding brillig instructions /// Extracting arguments and results from the SSA function call @@ -11,31 +11,31 @@ use crate::brillig::brillig_ir::BrilligContext; pub(crate) fn convert_black_box_call( brillig_context: &mut BrilligContext, bb_func: &BlackBoxFunc, - function_arguments: &[RegisterOrMemory], - function_results: &[RegisterOrMemory], + function_arguments: &[BrilligVariable], + function_results: &[BrilligVariable], ) { match bb_func { BlackBoxFunc::SHA256 => { - if let ([message], [RegisterOrMemory::HeapArray(result_array)]) = + if let ([message], [BrilligVariable::BrilligArray(result_array)]) = (function_arguments, function_results) { let message_vector = convert_array_or_vector(brillig_context, message, bb_func); brillig_context.black_box_op_instruction(BlackBoxOp::Sha256 { - message: message_vector, - output: *result_array, + message: message_vector.to_heap_vector(), + output: result_array.to_heap_array(), }); } else { unreachable!("ICE: SHA256 expects one array argument and one array result") } } BlackBoxFunc::Blake2s => { - if let ([message], [RegisterOrMemory::HeapArray(result_array)]) = + if let ([message], [BrilligVariable::BrilligArray(result_array)]) = (function_arguments, function_results) { let message_vector = convert_array_or_vector(brillig_context, message, bb_func); brillig_context.black_box_op_instruction(BlackBoxOp::Blake2s { - message: message_vector, - output: *result_array, + message: message_vector.to_heap_vector(), + output: result_array.to_heap_array(), }); } else { unreachable!("ICE: Blake2s expects one array argument and one array result") @@ -43,66 +43,75 @@ pub(crate) fn convert_black_box_call( } BlackBoxFunc::Keccak256 => { if let ( - [message, RegisterOrMemory::RegisterIndex(array_size)], - [RegisterOrMemory::HeapArray(result_array)], + [message, BrilligVariable::Simple(array_size)], + [BrilligVariable::BrilligArray(result_array)], ) = (function_arguments, function_results) { let mut message_vector = convert_array_or_vector(brillig_context, message, bb_func); message_vector.size = *array_size; brillig_context.black_box_op_instruction(BlackBoxOp::Keccak256 { - message: message_vector, - output: *result_array, + message: message_vector.to_heap_vector(), + output: result_array.to_heap_array(), }); } else { unreachable!("ICE: Keccak256 expects message, message size and result array") } } - BlackBoxFunc::HashToField128Security => { - if let ([message], [RegisterOrMemory::RegisterIndex(result_register)]) = - (function_arguments, function_results) + BlackBoxFunc::EcdsaSecp256k1 => { + if let ( + [BrilligVariable::BrilligArray(public_key_x), BrilligVariable::BrilligArray(public_key_y), BrilligVariable::BrilligArray(signature), message], + [BrilligVariable::Simple(result_register)], + ) = (function_arguments, function_results) { - let message_vector = convert_array_or_vector(brillig_context, message, bb_func); - brillig_context.black_box_op_instruction(BlackBoxOp::HashToField128Security { - message: message_vector, - output: *result_register, + let message_hash_vector = + convert_array_or_vector(brillig_context, message, bb_func); + brillig_context.black_box_op_instruction(BlackBoxOp::EcdsaSecp256k1 { + hashed_msg: message_hash_vector.to_heap_vector(), + public_key_x: public_key_x.to_heap_array(), + public_key_y: public_key_y.to_heap_array(), + signature: signature.to_heap_array(), + result: *result_register, }); } else { - unreachable!("ICE: HashToField128Security expects one array argument and one register result") + unreachable!( + "ICE: EcdsaSecp256k1 expects four array arguments and one register result" + ) } } - BlackBoxFunc::EcdsaSecp256k1 => { + BlackBoxFunc::EcdsaSecp256r1 => { if let ( - [RegisterOrMemory::HeapArray(public_key_x), RegisterOrMemory::HeapArray(public_key_y), RegisterOrMemory::HeapArray(signature), message], - [RegisterOrMemory::RegisterIndex(result_register)], + [BrilligVariable::BrilligArray(public_key_x), BrilligVariable::BrilligArray(public_key_y), BrilligVariable::BrilligArray(signature), message], + [BrilligVariable::Simple(result_register)], ) = (function_arguments, function_results) { let message_hash_vector = convert_array_or_vector(brillig_context, message, bb_func); - brillig_context.black_box_op_instruction(BlackBoxOp::EcdsaSecp256k1 { - hashed_msg: message_hash_vector, - public_key_x: *public_key_x, - public_key_y: *public_key_y, - signature: *signature, + brillig_context.black_box_op_instruction(BlackBoxOp::EcdsaSecp256r1 { + hashed_msg: message_hash_vector.to_heap_vector(), + public_key_x: public_key_x.to_heap_array(), + public_key_y: public_key_y.to_heap_array(), + signature: signature.to_heap_array(), result: *result_register, }); } else { unreachable!( - "ICE: EcdsaSecp256k1 expects four array arguments and one register result" + "ICE: EcdsaSecp256r1 expects four array arguments and one register result" ) } } + BlackBoxFunc::PedersenCommitment => { if let ( - [message, RegisterOrMemory::RegisterIndex(domain_separator)], - [RegisterOrMemory::HeapArray(result_array)], + [message, BrilligVariable::Simple(domain_separator)], + [BrilligVariable::BrilligArray(result_array)], ) = (function_arguments, function_results) { let message_vector = convert_array_or_vector(brillig_context, message, bb_func); brillig_context.black_box_op_instruction(BlackBoxOp::PedersenCommitment { - inputs: message_vector, + inputs: message_vector.to_heap_vector(), domain_separator: *domain_separator, - output: *result_array, + output: result_array.to_heap_array(), }); } else { unreachable!("ICE: Pedersen expects one array argument, a register for the domain separator, and one array result") @@ -110,13 +119,13 @@ pub(crate) fn convert_black_box_call( } BlackBoxFunc::PedersenHash => { if let ( - [message, RegisterOrMemory::RegisterIndex(domain_separator)], - [RegisterOrMemory::RegisterIndex(result)], + [message, BrilligVariable::Simple(domain_separator)], + [BrilligVariable::Simple(result)], ) = (function_arguments, function_results) { let message_vector = convert_array_or_vector(brillig_context, message, bb_func); brillig_context.black_box_op_instruction(BlackBoxOp::PedersenHash { - inputs: message_vector, + inputs: message_vector.to_heap_vector(), domain_separator: *domain_separator, output: *result, }); @@ -126,8 +135,8 @@ pub(crate) fn convert_black_box_call( } BlackBoxFunc::SchnorrVerify => { if let ( - [RegisterOrMemory::RegisterIndex(public_key_x), RegisterOrMemory::RegisterIndex(public_key_y), RegisterOrMemory::HeapArray(signature), message], - [RegisterOrMemory::RegisterIndex(result_register)], + [BrilligVariable::Simple(public_key_x), BrilligVariable::Simple(public_key_y), BrilligVariable::BrilligArray(signature), message], + [BrilligVariable::Simple(result_register)], ) = (function_arguments, function_results) { let message_hash = convert_array_or_vector(brillig_context, message, bb_func); @@ -135,8 +144,8 @@ pub(crate) fn convert_black_box_call( brillig_context.black_box_op_instruction(BlackBoxOp::SchnorrVerify { public_key_x: *public_key_x, public_key_y: *public_key_y, - message: message_hash, - signature, + message: message_hash.to_heap_vector(), + signature: signature.to_heap_vector(), result: *result_register, }); } else { @@ -145,14 +154,14 @@ pub(crate) fn convert_black_box_call( } BlackBoxFunc::FixedBaseScalarMul => { if let ( - [RegisterOrMemory::RegisterIndex(low), RegisterOrMemory::RegisterIndex(high)], - [RegisterOrMemory::HeapArray(result_array)], + [BrilligVariable::Simple(low), BrilligVariable::Simple(high)], + [BrilligVariable::BrilligArray(result_array)], ) = (function_arguments, function_results) { brillig_context.black_box_op_instruction(BlackBoxOp::FixedBaseScalarMul { low: *low, high: *high, - result: *result_array, + result: result_array.to_heap_array(), }); } else { unreachable!( @@ -160,18 +169,35 @@ pub(crate) fn convert_black_box_call( ) } } - _ => unimplemented!("ICE: Black box function {:?} is not implemented", bb_func), + BlackBoxFunc::AND => { + unreachable!("ICE: `BlackBoxFunc::AND` calls should be transformed into a `BinaryOp`") + } + BlackBoxFunc::XOR => { + unreachable!("ICE: `BlackBoxFunc::XOR` calls should be transformed into a `BinaryOp`") + } + BlackBoxFunc::RANGE => unreachable!( + "ICE: `BlackBoxFunc::RANGE` calls should be transformed into a `Instruction::Cast`" + ), + BlackBoxFunc::RecursiveAggregation => unimplemented!( + "ICE: `BlackBoxFunc::RecursiveAggregation` is not implemented by the Brillig VM" + ), + BlackBoxFunc::Blake3 => { + unimplemented!("ICE: `BlackBoxFunc::Blake3` is not implemented by the Brillig VM") + } + BlackBoxFunc::Keccakf1600 => { + unimplemented!("ICE: `BlackBoxFunc::Keccakf1600` is not implemented by the Brillig VM") + } } } fn convert_array_or_vector( brillig_context: &mut BrilligContext, - array_or_vector: &RegisterOrMemory, + array_or_vector: &BrilligVariable, bb_func: &BlackBoxFunc, -) -> HeapVector { +) -> BrilligVector { match array_or_vector { - RegisterOrMemory::HeapArray(array) => brillig_context.array_to_vector(array), - RegisterOrMemory::HeapVector(vector) => *vector, + BrilligVariable::BrilligArray(array) => brillig_context.array_to_vector(array), + BrilligVariable::BrilligVector(vector) => *vector, _ => unreachable!( "ICE: {} expected an array or a vector, but got {:?}", bb_func.name(), diff --git a/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs b/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs index 18fd822b07d..db005d9d438 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs @@ -1,6 +1,6 @@ +use crate::brillig::brillig_ir::brillig_variable::{BrilligArray, BrilligVariable, BrilligVector}; use crate::brillig::brillig_ir::{ - extract_heap_array, extract_register, extract_registers, BrilligBinaryOp, BrilligContext, - BRILLIG_INTEGER_ARITHMETIC_BIT_SIZE, + BrilligBinaryOp, BrilligContext, BRILLIG_INTEGER_ARITHMETIC_BIT_SIZE, }; use crate::ssa::ir::dfg::CallStack; use crate::ssa::ir::{ @@ -13,7 +13,7 @@ use crate::ssa::ir::{ types::{NumericType, Type}, value::{Value, ValueId}, }; -use acvm::acir::brillig::{BinaryFieldOp, BinaryIntOp, HeapArray, RegisterIndex, RegisterOrMemory}; +use acvm::acir::brillig::{BinaryFieldOp, BinaryIntOp, RegisterIndex, RegisterOrMemory}; use acvm::brillig_vm::brillig::HeapVector; use acvm::FieldElement; use fxhash::{FxHashMap as HashMap, FxHashSet as HashSet}; @@ -53,7 +53,7 @@ impl<'block> BrilligBlock<'block> { variables .get_available_variables(function_context) .into_iter() - .flat_map(extract_registers) + .flat_map(|variable| variable.extract_registers()) .collect(), ); let last_uses = function_context.liveness.get_last_uses(&block_id).clone(); @@ -159,7 +159,7 @@ impl<'block> BrilligBlock<'block> { .iter() .flat_map(|value_id| { let return_variable = self.convert_ssa_value(*value_id, dfg); - extract_registers(return_variable) + return_variable.extract_registers() }) .collect(); self.brillig_context.return_instruction(&return_registers); @@ -168,32 +168,44 @@ impl<'block> BrilligBlock<'block> { } /// Passes an arbitrary variable from the registers of the source to the registers of the destination - fn pass_variable(&mut self, source: RegisterOrMemory, destination: RegisterOrMemory) { + fn pass_variable(&mut self, source: BrilligVariable, destination: BrilligVariable) { match (source, destination) { ( - RegisterOrMemory::RegisterIndex(source_register), - RegisterOrMemory::RegisterIndex(destination_register), + BrilligVariable::Simple(source_register), + BrilligVariable::Simple(destination_register), ) => { self.brillig_context.mov_instruction(destination_register, source_register); } ( - RegisterOrMemory::HeapArray(HeapArray { pointer: source_pointer, .. }), - RegisterOrMemory::HeapArray(HeapArray { pointer: destination_pointer, .. }), + BrilligVariable::BrilligArray(BrilligArray { + pointer: source_pointer, + size: _, + rc: source_rc, + }), + BrilligVariable::BrilligArray(BrilligArray { + pointer: destination_pointer, + size: _, + rc: destination_rc, + }), ) => { self.brillig_context.mov_instruction(destination_pointer, source_pointer); + self.brillig_context.mov_instruction(destination_rc, source_rc); } ( - RegisterOrMemory::HeapVector(HeapVector { + BrilligVariable::BrilligVector(BrilligVector { pointer: source_pointer, size: source_size, + rc: source_rc, }), - RegisterOrMemory::HeapVector(HeapVector { + BrilligVariable::BrilligVector(BrilligVector { pointer: destination_pointer, size: destination_size, + rc: destination_rc, }), ) => { self.brillig_context.mov_instruction(destination_pointer, source_pointer); self.brillig_context.mov_instruction(destination_size, source_size); + self.brillig_context.mov_instruction(destination_rc, source_rc); } (_, _) => { unreachable!("ICE: Cannot pass value from {:?} to {:?}", source, destination); @@ -214,7 +226,7 @@ impl<'block> BrilligBlock<'block> { // In the case of arrays, the values should already be in memory and the register should // Be a valid pointer to the array. // For slices, two registers are passed, the pointer to the data and a register holding the size of the slice. - Type::Numeric(_) | Type::Array(..) | Type::Slice(..) | Type::Reference => { + Type::Numeric(_) | Type::Array(..) | Type::Slice(..) | Type::Reference(_) => { self.variables.get_block_param( self.function_context, self.block_id, @@ -264,7 +276,25 @@ impl<'block> BrilligBlock<'block> { result_value, dfg, ); - self.brillig_context.allocate_variable_instruction(address_register); + match dfg.type_of_value(result_value) { + Type::Reference(element) => match *element { + Type::Array(..) => { + self.brillig_context + .allocate_array_reference_instruction(address_register); + } + Type::Slice(..) => { + self.brillig_context + .allocate_vector_reference_instruction(address_register); + } + _ => { + self.brillig_context + .allocate_simple_reference_instruction(address_register); + } + }, + _ => { + unreachable!("ICE: Allocate on non-reference type") + } + } } Instruction::Store { address, value } => { let address_register = self.convert_ssa_register_value(*address, dfg); @@ -299,10 +329,11 @@ impl<'block> BrilligBlock<'block> { Value::ForeignFunction(func_name) => { let result_ids = dfg.instruction_results(instruction_id); - let input_registers = - vecmap(arguments, |value_id| self.convert_ssa_value(*value_id, dfg)); + let input_registers = vecmap(arguments, |value_id| { + self.convert_ssa_value(*value_id, dfg).to_register_or_memory() + }); let output_registers = vecmap(result_ids, |value_id| { - self.allocate_external_call_result(*value_id, dfg) + self.allocate_external_call_result(*value_id, dfg).to_register_or_memory() }); self.brillig_context.foreign_call_instruction( func_name.to_owned(), @@ -388,7 +419,7 @@ impl<'block> BrilligBlock<'block> { // or an array in the case of an array. if let Type::Numeric(_) = dfg.type_of_value(param_id) { let len_variable = self.convert_ssa_value(arguments[0], dfg); - let len_register_index = extract_register(len_variable); + let len_register_index = len_variable.extract_register(); self.brillig_context.mov_instruction(result_register, len_register_index); } else { self.convert_ssa_array_len(arguments[0], result_register, dfg); @@ -416,29 +447,29 @@ impl<'block> BrilligBlock<'block> { let results = dfg.instruction_results(instruction_id); - let target_len_variable = self.variables.define_variable( + let target_len = self.variables.define_register_variable( self.function_context, self.brillig_context, results[0], dfg, ); - let target_len = extract_register(target_len_variable); - - let target_slice = self.variables.define_variable( - self.function_context, - self.brillig_context, - results[1], - dfg, - ); - let heap_vec = self.brillig_context.extract_heap_vector(target_slice); + let target_vector = self + .variables + .define_variable( + self.function_context, + self.brillig_context, + results[1], + dfg, + ) + .extract_vector(); // Update the user-facing slice length self.brillig_context.mov_instruction(target_len, limb_count); self.brillig_context.radix_instruction( source, - heap_vec, + target_vector, radix, limb_count, matches!(endianness, Endian::Big), @@ -456,24 +487,29 @@ impl<'block> BrilligBlock<'block> { results[0], dfg, ); - let target_len = extract_register(target_len_variable); + let target_len = target_len_variable.extract_register(); - let target_slice = self.variables.define_variable( + let target_vector = match self.variables.define_variable( self.function_context, self.brillig_context, results[1], dfg, - ); + ) { + BrilligVariable::BrilligArray(array) => { + self.brillig_context.array_to_vector(&array) + } + BrilligVariable::BrilligVector(vector) => vector, + BrilligVariable::Simple(..) => unreachable!("ICE: ToBits on non-array"), + }; let radix = self.brillig_context.make_constant(2_usize.into()); - let heap_vec = self.brillig_context.extract_heap_vector(target_slice); // Update the user-facing slice length self.brillig_context.mov_instruction(target_len, limb_count); self.brillig_context.radix_instruction( source, - heap_vec, + target_vector, radix, limb_count, matches!(endianness, Endian::Big), @@ -485,7 +521,7 @@ impl<'block> BrilligBlock<'block> { unreachable!("unsupported function call type {:?}", dfg[*func]) } }, - Instruction::Truncate { value, .. } => { + Instruction::Truncate { value, bit_size, .. } => { let result_ids = dfg.instruction_results(instruction_id); let destination_register = self.variables.define_register_variable( self.function_context, @@ -494,9 +530,13 @@ impl<'block> BrilligBlock<'block> { dfg, ); let source_register = self.convert_ssa_register_value(*value, dfg); - self.brillig_context.truncate_instruction(destination_register, source_register); + self.brillig_context.truncate_instruction( + destination_register, + source_register, + *bit_size, + ); } - Instruction::Cast(value, target_type) => { + Instruction::Cast(value, _) => { let result_ids = dfg.instruction_results(instruction_id); let destination_register = self.variables.define_register_variable( self.function_context, @@ -505,12 +545,7 @@ impl<'block> BrilligBlock<'block> { dfg, ); let source_register = self.convert_ssa_register_value(*value, dfg); - self.convert_cast( - destination_register, - source_register, - target_type, - &dfg.type_of_value(*value), - ); + self.convert_cast(destination_register, source_register); } Instruction::ArrayGet { array, index } => { let result_ids = dfg.instruction_results(instruction_id); @@ -523,8 +558,8 @@ impl<'block> BrilligBlock<'block> { let array_variable = self.convert_ssa_value(*array, dfg); let array_pointer = match array_variable { - RegisterOrMemory::HeapArray(HeapArray { pointer, .. }) => pointer, - RegisterOrMemory::HeapVector(HeapVector { pointer, .. }) => pointer, + BrilligVariable::BrilligArray(BrilligArray { pointer, .. }) => pointer, + BrilligVariable::BrilligVector(BrilligVector { pointer, .. }) => pointer, _ => unreachable!("ICE: array get on non-array"), }; @@ -574,6 +609,14 @@ impl<'block> BrilligBlock<'block> { self.brillig_context.deallocate_register(condition); self.brillig_context.deallocate_register(right); } + Instruction::IncrementRc { value } => { + let rc_register = match self.convert_ssa_value(*value, dfg) { + BrilligVariable::BrilligArray(BrilligArray { rc, .. }) + | BrilligVariable::BrilligVector(BrilligVector { rc, .. }) => rc, + _ => unreachable!("ICE: increment rc on non-array"), + }; + self.brillig_context.usize_op_in_place(rc_register, BinaryIntOp::Add, 1); + } _ => todo!("ICE: Instruction not supported {instruction:?}"), }; @@ -598,10 +641,7 @@ impl<'block> BrilligBlock<'block> { // Convert the arguments to registers casting those to the types of the receiving function let argument_registers: Vec = arguments .iter() - .flat_map(|argument_id| { - let variable_to_pass = self.convert_ssa_value(*argument_id, dfg); - extract_registers(variable_to_pass) - }) + .flat_map(|argument_id| self.convert_ssa_value(*argument_id, dfg).extract_registers()) .collect(); let result_ids = dfg.instruction_results(instruction_id); @@ -637,7 +677,7 @@ impl<'block> BrilligBlock<'block> { // Collect the registers that should have been returned let returned_registers: Vec = variables_assigned_to .iter() - .flat_map(|returned_variable| extract_registers(*returned_variable)) + .flat_map(|returned_variable| returned_variable.extract_registers()) .collect(); assert!( @@ -654,17 +694,13 @@ impl<'block> BrilligBlock<'block> { &mut self, array_pointer: RegisterIndex, index_register: RegisterIndex, - destination_variable: RegisterOrMemory, + destination_variable: BrilligVariable, ) { match destination_variable { - RegisterOrMemory::RegisterIndex(destination_register) => { + BrilligVariable::Simple(destination_register) => { self.brillig_context.array_get(array_pointer, index_register, destination_register); } - RegisterOrMemory::HeapArray(HeapArray { pointer, .. }) => { - self.brillig_context.array_get(array_pointer, index_register, pointer); - } - RegisterOrMemory::HeapVector(..) => { - // Vectors are stored as references inside arrays to be able to match SSA indexes + BrilligVariable::BrilligArray(..) | BrilligVariable::BrilligVector(..) => { let reference = self.brillig_context.allocate_register(); self.brillig_context.array_get(array_pointer, index_register, reference); self.brillig_context.load_variable_instruction(destination_variable, reference); @@ -677,25 +713,30 @@ impl<'block> BrilligBlock<'block> { /// With a specific value changed. fn convert_ssa_array_set( &mut self, - source_variable: RegisterOrMemory, - destination_variable: RegisterOrMemory, + source_variable: BrilligVariable, + destination_variable: BrilligVariable, index_register: RegisterIndex, - value_variable: RegisterOrMemory, + value_variable: BrilligVariable, ) { let destination_pointer = match destination_variable { - RegisterOrMemory::HeapArray(HeapArray { pointer, .. }) => pointer, - RegisterOrMemory::HeapVector(HeapVector { pointer, .. }) => pointer, + BrilligVariable::BrilligArray(BrilligArray { pointer, .. }) => pointer, + BrilligVariable::BrilligVector(BrilligVector { pointer, .. }) => pointer, _ => unreachable!("ICE: array set returns non-array"), }; - // First issue a array copy to the destination + let reference_count = match source_variable { + BrilligVariable::BrilligArray(BrilligArray { rc, .. }) + | BrilligVariable::BrilligVector(BrilligVector { rc, .. }) => rc, + _ => unreachable!("ICE: array set on non-array"), + }; + let (source_pointer, source_size_as_register) = match source_variable { - RegisterOrMemory::HeapArray(HeapArray { size, pointer }) => { + BrilligVariable::BrilligArray(BrilligArray { size, pointer, rc: _ }) => { let source_size_register = self.brillig_context.allocate_register(); self.brillig_context.const_instruction(source_size_register, size.into()); (pointer, source_size_register) } - RegisterOrMemory::HeapVector(HeapVector { size, pointer }) => { + BrilligVariable::BrilligVector(BrilligVector { size, pointer, rc: _ }) => { let source_size_register = self.brillig_context.allocate_register(); self.brillig_context.mov_instruction(source_size_register, size); (pointer, source_size_register) @@ -703,51 +744,96 @@ impl<'block> BrilligBlock<'block> { _ => unreachable!("ICE: array set on non-array"), }; - self.brillig_context - .allocate_array_instruction(destination_pointer, source_size_as_register); + let one = self.brillig_context.make_constant(1_usize.into()); + let condition = self.brillig_context.allocate_register(); - self.brillig_context.copy_array_instruction( - source_pointer, - destination_pointer, - source_size_as_register, + self.brillig_context.binary_instruction( + reference_count, + one, + condition, + BrilligBinaryOp::Field { op: BinaryFieldOp::Equals }, ); - if let RegisterOrMemory::HeapVector(HeapVector { size: target_size, .. }) = - destination_variable - { - self.brillig_context.mov_instruction(target_size, source_size_as_register); + self.brillig_context.branch_instruction(condition, |ctx, cond| { + if cond { + // Reference count is 1, we can mutate the array directly + ctx.mov_instruction(destination_pointer, source_pointer); + } else { + // First issue a array copy to the destination + ctx.allocate_array_instruction(destination_pointer, source_size_as_register); + + ctx.copy_array_instruction( + source_pointer, + destination_pointer, + source_size_as_register, + ); + } + }); + + match destination_variable { + BrilligVariable::BrilligArray(BrilligArray { rc: target_rc, .. }) => { + self.brillig_context.const_instruction(target_rc, 1_usize.into()); + } + BrilligVariable::BrilligVector(BrilligVector { + size: target_size, + rc: target_rc, + .. + }) => { + self.brillig_context.mov_instruction(target_size, source_size_as_register); + self.brillig_context.const_instruction(target_rc, 1_usize.into()); + } + _ => unreachable!("ICE: array set on non-array"), } // Then set the value in the newly created array self.store_variable_in_array(destination_pointer, index_register, value_variable); self.brillig_context.deallocate_register(source_size_as_register); + self.brillig_context.deallocate_register(one); + self.brillig_context.deallocate_register(condition); } - pub(crate) fn store_variable_in_array( - &mut self, + pub(crate) fn store_variable_in_array_with_ctx( + ctx: &mut BrilligContext, destination_pointer: RegisterIndex, index_register: RegisterIndex, - value_variable: RegisterOrMemory, + value_variable: BrilligVariable, ) { match value_variable { - RegisterOrMemory::RegisterIndex(value_register) => { - self.brillig_context.array_set(destination_pointer, index_register, value_register); + BrilligVariable::Simple(value_register) => { + ctx.array_set(destination_pointer, index_register, value_register); } - RegisterOrMemory::HeapArray(HeapArray { pointer, .. }) => { - self.brillig_context.array_set(destination_pointer, index_register, pointer); + BrilligVariable::BrilligArray(_) => { + let reference: RegisterIndex = ctx.allocate_register(); + ctx.allocate_array_reference_instruction(reference); + ctx.store_variable_instruction(reference, value_variable); + ctx.array_set(destination_pointer, index_register, reference); + ctx.deallocate_register(reference); } - RegisterOrMemory::HeapVector(_) => { - // Vectors are stored as references inside arrays to be able to match SSA indexes - let reference = self.brillig_context.allocate_register(); - self.brillig_context.allocate_variable_instruction(reference); - self.brillig_context.store_variable_instruction(reference, value_variable); - self.brillig_context.array_set(destination_pointer, index_register, reference); - self.brillig_context.deallocate_register(reference); + BrilligVariable::BrilligVector(_) => { + let reference = ctx.allocate_register(); + ctx.allocate_vector_reference_instruction(reference); + ctx.store_variable_instruction(reference, value_variable); + ctx.array_set(destination_pointer, index_register, reference); + ctx.deallocate_register(reference); } } } + pub(crate) fn store_variable_in_array( + &mut self, + destination_pointer: RegisterIndex, + index_register: RegisterIndex, + value_variable: BrilligVariable, + ) { + Self::store_variable_in_array_with_ctx( + self.brillig_context, + destination_pointer, + index_register, + value_variable, + ); + } + /// Convert the SSA slice operations to brillig slice operations fn convert_ssa_slice_intrinsic_call( &mut self, @@ -770,7 +856,7 @@ impl<'block> BrilligBlock<'block> { results[0], dfg, ) { - RegisterOrMemory::RegisterIndex(register_index) => register_index, + BrilligVariable::Simple(register_index) => register_index, _ => unreachable!("ICE: first value of a slice must be a register index"), }; @@ -781,7 +867,7 @@ impl<'block> BrilligBlock<'block> { dfg, ); - let target_vector = self.brillig_context.extract_heap_vector(target_variable); + let target_vector = target_variable.extract_vector(); let item_values = vecmap(&arguments[2..element_size + 2], |arg| { self.convert_ssa_value(*arg, dfg) }); @@ -797,7 +883,7 @@ impl<'block> BrilligBlock<'block> { results[0], dfg, ) { - RegisterOrMemory::RegisterIndex(register_index) => register_index, + BrilligVariable::Simple(register_index) => register_index, _ => unreachable!("ICE: first value of a slice must be a register index"), }; @@ -807,7 +893,7 @@ impl<'block> BrilligBlock<'block> { results[1], dfg, ); - let target_vector = self.brillig_context.extract_heap_vector(target_variable); + let target_vector = target_variable.extract_vector(); let item_values = vecmap(&arguments[2..element_size + 2], |arg| { self.convert_ssa_value(*arg, dfg) }); @@ -823,7 +909,7 @@ impl<'block> BrilligBlock<'block> { results[0], dfg, ) { - RegisterOrMemory::RegisterIndex(register_index) => register_index, + BrilligVariable::Simple(register_index) => register_index, _ => unreachable!("ICE: first value of a slice must be a register index"), }; @@ -834,7 +920,7 @@ impl<'block> BrilligBlock<'block> { dfg, ); - let target_vector = self.brillig_context.extract_heap_vector(target_variable); + let target_vector = target_variable.extract_vector(); let pop_variables = vecmap(&results[2..element_size + 2], |result| { self.variables.define_variable( @@ -856,7 +942,7 @@ impl<'block> BrilligBlock<'block> { results[element_size], dfg, ) { - RegisterOrMemory::RegisterIndex(register_index) => register_index, + BrilligVariable::Simple(register_index) => register_index, _ => unreachable!("ICE: first value of a slice must be a register index"), }; @@ -875,7 +961,7 @@ impl<'block> BrilligBlock<'block> { results[element_size + 1], dfg, ); - let target_vector = self.brillig_context.extract_heap_vector(target_variable); + let target_vector = target_variable.extract_vector(); self.update_slice_length(target_len, arguments[0], dfg, BinaryIntOp::Sub); @@ -888,7 +974,7 @@ impl<'block> BrilligBlock<'block> { results[0], dfg, ) { - RegisterOrMemory::RegisterIndex(register_index) => register_index, + BrilligVariable::Simple(register_index) => register_index, _ => unreachable!("ICE: first value of a slice must be a register index"), }; @@ -900,7 +986,7 @@ impl<'block> BrilligBlock<'block> { dfg, ); - let target_vector = self.brillig_context.extract_heap_vector(target_variable); + let target_vector = target_variable.extract_vector(); // Remove if indexing in insert is changed to flattened indexing // https://github.com/noir-lang/noir/issues/1889#issuecomment-1668048587 @@ -931,7 +1017,7 @@ impl<'block> BrilligBlock<'block> { results[0], dfg, ) { - RegisterOrMemory::RegisterIndex(register_index) => register_index, + BrilligVariable::Simple(register_index) => register_index, _ => unreachable!("ICE: first value of a slice must be a register index"), }; @@ -943,7 +1029,7 @@ impl<'block> BrilligBlock<'block> { target_id, dfg, ); - let target_vector = self.brillig_context.extract_heap_vector(target_variable); + let target_vector = target_variable.extract_vector(); // Remove if indexing in remove is changed to flattened indexing // https://github.com/noir-lang/noir/issues/1889#issuecomment-1668048587 @@ -998,50 +1084,18 @@ impl<'block> BrilligBlock<'block> { binary_op: BinaryIntOp, ) { let source_len_variable = self.convert_ssa_value(source_value, dfg); - let source_len = extract_register(source_len_variable); + let source_len = source_len_variable.extract_register(); self.brillig_context.usize_op(source_len, target_len, binary_op, 1); } /// Converts an SSA cast to a sequence of Brillig opcodes. /// Casting is only necessary when shrinking the bit size of a numeric value. - fn convert_cast( - &mut self, - destination: RegisterIndex, - source: RegisterIndex, - target_type: &Type, - source_type: &Type, - ) { - fn numeric_to_bit_size(typ: &NumericType) -> u32 { - match typ { - NumericType::Signed { bit_size } | NumericType::Unsigned { bit_size } => *bit_size, - NumericType::NativeField => FieldElement::max_num_bits(), - } - } - // Casting is only valid for numeric types - // This should be checked by the frontend, so we panic if this is the case - let (source_numeric_type, target_numeric_type) = match (source_type, target_type) { - (Type::Numeric(source_numeric_type), Type::Numeric(target_numeric_type)) => { - (source_numeric_type, target_numeric_type) - } - _ => unimplemented!("The cast operation is only valid for integers."), - }; - let source_bit_size = numeric_to_bit_size(source_numeric_type); - let target_bit_size = numeric_to_bit_size(target_numeric_type); - // Casting from a larger bit size to a smaller bit size (narrowing cast) - // requires a cast instruction. - // If its a widening cast, ie casting from a smaller bit size to a larger bit size - // we simply put a mov instruction as a no-op - // - // Field elements by construction always have the largest bit size - // This means that casting to a Field element, will always be a widening cast - // and therefore a no-op. Conversely, casting from a Field element - // will always be a narrowing cast and therefore a cast instruction - if source_bit_size > target_bit_size { - self.brillig_context.cast_instruction(destination, source, target_bit_size); - } else { - self.brillig_context.mov_instruction(destination, source); - } + fn convert_cast(&mut self, destination: RegisterIndex, source: RegisterIndex) { + // We assume that `source` is a valid `target_type` as it's expected that a truncate instruction was emitted + // to ensure this is the case. + + self.brillig_context.mov_instruction(destination, source); } /// Converts the Binary instruction into a sequence of Brillig opcodes. @@ -1064,7 +1118,7 @@ impl<'block> BrilligBlock<'block> { } /// Converts an SSA `ValueId` into a `RegisterOrMemory`. Initializes if necessary. - fn convert_ssa_value(&mut self, value_id: ValueId, dfg: &DataFlowGraph) -> RegisterOrMemory { + fn convert_ssa_value(&mut self, value_id: ValueId, dfg: &DataFlowGraph) -> BrilligVariable { let value_id = dfg.resolve(value_id); let value = &dfg[value_id]; @@ -1082,7 +1136,7 @@ impl<'block> BrilligBlock<'block> { } else { let new_variable = self.variables.allocate_constant(self.brillig_context, value_id, dfg); - let register_index = extract_register(new_variable); + let register_index = new_variable.extract_register(); self.brillig_context.const_instruction(register_index, (*constant).into()); new_variable @@ -1097,19 +1151,21 @@ impl<'block> BrilligBlock<'block> { // Initialize the variable let pointer = match new_variable { - RegisterOrMemory::HeapArray(heap_array) => { + BrilligVariable::BrilligArray(brillig_array) => { + self.brillig_context + .allocate_fixed_length_array(brillig_array.pointer, array.len()); self.brillig_context - .allocate_fixed_length_array(heap_array.pointer, array.len()); + .const_instruction(brillig_array.rc, 1_usize.into()); - heap_array.pointer + brillig_array.pointer } - RegisterOrMemory::HeapVector(heap_vector) => { + BrilligVariable::BrilligVector(vector) => { + self.brillig_context.const_instruction(vector.size, array.len().into()); self.brillig_context - .const_instruction(heap_vector.size, array.len().into()); - self.brillig_context - .allocate_array_instruction(heap_vector.pointer, heap_vector.size); + .allocate_array_instruction(vector.pointer, vector.size); + self.brillig_context.const_instruction(vector.rc, 1_usize.into()); - heap_vector.pointer + vector.pointer } _ => unreachable!( "ICE: Cannot initialize array value created as {new_variable:?}" @@ -1138,7 +1194,7 @@ impl<'block> BrilligBlock<'block> { new_variable } } - _ => { + Value::Function(_) | Value::Intrinsic(_) | Value::ForeignFunction(_) => { todo!("ICE: Cannot convert value {value:?}") } } @@ -1151,14 +1207,14 @@ impl<'block> BrilligBlock<'block> { dfg: &DataFlowGraph, ) -> RegisterIndex { let variable = self.convert_ssa_value(value_id, dfg); - extract_register(variable) + variable.extract_register() } fn allocate_external_call_result( &mut self, result: ValueId, dfg: &DataFlowGraph, - ) -> RegisterOrMemory { + ) -> BrilligVariable { let typ = dfg[result].get_type(); match typ { Type::Numeric(_) => self.variables.define_variable( @@ -1175,8 +1231,10 @@ impl<'block> BrilligBlock<'block> { result, dfg, ); - let array = extract_heap_array(variable); + let array = variable.extract_array(); self.brillig_context.allocate_fixed_length_array(array.pointer, array.size); + self.brillig_context.const_instruction(array.rc, 1_usize.into()); + variable } Type::Slice(_) => { @@ -1186,12 +1244,14 @@ impl<'block> BrilligBlock<'block> { result, dfg, ); - let vector = self.brillig_context.extract_heap_vector(variable); + let vector = variable.extract_vector(); // Set the pointer to the current stack frame // The stack pointer will then be updated by the caller of this method // once the external call is resolved and the array size is known self.brillig_context.set_array_pointer(vector.pointer); + self.brillig_context.const_instruction(vector.rc, 1_usize.into()); + variable } _ => { @@ -1201,7 +1261,7 @@ impl<'block> BrilligBlock<'block> { } /// Gets the "user-facing" length of an array. - /// An array of structs with two fields would be stored as an 2 * array.len() heap array/heap vector. + /// An array of structs with two fields would be stored as an 2 * array.len() array/vector. /// So we divide the length by the number of subitems in an item to get the user-facing length. fn convert_ssa_array_len( &mut self, @@ -1213,11 +1273,11 @@ impl<'block> BrilligBlock<'block> { let element_size = dfg.type_of_value(array_id).element_size(); match array_variable { - RegisterOrMemory::HeapArray(HeapArray { size, .. }) => { + BrilligVariable::BrilligArray(BrilligArray { size, .. }) => { self.brillig_context .const_instruction(result_register, (size / element_size).into()); } - RegisterOrMemory::HeapVector(HeapVector { size, .. }) => { + BrilligVariable::BrilligVector(BrilligVector { size, .. }) => { self.brillig_context.usize_op( size, result_register, @@ -1240,7 +1300,7 @@ pub(crate) fn type_of_binary_operation(lhs_type: &Type, rhs_type: &Type) -> Type (_, Type::Function) | (Type::Function, _) => { unreachable!("Functions are invalid in binary operations") } - (_, Type::Reference) | (Type::Reference, _) => { + (_, Type::Reference(_)) | (Type::Reference(_), _) => { unreachable!("References are invalid in binary operations") } (_, Type::Array(..)) | (Type::Array(..), _) => { diff --git a/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block_variables.rs b/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block_variables.rs index eb7bab8c971..f2e698c0aa9 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block_variables.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block_variables.rs @@ -1,8 +1,11 @@ -use acvm::brillig_vm::brillig::{HeapArray, HeapVector, RegisterIndex, RegisterOrMemory}; +use acvm::brillig_vm::brillig::RegisterIndex; use fxhash::{FxHashMap as HashMap, FxHashSet as HashSet}; use crate::{ - brillig::brillig_ir::{extract_register, BrilligContext}, + brillig::brillig_ir::{ + brillig_variable::{BrilligArray, BrilligVariable, BrilligVector}, + BrilligContext, + }, ssa::ir::{ basic_block::BasicBlockId, dfg::DataFlowGraph, @@ -16,7 +19,7 @@ use super::brillig_fn::FunctionContext; #[derive(Debug, Default)] pub(crate) struct BlockVariables { available_variables: HashSet, - available_constants: HashMap, + available_constants: HashMap, } impl BlockVariables { @@ -32,7 +35,7 @@ impl BlockVariables { pub(crate) fn get_available_variables( &self, function_context: &FunctionContext, - ) -> Vec { + ) -> Vec { self.available_variables .iter() .map(|value_id| { @@ -52,7 +55,7 @@ impl BlockVariables { brillig_context: &mut BrilligContext, value_id: ValueId, dfg: &DataFlowGraph, - ) -> RegisterOrMemory { + ) -> BrilligVariable { let value_id = dfg.resolve(value_id); let variable = allocate_value(value_id, brillig_context, dfg); @@ -74,7 +77,7 @@ impl BlockVariables { dfg: &DataFlowGraph, ) -> RegisterIndex { let variable = self.define_variable(function_context, brillig_context, value, dfg); - extract_register(variable) + variable.extract_register() } /// Removes a variable so it's not used anymore within this block. @@ -88,7 +91,7 @@ impl BlockVariables { function_context: &FunctionContext, value_id: ValueId, dfg: &DataFlowGraph, - ) -> RegisterOrMemory { + ) -> BrilligVariable { let value_id = dfg.resolve(value_id); if let Some(constant) = self.available_constants.get(&value_id) { *constant @@ -112,7 +115,7 @@ impl BlockVariables { brillig_context: &mut BrilligContext, value_id: ValueId, dfg: &DataFlowGraph, - ) -> RegisterOrMemory { + ) -> BrilligVariable { let value_id = dfg.resolve(value_id); let constant = allocate_value(value_id, brillig_context, dfg); self.available_constants.insert(value_id, constant); @@ -124,7 +127,7 @@ impl BlockVariables { &mut self, value_id: ValueId, dfg: &DataFlowGraph, - ) -> Option { + ) -> Option { let value_id = dfg.resolve(value_id); self.available_constants.get(&value_id).cloned() } @@ -141,7 +144,7 @@ impl BlockVariables { block_id: BasicBlockId, value_id: ValueId, dfg: &DataFlowGraph, - ) -> RegisterOrMemory { + ) -> BrilligVariable { let value_id = dfg.resolve(value_id); assert!( function_context @@ -166,25 +169,34 @@ pub(crate) fn allocate_value( value_id: ValueId, brillig_context: &mut BrilligContext, dfg: &DataFlowGraph, -) -> RegisterOrMemory { +) -> BrilligVariable { let typ = dfg.type_of_value(value_id); match typ { - Type::Numeric(_) | Type::Reference => { + Type::Numeric(_) | Type::Reference(_) => { let register = brillig_context.allocate_register(); - RegisterOrMemory::RegisterIndex(register) + BrilligVariable::Simple(register) } Type::Array(item_typ, elem_count) => { let pointer_register = brillig_context.allocate_register(); + let rc_register = brillig_context.allocate_register(); let size = compute_array_length(&item_typ, elem_count); - RegisterOrMemory::HeapArray(HeapArray { pointer: pointer_register, size }) + + BrilligVariable::BrilligArray(BrilligArray { + pointer: pointer_register, + size, + rc: rc_register, + }) } Type::Slice(_) => { let pointer_register = brillig_context.allocate_register(); let size_register = brillig_context.allocate_register(); - RegisterOrMemory::HeapVector(HeapVector { + let rc_register = brillig_context.allocate_register(); + + BrilligVariable::BrilligVector(BrilligVector { pointer: pointer_register, size: size_register, + rc: rc_register, }) } Type::Function => { diff --git a/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_fn.rs b/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_fn.rs index ec72ceb2909..026def4ef11 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_fn.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_fn.rs @@ -1,9 +1,9 @@ -use acvm::brillig_vm::brillig::RegisterOrMemory; use iter_extended::vecmap; use crate::{ brillig::brillig_ir::{ artifact::{BrilligParameter, Label}, + brillig_variable::BrilligVariable, BrilligContext, }, ssa::ir::{ @@ -21,7 +21,7 @@ use super::{brillig_block_variables::allocate_value, variable_liveness::Variable pub(crate) struct FunctionContext { pub(crate) function_id: FunctionId, /// Map from SSA values its allocation. Since values can be only defined once in SSA form, we insert them here on when we allocate them at their definition. - pub(crate) ssa_value_allocations: HashMap, + pub(crate) ssa_value_allocations: HashMap, /// Block parameters are pre allocated at the function level. pub(crate) block_parameters: HashMap>, /// The block ids of the function in reverse post order. @@ -72,7 +72,7 @@ impl FunctionContext { fn ssa_type_to_parameter(typ: &Type) -> BrilligParameter { match typ { - Type::Numeric(_) | Type::Reference => BrilligParameter::Simple, + Type::Numeric(_) | Type::Reference(_) => BrilligParameter::Simple, Type::Array(item_type, size) => BrilligParameter::Array( vecmap(item_type.iter(), |item_typ| { FunctionContext::ssa_type_to_parameter(item_typ) diff --git a/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_slice_ops.rs b/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_slice_ops.rs index 211d670e7d8..6402e6f9d97 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_slice_ops.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_slice_ops.rs @@ -1,13 +1,15 @@ -use acvm::brillig_vm::brillig::{BinaryIntOp, HeapVector, RegisterIndex, RegisterOrMemory}; +use acvm::brillig_vm::brillig::{BinaryIntOp, RegisterIndex}; + +use crate::brillig::brillig_ir::brillig_variable::{BrilligVariable, BrilligVector}; use super::brillig_block::BrilligBlock; impl<'block> BrilligBlock<'block> { pub(crate) fn slice_push_back_operation( &mut self, - target_vector: HeapVector, - source_vector: HeapVector, - variables_to_insert: &[RegisterOrMemory], + target_vector: BrilligVector, + source_vector: BrilligVector, + variables_to_insert: &[BrilligVariable], ) { // First we need to allocate the target vector incrementing the size by variables_to_insert.len() self.brillig_context.usize_op( @@ -17,6 +19,8 @@ impl<'block> BrilligBlock<'block> { variables_to_insert.len(), ); self.brillig_context.allocate_array_instruction(target_vector.pointer, target_vector.size); + // We initialize the RC of the target vector to 1 + self.brillig_context.const_instruction(target_vector.rc, 1_usize.into()); // Now we copy the source vector into the target vector self.brillig_context.copy_array_instruction( @@ -40,9 +44,9 @@ impl<'block> BrilligBlock<'block> { pub(crate) fn slice_push_front_operation( &mut self, - target_vector: HeapVector, - source_vector: HeapVector, - variables_to_insert: &[RegisterOrMemory], + target_vector: BrilligVector, + source_vector: BrilligVector, + variables_to_insert: &[BrilligVariable], ) { // First we need to allocate the target vector incrementing the size by variables_to_insert.len() self.brillig_context.usize_op( @@ -52,6 +56,8 @@ impl<'block> BrilligBlock<'block> { variables_to_insert.len(), ); self.brillig_context.allocate_array_instruction(target_vector.pointer, target_vector.size); + // We initialize the RC of the target vector to 1 + self.brillig_context.const_instruction(target_vector.rc, 1_usize.into()); // Now we offset the target pointer by variables_to_insert.len() let destination_copy_pointer = self.brillig_context.allocate_register(); @@ -81,9 +87,9 @@ impl<'block> BrilligBlock<'block> { pub(crate) fn slice_pop_front_operation( &mut self, - target_vector: HeapVector, - source_vector: HeapVector, - removed_items: &[RegisterOrMemory], + target_vector: BrilligVector, + source_vector: BrilligVector, + removed_items: &[BrilligVariable], ) { // First we need to allocate the target vector decrementing the size by removed_items.len() self.brillig_context.usize_op( @@ -93,6 +99,8 @@ impl<'block> BrilligBlock<'block> { removed_items.len(), ); self.brillig_context.allocate_array_instruction(target_vector.pointer, target_vector.size); + // We initialize the RC of the target vector to 1 + self.brillig_context.const_instruction(target_vector.rc, 1_usize.into()); // Now we offset the source pointer by removed_items.len() let source_copy_pointer = self.brillig_context.allocate_register(); @@ -121,9 +129,9 @@ impl<'block> BrilligBlock<'block> { pub(crate) fn slice_pop_back_operation( &mut self, - target_vector: HeapVector, - source_vector: HeapVector, - removed_items: &[RegisterOrMemory], + target_vector: BrilligVector, + source_vector: BrilligVector, + removed_items: &[BrilligVariable], ) { // First we need to allocate the target vector decrementing the size by removed_items.len() self.brillig_context.usize_op( @@ -133,6 +141,8 @@ impl<'block> BrilligBlock<'block> { removed_items.len(), ); self.brillig_context.allocate_array_instruction(target_vector.pointer, target_vector.size); + // We initialize the RC of the target vector to 1 + self.brillig_context.const_instruction(target_vector.rc, 1_usize.into()); // Now we copy all elements except the last items into the target vector self.brillig_context.copy_array_instruction( @@ -156,10 +166,10 @@ impl<'block> BrilligBlock<'block> { pub(crate) fn slice_insert_operation( &mut self, - target_vector: HeapVector, - source_vector: HeapVector, + target_vector: BrilligVector, + source_vector: BrilligVector, index: RegisterIndex, - items: &[RegisterOrMemory], + items: &[BrilligVariable], ) { // First we need to allocate the target vector incrementing the size by items.len() self.brillig_context.usize_op( @@ -169,6 +179,8 @@ impl<'block> BrilligBlock<'block> { items.len(), ); self.brillig_context.allocate_array_instruction(target_vector.pointer, target_vector.size); + // We initialize the RC of the target vector to 1 + self.brillig_context.const_instruction(target_vector.rc, 1_usize.into()); // Copy the elements to the left of the index self.brillig_context.copy_array_instruction( @@ -226,10 +238,10 @@ impl<'block> BrilligBlock<'block> { pub(crate) fn slice_remove_operation( &mut self, - target_vector: HeapVector, - source_vector: HeapVector, + target_vector: BrilligVector, + source_vector: BrilligVector, index: RegisterIndex, - removed_items: &[RegisterOrMemory], + removed_items: &[BrilligVariable], ) { // First we need to allocate the target vector decrementing the size by removed_items.len() self.brillig_context.usize_op( @@ -239,6 +251,8 @@ impl<'block> BrilligBlock<'block> { removed_items.len(), ); self.brillig_context.allocate_array_instruction(target_vector.pointer, target_vector.size); + // We initialize the RC of the target vector to 1 + self.brillig_context.const_instruction(target_vector.rc, 1_usize.into()); // Copy the elements to the left of the index self.brillig_context.copy_array_instruction( @@ -297,11 +311,11 @@ impl<'block> BrilligBlock<'block> { pub(crate) fn convert_array_or_vector_to_vector( &mut self, - source_variable: RegisterOrMemory, - ) -> HeapVector { + source_variable: BrilligVariable, + ) -> BrilligVector { match source_variable { - RegisterOrMemory::HeapVector(source_vector) => source_vector, - RegisterOrMemory::HeapArray(source_array) => { + BrilligVariable::BrilligVector(source_vector) => source_vector, + BrilligVariable::BrilligArray(source_array) => { self.brillig_context.array_to_vector(&source_array) } _ => unreachable!("ICE: unsupported slice push back source {:?}", source_variable), @@ -313,13 +327,16 @@ impl<'block> BrilligBlock<'block> { mod tests { use std::vec; - use acvm::acir::brillig::{HeapVector, Value}; - use acvm::brillig_vm::brillig::{RegisterIndex, RegisterOrMemory}; + use acvm::acir::brillig::Value; + use acvm::brillig_vm::brillig::RegisterIndex; use crate::brillig::brillig_gen::brillig_block::BrilligBlock; use crate::brillig::brillig_gen::brillig_block_variables::BlockVariables; use crate::brillig::brillig_gen::brillig_fn::FunctionContext; use crate::brillig::brillig_ir::artifact::BrilligParameter; + use crate::brillig::brillig_ir::brillig_variable::{ + BrilligArray, BrilligVariable, BrilligVector, + }; use crate::brillig::brillig_ir::tests::{ create_and_run_vm, create_context, create_entry_point_bytecode, }; @@ -373,33 +390,44 @@ mod tests { let (_, mut function_context, mut context) = create_test_environment(); // Allocate the parameters - let array_pointer = context.allocate_register(); + let array_variable = BrilligArray { + pointer: context.allocate_register(), + size: array.len(), + rc: context.allocate_register(), + }; let item_to_insert = context.allocate_register(); // Cast the source array to a vector - let array_size = context.make_constant(array.len().into()); + let source_vector = context.array_to_vector(&array_variable); // Allocate the results - let copied_array_pointer = context.allocate_register(); - let copied_array_size = context.allocate_register(); + let target_vector = BrilligVector { + pointer: context.allocate_register(), + size: context.allocate_register(), + rc: context.allocate_register(), + }; let mut block = create_brillig_block(&mut function_context, &mut context); if push_back { block.slice_push_back_operation( - HeapVector { pointer: copied_array_pointer, size: copied_array_size }, - HeapVector { pointer: array_pointer, size: array_size }, - &[RegisterOrMemory::RegisterIndex(item_to_insert)], + target_vector, + source_vector, + &[BrilligVariable::Simple(item_to_insert)], ); } else { block.slice_push_front_operation( - HeapVector { pointer: copied_array_pointer, size: copied_array_size }, - HeapVector { pointer: array_pointer, size: array_size }, - &[RegisterOrMemory::RegisterIndex(item_to_insert)], + target_vector, + source_vector, + &[BrilligVariable::Simple(item_to_insert)], ); } - context.return_instruction(&[copied_array_pointer, copied_array_size]); + context.return_instruction(&[ + target_vector.pointer, + target_vector.rc, + target_vector.size, + ]); let bytecode = create_entry_point_bytecode(context, arguments, returns).byte_code; let vm = create_and_run_vm( @@ -465,34 +493,45 @@ mod tests { let (_, mut function_context, mut context) = create_test_environment(); // Allocate the parameters - let array_pointer = context.allocate_register(); + let array_variable = BrilligArray { + pointer: context.allocate_register(), + size: array.len(), + rc: context.allocate_register(), + }; // Cast the source array to a vector - let array_size = context.make_constant(array.len().into()); + let source_vector = context.array_to_vector(&array_variable); // Allocate the results - let copied_array_pointer = context.allocate_register(); + let target_vector = BrilligVector { + pointer: context.allocate_register(), + size: context.allocate_register(), + rc: context.allocate_register(), + }; let removed_item = context.allocate_register(); - let copied_array_size = context.allocate_register(); - let mut block = create_brillig_block(&mut function_context, &mut context); if pop_back { block.slice_pop_back_operation( - HeapVector { pointer: copied_array_pointer, size: copied_array_size }, - HeapVector { pointer: array_pointer, size: array_size }, - &[RegisterOrMemory::RegisterIndex(removed_item)], + target_vector, + source_vector, + &[BrilligVariable::Simple(removed_item)], ); } else { block.slice_pop_front_operation( - HeapVector { pointer: copied_array_pointer, size: copied_array_size }, - HeapVector { pointer: array_pointer, size: array_size }, - &[RegisterOrMemory::RegisterIndex(removed_item)], + target_vector, + source_vector, + &[BrilligVariable::Simple(removed_item)], ); } - context.return_instruction(&[copied_array_pointer, copied_array_size, removed_item]); + context.return_instruction(&[ + target_vector.pointer, + target_vector.rc, + target_vector.size, + removed_item, + ]); let bytecode = create_entry_point_bytecode(context, arguments, returns).byte_code; let vm = create_and_run_vm(array.clone(), vec![Value::from(0_usize)], &bytecode); @@ -557,28 +596,38 @@ mod tests { let (_, mut function_context, mut context) = create_test_environment(); // Allocate the parameters - let array_pointer = context.allocate_register(); + let array_variable = BrilligArray { + pointer: context.allocate_register(), + size: array.len(), + rc: context.allocate_register(), + }; let item_to_insert = context.allocate_register(); let index_to_insert = context.allocate_register(); // Cast the source array to a vector - let array_size = context.make_constant(array.len().into()); + let source_vector = context.array_to_vector(&array_variable); // Allocate the results - let copied_array_pointer = context.allocate_register(); - - let copied_array_size = context.allocate_register(); + let target_vector = BrilligVector { + pointer: context.allocate_register(), + size: context.allocate_register(), + rc: context.allocate_register(), + }; let mut block = create_brillig_block(&mut function_context, &mut context); block.slice_insert_operation( - HeapVector { pointer: copied_array_pointer, size: copied_array_size }, - HeapVector { pointer: array_pointer, size: array_size }, + target_vector, + source_vector, index_to_insert, - &[RegisterOrMemory::RegisterIndex(item_to_insert)], + &[BrilligVariable::Simple(item_to_insert)], ); - context.return_instruction(&[copied_array_pointer, copied_array_size]); + context.return_instruction(&[ + target_vector.pointer, + target_vector.rc, + target_vector.size, + ]); let bytecode = create_entry_point_bytecode(context, arguments, returns).byte_code; let vm = create_and_run_vm( @@ -679,28 +728,39 @@ mod tests { let (_, mut function_context, mut context) = create_test_environment(); // Allocate the parameters - let array_pointer = context.allocate_register(); + let array_variable = BrilligArray { + pointer: context.allocate_register(), + size: array.len(), + rc: context.allocate_register(), + }; let index_to_insert = context.allocate_register(); // Cast the source array to a vector - let array_size = context.make_constant(array.len().into()); + let source_vector = context.array_to_vector(&array_variable); // Allocate the results - let copied_array_pointer = context.allocate_register(); + let target_vector = BrilligVector { + pointer: context.allocate_register(), + size: context.allocate_register(), + rc: context.allocate_register(), + }; let removed_item = context.allocate_register(); - let copied_array_size = context.allocate_register(); - let mut block = create_brillig_block(&mut function_context, &mut context); block.slice_remove_operation( - HeapVector { pointer: copied_array_pointer, size: copied_array_size }, - HeapVector { pointer: array_pointer, size: array_size }, + target_vector, + source_vector, index_to_insert, - &[RegisterOrMemory::RegisterIndex(removed_item)], + &[BrilligVariable::Simple(removed_item)], ); - context.return_instruction(&[copied_array_pointer, copied_array_size, removed_item]); + context.return_instruction(&[ + target_vector.pointer, + target_vector.rc, + target_vector.size, + removed_item, + ]); let bytecode = create_entry_point_bytecode(context, arguments, returns).byte_code; let vm = create_and_run_vm(array.clone(), vec![Value::from(0_usize), index], &bytecode); diff --git a/compiler/noirc_evaluator/src/brillig/brillig_gen/variable_liveness.rs b/compiler/noirc_evaluator/src/brillig/brillig_gen/variable_liveness.rs index d57196288bf..05978c2c6ab 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_gen/variable_liveness.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_gen/variable_liveness.rs @@ -332,7 +332,7 @@ mod test { let v0 = builder.add_parameter(Type::field()); let v1 = builder.add_parameter(Type::field()); - let v3 = builder.insert_allocate(); + let v3 = builder.insert_allocate(Type::field()); let zero = builder.numeric_constant(0u128, Type::field()); builder.insert_store(v3, zero); @@ -439,7 +439,7 @@ mod test { let v0 = builder.add_parameter(Type::field()); let v1 = builder.add_parameter(Type::field()); - let v3 = builder.insert_allocate(); + let v3 = builder.insert_allocate(Type::field()); let zero = builder.numeric_constant(0u128, Type::field()); builder.insert_store(v3, zero); diff --git a/compiler/noirc_evaluator/src/brillig/brillig_ir.rs b/compiler/noirc_evaluator/src/brillig/brillig_ir.rs index 880ae95dcd7..3c4e77b09ec 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_ir.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_ir.rs @@ -5,6 +5,7 @@ //! ssa types and types in this module. //! A similar paradigm can be seen with the `acir_ir` module. pub(crate) mod artifact; +pub(crate) mod brillig_variable; pub(crate) mod debug_show; pub(crate) mod registers; @@ -14,12 +15,13 @@ use crate::ssa::ir::dfg::CallStack; use self::{ artifact::{BrilligArtifact, UnresolvedJumpLocation}, + brillig_variable::{BrilligArray, BrilligVariable, BrilligVector}, registers::BrilligRegistersContext, }; use acvm::{ acir::brillig::{ - BinaryFieldOp, BinaryIntOp, BlackBoxOp, HeapArray, HeapVector, Opcode as BrilligOpcode, - RegisterIndex, RegisterOrMemory, Value, + BinaryFieldOp, BinaryIntOp, BlackBoxOp, Opcode as BrilligOpcode, RegisterIndex, + RegisterOrMemory, Value, }, FieldElement, }; @@ -88,6 +90,8 @@ pub(crate) struct BrilligContext { context_label: String, /// Section label, used to separate sections of code section_label: usize, + /// Stores the next available section + next_section: usize, /// IR printer debug_show: DebugShow, } @@ -100,6 +104,7 @@ impl BrilligContext { registers: BrilligRegistersContext::new(), context_label: String::default(), section_label: 0, + next_section: 1, debug_show: DebugShow::new(enable_debug_trace), } } @@ -161,10 +166,14 @@ impl BrilligContext { /// Allocates a variable in memory and stores the /// pointer to the array in `pointer_register` - pub(crate) fn allocate_variable_instruction(&mut self, pointer_register: RegisterIndex) { + fn allocate_variable_reference_instruction( + &mut self, + pointer_register: RegisterIndex, + size: usize, + ) { self.debug_show.allocate_instruction(pointer_register); - // A variable can be stored in up to two values, so we reserve two values for that. - let size_register = self.make_constant(2_u128.into()); + // A variable can be stored in up to three values, so we reserve three values for that. + let size_register = self.make_constant(size.into()); self.push_opcode(BrilligOpcode::Mov { destination: pointer_register, source: ReservedRegisters::stack_pointer(), @@ -177,6 +186,30 @@ impl BrilligContext { ); } + pub(crate) fn allocate_simple_reference_instruction( + &mut self, + pointer_register: RegisterIndex, + ) { + self.allocate_variable_reference_instruction(pointer_register, 1); + } + + pub(crate) fn allocate_array_reference_instruction(&mut self, pointer_register: RegisterIndex) { + self.allocate_variable_reference_instruction( + pointer_register, + BrilligArray::registers_count(), + ); + } + + pub(crate) fn allocate_vector_reference_instruction( + &mut self, + pointer_register: RegisterIndex, + ) { + self.allocate_variable_reference_instruction( + pointer_register, + BrilligVector::registers_count(), + ); + } + /// Gets the value in the array at index `index` and stores it in `result` pub(crate) fn array_get( &mut self, @@ -253,8 +286,8 @@ impl BrilligContext { { let iterator_register = self.make_constant(0_u128.into()); - let loop_label = self.next_section_label(); - self.enter_next_section(); + let (loop_section, loop_label) = self.reserve_next_section_label(); + self.enter_section(loop_section); // Loop body @@ -267,7 +300,7 @@ impl BrilligContext { BinaryIntOp::LessThan, ); - let exit_loop_label = self.next_section_label(); + let (exit_loop_section, exit_loop_label) = self.reserve_next_section_label(); self.not_instruction(iterator_less_than_iterations, 1, iterator_less_than_iterations); self.jump_if_instruction(iterator_less_than_iterations, exit_loop_label); @@ -281,12 +314,41 @@ impl BrilligContext { self.jump_instruction(loop_label); // Exit the loop - self.enter_next_section(); + self.enter_section(exit_loop_section); + // Deallocate our temporary registers self.deallocate_register(iterator_less_than_iterations); self.deallocate_register(iterator_register); } + /// This instruction will issue an if-then branch that will check if the condition is true + /// and if so, perform the instructions given in `f(self, true)` and otherwise perform the + /// instructions given in `f(self, false)`. A boolean is passed instead of two separate + /// functions to allow the given function to mutably alias its environment. + pub(crate) fn branch_instruction( + &mut self, + condition: RegisterIndex, + mut f: impl FnMut(&mut BrilligContext, bool), + ) { + // Reserve 3 sections + let (then_section, then_label) = self.reserve_next_section_label(); + let (otherwise_section, otherwise_label) = self.reserve_next_section_label(); + let (end_section, end_label) = self.reserve_next_section_label(); + + self.jump_if_instruction(condition, then_label.clone()); + self.jump_instruction(otherwise_label.clone()); + + self.enter_section(then_section); + f(self, true); + self.jump_instruction(end_label.clone()); + + self.enter_section(otherwise_section); + f(self, false); + self.jump_instruction(end_label.clone()); + + self.enter_section(end_section); + } + /// Adds a label to the next opcode pub(crate) fn enter_context(&mut self, label: T) { self.debug_show.enter_context(label.to_string()); @@ -299,23 +361,25 @@ impl BrilligContext { .add_label_at_position(self.current_section_label(), self.obj.index_of_next_opcode()); } - /// Increments the section label and adds a section label to the next opcode - fn enter_next_section(&mut self) { - self.section_label += 1; + /// Enter the given section + fn enter_section(&mut self, section: usize) { + self.section_label = section; self.obj .add_label_at_position(self.current_section_label(), self.obj.index_of_next_opcode()); } + /// Create, reserve, and return a new section label. + fn reserve_next_section_label(&mut self) -> (usize, String) { + let section = self.next_section; + self.next_section += 1; + (section, self.compute_section_label(section)) + } + /// Internal function used to compute the section labels fn compute_section_label(&self, section: usize) -> String { format!("{}-{}", self.context_label, section) } - /// Returns the next section label - fn next_section_label(&self) -> String { - self.compute_section_label(self.section_label + 1) - } - /// Returns the current section label fn current_section_label(&self) -> String { self.compute_section_label(self.section_label) @@ -371,15 +435,13 @@ impl BrilligContext { assert_message: Option, ) { self.debug_show.constrain_instruction(condition); - self.add_unresolved_jump( - BrilligOpcode::JumpIf { condition, location: 0 }, - self.next_section_label(), - ); + let (next_section, next_label) = self.reserve_next_section_label(); + self.add_unresolved_jump(BrilligOpcode::JumpIf { condition, location: 0 }, next_label); self.push_opcode(BrilligOpcode::Trap); if let Some(assert_message) = assert_message { self.obj.add_assert_message_to_last_opcode(assert_message); } - self.enter_next_section(); + self.enter_section(next_section); } /// Processes a return instruction. @@ -528,17 +590,24 @@ impl BrilligContext { /// Loads a variable stored previously pub(crate) fn load_variable_instruction( &mut self, - destination: RegisterOrMemory, + destination: BrilligVariable, variable_pointer: RegisterIndex, ) { match destination { - RegisterOrMemory::RegisterIndex(register_index) => { + BrilligVariable::Simple(register_index) => { self.load_instruction(register_index, variable_pointer); } - RegisterOrMemory::HeapArray(HeapArray { pointer, .. }) => { + BrilligVariable::BrilligArray(BrilligArray { pointer, size: _, rc }) => { self.load_instruction(pointer, variable_pointer); + + let rc_pointer = self.allocate_register(); + self.mov_instruction(rc_pointer, variable_pointer); + self.usize_op_in_place(rc_pointer, BinaryIntOp::Add, 1_usize); + + self.load_instruction(rc, rc_pointer); + self.deallocate_register(rc_pointer); } - RegisterOrMemory::HeapVector(HeapVector { pointer, size }) => { + BrilligVariable::BrilligVector(BrilligVector { pointer, size, rc }) => { self.load_instruction(pointer, variable_pointer); let size_pointer = self.allocate_register(); @@ -547,6 +616,13 @@ impl BrilligContext { self.load_instruction(size, size_pointer); self.deallocate_register(size_pointer); + + let rc_pointer = self.allocate_register(); + self.mov_instruction(rc_pointer, variable_pointer); + self.usize_op_in_place(rc_pointer, BinaryIntOp::Add, 2_usize); + + self.load_instruction(rc, rc_pointer); + self.deallocate_register(rc_pointer); } } } @@ -565,32 +641,38 @@ impl BrilligContext { pub(crate) fn store_variable_instruction( &mut self, variable_pointer: RegisterIndex, - source: RegisterOrMemory, + source: BrilligVariable, ) { - let size_pointer = self.allocate_register(); - self.mov_instruction(size_pointer, variable_pointer); - self.usize_op_in_place(size_pointer, BinaryIntOp::Add, 1_usize); - match source { - RegisterOrMemory::RegisterIndex(register_index) => { + BrilligVariable::Simple(register_index) => { self.store_instruction(variable_pointer, register_index); - let size_constant = self.make_constant(Value::from(1_usize)); - self.store_instruction(size_pointer, size_constant); - self.deallocate_register(size_constant); } - RegisterOrMemory::HeapArray(HeapArray { pointer, size }) => { + BrilligVariable::BrilligArray(BrilligArray { pointer, size: _, rc }) => { self.store_instruction(variable_pointer, pointer); - let size_constant = self.make_constant(Value::from(size)); - self.store_instruction(size_pointer, size_constant); - self.deallocate_register(size_constant); + + let rc_pointer: RegisterIndex = self.allocate_register(); + self.mov_instruction(rc_pointer, variable_pointer); + self.usize_op_in_place(rc_pointer, BinaryIntOp::Add, 1_usize); + self.store_instruction(rc_pointer, rc); + self.deallocate_register(rc_pointer); } - RegisterOrMemory::HeapVector(HeapVector { pointer, size }) => { + BrilligVariable::BrilligVector(BrilligVector { pointer, size, rc }) => { self.store_instruction(variable_pointer, pointer); + + let size_pointer = self.allocate_register(); + self.mov_instruction(size_pointer, variable_pointer); + self.usize_op_in_place(size_pointer, BinaryIntOp::Add, 1_usize); self.store_instruction(size_pointer, size); + + let rc_pointer: RegisterIndex = self.allocate_register(); + self.mov_instruction(rc_pointer, variable_pointer); + self.usize_op_in_place(rc_pointer, BinaryIntOp::Add, 2_usize); + self.store_instruction(rc_pointer, rc); + + self.deallocate_register(size_pointer); + self.deallocate_register(rc_pointer); } } - - self.deallocate_register(size_pointer); } /// Emits a truncate instruction. @@ -605,10 +687,29 @@ impl BrilligContext { &mut self, destination_of_truncated_value: RegisterIndex, value_to_truncate: RegisterIndex, + bit_size: u32, ) { - // Effectively a no-op because brillig already has implicit truncation on integer - // operations. We need only copy the value to it's destination. - self.mov_instruction(destination_of_truncated_value, value_to_truncate); + self.debug_show.truncate_instruction( + destination_of_truncated_value, + value_to_truncate, + bit_size, + ); + assert!( + bit_size <= BRILLIG_INTEGER_ARITHMETIC_BIT_SIZE, + "tried to truncate to a bit size greater than allowed {bit_size}" + ); + + // The brillig VM performs all arithmetic operations modulo 2**bit_size + // So to truncate any value to a target bit size we can just issue a no-op arithmetic operation + // With bit size equal to target_bit_size + let zero_register = self.make_constant(Value::from(FieldElement::zero())); + self.binary_instruction( + value_to_truncate, + zero_register, + destination_of_truncated_value, + BrilligBinaryOp::Integer { op: BinaryIntOp::Add, bit_size }, + ); + self.deallocate_register(zero_register); } /// Emits a stop instruction @@ -679,36 +780,6 @@ impl BrilligContext { self.deallocate_register(scratch_register_j); } - /// Emits a modulo instruction against 2**target_bit_size - /// - /// Integer arithmetic in Brillig is currently constrained to 127 bit integers. - /// We restrict the cast operation, so that integer types over 127 bits - /// cannot be created. - pub(crate) fn cast_instruction( - &mut self, - destination: RegisterIndex, - source: RegisterIndex, - target_bit_size: u32, - ) { - self.debug_show.cast_instruction(destination, source, target_bit_size); - assert!( - target_bit_size <= BRILLIG_INTEGER_ARITHMETIC_BIT_SIZE, - "tried to cast to a bit size greater than allowed {target_bit_size}" - ); - - // The brillig VM performs all arithmetic operations modulo 2**bit_size - // So to cast any value to a target bit size we can just issue a no-op arithmetic operation - // With bit size equal to target_bit_size - let zero_register = self.make_constant(Value::from(FieldElement::zero())); - self.binary_instruction( - source, - zero_register, - destination, - BrilligBinaryOp::Integer { op: BinaryIntOp::Add, bit_size: target_bit_size }, - ); - self.deallocate_register(zero_register); - } - /// Adds a unresolved external `Call` instruction to the bytecode. /// This calls into another function compiled into this brillig artifact. pub(crate) fn add_external_call_instruction(&mut self, func_label: T) { @@ -725,14 +796,14 @@ impl BrilligContext { } /// Saves all of the registers that have been used up until this point. - fn save_registers_of_vars(&mut self, vars: &[RegisterOrMemory]) -> Vec { + fn save_registers_of_vars(&mut self, vars: &[BrilligVariable]) -> Vec { // Save all of the used registers at this point in memory // because the function call will/may overwrite them. // // Note that here it is important that the stack pointer register is at register 0, // as after the first register save we add to the pointer. let mut used_registers: Vec<_> = - vars.iter().flat_map(|var| extract_registers(*var)).collect(); + vars.iter().flat_map(|var| var.extract_registers()).collect(); // Also dump the previous stack pointer used_registers.push(ReservedRegisters::previous_stack_pointer()); @@ -811,7 +882,7 @@ impl BrilligContext { pub(crate) fn pre_call_save_registers_prep_args( &mut self, arguments: &[RegisterIndex], - variables_to_save: &[RegisterOrMemory], + variables_to_save: &[BrilligVariable], ) -> Vec { // Save all the registers we have used to the stack. let saved_registers = self.save_registers_of_vars(variables_to_save); @@ -852,9 +923,9 @@ impl BrilligContext { } /// Utility method to transform a HeapArray to a HeapVector by making a runtime constant with the size. - pub(crate) fn array_to_vector(&mut self, array: &HeapArray) -> HeapVector { + pub(crate) fn array_to_vector(&mut self, array: &BrilligArray) -> BrilligVector { let size_register = self.make_constant(array.size.into()); - HeapVector { size: size_register, pointer: array.pointer } + BrilligVector { size: size_register, pointer: array.pointer, rc: array.rc } } /// Issues a blackbox operation. @@ -868,12 +939,13 @@ impl BrilligContext { pub(crate) fn radix_instruction( &mut self, source: RegisterIndex, - target_vector: HeapVector, + target_vector: BrilligVector, radix: RegisterIndex, limb_count: RegisterIndex, big_endian: bool, ) { self.mov_instruction(target_vector.size, limb_count); + self.const_instruction(target_vector.rc, 1_usize.into()); self.allocate_array_instruction(target_vector.pointer, target_vector.size); let shifted_register = self.allocate_register(); @@ -914,7 +986,7 @@ impl BrilligContext { } /// This instruction will reverse the order of the elements in a vector. - pub(crate) fn reverse_vector_in_place_instruction(&mut self, vector: HeapVector) { + pub(crate) fn reverse_vector_in_place_instruction(&mut self, vector: BrilligVector) { let iteration_count = self.allocate_register(); self.usize_op(vector.size, iteration_count, BinaryIntOp::UnsignedDiv, 2); @@ -949,51 +1021,12 @@ impl BrilligContext { self.deallocate_register(index_at_end_of_array); } - pub(crate) fn extract_heap_vector(&mut self, variable: RegisterOrMemory) -> HeapVector { - match variable { - RegisterOrMemory::HeapVector(vector) => vector, - RegisterOrMemory::HeapArray(array) => { - let size = self.allocate_register(); - self.const_instruction(size, array.size.into()); - HeapVector { pointer: array.pointer, size } - } - _ => unreachable!("ICE: Expected vector, got {variable:?}"), - } - } - /// Sets a current call stack that the next pushed opcodes will be associated with. pub(crate) fn set_call_stack(&mut self, call_stack: CallStack) { self.obj.set_call_stack(call_stack); } } -pub(crate) fn extract_register(variable: RegisterOrMemory) -> RegisterIndex { - match variable { - RegisterOrMemory::RegisterIndex(register_index) => register_index, - _ => unreachable!("ICE: Expected register, got {variable:?}"), - } -} - -pub(crate) fn extract_heap_array(variable: RegisterOrMemory) -> HeapArray { - match variable { - RegisterOrMemory::HeapArray(array) => array, - _ => unreachable!("ICE: Expected array, got {variable:?}"), - } -} - -/// Collects the registers that a given variable is stored in. -pub(crate) fn extract_registers(variable: RegisterOrMemory) -> Vec { - match variable { - RegisterOrMemory::RegisterIndex(register_index) => vec![register_index], - RegisterOrMemory::HeapArray(array) => { - vec![array.pointer] - } - RegisterOrMemory::HeapVector(vector) => { - vec![vector.pointer, vector.size] - } - } -} - /// Type to encapsulate the binary operation types in Brillig #[derive(Clone)] pub(crate) enum BrilligBinaryOp { diff --git a/compiler/noirc_evaluator/src/brillig/brillig_ir/brillig_variable.rs b/compiler/noirc_evaluator/src/brillig/brillig_ir/brillig_variable.rs new file mode 100644 index 00000000000..46c54d55ecb --- /dev/null +++ b/compiler/noirc_evaluator/src/brillig/brillig_ir/brillig_variable.rs @@ -0,0 +1,99 @@ +use acvm::brillig_vm::brillig::{HeapArray, HeapVector, RegisterIndex, RegisterOrMemory}; +use serde::{Deserialize, Serialize}; + +/// The representation of a noir array in the Brillig IR +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Copy)] +pub(crate) struct BrilligArray { + pub(crate) pointer: RegisterIndex, + pub(crate) size: usize, + pub(crate) rc: RegisterIndex, +} + +impl BrilligArray { + pub(crate) fn to_heap_array(self) -> HeapArray { + HeapArray { pointer: self.pointer, size: self.size } + } + + pub(crate) fn registers_count() -> usize { + 2 + } + + pub(crate) fn extract_registers(self) -> Vec { + vec![self.pointer, self.rc] + } +} + +/// The representation of a noir slice in the Brillig IR +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Copy)] +pub(crate) struct BrilligVector { + pub(crate) pointer: RegisterIndex, + pub(crate) size: RegisterIndex, + pub(crate) rc: RegisterIndex, +} + +impl BrilligVector { + pub(crate) fn to_heap_vector(self) -> HeapVector { + HeapVector { pointer: self.pointer, size: self.size } + } + + pub(crate) fn registers_count() -> usize { + 3 + } + + pub(crate) fn extract_registers(self) -> Vec { + vec![self.pointer, self.size, self.rc] + } +} + +/// The representation of a noir value in the Brillig IR +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Copy)] +pub(crate) enum BrilligVariable { + Simple(RegisterIndex), + BrilligArray(BrilligArray), + BrilligVector(BrilligVector), +} + +impl BrilligVariable { + pub(crate) fn extract_register(self) -> RegisterIndex { + match self { + BrilligVariable::Simple(register_index) => register_index, + _ => unreachable!("ICE: Expected register, got {self:?}"), + } + } + + pub(crate) fn extract_array(self) -> BrilligArray { + match self { + BrilligVariable::BrilligArray(array) => array, + _ => unreachable!("ICE: Expected array, got {self:?}"), + } + } + + pub(crate) fn extract_vector(self) -> BrilligVector { + match self { + BrilligVariable::BrilligVector(vector) => vector, + _ => unreachable!("ICE: Expected vector, got {self:?}"), + } + } + + pub(crate) fn extract_registers(self) -> Vec { + match self { + BrilligVariable::Simple(register_index) => vec![register_index], + BrilligVariable::BrilligArray(array) => array.extract_registers(), + BrilligVariable::BrilligVector(vector) => vector.extract_registers(), + } + } + + pub(crate) fn to_register_or_memory(self) -> RegisterOrMemory { + match self { + BrilligVariable::Simple(register_index) => { + RegisterOrMemory::RegisterIndex(register_index) + } + BrilligVariable::BrilligArray(array) => { + RegisterOrMemory::HeapArray(array.to_heap_array()) + } + BrilligVariable::BrilligVector(vector) => { + RegisterOrMemory::HeapVector(vector.to_heap_vector()) + } + } + } +} diff --git a/compiler/noirc_evaluator/src/brillig/brillig_ir/debug_show.rs b/compiler/noirc_evaluator/src/brillig/brillig_ir/debug_show.rs index 65db47dd2e0..74b24b280cd 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_ir/debug_show.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_ir/debug_show.rs @@ -326,7 +326,7 @@ impl DebugShow { } /// Debug function for cast_instruction - pub(crate) fn cast_instruction( + pub(crate) fn truncate_instruction( &self, destination: RegisterIndex, source: RegisterIndex, @@ -334,7 +334,7 @@ impl DebugShow { ) { debug_println!( self.enable_debug_trace, - " CAST {} FROM {} TO {} BITS", + " TRUNCATE {} FROM {} TO {} BITS", destination, source, target_bit_size @@ -353,14 +353,6 @@ impl DebugShow { BlackBoxOp::Blake2s { message, output } => { debug_println!(self.enable_debug_trace, " BLAKE2S {} -> {}", message, output); } - BlackBoxOp::HashToField128Security { message, output } => { - debug_println!( - self.enable_debug_trace, - " HASH_TO_FIELD_128_SECURITY {} -> {}", - message, - output - ); - } BlackBoxOp::EcdsaSecp256k1 { hashed_msg, public_key_x, diff --git a/compiler/noirc_evaluator/src/brillig/brillig_ir/entry_point.rs b/compiler/noirc_evaluator/src/brillig/brillig_ir/entry_point.rs index fb426ad6876..36ca414f38e 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_ir/entry_point.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_ir/entry_point.rs @@ -2,6 +2,7 @@ use crate::brillig::brillig_ir::ReservedRegisters; use super::{ artifact::{BrilligArtifact, BrilligParameter}, + brillig_variable::{BrilligArray, BrilligVariable}, debug_show::DebugShow, registers::BrilligRegistersContext, BrilligContext, @@ -20,6 +21,7 @@ impl BrilligContext { registers: BrilligRegistersContext::new(), context_label: String::default(), section_label: 0, + next_section: 1, debug_show: DebugShow::new(false), }; @@ -32,18 +34,39 @@ impl BrilligContext { } /// Adds the instructions needed to handle entry point parameters - /// - /// And sets the starting value of the reserved registers + /// The runtime will leave the parameters in the first `n` registers. + /// Arrays will be passed as pointers to the first element, with all the nested arrays flattened. + /// First, reserve the registers that contain the parameters. + /// This function also sets the starting value of the reserved registers fn entry_point_instruction(&mut self, arguments: Vec) { - // Translate the inputs by the reserved registers offset - for i in (0..arguments.len()).rev() { - self.push_opcode(BrilligOpcode::Mov { - destination: ReservedRegisters::user_register_index(i), - source: RegisterIndex::from(i), - }); - // Make sure we don't overwrite the arguments - self.allocate_register(); - } + let preallocated_registers: Vec<_> = + arguments.iter().enumerate().map(|(i, _)| RegisterIndex::from(i)).collect(); + self.set_allocated_registers(preallocated_registers.clone()); + + // Then allocate and initialize the variables that will hold the parameters + let argument_variables: Vec<_> = arguments + .iter() + .zip(preallocated_registers) + .map(|(argument, param_register)| match argument { + BrilligParameter::Simple => { + let variable_register = self.allocate_register(); + self.mov_instruction(variable_register, param_register); + BrilligVariable::Simple(variable_register) + } + BrilligParameter::Array(item_types, item_count) => { + let pointer_register = self.allocate_register(); + let rc_register = self.allocate_register(); + self.mov_instruction(pointer_register, param_register); + self.const_instruction(rc_register, 1_usize.into()); + BrilligVariable::BrilligArray(BrilligArray { + pointer: pointer_register, + size: item_types.len() * item_count, + rc: rc_register, + }) + } + BrilligParameter::Slice(_) => unimplemented!("Unsupported slices as parameter"), + }) + .collect(); // Calculate the initial value for the stack pointer register let size_arguments_memory: usize = arguments @@ -65,16 +88,24 @@ impl BrilligContext { value: 0_usize.into(), }); - for (index, parameter) in arguments.iter().enumerate() { + // Deflatten the arrays + for (parameter, assigned_variable) in arguments.iter().zip(&argument_variables) { if let BrilligParameter::Array(item_type, item_count) = parameter { if item_type.iter().any(|param| !matches!(param, BrilligParameter::Simple)) { - let pointer_register = ReservedRegisters::user_register_index(index); + let pointer_register = assigned_variable.extract_array().pointer; let deflattened_register = self.deflatten_array(item_type, *item_count, pointer_register); self.mov_instruction(pointer_register, deflattened_register); } } } + + // Move the parameters to the first user defined registers, to follow function call convention. + for (i, register) in + argument_variables.into_iter().flat_map(|arg| arg.extract_registers()).enumerate() + { + self.mov_instruction(ReservedRegisters::user_register_index(i), register); + } } /// Computes the size of a parameter if it was flattened @@ -92,6 +123,7 @@ impl BrilligContext { } /// Deflatten an array by recursively allocating nested arrays and copying the plain values. + /// Returns the pointer to the deflattened items. fn deflatten_array( &mut self, item_type: &[BrilligParameter], @@ -139,13 +171,25 @@ impl BrilligContext { *nested_array_item_count, nested_array_pointer, ); - self.array_set( - deflattened_array_pointer, - target_index, - deflattened_nested_array_pointer, + let reference = self.allocate_register(); + let rc = self.allocate_register(); + self.const_instruction(rc, 1_usize.into()); + + self.allocate_array_reference_instruction(reference); + self.store_variable_instruction( + reference, + BrilligVariable::BrilligArray(BrilligArray { + pointer: deflattened_nested_array_pointer, + size: nested_array_item_type.len() * nested_array_item_count, + rc, + }), ); + self.array_set(deflattened_array_pointer, target_index, reference); + self.deallocate_register(nested_array_pointer); + self.deallocate_register(reference); + self.deallocate_register(rc); source_offset += BrilligContext::flattened_size(subitem); } @@ -163,21 +207,36 @@ impl BrilligContext { } /// Adds the instructions needed to handle return parameters + /// The runtime expects the results in the first `n` registers. + /// Arrays are expected to be returned as pointers to the first element with all the nested arrays flattened. + /// However, the function called returns variables (that have extra data) and the returned arrays are deflattened. fn exit_point_instruction(&mut self, return_parameters: Vec) { - // Make sure we don't overwrite the return parameters - return_parameters.iter().for_each(|_| { - self.allocate_register(); - }); - - for (index, ret) in return_parameters.iter().enumerate() { - if let BrilligParameter::Array(item_type, item_count) = ret { + // First, we allocate the registers that hold the returned variables from the function call. + self.set_allocated_registers(vec![]); + let returned_variables: Vec<_> = return_parameters + .iter() + .map(|return_parameter| match return_parameter { + BrilligParameter::Simple => BrilligVariable::Simple(self.allocate_register()), + BrilligParameter::Array(item_types, item_count) => { + BrilligVariable::BrilligArray(BrilligArray { + pointer: self.allocate_register(), + size: item_types.len() * item_count, + rc: self.allocate_register(), + }) + } + BrilligParameter::Slice(..) => unreachable!("ICE: Cannot return slices"), + }) + .collect(); + // Now, we deflatten the returned arrays + for (return_param, returned_variable) in return_parameters.iter().zip(&returned_variables) { + if let BrilligParameter::Array(item_type, item_count) = return_param { if item_type.iter().any(|item| !matches!(item, BrilligParameter::Simple)) { - let returned_pointer = ReservedRegisters::user_register_index(index); + let returned_pointer = returned_variable.extract_array().pointer; let flattened_array_pointer = self.allocate_register(); self.allocate_fixed_length_array( flattened_array_pointer, - BrilligContext::flattened_size(ret), + BrilligContext::flattened_size(return_param), ); self.flatten_array( @@ -191,16 +250,18 @@ impl BrilligContext { } } } - // We want all functions to follow the calling convention of returning + // The VM expects us to follow the calling convention of returning // their results in the first `n` registers. So we to move the return values // to the first `n` registers once completed. // Move the results to registers 0..n - for i in 0..return_parameters.len() { - self.push_opcode(BrilligOpcode::Mov { - destination: i.into(), - source: ReservedRegisters::user_register_index(i), - }); + for (i, returned_variable) in returned_variables.into_iter().enumerate() { + let register = match returned_variable { + BrilligVariable::Simple(register) => register, + BrilligVariable::BrilligArray(array) => array.pointer, + BrilligVariable::BrilligVector(vector) => vector.pointer, + }; + self.push_opcode(BrilligOpcode::Mov { destination: i.into(), source: register }); } self.push_opcode(BrilligOpcode::Stop); } @@ -237,11 +298,22 @@ impl BrilligContext { target_offset += 1; } BrilligParameter::Array(nested_array_item_type, nested_array_item_count) => { - let nested_array_pointer = self.allocate_register(); + let nested_array_reference = self.allocate_register(); self.array_get( deflattened_array_pointer, source_index, - nested_array_pointer, + nested_array_reference, + ); + + let nested_array_variable = BrilligVariable::BrilligArray(BrilligArray { + pointer: self.allocate_register(), + size: nested_array_item_type.len() * nested_array_item_count, + rc: self.allocate_register(), + }); + + self.load_variable_instruction( + nested_array_variable, + nested_array_reference, ); let flattened_nested_array_pointer = self.allocate_register(); @@ -262,11 +334,15 @@ impl BrilligContext { nested_array_item_type, *nested_array_item_count, flattened_nested_array_pointer, - nested_array_pointer, + nested_array_variable.extract_array().pointer, ); - self.deallocate_register(nested_array_pointer); + self.deallocate_register(nested_array_reference); self.deallocate_register(flattened_nested_array_pointer); + nested_array_variable + .extract_registers() + .into_iter() + .for_each(|register| self.deallocate_register(register)); target_offset += BrilligContext::flattened_size(subitem); } @@ -288,6 +364,7 @@ mod tests { use crate::brillig::brillig_ir::{ artifact::BrilligParameter, + brillig_variable::BrilligArray, tests::{create_and_run_vm, create_context, create_entry_point_bytecode}, }; @@ -332,18 +409,24 @@ mod tests { Value::from(4_usize), Value::from(5_usize), Value::from(6_usize), - // The pointer to the nested array of the first item - Value::from(10_usize), - Value::from(3_usize), - // The pointer to the nested array of the second item + // The pointer to the nested reference of the first item Value::from(12_usize), + Value::from(3_usize), + // The pointer to the nested reference of the second item + Value::from(16_usize), Value::from(6_usize), // The nested array of the first item Value::from(1_usize), Value::from(2_usize), + // The nested reference of the first item + Value::from(10_usize), + Value::from(1_usize), // The nested array of the second item Value::from(4_usize), Value::from(5_usize), + // The nested reference of the second item + Value::from(14_usize), + Value::from(1_usize), ] ); } @@ -358,35 +441,31 @@ mod tests { Value::from(5_usize), Value::from(6_usize), ]; - let array_param = BrilligParameter::Array( vec![ - BrilligParameter::Simple, BrilligParameter::Array(vec![BrilligParameter::Simple], 2), + BrilligParameter::Simple, ], 2, ); - let arguments = vec![array_param.clone()]; let returns = vec![array_param]; let mut context = create_context(); // Allocate the parameter - let array_pointer = context.allocate_register(); + let brillig_array = BrilligArray { + pointer: context.allocate_register(), + size: 2, + rc: context.allocate_register(), + }; - context.return_instruction(&[array_pointer]); + context.return_instruction(&brillig_array.extract_registers()); let bytecode = create_entry_point_bytecode(context, arguments, returns).byte_code; let vm = create_and_run_vm(flattened_array.clone(), vec![Value::from(0_usize)], &bytecode); let memory = vm.get_memory(); - assert_eq!( - vm.get_registers().get(RegisterIndex(0)), - // The returned value will be past the original array and the deflattened array - Value::from(flattened_array.len() + (flattened_array.len() + 2)), - ); - assert_eq!( memory, &vec![ @@ -397,19 +476,25 @@ mod tests { Value::from(4_usize), Value::from(5_usize), Value::from(6_usize), - // The pointer to the nested array of the first item - Value::from(1_usize), - Value::from(10_usize), - // The pointer to the nested array of the second item - Value::from(4_usize), + // The pointer to the nested reference of the first item Value::from(12_usize), + Value::from(3_usize), + // The pointer to the nested reference of the second item + Value::from(16_usize), + Value::from(6_usize), // The nested array of the first item + Value::from(1_usize), Value::from(2_usize), - Value::from(3_usize), + // The nested reference of the first item + Value::from(10_usize), + Value::from(1_usize), // The nested array of the second item + Value::from(4_usize), Value::from(5_usize), - Value::from(6_usize), - // The values flattened again + // The nested reference of the second item + Value::from(14_usize), + Value::from(1_usize), + // The original flattened again Value::from(1_usize), Value::from(2_usize), Value::from(3_usize), @@ -418,5 +503,6 @@ mod tests { Value::from(6_usize), ] ); + assert_eq!(vm.get_registers().get(RegisterIndex(0)), 18_usize.into()); } } diff --git a/compiler/noirc_evaluator/src/errors.rs b/compiler/noirc_evaluator/src/errors.rs index 42672d4d0ad..33ecc794f76 100644 --- a/compiler/noirc_evaluator/src/errors.rs +++ b/compiler/noirc_evaluator/src/errors.rs @@ -26,7 +26,7 @@ pub enum RuntimeError { }, #[error(transparent)] InternalError(#[from] InternalError), - #[error("Index out of bounds, array has size {index:?}, but index was {array_size:?}")] + #[error("Index out of bounds, array has size {array_size}, but index was {index}")] IndexOutOfBounds { index: usize, array_size: usize, call_stack: CallStack }, #[error("Range constraint of {num_bits} bits is too large for the Field size")] InvalidRangeConstraint { num_bits: u32, call_stack: CallStack }, @@ -106,7 +106,7 @@ pub enum InternalError { #[error("ICE: Undeclared AcirVar")] UndeclaredAcirVar { call_stack: CallStack }, #[error("ICE: Expected {expected:?}, found {found:?}")] - UnExpected { expected: String, found: String, call_stack: CallStack }, + Unexpected { expected: String, found: String, call_stack: CallStack }, } impl RuntimeError { @@ -119,7 +119,7 @@ impl RuntimeError { | InternalError::MissingArg { call_stack, .. } | InternalError::NotAConstant { call_stack, .. } | InternalError::UndeclaredAcirVar { call_stack } - | InternalError::UnExpected { call_stack, .. }, + | InternalError::Unexpected { call_stack, .. }, ) | RuntimeError::FailedConstraint { call_stack, .. } | RuntimeError::IndexOutOfBounds { call_stack, .. } diff --git a/compiler/noirc_evaluator/src/lib.rs b/compiler/noirc_evaluator/src/lib.rs index b5b697e3b65..70751d3e541 100644 --- a/compiler/noirc_evaluator/src/lib.rs +++ b/compiler/noirc_evaluator/src/lib.rs @@ -11,5 +11,4 @@ pub mod ssa; pub mod brillig; -pub use ssa::abi_gen::into_abi_params; pub use ssa::create_circuit; diff --git a/compiler/noirc_evaluator/src/ssa.rs b/compiler/noirc_evaluator/src/ssa.rs index 24521b98bd1..deffe84baea 100644 --- a/compiler/noirc_evaluator/src/ssa.rs +++ b/compiler/noirc_evaluator/src/ssa.rs @@ -7,10 +7,7 @@ //! This module heavily borrows from Cranelift #![allow(dead_code)] -use std::{ - collections::{BTreeMap, BTreeSet}, - ops::Range, -}; +use std::collections::BTreeSet; use crate::{ brillig::Brillig, @@ -23,13 +20,13 @@ use acvm::acir::{ use noirc_errors::debug_info::DebugInfo; -use noirc_abi::Abi; - -use noirc_frontend::{hir::Context, monomorphization::ast::Program}; +use noirc_frontend::{ + hir_def::function::FunctionSignature, monomorphization::ast::Program, Visibility, +}; +use tracing::{span, Level}; -use self::{abi_gen::gen_abi, acir_gen::GeneratedAcir, ssa_gen::Ssa}; +use self::{acir_gen::GeneratedAcir, ssa_gen::Ssa}; -pub mod abi_gen; mod acir_gen; pub(super) mod function_builder; pub mod ir; @@ -46,6 +43,8 @@ pub(crate) fn optimize_into_acir( ) -> Result { let abi_distinctness = program.return_distinctness; + let ssa_gen_span = span!(Level::TRACE, "ssa_generation"); + let ssa_gen_span_guard = ssa_gen_span.enter(); let ssa_builder = SsaBuilder::new(program, print_ssa_passes)? .run_pass(Ssa::defunctionalize, "After Defunctionalization:") .run_pass(Ssa::inline_functions, "After Inlining:") @@ -73,20 +72,23 @@ pub(crate) fn optimize_into_acir( let ssa = ssa_builder .run_pass(Ssa::fill_internal_slices, "After Fill Internal Slice Dummy Data:") .finish(); + drop(ssa_gen_span_guard); let last_array_uses = ssa.find_last_array_uses(); + ssa.into_acir(brillig, abi_distinctness, &last_array_uses) } /// Compiles the [`Program`] into [`ACIR`][acvm::acir::circuit::Circuit]. /// /// The output ACIR is is backend-agnostic and so must go through a transformation pass before usage in proof generation. +#[allow(clippy::type_complexity)] +#[tracing::instrument(level = "trace", skip_all)] pub fn create_circuit( - context: &Context, program: Program, enable_ssa_logging: bool, enable_brillig_logging: bool, -) -> Result<(Circuit, DebugInfo, Abi, Vec), RuntimeError> { +) -> Result<(Circuit, DebugInfo, Vec, Vec, Vec), RuntimeError> { let func_sig = program.main_function_signature.clone(); let mut generated_acir = optimize_into_acir(program, enable_ssa_logging, enable_brillig_logging)?; @@ -101,15 +103,11 @@ pub fn create_circuit( .. } = generated_acir; - let abi = gen_abi(context, func_sig, input_witnesses, return_witnesses.clone()); - let public_abi = abi.clone().public_abi(); + let (public_parameter_witnesses, private_parameters) = + split_public_and_private_inputs(&func_sig, &input_witnesses); - let public_parameters = PublicInputs(tree_to_set(&public_abi.param_witnesses)); - - let all_parameters: BTreeSet = tree_to_set(&abi.param_witnesses); - let private_parameters = all_parameters.difference(&public_parameters.0).copied().collect(); - - let return_values = PublicInputs(return_witnesses.into_iter().collect()); + let public_parameters = PublicInputs(public_parameter_witnesses); + let return_values = PublicInputs(return_witnesses.iter().copied().collect()); let circuit = Circuit { current_witness_index, @@ -132,7 +130,41 @@ pub fn create_circuit( let (optimized_circuit, transformation_map) = acvm::compiler::optimize(circuit); debug_info.update_acir(transformation_map); - Ok((optimized_circuit, debug_info, abi, warnings)) + Ok((optimized_circuit, debug_info, input_witnesses, return_witnesses, warnings)) +} + +// Takes each function argument and partitions the circuit's inputs witnesses according to its visibility. +fn split_public_and_private_inputs( + func_sig: &FunctionSignature, + input_witnesses: &[Witness], +) -> (BTreeSet, BTreeSet) { + let mut idx = 0_usize; + if input_witnesses.is_empty() { + return (BTreeSet::new(), BTreeSet::new()); + } + + func_sig + .0 + .iter() + .map(|(_, typ, visibility)| { + let num_field_elements_needed = typ.field_count() as usize; + let witnesses = input_witnesses[idx..idx + num_field_elements_needed].to_vec(); + idx += num_field_elements_needed; + (visibility, witnesses) + }) + .fold((BTreeSet::new(), BTreeSet::new()), |mut acc, (vis, witnesses)| { + // Split witnesses into sets based on their visibility. + if *vis == Visibility::Public { + for witness in witnesses { + acc.0.insert(witness); + } + } else { + for witness in witnesses { + acc.1.insert(witness); + } + } + (acc.0, acc.1) + }) } // This is just a convenience object to bundle the ssa with `print_ssa_passes` for debug printing. @@ -178,15 +210,3 @@ impl SsaBuilder { self } } - -// Flatten the witnesses in the map into a BTreeSet -fn tree_to_set(input: &BTreeMap>>) -> BTreeSet { - let mut result = BTreeSet::new(); - for range in input.values().flatten() { - for i in range.start.witness_index()..range.end.witness_index() { - result.insert(Witness(i)); - } - } - - result -} diff --git a/compiler/noirc_evaluator/src/ssa/abi_gen/mod.rs b/compiler/noirc_evaluator/src/ssa/abi_gen/mod.rs deleted file mode 100644 index 948fb7d5263..00000000000 --- a/compiler/noirc_evaluator/src/ssa/abi_gen/mod.rs +++ /dev/null @@ -1,86 +0,0 @@ -use std::collections::BTreeMap; - -use acvm::acir::native_types::Witness; -use iter_extended::{btree_map, vecmap}; -use noirc_abi::{Abi, AbiParameter, AbiType}; -use noirc_frontend::{ - hir::Context, - hir_def::{ - function::{FunctionSignature, Param}, - stmt::HirPattern, - }, - node_interner::NodeInterner, -}; -use std::ops::Range; - -/// Attempts to retrieve the name of this parameter. Returns None -/// if this parameter is a tuple or struct pattern. -fn get_param_name<'a>(pattern: &HirPattern, interner: &'a NodeInterner) -> Option<&'a str> { - match pattern { - HirPattern::Identifier(ident) => Some(interner.definition_name(ident.id)), - HirPattern::Mutable(pattern, _) => get_param_name(pattern, interner), - HirPattern::Tuple(_, _) => None, - HirPattern::Struct(_, _, _) => None, - } -} - -pub fn into_abi_params(context: &Context, params: Vec) -> Vec { - vecmap(params, |(pattern, typ, vis)| { - let param_name = get_param_name(&pattern, &context.def_interner) - .expect("Abi for tuple and struct parameters is unimplemented") - .to_owned(); - let as_abi = AbiType::from_type(context, &typ); - AbiParameter { name: param_name, typ: as_abi, visibility: vis.into() } - }) -} - -/// Arranges a function signature and a generated circuit's return witnesses into a -/// `noirc_abi::Abi`. -pub(crate) fn gen_abi( - context: &Context, - func_sig: FunctionSignature, - input_witnesses: Vec>, - return_witnesses: Vec, -) -> Abi { - let (parameters, return_type) = func_sig; - let parameters = into_abi_params(context, parameters); - let return_type = return_type.map(|typ| AbiType::from_type(context, &typ)); - let param_witnesses = param_witnesses_from_abi_param(¶meters, input_witnesses); - Abi { parameters, return_type, param_witnesses, return_witnesses } -} - -// Takes each abi parameter and shallowly maps to the expected witness range in which the -// parameter's constituent values live. -fn param_witnesses_from_abi_param( - abi_params: &Vec, - input_witnesses: Vec>, -) -> BTreeMap>> { - let mut idx = 0_usize; - if input_witnesses.is_empty() { - return BTreeMap::new(); - } - let mut processed_range = input_witnesses[idx].start.witness_index(); - - btree_map(abi_params, |param| { - let num_field_elements_needed = param.typ.field_count(); - let mut wit = Vec::new(); - let mut processed_fields = 0; - while processed_fields < num_field_elements_needed { - let end = input_witnesses[idx].end.witness_index(); - if num_field_elements_needed <= end - processed_range { - wit.push( - Witness(processed_range)..Witness(processed_range + num_field_elements_needed), - ); - processed_range += num_field_elements_needed; - processed_fields += num_field_elements_needed; - } else { - // consume the current range - wit.push(Witness(processed_range)..input_witnesses[idx].end); - processed_fields += end - processed_range; - idx += 1; - processed_range = input_witnesses[idx].start.witness_index(); - } - } - (param.name.clone(), wit) - }) -} diff --git a/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/acir_variable.rs b/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/acir_variable.rs index 4ee70ed9b22..e7c55237259 100644 --- a/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/acir_variable.rs +++ b/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/acir_variable.rs @@ -23,7 +23,6 @@ use acvm::{BlackBoxFunctionSolver, BlackBoxResolutionError}; use fxhash::FxHashMap as HashMap; use iter_extended::{try_vecmap, vecmap}; use num_bigint::BigUint; -use std::ops::RangeInclusive; use std::{borrow::Cow, hash::Hash}; #[derive(Clone, Debug, PartialEq, Eq, Hash)] @@ -115,7 +114,7 @@ impl AcirContext { self.acir_ir.current_witness_index() } - pub(crate) fn extract_witness(&self, inputs: &[AcirValue]) -> Vec { + pub(crate) fn extract_witness(&self, inputs: &[AcirValue]) -> Vec { inputs .iter() .flat_map(|value| value.clone().flatten()) @@ -126,17 +125,23 @@ impl AcirContext { .to_expression() .to_witness() .expect("ICE - cannot extract a witness") - .0 }) .collect() } /// Adds a constant to the context and assigns a Variable to represent it - pub(crate) fn add_constant(&mut self, constant: FieldElement) -> AcirVar { - let constant_data = AcirVarData::Const(constant); + pub(crate) fn add_constant(&mut self, constant: impl Into) -> AcirVar { + let constant_data = AcirVarData::Const(constant.into()); self.add_data(constant_data) } + /// Returns the constant represented by the given variable. + /// + /// Panics: if the variable does not represent a constant. + pub(crate) fn constant(&self, var: AcirVar) -> FieldElement { + self.vars[&var].as_constant().expect("ICE - expected the variable to be a constant value") + } + /// Adds a Variable to the context, whose exact value is resolved at /// runtime. pub(crate) fn add_variable(&mut self) -> AcirVar { @@ -309,6 +314,7 @@ impl AcirContext { inverse_code, vec![AcirValue::Var(var, AcirType::field())], vec![AcirType::field()], + true, )?; let inverted_var = Self::expect_one_var(results); @@ -355,7 +361,7 @@ impl AcirContext { // Check to see if equality can be determined at compile-time. if diff_expr.is_const() { - return Ok(self.add_constant(diff_expr.is_zero().into())); + return Ok(self.add_constant(diff_expr.is_zero())); } let is_equal_witness = self.acir_ir.is_equal(&lhs_expr, &rhs_expr); @@ -370,9 +376,34 @@ impl AcirContext { rhs: AcirVar, typ: AcirType, ) -> Result { - let inputs = vec![AcirValue::Var(lhs, typ.clone()), AcirValue::Var(rhs, typ)]; - let outputs = self.black_box_function(BlackBoxFunc::XOR, inputs, 1)?; - Ok(outputs[0]) + let lhs_expr = self.var_to_expression(lhs)?; + let rhs_expr = self.var_to_expression(rhs)?; + + if lhs_expr == rhs_expr { + // x ^ x == 0 + let zero = self.add_constant(FieldElement::zero()); + return Ok(zero); + } else if lhs_expr.is_zero() { + // 0 ^ x == x + return Ok(rhs); + } else if rhs_expr.is_zero() { + // x ^ 0 == x + return Ok(lhs); + } + + let bit_size = typ.bit_size(); + if bit_size == 1 { + // Operands are booleans. + // + // a ^ b == a + b - 2*a*b + let sum = self.add_var(lhs, rhs)?; + let prod = self.mul_var(lhs, rhs)?; + self.add_mul_var(sum, -FieldElement::from(2_i128), prod) + } else { + let inputs = vec![AcirValue::Var(lhs, typ.clone()), AcirValue::Var(rhs, typ)]; + let outputs = self.black_box_function(BlackBoxFunc::XOR, inputs, 1)?; + Ok(outputs[0]) + } } /// Returns an `AcirVar` that is the AND result of `lhs` & `rhs`. @@ -382,9 +413,27 @@ impl AcirContext { rhs: AcirVar, typ: AcirType, ) -> Result { - let inputs = vec![AcirValue::Var(lhs, typ.clone()), AcirValue::Var(rhs, typ)]; - let outputs = self.black_box_function(BlackBoxFunc::AND, inputs, 1)?; - Ok(outputs[0]) + let lhs_expr = self.var_to_expression(lhs)?; + let rhs_expr = self.var_to_expression(rhs)?; + + if lhs_expr == rhs_expr { + // x & x == x + return Ok(lhs); + } else if lhs_expr.is_zero() || rhs_expr.is_zero() { + // x & 0 == 0 and 0 & x == 0 + let zero = self.add_constant(FieldElement::zero()); + return Ok(zero); + } + + let bit_size = typ.bit_size(); + if bit_size == 1 { + // Operands are booleans. + self.mul_var(lhs, rhs) + } else { + let inputs = vec![AcirValue::Var(lhs, typ.clone()), AcirValue::Var(rhs, typ)]; + let outputs = self.black_box_function(BlackBoxFunc::AND, inputs, 1)?; + Ok(outputs[0]) + } } /// Returns an `AcirVar` that is the OR result of `lhs` & `rhs`. @@ -394,6 +443,16 @@ impl AcirContext { rhs: AcirVar, typ: AcirType, ) -> Result { + let lhs_expr = self.var_to_expression(lhs)?; + let rhs_expr = self.var_to_expression(rhs)?; + if lhs_expr.is_zero() { + // 0 | x == x + return Ok(rhs); + } else if rhs_expr.is_zero() { + // x | 0 == x + return Ok(lhs); + } + let bit_size = typ.bit_size(); if bit_size == 1 { // Operands are booleans @@ -403,15 +462,11 @@ impl AcirContext { self.sub_var(sum, mul) } else { // Implement OR in terms of AND - // max - ((max - a) AND (max -b)) - // Subtracting from max flips the bits, so this is effectively: - // (NOT a) NAND (NOT b) - let max = self.add_constant(FieldElement::from((1_u128 << bit_size) - 1)); - let a = self.sub_var(max, lhs)?; - let b = self.sub_var(max, rhs)?; - let inputs = vec![AcirValue::Var(a, typ.clone()), AcirValue::Var(b, typ)]; - let outputs = self.black_box_function(BlackBoxFunc::AND, inputs, 1)?; - self.sub_var(max, outputs[0]) + // (NOT a) NAND (NOT b) => a OR b + let a = self.not_var(lhs, typ.clone())?; + let b = self.not_var(rhs, typ.clone())?; + let a_and_b = self.and_var(a, b, typ.clone())?; + self.not_var(a_and_b, typ) } } @@ -483,8 +538,19 @@ impl AcirContext { let lhs_data = self.vars[&lhs].clone(); let rhs_data = self.vars[&rhs].clone(); let result = match (lhs_data, rhs_data) { + // (x * 1) == (1 * x) == x + (AcirVarData::Const(constant), _) if constant.is_one() => rhs, + (_, AcirVarData::Const(constant)) if constant.is_one() => lhs, + + // (x * 0) == (0 * x) == 0 + (AcirVarData::Const(constant), _) | (_, AcirVarData::Const(constant)) + if constant.is_zero() => + { + self.add_constant(FieldElement::zero()) + } + (AcirVarData::Const(lhs_constant), AcirVarData::Const(rhs_constant)) => { - self.add_data(AcirVarData::Const(lhs_constant * rhs_constant)) + self.add_constant(lhs_constant * rhs_constant) } (AcirVarData::Witness(witness), AcirVarData::Const(constant)) | (AcirVarData::Const(constant), AcirVarData::Witness(witness)) => { @@ -549,7 +615,7 @@ impl AcirContext { pub(crate) fn not_var(&mut self, x: AcirVar, typ: AcirType) -> Result { let bit_size = typ.bit_size(); // Subtracting from max flips the bits - let max = self.add_constant(FieldElement::from((1_u128 << bit_size) - 1)); + let max = self.add_constant((1_u128 << bit_size) - 1); self.sub_var(max, x) } @@ -576,8 +642,8 @@ impl AcirContext { let quotient = lhs_const.to_u128() / rhs_const.to_u128(); let remainder = lhs_const.to_u128() - quotient * rhs_const.to_u128(); - let quotient_var = self.add_constant(FieldElement::from(quotient)); - let remainder_var = self.add_constant(FieldElement::from(remainder)); + let quotient_var = self.add_constant(quotient); + let remainder_var = self.add_constant(remainder); return Ok((quotient_var, remainder_var)); } @@ -650,6 +716,7 @@ impl AcirContext { AcirValue::Var(rhs, AcirType::unsigned(bit_size)), ], vec![AcirType::unsigned(max_q_bits), AcirType::unsigned(max_rhs_bits)], + true, )? .try_into() .expect("quotient only returns two values"); @@ -774,7 +841,7 @@ impl AcirContext { // witness = lhs_offset + r assert!(bits + r_bit_size < FieldElement::max_num_bits()); //we need to ensure lhs_offset + r does not overflow - let r_var = self.add_constant(r.into()); + let r_var = self.add_constant(r); let aor = self.add_var(lhs_offset, r_var)?; // lhs_offset<=rhs_offset <=> lhs_offset + r < rhs_offset + r = 2^bit_size <=> witness < 2^bit_size self.range_constrain_var(aor, &NumericType::Unsigned { bit_size }, None)?; @@ -848,9 +915,7 @@ impl AcirContext { // Unsigned to signed: derive q and r from q1,r1 and the signs of lhs and rhs // Quotient sign is lhs sign * rhs sign, whose resulting sign bit is the XOR of the sign bits - let sign_sum = self.add_var(lhs_leading, rhs_leading)?; - let sign_prod = self.mul_var(lhs_leading, rhs_leading)?; - let q_sign = self.add_mul_var(sign_sum, -FieldElement::from(2_i128), sign_prod)?; + let q_sign = self.xor_var(lhs_leading, rhs_leading, AcirType::unsigned(1))?; let quotient = self.two_complement(q1, q_sign, bit_size)?; let remainder = self.two_complement(r1, lhs_leading, bit_size)?; @@ -873,7 +938,8 @@ impl AcirContext { /// Converts the `AcirVar` to a `Witness` if it hasn't been already, and appends it to the /// `GeneratedAcir`'s return witnesses. pub(crate) fn return_var(&mut self, acir_var: AcirVar) -> Result<(), InternalError> { - let witness = self.var_to_witness(acir_var)?; + let return_var = self.get_or_create_witness_var(acir_var)?; + let witness = self.var_to_witness(return_var)?; self.acir_ir.push_return_witness(witness); Ok(()) } @@ -922,7 +988,7 @@ impl AcirContext { ) -> Result { // 2^{rhs} let divisor = - self.add_constant(FieldElement::from(2_i128).pow(&FieldElement::from(rhs as i128))); + self.add_constant(FieldElement::from(2_u128).pow(&FieldElement::from(rhs as u128))); let one = self.add_constant(FieldElement::one()); // Computes lhs = 2^{rhs} * q + r @@ -931,14 +997,65 @@ impl AcirContext { Ok(remainder) } + /// Returns an 'AcirVar' containing the boolean value lhs diff<2^n, because the 2-complement representation keeps the ordering (e.g in 8 bits -1 is 255 > -2 = 254) + /// If not, lhs positive => diff > 2^n + /// and lhs negative => diff <= 2^n => diff < 2^n (because signs are not the same, so lhs != rhs and so diff != 2^n) + pub(crate) fn less_than_signed( + &mut self, + lhs: AcirVar, + rhs: AcirVar, + bit_count: u32, + ) -> Result { + let pow_last = self.add_constant(FieldElement::from(1_u128 << (bit_count - 1))); + let pow = self.add_constant(FieldElement::from(1_u128 << (bit_count))); + + // We check whether the inputs have same sign or not by computing the XOR of their bit sign + + // Predicate is always active as `pow_last` is known to be non-zero. + let one = self.add_constant(1_u128); + let lhs_sign = self.div_var( + lhs, + pow_last, + AcirType::NumericType(NumericType::Unsigned { bit_size: bit_count }), + one, + )?; + let rhs_sign = self.div_var( + rhs, + pow_last, + AcirType::NumericType(NumericType::Unsigned { bit_size: bit_count }), + one, + )?; + let same_sign = self.xor_var( + lhs_sign, + rhs_sign, + AcirType::NumericType(NumericType::Signed { bit_size: 1 }), + )?; + + // We compute the input difference + let no_underflow = self.add_var(lhs, pow)?; + let diff = self.sub_var(no_underflow, rhs)?; + + // We check the 'bit sign' of the difference + let diff_sign = self.less_than_var(diff, pow, bit_count + 1)?; + + // Then the result is simply diff_sign XOR same_sign (can be checked with a truth table) + self.xor_var( + diff_sign, + same_sign, + AcirType::NumericType(NumericType::Signed { bit_size: 1 }), + ) + } + /// Returns an `AcirVar` which will be `1` if lhs >= rhs /// and `0` otherwise. - fn more_than_eq_var( + pub(crate) fn more_than_eq_var( &mut self, lhs: AcirVar, rhs: AcirVar, max_bits: u32, - predicate: AcirVar, ) -> Result { // Returns a `Witness` that is constrained to be: // - `1` if lhs >= rhs @@ -963,6 +1080,7 @@ impl AcirContext { // // TODO: perhaps this should be a user error, instead of an assert assert!(max_bits + 1 < FieldElement::max_num_bits()); + let two_max_bits = self .add_constant(FieldElement::from(2_i128).pow(&FieldElement::from(max_bits as i128))); let diff = self.sub_var(lhs, rhs)?; @@ -992,13 +1110,11 @@ impl AcirContext { // let k = b - a // - 2^{max_bits} - k == q * 2^{max_bits} + r // - This is only the case when q == 0 and r == 2^{max_bits} - k - // - let (q, _) = self.euclidean_division_var( - comparison_evaluation, - two_max_bits, - max_bits + 1, - predicate, - )?; + + // Predicate is always active as we know `two_max_bits` is always non-zero. + let one = self.add_constant(1_u128); + let (q, _) = + self.euclidean_division_var(comparison_evaluation, two_max_bits, max_bits + 1, one)?; Ok(q) } @@ -1009,11 +1125,10 @@ impl AcirContext { lhs: AcirVar, rhs: AcirVar, bit_size: u32, - predicate: AcirVar, ) -> Result { // Flip the result of calling more than equal method to // compute less than. - let comparison = self.more_than_eq_var(lhs, rhs, bit_size, predicate)?; + let comparison = self.more_than_eq_var(lhs, rhs, bit_size)?; let one = self.add_constant(FieldElement::one()); self.sub_var(one, comparison) // comparison_negated @@ -1146,10 +1261,7 @@ impl AcirContext { // `Intrinsic::ToRadix` returns slices which are represented // by tuples with the structure (length, slice contents) Ok(vec![ - AcirValue::Var( - self.add_constant(FieldElement::from(limb_vars.len() as u128)), - AcirType::field(), - ), + AcirValue::Var(self.add_constant(limb_vars.len()), AcirType::field()), AcirValue::Array(limb_vars.into()), ]) } @@ -1162,7 +1274,7 @@ impl AcirContext { limb_count_var: AcirVar, result_element_type: AcirType, ) -> Result, RuntimeError> { - let two_var = self.add_constant(FieldElement::from(2_u128)); + let two_var = self.add_constant(2_u128); self.radix_decompose(endian, input_var, two_var, limb_count_var, result_element_type) } @@ -1182,27 +1294,10 @@ impl AcirContext { /// Terminates the context and takes the resulting `GeneratedAcir` pub(crate) fn finish( mut self, - inputs: Vec>, + inputs: Vec, warnings: Vec, ) -> GeneratedAcir { - let mut current_range = 0..0; - for range in inputs { - if current_range.end == *range.start() { - current_range.end = range.end() + 1; - } else { - if current_range.end != 0 { - self.acir_ir - .input_witnesses - .push(Witness(current_range.start)..Witness(current_range.end)); - } - current_range = *range.start()..range.end() + 1; - } - } - if current_range.end != 0 { - self.acir_ir - .input_witnesses - .push(Witness(current_range.start)..Witness(current_range.end)); - } + self.acir_ir.input_witnesses = inputs; self.acir_ir.warnings = warnings; self.acir_ir } @@ -1224,6 +1319,7 @@ impl AcirContext { generated_brillig: GeneratedBrillig, inputs: Vec, outputs: Vec, + attempt_execution: bool, ) -> Result, InternalError> { let b_inputs = try_vecmap(inputs, |i| match i { AcirValue::Var(var, _) => Ok(BrilligInputs::Single(self.var_to_expression(var)?)), @@ -1243,10 +1339,15 @@ impl AcirContext { // Optimistically try executing the brillig now, if we can complete execution they just return the results. // This is a temporary measure pending SSA optimizations being applied to Brillig which would remove constant-input opcodes (See #2066) - if let Some(brillig_outputs) = - self.execute_brillig(&generated_brillig.byte_code, &b_inputs, &outputs) - { - return Ok(brillig_outputs); + // + // We do _not_ want to do this in the situation where the `main` function is unconstrained, as if execution succeeds + // the entire program will be replaced with witness constraints to its outputs. + if attempt_execution { + if let Some(brillig_outputs) = + self.execute_brillig(&generated_brillig.byte_code, &b_inputs, &outputs) + { + return Ok(brillig_outputs); + } } // Otherwise we must generate ACIR for it and execute at runtime. @@ -1287,7 +1388,7 @@ impl AcirContext { AcirValue::DynamicArray(AcirDynamicArray { block_id, len, .. }) => { for i in 0..len { // We generate witnesses corresponding to the array values - let index_var = self.add_constant(FieldElement::from(i as u128)); + let index_var = self.add_constant(i); let value_read_var = self.read_from_memory(block_id, &index_var)?; let value_read = AcirValue::Var(value_read_var, AcirType::field()); @@ -1429,7 +1530,7 @@ impl AcirContext { bit_size: u32, predicate: AcirVar, ) -> Result<(), RuntimeError> { - let lhs_less_than_rhs = self.more_than_eq_var(rhs, lhs, bit_size, predicate)?; + let lhs_less_than_rhs = self.more_than_eq_var(rhs, lhs, bit_size)?; self.maybe_eq_predicate(lhs_less_than_rhs, predicate) } @@ -1441,7 +1542,8 @@ impl AcirContext { index: &AcirVar, ) -> Result { // Fetch the witness corresponding to the index - let index_witness = self.var_to_witness(*index)?; + let index_var = self.get_or_create_witness_var(*index)?; + let index_witness = self.var_to_witness(index_var)?; // Create a Variable to hold the result of the read and extract the corresponding Witness let value_read_var = self.add_variable(); @@ -1462,11 +1564,12 @@ impl AcirContext { value: &AcirVar, ) -> Result<(), InternalError> { // Fetch the witness corresponding to the index - // - let index_witness = self.var_to_witness(*index)?; + let index_var = self.get_or_create_witness_var(*index)?; + let index_witness = self.var_to_witness(index_var)?; // Fetch the witness corresponding to the value to be written - let value_write_witness = self.var_to_witness(*value)?; + let value_write_var = self.get_or_create_witness_var(*value)?; + let value_write_witness = self.var_to_witness(value_write_var)?; // Add the memory write operation to the list of opcodes let op = MemOp::write_to_mem_index(index_witness.into(), value_write_witness.into()); @@ -1508,6 +1611,7 @@ impl AcirContext { ) -> Result<(), InternalError> { match input { AcirValue::Var(var, _) => { + let var = self.get_or_create_witness_var(var)?; witnesses.push(self.var_to_witness(var)?); } AcirValue::Array(values) => { @@ -1617,28 +1721,40 @@ fn execute_brillig( _signature: &[u8], _message: &[u8], ) -> Result { - Err(BlackBoxResolutionError::Unsupported(BlackBoxFunc::SchnorrVerify)) + Err(BlackBoxResolutionError::Failed( + BlackBoxFunc::SchnorrVerify, + "SchnorrVerify is not supported".to_string(), + )) } fn pedersen_commitment( &self, _inputs: &[FieldElement], _domain_separator: u32, ) -> Result<(FieldElement, FieldElement), BlackBoxResolutionError> { - Err(BlackBoxResolutionError::Unsupported(BlackBoxFunc::PedersenCommitment)) + Err(BlackBoxResolutionError::Failed( + BlackBoxFunc::PedersenCommitment, + "PedersenCommitment is not supported".to_string(), + )) } fn pedersen_hash( &self, _inputs: &[FieldElement], _domain_separator: u32, ) -> Result { - Err(BlackBoxResolutionError::Unsupported(BlackBoxFunc::PedersenHash)) + Err(BlackBoxResolutionError::Failed( + BlackBoxFunc::PedersenHash, + "PedersenHash is not supported".to_string(), + )) } fn fixed_base_scalar_mul( &self, _low: &FieldElement, _high: &FieldElement, ) -> Result<(FieldElement, FieldElement), BlackBoxResolutionError> { - Err(BlackBoxResolutionError::Unsupported(BlackBoxFunc::FixedBaseScalarMul)) + Err(BlackBoxResolutionError::Failed( + BlackBoxFunc::FixedBaseScalarMul, + "FixedBaseScalarMul is not supported".to_string(), + )) } } diff --git a/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/generated_acir.rs b/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/generated_acir.rs index 9bd83096adf..2506af8c8c8 100644 --- a/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/generated_acir.rs +++ b/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/generated_acir.rs @@ -23,7 +23,6 @@ use acvm::{ }; use iter_extended::vecmap; use num_bigint::BigUint; -use std::ops::Range; #[derive(Debug, Default)] /// The output of the Acir-gen pass @@ -43,7 +42,7 @@ pub(crate) struct GeneratedAcir { pub(crate) return_witnesses: Vec, /// All witness indices which are inputs to the main function - pub(crate) input_witnesses: Vec>, + pub(crate) input_witnesses: Vec, /// Correspondence between an opcode index (in opcodes) and the source code call stack which generated it pub(crate) locations: BTreeMap, @@ -156,10 +155,7 @@ impl GeneratedAcir { BlackBoxFunc::Blake2s => { BlackBoxFuncCall::Blake2s { inputs: inputs[0].clone(), outputs } } - BlackBoxFunc::HashToField128Security => BlackBoxFuncCall::HashToField128Security { - inputs: inputs[0].clone(), - output: outputs[0], - }, + BlackBoxFunc::Blake3 => BlackBoxFuncCall::Blake3 { inputs: inputs[0].clone(), outputs }, BlackBoxFunc::SchnorrVerify => { BlackBoxFuncCall::SchnorrVerify { public_key_x: inputs[0][0], @@ -227,26 +223,15 @@ impl GeneratedAcir { outputs, } } - BlackBoxFunc::RecursiveAggregation => { - let has_previous_aggregation = self.opcodes.iter().any(|op| { - matches!( - op, - AcirOpcode::BlackBoxFuncCall(BlackBoxFuncCall::RecursiveAggregation { .. }) - ) - }); - - let input_aggregation_object = - if !has_previous_aggregation { None } else { Some(inputs[4].clone()) }; - - BlackBoxFuncCall::RecursiveAggregation { - verification_key: inputs[0].clone(), - proof: inputs[1].clone(), - public_inputs: inputs[2].clone(), - key_hash: inputs[3][0], - input_aggregation_object, - output_aggregation_object: outputs, - } + BlackBoxFunc::Keccakf1600 => { + BlackBoxFuncCall::Keccakf1600 { inputs: inputs[0].clone(), outputs } } + BlackBoxFunc::RecursiveAggregation => BlackBoxFuncCall::RecursiveAggregation { + verification_key: inputs[0].clone(), + proof: inputs[1].clone(), + public_inputs: inputs[2].clone(), + key_hash: inputs[3][0], + }, }; self.push_opcode(AcirOpcode::BlackBoxFuncCall(black_box_func_call)); @@ -373,7 +358,7 @@ impl GeneratedAcir { /// If `expr` is not zero, then the constraint system will /// fail upon verification. pub(crate) fn assert_is_zero(&mut self, expr: Expression) { - self.push_opcode(AcirOpcode::Arithmetic(expr)); + self.push_opcode(AcirOpcode::AssertZero(expr)); } /// Returns a `Witness` that is constrained to be: @@ -553,7 +538,7 @@ impl GeneratedAcir { // Constrain the network output to out_expr for (b, o) in b.iter().zip(out_expr) { - self.push_opcode(AcirOpcode::Arithmetic(b - o)); + self.push_opcode(AcirOpcode::AssertZero(b - o)); } Ok(()) } @@ -574,9 +559,11 @@ fn black_box_func_expected_input_size(name: BlackBoxFunc) -> Option { BlackBoxFunc::Keccak256 | BlackBoxFunc::SHA256 | BlackBoxFunc::Blake2s + | BlackBoxFunc::Blake3 | BlackBoxFunc::PedersenCommitment - | BlackBoxFunc::PedersenHash - | BlackBoxFunc::HashToField128Security => None, + | BlackBoxFunc::PedersenHash => None, + + BlackBoxFunc::Keccakf1600 => Some(25), // Can only apply a range constraint to one // witness at a time. @@ -603,9 +590,11 @@ fn black_box_expected_output_size(name: BlackBoxFunc) -> Option { // or the operation. BlackBoxFunc::AND | BlackBoxFunc::XOR => Some(1), // 32 byte hash algorithms - BlackBoxFunc::Keccak256 | BlackBoxFunc::SHA256 | BlackBoxFunc::Blake2s => Some(32), - // Hash to field returns a field element - BlackBoxFunc::HashToField128Security => Some(1), + BlackBoxFunc::Keccak256 + | BlackBoxFunc::SHA256 + | BlackBoxFunc::Blake2s + | BlackBoxFunc::Blake3 => Some(32), + BlackBoxFunc::Keccakf1600 => Some(25), // Pedersen commitment returns a point BlackBoxFunc::PedersenCommitment => Some(2), // Pedersen hash returns a field diff --git a/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs b/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs index bd5749ac1e8..03fb4db73c5 100644 --- a/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs +++ b/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs @@ -3,9 +3,9 @@ mod acir_ir; use std::collections::HashSet; use std::fmt::Debug; -use std::ops::RangeInclusive; use self::acir_ir::acir_variable::{AcirContext, AcirType, AcirVar}; +use super::function_builder::data_bus::DataBus; use super::ir::dfg::CallStack; use super::{ ir::{ @@ -26,6 +26,7 @@ use crate::brillig::{brillig_gen::brillig_fn::FunctionContext as BrilligFunction use crate::errors::{InternalError, InternalWarning, RuntimeError, SsaReport}; pub(crate) use acir_ir::generated_acir::GeneratedAcir; +use acvm::acir::native_types::Witness; use acvm::acir::BlackBoxFunc; use acvm::{ acir::{circuit::opcodes::BlockId, native_types::Expression}, @@ -90,6 +91,8 @@ struct Context { /// Maps SSA array values to their slice size and any nested slices internal to the parent slice. /// This enables us to maintain the slice structure of a slice when performing an array get. slice_sizes: HashMap, Vec>, + + data_bus: DataBus, } #[derive(Clone)] @@ -101,14 +104,16 @@ pub(crate) struct AcirDynamicArray { len: usize, /// Identification for the ACIR dynamic array /// inner element type sizes array - element_type_sizes: BlockId, + element_type_sizes: Option, } impl Debug for AcirDynamicArray { fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { write!( f, "id: {}, len: {}, element_type_sizes: {:?}", - self.block_id.0, self.len, self.element_type_sizes.0 + self.block_id.0, + self.len, + self.element_type_sizes.map(|block_id| block_id.0) ) } } @@ -131,6 +136,16 @@ impl AcirValue { } } + fn borrow_var(&self) -> Result { + match self { + AcirValue::Var(var, _) => Ok(*var), + AcirValue::DynamicArray(_) | AcirValue::Array(_) => Err(InternalError::General { + message: "Called AcirValue::borrow_var on an array".to_string(), + call_stack: CallStack::new(), + }), + } + } + fn flatten(self) -> Vec<(AcirVar, AcirType)> { match self { AcirValue::Var(var, typ) => vec![(var, typ)], @@ -141,6 +156,7 @@ impl AcirValue { } impl Ssa { + #[tracing::instrument(level = "trace", skip_all)] pub(crate) fn into_acir( self, brillig: Brillig, @@ -187,6 +203,7 @@ impl Context { internal_mem_block_lengths: HashMap::default(), max_block_id: 0, slice_sizes: HashMap::default(), + data_bus: DataBus::default(), } } @@ -214,6 +231,8 @@ impl Context { let dfg = &main_func.dfg; let entry_block = &dfg[main_func.entry_block()]; let input_witness = self.convert_ssa_block_params(entry_block.parameters(), dfg)?; + + self.data_bus = dfg.data_bus.to_owned(); let mut warnings = Vec::new(); for instruction_id in entry_block.instructions() { warnings.extend(self.convert_ssa_instruction( @@ -226,7 +245,7 @@ impl Context { } warnings.extend(self.convert_ssa_return(entry_block.unwrap_terminator(), dfg)?); - Ok(self.acir_context.finish(vec![input_witness], warnings)) + Ok(self.acir_context.finish(input_witness, warnings)) } fn convert_brillig_main( @@ -247,11 +266,14 @@ impl Context { let code = self.gen_brillig_for(main_func, &brillig)?; + // We specifically do not attempt execution of the brillig code being generated as this can result in it being + // replaced with constraints on witnesses to the program outputs. let output_values = self.acir_context.brillig( self.current_side_effects_enabled_var, code, inputs, outputs, + false, )?; let output_vars: Vec<_> = output_values .iter() @@ -262,8 +284,7 @@ impl Context { for acir_var in output_vars { self.acir_context.return_var(acir_var)?; } - let witnesses = vecmap(witness_inputs, |input| RangeInclusive::new(input, input)); - Ok(self.acir_context.finish(witnesses, Vec::new())) + Ok(self.acir_context.finish(witness_inputs, Vec::new())) } /// Adds and binds `AcirVar`s for each numeric block parameter or block parameter array element. @@ -271,7 +292,7 @@ impl Context { &mut self, params: &[ValueId], dfg: &DataFlowGraph, - ) -> Result, RuntimeError> { + ) -> Result, RuntimeError> { // The first witness (if any) is the next one let start_witness = self.acir_context.current_witness_index().0 + 1; for param_id in params { @@ -284,7 +305,7 @@ impl Context { let len = if matches!(typ, Type::Array(_, _)) { typ.flattened_size() } else { - return Err(InternalError::UnExpected { + return Err(InternalError::Unexpected { expected: "Block params should be an array".to_owned(), found: format!("Instead got {:?}", typ), call_stack: self.acir_context.get_call_stack(), @@ -300,7 +321,8 @@ impl Context { self.ssa_values.insert(*param_id, value); } let end_witness = self.acir_context.current_witness_index().0; - Ok(start_witness..=end_witness) + let witnesses = (start_witness..=end_witness).map(Witness::from).collect(); + Ok(witnesses) } fn convert_ssa_block_param(&mut self, param_type: &Type) -> Result { @@ -439,8 +461,7 @@ impl Context { let mut read_dynamic_array_index = |block_id: BlockId, array_index: usize| -> Result { - let index_var = - self.acir_context.add_constant(FieldElement::from(array_index as u128)); + let index_var = self.acir_context.add_constant(array_index); self.acir_context.read_from_memory(block_id, &index_var) }; @@ -451,9 +472,9 @@ impl Context { self.acir_context.assert_eq_var(lhs, rhs, assert_message.clone())?; } } - Instruction::Cast(value_id, typ) => { - let result_acir_var = self.convert_ssa_cast(value_id, typ, dfg)?; - self.define_result_var(dfg, instruction_id, result_acir_var); + Instruction::Cast(value_id, _) => { + let acir_var = self.convert_numeric_value(*value_id, dfg)?; + self.define_result_var(dfg, instruction_id, acir_var); } Instruction::Call { func, arguments } => { let result_ids = dfg.instruction_results(instruction_id); @@ -471,7 +492,7 @@ impl Context { let outputs: Vec = vecmap(result_ids, |result_id| dfg.type_of_value(*result_id).into()); - let output_values = self.acir_context.brillig(self.current_side_effects_enabled_var, code, inputs, outputs)?; + let output_values = self.acir_context.brillig(self.current_side_effects_enabled_var, code, inputs, outputs, true)?; // Compiler sanity check assert_eq!(result_ids.len(), output_values.len(), "ICE: The number of Brillig output values should match the result ids in SSA"); @@ -563,6 +584,9 @@ impl Context { Instruction::Load { .. } => { unreachable!("Expected all load instructions to be removed before acir_gen") } + Instruction::IncrementRc { .. } => { + // Do nothing. Only Brillig needs to worry about reference counted arrays + } Instruction::RangeCheck { value, max_bit_size, assert_message } => { let acir_var = self.convert_numeric_value(*value, dfg)?; self.acir_context.range_constrain_var( @@ -619,7 +643,7 @@ impl Context { Instruction::ArrayGet { array, index } => (array, index, None), Instruction::ArraySet { array, index, value, .. } => (array, index, Some(value)), _ => { - return Err(InternalError::UnExpected { + return Err(InternalError::Unexpected { expected: "Instruction should be an ArrayGet or ArraySet".to_owned(), found: format!("Instead got {:?}", dfg[instruction]), call_stack: self.acir_context.get_call_stack(), @@ -676,7 +700,7 @@ impl Context { match self.convert_value(array, dfg) { AcirValue::Var(acir_var, _) => { - return Err(RuntimeError::InternalError(InternalError::UnExpected { + return Err(RuntimeError::InternalError(InternalError::Unexpected { expected: "an array value".to_string(), found: format!("{acir_var:?}"), call_stack: self.acir_context.get_call_stack(), @@ -767,7 +791,7 @@ impl Context { let slice_sizes = if store_type.contains_slice_element() { self.compute_slice_sizes(store, None, dfg); self.slice_sizes.get(&store).cloned().ok_or_else(|| { - InternalError::UnExpected { + InternalError::Unexpected { expected: "Store value should have slice sizes computed".to_owned(), found: "Missing key in slice sizes map".to_owned(), call_stack: self.acir_context.get_call_stack(), @@ -847,7 +871,7 @@ impl Context { ); let values = try_vecmap(0..*len, |i| { - let index_var = self.acir_context.add_constant(FieldElement::from(i as u128)); + let index_var = self.acir_context.add_constant(i); let read = self.acir_context.read_from_memory(*block_id, &index_var)?; Ok::(AcirValue::Var(read, AcirType::field())) @@ -878,10 +902,26 @@ impl Context { dfg: &DataFlowGraph, ) -> Result { let (array_id, _, block_id) = self.check_array_is_initialized(array, dfg)?; - let results = dfg.instruction_results(instruction); let res_typ = dfg.type_of_value(results[0]); + // Get operations to call-data parameters are replaced by a get to the call-data-bus array + if let Some(call_data) = self.data_bus.call_data { + if self.data_bus.call_data_map.contains_key(&array_id) { + // TODO: the block_id of call-data must be notified to the backend + // TODO: should we do the same for return-data? + let type_size = res_typ.flattened_size(); + let type_size = + self.acir_context.add_constant(FieldElement::from(type_size as i128)); + let offset = self.acir_context.mul_var(var_index, type_size)?; + let bus_index = self.acir_context.add_constant(FieldElement::from( + self.data_bus.call_data_map[&array_id] as i128, + )); + let new_index = self.acir_context.add_var(offset, bus_index)?; + return self.array_get(instruction, call_data, new_index, dfg); + } + } + let value = if !res_typ.contains_slice_element() { self.array_get_value(&res_typ, block_id, &mut var_index, &[])? } else { @@ -892,7 +932,7 @@ impl Context { // The first max size is going to be the length of the parent slice // As we are fetching from the parent slice we just want its internal - // slize sizes. + // slice sizes. let slice_sizes = slice_sizes[1..].to_vec(); let value = self.array_get_value(&res_typ, block_id, &mut var_index, &slice_sizes)?; @@ -921,7 +961,7 @@ impl Context { // Read the value from the array at the specified index let read = self.acir_context.read_from_memory(block_id, var_index)?; - // Incremement the var_index in case of a nested array + // Increment the var_index in case of a nested array *var_index = self.acir_context.add_var(*var_index, one)?; let typ = AcirType::NumericType(numeric_type); @@ -976,7 +1016,7 @@ impl Context { let array = match dfg[instruction] { Instruction::ArraySet { array, .. } => array, _ => { - return Err(InternalError::UnExpected { + return Err(InternalError::Unexpected { expected: "Instruction should be an ArraySet".to_owned(), found: format!("Instead got {:?}", dfg[instruction]), call_stack: self.acir_context.get_call_stack(), @@ -1017,7 +1057,7 @@ impl Context { self.copy_dynamic_array(block_id, result_block_id, array_len)?; } - self.array_set_value(store_value, result_block_id, &mut var_index)?; + self.array_set_value(&store_value, result_block_id, &mut var_index)?; // Set new resulting array to have the same slice sizes as the instruction input if let Type::Slice(element_types) = &array_typ { @@ -1036,8 +1076,11 @@ impl Context { } } - let element_type_sizes = - self.init_element_type_sizes_array(&array_typ, array_id, None, dfg)?; + let element_type_sizes = if !can_omit_element_sizes_array(&array_typ) { + Some(self.init_element_type_sizes_array(&array_typ, array_id, None, dfg)?) + } else { + None + }; let result_value = AcirValue::DynamicArray(AcirDynamicArray { block_id: result_block_id, len: array_len, @@ -1049,7 +1092,7 @@ impl Context { fn array_set_value( &mut self, - value: AcirValue, + value: &AcirValue, block_id: BlockId, var_index: &mut AcirVar, ) -> Result<(), RuntimeError> { @@ -1057,8 +1100,8 @@ impl Context { match value { AcirValue::Var(store_var, _) => { // Write the new value into the new array at the specified index - self.acir_context.write_to_memory(block_id, var_index, &store_var)?; - // Incremement the var_index in case of a nested array + self.acir_context.write_to_memory(block_id, var_index, store_var)?; + // Increment the var_index in case of a nested array *var_index = self.acir_context.add_var(*var_index, one)?; } AcirValue::Array(values) => { @@ -1067,13 +1110,13 @@ impl Context { } } AcirValue::DynamicArray(AcirDynamicArray { block_id: inner_block_id, len, .. }) => { - let values = try_vecmap(0..len, |i| { - let index_var = self.acir_context.add_constant(FieldElement::from(i as u128)); + let values = try_vecmap(0..*len, |i| { + let index_var = self.acir_context.add_constant(i); - let read = self.acir_context.read_from_memory(inner_block_id, &index_var)?; + let read = self.acir_context.read_from_memory(*inner_block_id, &index_var)?; Ok::(AcirValue::Var(read, AcirType::field())) })?; - self.array_set_value(AcirValue::Array(values.into()), block_id, var_index)?; + self.array_set_value(&AcirValue::Array(values.into()), block_id, var_index)?; } } Ok(()) @@ -1162,27 +1205,29 @@ impl Context { element_type_sizes: inner_elem_type_sizes, .. }) => { - if self.initialized_arrays.contains(&inner_elem_type_sizes) { - let type_sizes_array_len = self.internal_mem_block_lengths.get(&inner_elem_type_sizes).copied().ok_or_else(|| - InternalError::General { - message: format!("Array {array_id}'s inner element type sizes array does not have a tracked length"), + if let Some(inner_elem_type_sizes) = inner_elem_type_sizes { + if self.initialized_arrays.contains(&inner_elem_type_sizes) { + let type_sizes_array_len = self.internal_mem_block_lengths.get(&inner_elem_type_sizes).copied().ok_or_else(|| + InternalError::General { + message: format!("Array {array_id}'s inner element type sizes array does not have a tracked length"), + call_stack: self.acir_context.get_call_stack(), + } + )?; + self.copy_dynamic_array( + inner_elem_type_sizes, + element_type_sizes, + type_sizes_array_len, + )?; + self.internal_mem_block_lengths + .insert(element_type_sizes, type_sizes_array_len); + return Ok(element_type_sizes); + } else { + return Err(InternalError::General { + message: format!("Array {array_id}'s inner element type sizes array should be initialized"), call_stack: self.acir_context.get_call_stack(), } - )?; - self.copy_dynamic_array( - inner_elem_type_sizes, - element_type_sizes, - type_sizes_array_len, - )?; - self.internal_mem_block_lengths - .insert(element_type_sizes, type_sizes_array_len); - return Ok(element_type_sizes); - } else { - return Err(InternalError::General { - message: format!("Array {array_id}'s inner element type sizes array should be initialized"), - call_stack: self.acir_context.get_call_stack(), + .into()); } - .into()); } } AcirValue::Array(values) => { @@ -1193,7 +1238,7 @@ impl Context { } } _ => { - return Err(InternalError::UnExpected { + return Err(InternalError::Unexpected { expected: "AcirValue::DynamicArray or AcirValue::Array" .to_owned(), found: format!("{:?}", array_acir_value), @@ -1204,7 +1249,7 @@ impl Context { } } _ => { - return Err(InternalError::UnExpected { + return Err(InternalError::Unexpected { expected: "array or instruction".to_owned(), found: format!("{:?}", &dfg[array_id]), call_stack: self.acir_context.get_call_stack(), @@ -1214,7 +1259,7 @@ impl Context { }; } _ => { - return Err(InternalError::UnExpected { + return Err(InternalError::Unexpected { expected: "array or slice".to_owned(), found: array_typ.to_string(), call_stack: self.acir_context.get_call_stack(), @@ -1225,7 +1270,7 @@ impl Context { // The final array should will the flattened index at each outer array index let init_values = vecmap(flat_elem_type_sizes, |type_size| { - let var = self.acir_context.add_constant(FieldElement::from(type_size as u128)); + let var = self.acir_context.add_constant(type_size); AcirValue::Var(var, AcirType::field()) }); let element_type_sizes_len = init_values.len(); @@ -1282,7 +1327,7 @@ impl Context { array_len: usize, ) -> Result<(), RuntimeError> { let init_values = try_vecmap(0..array_len, |i| { - let index_var = self.acir_context.add_constant(FieldElement::from(i as u128)); + let index_var = self.acir_context.add_constant(i); let read = self.acir_context.read_from_memory(source, &index_var)?; Ok::(AcirValue::Var(read, AcirType::field())) @@ -1298,15 +1343,19 @@ impl Context { var_index: AcirVar, dfg: &DataFlowGraph, ) -> Result { - let element_type_sizes = - self.init_element_type_sizes_array(array_typ, array_id, None, dfg)?; + if !can_omit_element_sizes_array(array_typ) { + let element_type_sizes = + self.init_element_type_sizes_array(array_typ, array_id, None, dfg)?; - let predicate_index = - self.acir_context.mul_var(var_index, self.current_side_effects_enabled_var)?; - let flat_element_size_var = - self.acir_context.read_from_memory(element_type_sizes, &predicate_index)?; + let predicate_index = + self.acir_context.mul_var(var_index, self.current_side_effects_enabled_var)?; - Ok(flat_element_size_var) + self.acir_context + .read_from_memory(element_type_sizes, &predicate_index) + .map_err(RuntimeError::from) + } else { + Ok(var_index) + } } fn flattened_slice_size(&mut self, array_id: ValueId, dfg: &DataFlowGraph) -> usize { @@ -1467,12 +1516,12 @@ impl Context { ) -> Result { match self.convert_value(value_id, dfg) { AcirValue::Var(acir_var, _) => Ok(acir_var), - AcirValue::Array(array) => Err(InternalError::UnExpected { + AcirValue::Array(array) => Err(InternalError::Unexpected { expected: "a numeric value".to_string(), found: format!("{array:?}"), call_stack: self.acir_context.get_call_stack(), }), - AcirValue::DynamicArray(_) => Err(InternalError::UnExpected { + AcirValue::DynamicArray(_) => Err(InternalError::Unexpected { expected: "a numeric value".to_string(), found: "an array".to_string(), call_stack: self.acir_context.get_call_stack(), @@ -1524,12 +1573,12 @@ impl Context { // Note: that this produces unnecessary constraints when // this Eq instruction is being used for a constrain statement BinaryOp::Eq => self.acir_context.eq_var(lhs, rhs), - BinaryOp::Lt => self.acir_context.less_than_var( - lhs, - rhs, - bit_count, - self.current_side_effects_enabled_var, - ), + BinaryOp::Lt => match binary_type { + AcirType::NumericType(NumericType::Signed { .. }) => { + self.acir_context.less_than_signed(lhs, rhs, bit_count) + } + _ => self.acir_context.less_than_var(lhs, rhs, bit_count), + }, BinaryOp::Xor => self.acir_context.xor_var(lhs, rhs, binary_type), BinaryOp::And => self.acir_context.and_var(lhs, rhs, binary_type), BinaryOp::Or => self.acir_context.or_var(lhs, rhs, binary_type), @@ -1565,7 +1614,7 @@ impl Context { (_, Type::Function) | (Type::Function, _) => { unreachable!("all functions should be inlined") } - (_, Type::Reference) | (Type::Reference, _) => { + (_, Type::Reference(_)) | (Type::Reference(_), _) => { unreachable!("References are invalid in binary operations") } (_, Type::Array(..)) | (Type::Array(..), _) => { @@ -1587,41 +1636,6 @@ impl Context { } } - /// Returns an `AcirVar` that is constrained to fit in the target type by truncating the input. - /// If the target cast is to a `NativeField`, no truncation is required so the cast becomes a - /// no-op. - fn convert_ssa_cast( - &mut self, - value_id: &ValueId, - typ: &Type, - dfg: &DataFlowGraph, - ) -> Result { - let (variable, incoming_type) = match self.convert_value(*value_id, dfg) { - AcirValue::Var(variable, typ) => (variable, typ), - AcirValue::DynamicArray(_) | AcirValue::Array(_) => { - unreachable!("Cast is only applied to numerics") - } - }; - let target_numeric = match typ { - Type::Numeric(numeric) => numeric, - _ => unreachable!("Can only cast to a numeric"), - }; - match target_numeric { - NumericType::NativeField => { - // Casting into a Field as a no-op - Ok(variable) - } - NumericType::Unsigned { bit_size } | NumericType::Signed { bit_size } => { - let max_bit_size = incoming_type.bit_size(); - if max_bit_size <= *bit_size { - // Incoming variable already fits into target bit size - this is a no-op - return Ok(variable); - } - self.acir_context.truncate_var(variable, *bit_size, max_bit_size) - } - } - } - /// Returns an `AcirVar`that is constrained to be result of the truncation. fn convert_ssa_truncate( &mut self, @@ -1639,8 +1653,7 @@ impl Context { ) { // Subtractions must first have the integer modulus added before truncation can be // applied. This is done in order to prevent underflow. - let integer_modulus = - self.acir_context.add_constant(FieldElement::from(2_u128.pow(bit_size))); + let integer_modulus = self.acir_context.add_constant(2_u128.pow(bit_size)); var = self.acir_context.add_var(var, integer_modulus)?; } } @@ -1699,6 +1712,9 @@ impl Context { Ok(Self::convert_vars_to_values(vars, dfg, result_ids)) } + Intrinsic::ApplyRangeConstraint => { + unreachable!("ICE: `Intrinsic::ApplyRangeConstraint` calls should be transformed into an `Instruction::RangeCheck`"); + } Intrinsic::ToRadix(endian) => { let field = self.convert_value(arguments[0], dfg).into_var()?; let radix = self.convert_value(arguments[1], dfg).into_var()?; @@ -1746,197 +1762,570 @@ impl Context { Intrinsic::ArrayLen => { let len = match self.convert_value(arguments[0], dfg) { AcirValue::Var(_, _) => unreachable!("Non-array passed to array.len() method"), - AcirValue::Array(values) => (values.len() as u128).into(), - AcirValue::DynamicArray(array) => (array.len as u128).into(), + AcirValue::Array(values) => values.len(), + AcirValue::DynamicArray(array) => array.len, }; Ok(vec![AcirValue::Var(self.acir_context.add_constant(len), AcirType::field())]) } Intrinsic::SlicePushBack => { + // arguments = [slice_length, slice_contents, ...elements_to_push] let slice_length = self.convert_value(arguments[0], dfg).into_var()?; - let (array_id, array_typ, _) = + let (slice_contents, slice_typ, _) = self.check_array_is_initialized(arguments[1], dfg)?; - let slice = self.convert_value(arguments[1], dfg); + let slice = self.convert_value(slice_contents, dfg); - // TODO(#3364): make sure that we have handled nested struct inputs - let element = self.convert_value(arguments[2], dfg); - let one = self.acir_context.add_constant(FieldElement::one()); - let new_slice_length = self.acir_context.add_var(slice_length, one)?; + let mut new_elem_size = Self::flattened_value_size(&slice); - // We attach the element no matter what in case len == capacity, as otherwise we - // may get an out of bounds error. let mut new_slice = Vector::new(); - self.slice_intrinsic_input(&mut new_slice, slice.clone())?; - new_slice.push_back(element.clone()); + self.slice_intrinsic_input(&mut new_slice, slice)?; + + let elements_to_push = &arguments[2..]; + // We only fill internal slices for nested slices (a slice inside of a slice). + // So we must directly push back elements for slices which are not a nested slice. + if !slice_typ.is_nested_slice() { + for elem in elements_to_push { + let element = self.convert_value(*elem, dfg); + + new_elem_size += Self::flattened_value_size(&element); + new_slice.push_back(element); + } + } + + // Increase the slice length by one to enable accessing more elements in the slice. + let one = self.acir_context.add_constant(FieldElement::one()); + let new_slice_length = self.acir_context.add_var(slice_length, one)?; - // TODO(#3364): This works for non-nested outputs - let len = Self::flattened_value_size(&slice); - let new_elem_size = Self::flattened_value_size(&element); let new_slice_val = AcirValue::Array(new_slice); let result_block_id = self.block_id(&result_ids[1]); - self.initialize_array( - result_block_id, - len + new_elem_size, - Some(new_slice_val.clone()), - )?; + self.initialize_array(result_block_id, new_elem_size, Some(new_slice_val.clone()))?; + // The previous slice length represents the index we want to write into. let mut var_index = slice_length; - self.array_set_value(element, result_block_id, &mut var_index)?; + // Dynamic arrays are represented as flat memory. We must flatten the user facing index + // to a flattened index that matches the complex slice structure. + if slice_typ.is_nested_slice() { + let element_size = slice_typ.element_size(); + + // Multiply the element size against the var index before fetching the flattened index + // This operation makes sure our user-facing slice index matches the strategy for indexing in SSA, + // which is how `get_flattened_index` expects its index input. + let element_size_var = self.acir_context.add_constant(element_size); + var_index = self.acir_context.mul_var(slice_length, element_size_var)?; + var_index = + self.get_flattened_index(&slice_typ, slice_contents, var_index, dfg)?; + } - let result = AcirValue::DynamicArray(AcirDynamicArray { - block_id: result_block_id, - len: len + new_elem_size, - element_type_sizes: self.init_element_type_sizes_array( - &array_typ, - array_id, + // Write the elements we wish to push back directly. + // The slice's underlying array value should already be filled with dummy data + // to enable this write to be within bounds. + // The dummy data is either attached during SSA gen or in this match case for non-nested slices. + // These values can then be accessed due to the increased dynamic slice length. + for elem in elements_to_push { + let element = self.convert_value(*elem, dfg); + self.array_set_value(&element, result_block_id, &mut var_index)?; + } + + let element_type_sizes = if !can_omit_element_sizes_array(&slice_typ) { + Some(self.init_element_type_sizes_array( + &slice_typ, + slice_contents, Some(new_slice_val), dfg, - )?, + )?) + } else { + None + }; + let result = AcirValue::DynamicArray(AcirDynamicArray { + block_id: result_block_id, + len: new_elem_size, + element_type_sizes, }); Ok(vec![AcirValue::Var(new_slice_length, AcirType::field()), result]) } Intrinsic::SlicePushFront => { + // arguments = [slice_length, slice_contents, ...elements_to_push] let slice_length = self.convert_value(arguments[0], dfg).into_var()?; - let slice: AcirValue = self.convert_value(arguments[1], dfg); - // TODO(#3364): make sure that we have handled nested struct inputs - let element = self.convert_value(arguments[2], dfg); + let (slice_contents, slice_typ, _) = + self.check_array_is_initialized(arguments[1], dfg)?; + let slice: AcirValue = self.convert_value(slice_contents, dfg); + + let mut new_slice_size = Self::flattened_value_size(&slice); + + // Increase the slice length by one to enable accessing more elements in the slice. let one = self.acir_context.add_constant(FieldElement::one()); let new_slice_length = self.acir_context.add_var(slice_length, one)?; let mut new_slice = Vector::new(); self.slice_intrinsic_input(&mut new_slice, slice)?; - new_slice.push_front(element); - Ok(vec![ - AcirValue::Var(new_slice_length, AcirType::field()), - AcirValue::Array(new_slice), - ]) + let elements_to_push = &arguments[2..]; + let mut elem_size = 0; + // We only fill internal slices for nested slices (a slice inside of a slice). + // So we must directly push front elements for slices which are not a nested slice. + if !slice_typ.is_nested_slice() { + for elem in elements_to_push.iter().rev() { + let element = self.convert_value(*elem, dfg); + + elem_size += Self::flattened_value_size(&element); + new_slice.push_front(element); + } + new_slice_size += elem_size; + } else { + // We have already filled the appropriate dummy values for nested slice during SSA gen. + // We need to account for that we do not go out of bounds by removing dummy data as we + // push elements to the front of our slice. + // Using this strategy we are able to avoid dynamic writes like we do for a SlicePushBack. + for elem in elements_to_push.iter().rev() { + let element = self.convert_value(*elem, dfg); + + let elem_size = Self::flattened_value_size(&element); + // Have to pop based off of the flattened value size as we read the + // slice intrinsic as a flat list of AcirValue::Var + for _ in 0..elem_size { + new_slice.pop_back(); + } + new_slice.push_front(element); + } + } + + let new_slice_val = AcirValue::Array(new_slice.clone()); + + let result_block_id = self.block_id(&result_ids[1]); + self.initialize_array( + result_block_id, + new_slice_size, + Some(new_slice_val.clone()), + )?; + + let element_type_sizes = if !can_omit_element_sizes_array(&slice_typ) { + Some(self.init_element_type_sizes_array( + &slice_typ, + slice_contents, + Some(new_slice_val), + dfg, + )?) + } else { + None + }; + let result = AcirValue::DynamicArray(AcirDynamicArray { + block_id: result_block_id, + len: new_slice_size, + element_type_sizes, + }); + + Ok(vec![AcirValue::Var(new_slice_length, AcirType::field()), result]) } Intrinsic::SlicePopBack => { + // arguments = [slice_length, slice_contents] let slice_length = self.convert_value(arguments[0], dfg).into_var()?; - let slice = self.convert_value(arguments[1], dfg); let one = self.acir_context.add_constant(FieldElement::one()); let new_slice_length = self.acir_context.sub_var(slice_length, one)?; - - let (_, _, block_id) = self.check_array_is_initialized(arguments[1], dfg)?; + // For a pop back operation we want to fetch from the `length - 1` as this is the + // last valid index that can be accessed in a slice. After the pop back operation + // the elements stored at that index will no longer be able to be accessed. let mut var_index = new_slice_length; - let elem = self.array_get_value( - &dfg.type_of_value(result_ids[2]), - block_id, - &mut var_index, - &[], - )?; - // TODO(#3364): make sure that we have handled nested struct inputs + let (slice_contents, slice_typ, block_id) = + self.check_array_is_initialized(arguments[1], dfg)?; + let slice = self.convert_value(slice_contents, dfg); + + let element_size = slice_typ.element_size(); + + let mut popped_elements = Vec::new(); + // Fetch the values we are popping off of the slice. + // In the case of non-nested slice the logic is simple as we do not + // need to account for the internal slice sizes or flattening the index. + // + // The pop back operation results are of the format [slice length, slice contents, popped elements]. + // Thus, we look at the result ids at index 2 and onwards to determine the type of each popped element. + if !slice_typ.is_nested_slice() { + for res in &result_ids[2..] { + let elem = self.array_get_value( + &dfg.type_of_value(*res), + block_id, + &mut var_index, + &[], + )?; + popped_elements.push(elem); + } + } else { + // Fetch the slice sizes of the nested slice. + let slice_sizes = self.slice_sizes.get(&slice_contents); + let mut slice_sizes = + slice_sizes.expect("ICE: should have slice sizes").clone(); + // We want to remove the parent size as we are fetching the child + slice_sizes.remove(0); + + // Multiply the element size against the var index before fetching the flattened index + // This operation makes sure our user-facing slice index matches the strategy for indexing in SSA, + // which is how `get_flattened_index` expects its index input. + let element_size_var = self.acir_context.add_constant(element_size); + // We want to use an index one less than the slice length + var_index = self.acir_context.mul_var(var_index, element_size_var)?; + var_index = + self.get_flattened_index(&slice_typ, slice_contents, var_index, dfg)?; + + for res in &result_ids[2..] { + let elem = self.array_get_value( + &dfg.type_of_value(*res), + block_id, + &mut var_index, + &slice_sizes, + )?; + popped_elements.push(elem); + } + } + let mut new_slice = Vector::new(); self.slice_intrinsic_input(&mut new_slice, slice)?; - Ok(vec![ + let mut results = vec![ AcirValue::Var(new_slice_length, AcirType::field()), AcirValue::Array(new_slice), - elem, - ]) + ]; + results.append(&mut popped_elements); + + Ok(results) } Intrinsic::SlicePopFront => { + // arguments = [slice_length, slice_contents] let slice_length = self.convert_value(arguments[0], dfg).into_var()?; - let slice = self.convert_value(arguments[1], dfg); + + let (slice_contents, slice_typ, block_id) = + self.check_array_is_initialized(arguments[1], dfg)?; + let slice = self.convert_value(slice_contents, dfg); let one = self.acir_context.add_constant(FieldElement::one()); let new_slice_length = self.acir_context.sub_var(slice_length, one)?; let mut new_slice = Vector::new(); self.slice_intrinsic_input(&mut new_slice, slice)?; - // TODO(#3364): make sure that we have handled nested struct inputs - let elem = new_slice - .pop_front() - .expect("There are no elements in this slice to be removed"); - Ok(vec![ - elem, - AcirValue::Var(new_slice_length, AcirType::field()), - AcirValue::Array(new_slice), - ]) + let element_size = slice_typ.element_size(); + + let mut popped_elements: Vec = Vec::new(); + let mut popped_elements_size = 0; + let mut var_index = self.acir_context.add_constant(FieldElement::zero()); + // Fetch the values we are popping off of the slice. + // In the case of non-nested slice the logic is simple as we do not + // need to account for the internal slice sizes or flattening the index. + // + // The pop front operation results are of the format [popped elements, slice length, slice contents]. + // Thus, we look at the result ids up to the element size to determine the type of each popped element. + if !slice_typ.is_nested_slice() { + for res in &result_ids[..element_size] { + let element = self.array_get_value( + &dfg.type_of_value(*res), + block_id, + &mut var_index, + &[], + )?; + let elem_size = Self::flattened_value_size(&element); + popped_elements_size += elem_size; + popped_elements.push(element); + } + } else { + let slice_sizes = self.slice_sizes.get(&slice_contents); + let mut slice_sizes = + slice_sizes.expect("ICE: should have slice sizes").clone(); + // We want to remove the parent size as we are fetching the child + slice_sizes.remove(0); + + for res in &result_ids[..element_size] { + let element = self.array_get_value( + &dfg.type_of_value(*res), + block_id, + &mut var_index, + &slice_sizes, + )?; + let elem_size = Self::flattened_value_size(&element); + popped_elements_size += elem_size; + popped_elements.push(element); + } + } + // It is expected that the `popped_elements_size` is the flattened size of the elements, + // as the input slice should be a dynamic array which is represented by flat memory. + new_slice = new_slice.slice(popped_elements_size..); + + popped_elements.push(AcirValue::Var(new_slice_length, AcirType::field())); + popped_elements.push(AcirValue::Array(new_slice)); + + Ok(popped_elements) } Intrinsic::SliceInsert => { - // Slice insert with a constant index + // arguments = [slice_length, slice_contents, insert_index, ...elements_to_insert] let slice_length = self.convert_value(arguments[0], dfg).into_var()?; - let slice = self.convert_value(arguments[1], dfg); - let index = self.convert_value(arguments[2], dfg).into_var()?; - let element = self.convert_value(arguments[3], dfg); + + let (slice_contents, slice_typ, block_id) = + self.check_array_is_initialized(arguments[1], dfg)?; + + let slice = self.convert_value(slice_contents, dfg); + let insert_index = self.convert_value(arguments[2], dfg).into_var()?; let one = self.acir_context.add_constant(FieldElement::one()); let new_slice_length = self.acir_context.add_var(slice_length, one)?; - // TODO(#2462): Slice insert is a little less obvious on how to implement due to the case - // of having a dynamic index - // The slice insert logic will need a more involved codegen - let index = self.acir_context.var_to_expression(index)?.to_const(); - let index = index - .expect("ICE: slice length should be fully tracked and constant by ACIR gen"); - let index = index.to_u128() as usize; + let slice_size = Self::flattened_value_size(&slice); + + // Fetch the flattened index from the user provided index argument. + let element_size = slice_typ.element_size(); + let element_size_var = self.acir_context.add_constant(element_size); + let flat_insert_index = + self.acir_context.mul_var(insert_index, element_size_var)?; + let flat_user_index = + self.get_flattened_index(&slice_typ, slice_contents, flat_insert_index, dfg)?; + + let elements_to_insert = &arguments[3..]; + // Determine the elements we need to write into our resulting dynamic array. + // We need to a fully flat list of AcirVar's as a dynamic array is represented with flat memory. + let mut inner_elem_size_usize = 0; + let mut flattened_elements = Vec::new(); + for elem in elements_to_insert { + let element = self.convert_value(*elem, dfg); + let elem_size = Self::flattened_value_size(&element); + inner_elem_size_usize += elem_size; + let mut flat_elem = element.flatten().into_iter().map(|(var, _)| var).collect(); + flattened_elements.append(&mut flat_elem); + } + let inner_elem_size = self.acir_context.add_constant(inner_elem_size_usize); + // Set the maximum flattened index at which a new element should be inserted. + let max_flat_user_index = + self.acir_context.add_var(flat_user_index, inner_elem_size)?; + + // Go through the entire slice argument and determine what value should be written to the new slice. + // 1. If we are below the starting insertion index we should insert the value that was already + // in the original slice. + // 2. If we are above the starting insertion index but below the max insertion index we should insert + // the flattened element arguments. + // 3. If we are above the max insertion index we should insert the previous value from the original slice, + // as during an insertion we want to shift all elements after the insertion up an index. + let result_block_id = self.block_id(&result_ids[1]); + self.initialize_array(result_block_id, slice_size, None)?; + let mut current_insert_index = 0; + for i in 0..slice_size { + let current_index = self.acir_context.add_constant(i); + + // Check that we are above the lower bound of the insertion index + let greater_eq_than_idx = + self.acir_context.more_than_eq_var(current_index, flat_user_index, 64)?; + // Check that we are below the upper bound of the insertion index + let less_than_idx = + self.acir_context.less_than_var(current_index, max_flat_user_index, 64)?; + + // Read from the original slice the value we want to insert into our new slice. + // We need to make sure that we read the previous element when our current index is greater than insertion index. + // If the index for the previous element is out of the array bounds we can avoid the check for whether + // the current index is over the insertion index. + let shifted_index = if i < inner_elem_size_usize { + current_index + } else { + let index_minus_elem_size = + self.acir_context.add_constant(i - inner_elem_size_usize); - let mut new_slice = Vector::new(); - self.slice_intrinsic_input(&mut new_slice, slice)?; + let use_shifted_index_pred = self + .acir_context + .mul_var(index_minus_elem_size, greater_eq_than_idx)?; - // We do not return an index out of bounds error directly here - // as the length of the slice is dynamic, and length of `new_slice` - // represents the capacity of the slice, not the actual length. - // - // Constraints should be generated during SSA gen to tell the user - // they are attempting to insert at too large of an index. - // This check prevents a panic inside of the im::Vector insert method. - if index <= new_slice.len() { - // TODO(#3364): make sure that we have handled nested struct inputs - new_slice.insert(index, element); + let not_pred = self.acir_context.sub_var(one, greater_eq_than_idx)?; + let use_current_index_pred = + self.acir_context.mul_var(not_pred, current_index)?; + + self.acir_context.add_var(use_shifted_index_pred, use_current_index_pred)? + }; + + let value_shifted_index = + self.acir_context.read_from_memory(block_id, &shifted_index)?; + + // Final predicate to determine whether we are within the insertion bounds + let should_insert_value_pred = + self.acir_context.mul_var(greater_eq_than_idx, less_than_idx)?; + let insert_value_pred = self.acir_context.mul_var( + flattened_elements[current_insert_index], + should_insert_value_pred, + )?; + + let not_pred = self.acir_context.sub_var(one, should_insert_value_pred)?; + let shifted_value_pred = + self.acir_context.mul_var(not_pred, value_shifted_index)?; + + let new_value = + self.acir_context.add_var(insert_value_pred, shifted_value_pred)?; + + self.acir_context.write_to_memory( + result_block_id, + ¤t_index, + &new_value, + )?; + + current_insert_index += 1; + if inner_elem_size_usize == current_insert_index { + current_insert_index = 0; + } } - Ok(vec![ - AcirValue::Var(new_slice_length, AcirType::field()), - AcirValue::Array(new_slice), - ]) + // let new_slice_val = AcirValue::Array(new_slice); + let element_type_sizes = if !can_omit_element_sizes_array(&slice_typ) { + Some(self.init_element_type_sizes_array( + &slice_typ, + slice_contents, + Some(slice), + dfg, + )?) + } else { + None + }; + let result = AcirValue::DynamicArray(AcirDynamicArray { + block_id: result_block_id, + len: slice_size, + element_type_sizes, + }); + + Ok(vec![AcirValue::Var(new_slice_length, AcirType::field()), result]) } Intrinsic::SliceRemove => { - // Slice insert with a constant index + // arguments = [slice_length, slice_contents, remove_index] let slice_length = self.convert_value(arguments[0], dfg).into_var()?; - let slice = self.convert_value(arguments[1], dfg); - let index = self.convert_value(arguments[2], dfg).into_var()?; + + let (slice_contents, slice_typ, block_id) = + self.check_array_is_initialized(arguments[1], dfg)?; + + let slice = self.convert_value(slice_contents, dfg); + let remove_index = self.convert_value(arguments[2], dfg).into_var()?; let one = self.acir_context.add_constant(FieldElement::one()); let new_slice_length = self.acir_context.sub_var(slice_length, one)?; - // TODO(#2462): allow slice remove with a constant index - // Slice remove is a little less obvious on how to implement due to the case - // of having a dynamic index - // The slice remove logic will need a more involved codegen - let index = self.acir_context.var_to_expression(index)?.to_const(); - let index = index - .expect("ICE: slice length should be fully tracked and constant by ACIR gen"); - let index = index.to_u128() as usize; + let slice_size = Self::flattened_value_size(&slice); let mut new_slice = Vector::new(); self.slice_intrinsic_input(&mut new_slice, slice)?; - // We do not return an index out of bounds error directly here - // as the length of the slice is dynamic, and length of `new_slice` - // represents the capacity of the slice, not the actual length. - // - // Constraints should be generated during SSA gen to tell the user - // they are attempting to remove at too large of an index. - // This check prevents a panic inside of the im::Vector remove method. - let removed_elem = if index < new_slice.len() { - // TODO(#3364): make sure that we have handled nested struct inputs - new_slice.remove(index) + // Compiler sanity check + assert_eq!( + new_slice.len(), + slice_size, + "ICE: The read flattened slice should match the computed size" + ); + + // Fetch the flattened index from the user provided index argument. + let element_size = slice_typ.element_size(); + let element_size_var = self.acir_context.add_constant(element_size); + let flat_remove_index = + self.acir_context.mul_var(remove_index, element_size_var)?; + let flat_user_index = + self.get_flattened_index(&slice_typ, slice_contents, flat_remove_index, dfg)?; + + // Fetch the values we are remove from the slice. + // In the case of non-nested slice the logic is simple as we do not + // need to account for the internal slice sizes or flattening the index. + // As we fetch the values we can determine the size of the removed values + // which we will later use for writing the correct resulting slice. + let mut popped_elements = Vec::new(); + let mut popped_elements_size = 0; + // Set a temp index just for fetching from the original slice as `array_get_value` mutates + // the index internally. + let mut temp_index = flat_user_index; + if !slice_typ.is_nested_slice() { + for res in &result_ids[2..(2 + element_size)] { + let element = self.array_get_value( + &dfg.type_of_value(*res), + block_id, + &mut temp_index, + &[], + )?; + let elem_size = Self::flattened_value_size(&element); + popped_elements_size += elem_size; + popped_elements.push(element); + } + } else { + let slice_sizes = self.slice_sizes.get(&slice_contents); + let mut slice_sizes = + slice_sizes.expect("ICE: should have slice sizes").clone(); + // We want to remove the parent size as we are fetching the child + slice_sizes.remove(0); + + for res in &result_ids[2..(2 + element_size)] { + let element = self.array_get_value( + &dfg.type_of_value(*res), + block_id, + &mut temp_index, + &slice_sizes, + )?; + let elem_size = Self::flattened_value_size(&element); + popped_elements_size += elem_size; + popped_elements.push(element); + } + } + + // Go through the entire slice argument and determine what value should be written to the new slice. + // 1. If the current index is greater than the removal index we must write the next value + // from the original slice to the current index + // 2. At the end of the slice reading from the next value of the original slice + // can lead to a potential out of bounds error. In this case we just fetch from the original slice + // at the current index. As we are decreasing the slice in length, this is a safe operation. + let result_block_id = self.block_id(&result_ids[1]); + self.initialize_array( + result_block_id, + slice_size, + Some(AcirValue::Array(new_slice.clone())), + )?; + for i in 0..slice_size { + let current_index = self.acir_context.add_constant(i); + + let value_current_index = &new_slice[i].borrow_var()?; + + if slice_size > (i + popped_elements_size) { + let shifted_index = + self.acir_context.add_constant(i + popped_elements_size); + + let value_shifted_index = + self.acir_context.read_from_memory(block_id, &shifted_index)?; + + let use_shifted_value = self.acir_context.more_than_eq_var( + current_index, + flat_user_index, + 64, + )?; + + let shifted_value_pred = + self.acir_context.mul_var(value_shifted_index, use_shifted_value)?; + let not_pred = self.acir_context.sub_var(one, use_shifted_value)?; + let current_value_pred = + self.acir_context.mul_var(not_pred, *value_current_index)?; + + let new_value = + self.acir_context.add_var(shifted_value_pred, current_value_pred)?; + + self.acir_context.write_to_memory( + result_block_id, + ¤t_index, + &new_value, + )?; + }; + } + + let new_slice_val = AcirValue::Array(new_slice); + let element_type_sizes = if !can_omit_element_sizes_array(&slice_typ) { + Some(self.init_element_type_sizes_array( + &slice_typ, + slice_contents, + Some(new_slice_val), + dfg, + )?) } else { - // This is a dummy value which should never be used if the appropriate - // slice access checks are generated before this slice remove call. - AcirValue::Var(slice_length, AcirType::field()) + None }; + let result = AcirValue::DynamicArray(AcirDynamicArray { + block_id: result_block_id, + len: slice_size, + element_type_sizes, + }); - Ok(vec![ - AcirValue::Var(new_slice_length, AcirType::field()), - AcirValue::Array(new_slice), - removed_elem, - ]) + let mut result = vec![AcirValue::Var(new_slice_length, AcirType::field()), result]; + result.append(&mut popped_elements); + + Ok(result) } _ => todo!("expected a black box function"), } @@ -1959,7 +2348,7 @@ impl Context { AcirValue::DynamicArray(AcirDynamicArray { block_id, len, .. }) => { for i in 0..len { // We generate witnesses corresponding to the array values - let index_var = self.acir_context.add_constant(FieldElement::from(i as u128)); + let index_var = self.acir_context.add_constant(i); let value_read_var = self.acir_context.read_from_memory(block_id, &index_var)?; @@ -2041,3 +2430,16 @@ impl Context { } } } + +// We can omit the element size array for arrays which have elements of size 1 and do not contain slices. +// TODO: remove restriction on size 1 elements. +fn can_omit_element_sizes_array(array_typ: &Type) -> bool { + if array_typ.contains_slice_element() { + return false; + } + let Type::Array(types, _) = array_typ else { + panic!("ICE: expected array type"); + }; + + types.len() == 1 && types[0].flattened_size() == 1 +} diff --git a/compiler/noirc_evaluator/src/ssa/function_builder/data_bus.rs b/compiler/noirc_evaluator/src/ssa/function_builder/data_bus.rs new file mode 100644 index 00000000000..cbaeb2477d6 --- /dev/null +++ b/compiler/noirc_evaluator/src/ssa/function_builder/data_bus.rs @@ -0,0 +1,146 @@ +use std::rc::Rc; + +use crate::ssa::ir::{types::Type, value::ValueId}; +use acvm::FieldElement; +use fxhash::FxHashMap as HashMap; +use noirc_frontend::hir_def::function::FunctionSignature; + +use super::FunctionBuilder; + +/// Used to create a data bus, which is an array of private inputs +/// replacing public inputs +pub(crate) struct DataBusBuilder { + pub(crate) values: im::Vector, + index: usize, + pub(crate) map: HashMap, + pub(crate) databus: Option, +} + +impl DataBusBuilder { + pub(crate) fn new() -> DataBusBuilder { + DataBusBuilder { + index: 0, + map: HashMap::default(), + databus: None, + values: im::Vector::new(), + } + } + + /// Generates a boolean vector telling which (ssa) parameter from the given function signature + /// are tagged with databus visibility + pub(crate) fn is_databus(main_signature: &FunctionSignature) -> Vec { + let mut params_is_databus = Vec::new(); + + for param in &main_signature.0 { + let is_databus = match param.2 { + noirc_frontend::Visibility::Public | noirc_frontend::Visibility::Private => false, + noirc_frontend::Visibility::DataBus => true, + }; + let len = param.1.field_count() as usize; + params_is_databus.extend(vec![is_databus; len]); + } + params_is_databus + } +} + +#[derive(Clone, Default, Debug)] +pub(crate) struct DataBus { + pub(crate) call_data: Option, + pub(crate) call_data_map: HashMap, + pub(crate) return_data: Option, +} + +impl DataBus { + /// Updates the databus values with the provided function + pub(crate) fn map_values(&self, mut f: impl FnMut(ValueId) -> ValueId) -> DataBus { + let mut call_data_map = HashMap::default(); + for (k, v) in self.call_data_map.iter() { + call_data_map.insert(f(*k), *v); + } + DataBus { + call_data: self.call_data.map(&mut f), + call_data_map, + return_data: self.return_data.map(&mut f), + } + } + + /// Construct a databus from call_data and return_data data bus builders + pub(crate) fn get_data_bus(call_data: DataBusBuilder, return_data: DataBusBuilder) -> DataBus { + DataBus { + call_data: call_data.databus, + call_data_map: call_data.map, + return_data: return_data.databus, + } + } +} + +impl FunctionBuilder { + /// Insert a value into a data bus builder + fn add_to_data_bus(&mut self, value: ValueId, databus: &mut DataBusBuilder) { + assert!(databus.databus.is_none(), "initializing finalized call data"); + let typ = self.current_function.dfg[value].get_type().clone(); + match typ { + Type::Numeric(_) => { + databus.values.push_back(value); + databus.index += 1; + } + Type::Array(typ, len) => { + assert!(typ.len() == 1, "unsupported composite type"); + databus.map.insert(value, databus.index); + for i in 0..len { + // load each element of the array + let index = self + .current_function + .dfg + .make_constant(FieldElement::from(i as i128), Type::field()); + let element = self.insert_array_get(value, index, typ[0].clone()); + self.add_to_data_bus(element, databus); + } + } + Type::Reference(_) => { + unreachable!("Attempted to add invalid type (reference) to databus") + } + Type::Slice(_) => unreachable!("Attempted to add invalid type (slice) to databus"), + Type::Function => unreachable!("Attempted to add invalid type (function) to databus"), + } + } + + /// Create a data bus builder from a list of values + pub(crate) fn initialize_data_bus( + &mut self, + values: &[ValueId], + mut databus: DataBusBuilder, + ) -> DataBusBuilder { + for value in values { + self.add_to_data_bus(*value, &mut databus); + } + let len = databus.values.len(); + + let array = if len > 0 { + let array = + self.array_constant(databus.values, Type::Array(Rc::new(vec![Type::field()]), len)); + Some(array) + } else { + None + }; + + DataBusBuilder { index: 0, map: databus.map, databus: array, values: im::Vector::new() } + } + + /// Generate the data bus for call-data, based on the parameters of the entry block + /// and a boolean vector telling which ones are call-data + pub(crate) fn call_data_bus(&mut self, is_params_databus: Vec) -> DataBusBuilder { + //filter parameters of the first block that have call-data visibility + let first_block = self.current_function.entry_block(); + let params = self.current_function.dfg[first_block].parameters(); + let mut databus_param = Vec::new(); + for (param, is_databus) in params.iter().zip(is_params_databus) { + if is_databus { + databus_param.push(param.to_owned()); + } + } + // create the call-data-bus from the filtered list + let call_data = DataBusBuilder::new(); + self.initialize_data_bus(&databus_param, call_data) + } +} diff --git a/compiler/noirc_evaluator/src/ssa/function_builder/mod.rs b/compiler/noirc_evaluator/src/ssa/function_builder/mod.rs index 058f91adacb..2871f149b41 100644 --- a/compiler/noirc_evaluator/src/ssa/function_builder/mod.rs +++ b/compiler/noirc_evaluator/src/ssa/function_builder/mod.rs @@ -1,3 +1,5 @@ +pub(crate) mod data_bus; + use std::{borrow::Cow, rc::Rc}; use acvm::FieldElement; @@ -170,8 +172,9 @@ impl FunctionBuilder { /// Insert an allocate instruction at the end of the current block, allocating the /// given amount of field elements. Returns the result of the allocate instruction, /// which is always a Reference to the allocated data. - pub(crate) fn insert_allocate(&mut self) -> ValueId { - self.insert_instruction(Instruction::Allocate, None).first() + pub(crate) fn insert_allocate(&mut self, element_type: Type) -> ValueId { + let reference_type = Type::Reference(Rc::new(element_type)); + self.insert_instruction(Instruction::Allocate, Some(vec![reference_type])).first() } pub(crate) fn set_location(&mut self, location: Location) -> &mut FunctionBuilder { @@ -252,6 +255,19 @@ impl FunctionBuilder { self.insert_instruction(Instruction::Constrain(lhs, rhs, assert_message), None); } + /// Insert a [`Instruction::RangeCheck`] instruction at the end of the current block. + pub(crate) fn insert_range_check( + &mut self, + value: ValueId, + max_bit_size: u32, + assert_message: Option, + ) { + self.insert_instruction( + Instruction::RangeCheck { value, max_bit_size, assert_message }, + None, + ); + } + /// Insert a call instruction at the end of the current block and return /// the results of the call. pub(crate) fn insert_call( @@ -260,87 +276,68 @@ impl FunctionBuilder { arguments: Vec, result_types: Vec, ) -> Cow<[ValueId]> { - if let Value::Intrinsic(intrinsic) = &self.current_function.dfg[func] { - if intrinsic == &Intrinsic::WrappingShiftLeft { - let result_type = self.current_function.dfg.type_of_value(arguments[0]); - let bit_size = match result_type { - Type::Numeric(NumericType::Signed { bit_size }) - | Type::Numeric(NumericType::Unsigned { bit_size }) => bit_size, - _ => { - unreachable!("ICE: Truncation attempted on non-integer"); - } - }; - return self - .insert_wrapping_shift_left(arguments[0], arguments[1], bit_size) - .results(); - } - } - self.insert_instruction(Instruction::Call { func, arguments }, Some(result_types)).results() } - /// Insert ssa instructions which computes lhs << rhs by doing lhs*2^rhs - pub(crate) fn insert_shift_left(&mut self, lhs: ValueId, rhs: ValueId) -> ValueId { - let base = self.field_constant(FieldElement::from(2_u128)); - let pow = self.pow(base, rhs); - let typ = self.current_function.dfg.type_of_value(lhs); - let pow = self.insert_cast(pow, typ); - self.insert_binary(lhs, BinaryOp::Mul, pow) - } - /// Insert ssa instructions which computes lhs << rhs by doing lhs*2^rhs /// and truncate the result to bit_size - fn insert_wrapping_shift_left( + pub(crate) fn insert_wrapping_shift_left( &mut self, lhs: ValueId, rhs: ValueId, bit_size: u32, - ) -> InsertInstructionResult { + ) -> ValueId { let base = self.field_constant(FieldElement::from(2_u128)); let typ = self.current_function.dfg.type_of_value(lhs); - let (max_bit, pow) = if let Some(rhs_constant) = - self.current_function.dfg.get_numeric_constant(rhs) - { - // Happy case is that we know precisely by how many bits the the integer will - // increase: lhs_bit_size + rhs - let (rhs_bit_size_pow_2, overflows) = - 2_u32.overflowing_pow(rhs_constant.to_u128() as u32); - if overflows { - let zero = self.numeric_constant(FieldElement::zero(), typ); - return InsertInstructionResult::SimplifiedTo(zero); - } - let pow = self.numeric_constant(FieldElement::from(rhs_bit_size_pow_2 as u128), typ); - (bit_size + (rhs_constant.to_u128() as u32), pow) - } else { - // we use a predicate to nullify the result in case of overflow - let bit_size_var = - self.numeric_constant(FieldElement::from(bit_size as u128), typ.clone()); - let overflow = self.insert_binary(rhs, BinaryOp::Lt, bit_size_var); - let one = self.numeric_constant(FieldElement::one(), Type::unsigned(1)); - let predicate = self.insert_binary(overflow, BinaryOp::Eq, one); - let predicate = self.insert_cast(predicate, typ.clone()); - - let pow = self.pow(base, rhs); - let pow = self.insert_cast(pow, typ); - (FieldElement::max_num_bits(), self.insert_binary(predicate, BinaryOp::Mul, pow)) - }; + let (max_bit, pow) = + if let Some(rhs_constant) = self.current_function.dfg.get_numeric_constant(rhs) { + // Happy case is that we know precisely by how many bits the the integer will + // increase: lhs_bit_size + rhs + let (rhs_bit_size_pow_2, overflows) = + 2_u128.overflowing_pow(rhs_constant.to_u128() as u32); + if overflows { + assert!(bit_size < 128, "ICE - shift left with big integers are not supported"); + if bit_size < 128 { + let zero = self.numeric_constant(FieldElement::zero(), typ); + return InsertInstructionResult::SimplifiedTo(zero).first(); + } + } + let pow = self.numeric_constant(FieldElement::from(rhs_bit_size_pow_2), typ); + (bit_size + (rhs_constant.to_u128() as u32), pow) + } else { + // we use a predicate to nullify the result in case of overflow + let bit_size_var = + self.numeric_constant(FieldElement::from(bit_size as u128), typ.clone()); + let overflow = self.insert_binary(rhs, BinaryOp::Lt, bit_size_var); + let one = self.numeric_constant(FieldElement::one(), Type::unsigned(1)); + let predicate = self.insert_binary(overflow, BinaryOp::Eq, one); + let predicate = self.insert_cast(predicate, typ.clone()); + // we can safely cast to unsigned because overflow_checks prevent bit-shift with a negative value + let rhs_unsigned = self.insert_cast(rhs, Type::unsigned(bit_size)); + let pow = self.pow(base, rhs_unsigned); + let pow = self.insert_cast(pow, typ); + (FieldElement::max_num_bits(), self.insert_binary(predicate, BinaryOp::Mul, pow)) + }; - let instruction = Instruction::Binary(Binary { lhs, rhs: pow, operator: BinaryOp::Mul }); if max_bit <= bit_size { - self.insert_instruction(instruction, None) + self.insert_binary(lhs, BinaryOp::Mul, pow) } else { - let result = self.insert_instruction(instruction, None).first(); - self.insert_instruction( - Instruction::Truncate { value: result, bit_size, max_bit_size: max_bit }, - None, - ) + let result = self.insert_binary(lhs, BinaryOp::Mul, pow); + self.insert_truncate(result, bit_size, max_bit) } } /// Insert ssa instructions which computes lhs >> rhs by doing lhs/2^rhs - pub(crate) fn insert_shift_right(&mut self, lhs: ValueId, rhs: ValueId) -> ValueId { + pub(crate) fn insert_shift_right( + &mut self, + lhs: ValueId, + rhs: ValueId, + bit_size: u32, + ) -> ValueId { let base = self.field_constant(FieldElement::from(2_u128)); - let pow = self.pow(base, rhs); + // we can safely cast to unsigned because overflow_checks prevent bit-shift with a negative value + let rhs_unsigned = self.insert_cast(rhs, Type::unsigned(bit_size)); + let pow = self.pow(base, rhs_unsigned); self.insert_binary(lhs, BinaryOp::Div, pow) } @@ -471,6 +468,27 @@ impl FunctionBuilder { _ => None, } } + + /// Insert instructions to increment the reference count of any array(s) stored + /// within the given value. If the given value is not an array and does not contain + /// any arrays, this does nothing. + pub(crate) fn increment_array_reference_count(&mut self, value: ValueId) { + match self.type_of_value(value) { + Type::Numeric(_) => (), + Type::Function => (), + Type::Reference(element) => { + if element.contains_an_array() { + let value = self.insert_load(value, element.as_ref().clone()); + self.increment_array_reference_count(value); + } + } + Type::Array(..) | Type::Slice(..) => { + self.insert_instruction(Instruction::IncrementRc { value }, None); + // If there are nested arrays or slices, we wait until ArrayGet + // is issued to increment the count of that array. + } + } + } } impl std::ops::Index for FunctionBuilder { diff --git a/compiler/noirc_evaluator/src/ssa/ir/dfg.rs b/compiler/noirc_evaluator/src/ssa/ir/dfg.rs index 3cb6736007d..9942a48a38a 100644 --- a/compiler/noirc_evaluator/src/ssa/ir/dfg.rs +++ b/compiler/noirc_evaluator/src/ssa/ir/dfg.rs @@ -1,6 +1,6 @@ use std::borrow::Cow; -use crate::ssa::ir::instruction::SimplifyResult; +use crate::ssa::{function_builder::data_bus::DataBus, ir::instruction::SimplifyResult}; use super::{ basic_block::{BasicBlock, BasicBlockId}, @@ -47,7 +47,7 @@ pub(crate) struct DataFlowGraph { constants: HashMap<(FieldElement, Type), ValueId>, /// Contains each function that has been imported into the current function. - /// Each function's Value::Function is uniqued here so any given FunctionId + /// A unique `ValueId` for each function's [`Value::Function`] is stored so any given FunctionId /// will always have the same ValueId within this function. functions: HashMap, @@ -57,7 +57,7 @@ pub(crate) struct DataFlowGraph { intrinsics: HashMap, /// Contains each foreign function that has been imported into the current function. - /// This map is used to ensure that the ValueId for any given foreign functôn is always + /// This map is used to ensure that the ValueId for any given foreign function is always /// represented by only 1 ValueId within this function. foreign_functions: HashMap, @@ -80,6 +80,8 @@ pub(crate) struct DataFlowGraph { /// Instructions inserted by internal SSA passes that don't correspond to user code /// may not have a corresponding location. locations: HashMap, + + pub(crate) data_bus: DataBus, } pub(crate) type CallStack = im::Vector; @@ -158,17 +160,37 @@ impl DataFlowGraph { call_stack: CallStack, ) -> InsertInstructionResult { use InsertInstructionResult::*; - match instruction.simplify(self, block, ctrl_typevars.clone()) { + match instruction.simplify(self, block, ctrl_typevars.clone(), &call_stack) { SimplifyResult::SimplifiedTo(simplification) => SimplifiedTo(simplification), SimplifyResult::SimplifiedToMultiple(simplification) => { SimplifiedToMultiple(simplification) } SimplifyResult::Remove => InstructionRemoved, - result @ (SimplifyResult::SimplifiedToInstruction(_) | SimplifyResult::None) => { - let instruction = result.instruction().unwrap_or(instruction); - let id = self.make_instruction(instruction, ctrl_typevars); - self.blocks[block].insert_instruction(id); - self.locations.insert(id, call_stack); + result @ (SimplifyResult::SimplifiedToInstruction(_) + | SimplifyResult::SimplifiedToInstructionMultiple(_) + | SimplifyResult::None) => { + let instructions = result.instructions().unwrap_or(vec![instruction]); + + if instructions.len() > 1 { + // There's currently no way to pass results from one instruction in `instructions` on to the next. + // We then restrict this to only support multiple instructions if they're all `Instruction::Constrain` + // as this instruction type does not have any results. + assert!( + instructions.iter().all(|instruction| matches!(instruction, Instruction::Constrain(..))), + "`SimplifyResult::SimplifiedToInstructionMultiple` only supports `Constrain` instructions" + ); + } + + let mut last_id = None; + + for instruction in instructions { + let id = self.make_instruction(instruction, ctrl_typevars.clone()); + self.blocks[block].insert_instruction(id); + self.locations.insert(id, call_stack.clone()); + last_id = Some(id); + } + + let id = last_id.expect("There should be at least 1 simplified instruction"); InsertInstructionResult::Results(id, self.instruction_results(id)) } } @@ -318,7 +340,7 @@ impl DataFlowGraph { /// True if the type of this value is Type::Reference. /// Using this method over type_of_value avoids cloning the value's type. pub(crate) fn value_is_reference(&self, value: ValueId) -> bool { - matches!(self.values[value].get_type(), Type::Reference) + matches!(self.values[value].get_type(), Type::Reference(_)) } /// Appends a result type to the instruction. @@ -521,13 +543,13 @@ impl<'dfg> InsertInstructionResult<'dfg> { #[cfg(test)] mod tests { use super::DataFlowGraph; - use crate::ssa::ir::instruction::Instruction; + use crate::ssa::ir::{instruction::Instruction, types::Type}; #[test] fn make_instruction() { let mut dfg = DataFlowGraph::default(); let ins = Instruction::Allocate; - let ins_id = dfg.make_instruction(ins, None); + let ins_id = dfg.make_instruction(ins, Some(vec![Type::field()])); let results = dfg.instruction_results(ins_id); assert_eq!(results.len(), 1); diff --git a/compiler/noirc_evaluator/src/ssa/ir/instruction.rs b/compiler/noirc_evaluator/src/ssa/ir/instruction.rs index 71e773e3f70..457fe41de93 100644 --- a/compiler/noirc_evaluator/src/ssa/ir/instruction.rs +++ b/compiler/noirc_evaluator/src/ssa/ir/instruction.rs @@ -40,13 +40,13 @@ pub(crate) enum Intrinsic { SlicePopFront, SliceInsert, SliceRemove, + ApplyRangeConstraint, StrAsBytes, ToBits(Endian), ToRadix(Endian), BlackBox(BlackBoxFunc), FromField, AsField, - WrappingShiftLeft, } impl std::fmt::Display for Intrinsic { @@ -62,6 +62,7 @@ impl std::fmt::Display for Intrinsic { Intrinsic::SliceInsert => write!(f, "slice_insert"), Intrinsic::SliceRemove => write!(f, "slice_remove"), Intrinsic::StrAsBytes => write!(f, "str_as_bytes"), + Intrinsic::ApplyRangeConstraint => write!(f, "apply_range_constraint"), Intrinsic::ToBits(Endian::Big) => write!(f, "to_be_bits"), Intrinsic::ToBits(Endian::Little) => write!(f, "to_le_bits"), Intrinsic::ToRadix(Endian::Big) => write!(f, "to_be_radix"), @@ -69,7 +70,6 @@ impl std::fmt::Display for Intrinsic { Intrinsic::BlackBox(function) => write!(f, "{function}"), Intrinsic::FromField => write!(f, "from_field"), Intrinsic::AsField => write!(f, "as_field"), - Intrinsic::WrappingShiftLeft => write!(f, "wrapping_shift_left"), } } } @@ -80,7 +80,7 @@ impl Intrinsic { /// If there are no side effects then the `Intrinsic` can be removed if the result is unused. pub(crate) fn has_side_effects(&self) -> bool { match self { - Intrinsic::AssertConstant => true, + Intrinsic::AssertConstant | Intrinsic::ApplyRangeConstraint => true, Intrinsic::Sort | Intrinsic::ArrayLen @@ -94,8 +94,7 @@ impl Intrinsic { | Intrinsic::ToBits(_) | Intrinsic::ToRadix(_) | Intrinsic::FromField - | Intrinsic::AsField - | Intrinsic::WrappingShiftLeft => false, + | Intrinsic::AsField => false, // Some black box functions have side-effects Intrinsic::BlackBox(func) => matches!(func, BlackBoxFunc::RecursiveAggregation), @@ -109,6 +108,7 @@ impl Intrinsic { "arraysort" => Some(Intrinsic::Sort), "array_len" => Some(Intrinsic::ArrayLen), "assert_constant" => Some(Intrinsic::AssertConstant), + "apply_range_constraint" => Some(Intrinsic::ApplyRangeConstraint), "slice_push_back" => Some(Intrinsic::SlicePushBack), "slice_push_front" => Some(Intrinsic::SlicePushFront), "slice_pop_back" => Some(Intrinsic::SlicePopBack), @@ -122,7 +122,6 @@ impl Intrinsic { "to_be_bits" => Some(Intrinsic::ToBits(Endian::Big)), "from_field" => Some(Intrinsic::FromField), "as_field" => Some(Intrinsic::AsField), - "wrapping_shift_left" => Some(Intrinsic::WrappingShiftLeft), other => BlackBoxFunc::lookup(other).map(Intrinsic::BlackBox), } } @@ -186,6 +185,13 @@ pub(crate) enum Instruction { /// Creates a new array with the new value at the given index. All other elements are identical /// to those in the given array. This will not modify the original array. ArraySet { array: ValueId, index: ValueId, value: ValueId }, + + /// An instruction to increment the reference count of a value. + /// + /// This currently only has an effect in Brillig code where array sharing and copy on write is + /// implemented via reference counting. In ACIR code this is done with im::Vector and these + /// IncrementRc instructions are ignored. + IncrementRc { value: ValueId }, } impl Instruction { @@ -199,18 +205,19 @@ impl Instruction { match self { Instruction::Binary(binary) => binary.result_type(), Instruction::Cast(_, typ) => InstructionResultType::Known(typ.clone()), - Instruction::Allocate { .. } => InstructionResultType::Known(Type::Reference), Instruction::Not(value) | Instruction::Truncate { value, .. } => { InstructionResultType::Operand(*value) } Instruction::ArraySet { array, .. } => InstructionResultType::Operand(*array), Instruction::Constrain(..) | Instruction::Store { .. } - | Instruction::EnableSideEffects { .. } - | Instruction::RangeCheck { .. } => InstructionResultType::None, - Instruction::Load { .. } | Instruction::ArrayGet { .. } | Instruction::Call { .. } => { - InstructionResultType::Unknown - } + | Instruction::IncrementRc { .. } + | Instruction::RangeCheck { .. } + | Instruction::EnableSideEffects { .. } => InstructionResultType::None, + Instruction::Allocate { .. } + | Instruction::Load { .. } + | Instruction::ArrayGet { .. } + | Instruction::Call { .. } => InstructionResultType::Unknown, } } @@ -228,10 +235,11 @@ impl Instruction { use Instruction::*; match self { - Binary(_) | Cast(_, _) | Not(_) | ArrayGet { .. } | ArraySet { .. } => true, - - // Unclear why this instruction causes problems. - Truncate { .. } => false, + Binary(bin) => { + // In ACIR, a division with a false predicate outputs (0,0), so it cannot replace another instruction unless they have the same predicate + bin.operator != BinaryOp::Div + } + Cast(_, _) | Truncate { .. } | Not(_) | ArrayGet { .. } | ArraySet { .. } => true, // These either have side-effects or interact with memory Constrain(..) @@ -239,6 +247,7 @@ impl Instruction { | Allocate | Load { .. } | Store { .. } + | IncrementRc { .. } | RangeCheck { .. } => false, Call { func, .. } => match dfg[*func] { @@ -270,7 +279,11 @@ impl Instruction { | ArrayGet { .. } | ArraySet { .. } => false, - Constrain(..) | Store { .. } | EnableSideEffects { .. } | RangeCheck { .. } => true, + Constrain(..) + | Store { .. } + | EnableSideEffects { .. } + | IncrementRc { .. } + | RangeCheck { .. } => true, // Some `Intrinsic`s have side effects so we must check what kind of `Call` this is. Call { func, .. } => match dfg[*func] { @@ -327,6 +340,7 @@ impl Instruction { Instruction::ArraySet { array, index, value } => { Instruction::ArraySet { array: f(*array), index: f(*index), value: f(*value) } } + Instruction::IncrementRc { value } => Instruction::IncrementRc { value: f(*value) }, Instruction::RangeCheck { value, max_bit_size, assert_message } => { Instruction::RangeCheck { value: f(*value), @@ -378,7 +392,7 @@ impl Instruction { Instruction::EnableSideEffects { condition } => { f(*condition); } - Instruction::RangeCheck { value, .. } => { + Instruction::IncrementRc { value } | Instruction::RangeCheck { value, .. } => { f(*value); } } @@ -394,6 +408,7 @@ impl Instruction { dfg: &mut DataFlowGraph, block: BasicBlockId, ctrl_typevars: Option>, + call_stack: &CallStack, ) -> SimplifyResult { use SimplifyResult::*; match self { @@ -419,12 +434,12 @@ impl Instruction { _ => None, } } - Instruction::Constrain(lhs, rhs, ..) => { - if dfg.resolve(*lhs) == dfg.resolve(*rhs) { - // Remove trivial case `assert_eq(x, x)` - SimplifyResult::Remove + Instruction::Constrain(lhs, rhs, msg) => { + let constraints = decompose_constrain(*lhs, *rhs, msg.clone(), dfg); + if constraints.is_empty() { + Remove } else { - SimplifyResult::None + SimplifiedToInstructionMultiple(constraints) } } Instruction::ArrayGet { array, index } => { @@ -453,17 +468,56 @@ impl Instruction { } None } - Instruction::Truncate { value, bit_size, .. } => { + Instruction::Truncate { value, bit_size, max_bit_size } => { if let Some((numeric_constant, typ)) = dfg.get_numeric_constant_with_type(*value) { let integer_modulus = 2_u128.pow(*bit_size); let truncated = numeric_constant.to_u128() % integer_modulus; SimplifiedTo(dfg.make_constant(truncated.into(), typ)) + } else if let Value::Instruction { instruction, .. } = &dfg[dfg.resolve(*value)] { + match &dfg[*instruction] { + Instruction::Truncate { bit_size: src_bit_size, .. } => { + // If we're truncating the value to fit into the same or larger bit size then this is a noop. + if src_bit_size <= bit_size && src_bit_size <= max_bit_size { + SimplifiedTo(*value) + } else { + None + } + } + + Instruction::Binary(Binary { + lhs, rhs, operator: BinaryOp::Div, .. + }) if dfg.is_constant(*rhs) => { + // If we're truncating the result of a division by a constant denominator, we can + // reason about the maximum bit size of the result and whether a truncation is necessary. + + let numerator_type = dfg.type_of_value(*lhs); + let max_numerator_bits = numerator_type.bit_size(); + + let divisor = dfg + .get_numeric_constant(*rhs) + .expect("rhs is checked to be constant."); + let divisor_bits = divisor.num_bits(); + + // 2^{max_quotient_bits} = 2^{max_numerator_bits} / 2^{divisor_bits} + // => max_quotient_bits = max_numerator_bits - divisor_bits + // + // In order for the truncation to be a noop, we then require `max_quotient_bits < bit_size`. + let max_quotient_bits = max_numerator_bits - divisor_bits; + if max_quotient_bits < *bit_size { + SimplifiedTo(*value) + } else { + None + } + } + + _ => None, + } } else { None } } Instruction::Call { func, arguments } => { - simplify_call(*func, arguments, dfg, block, ctrl_typevars) + simplify_call(*func, arguments, dfg, block, ctrl_typevars, call_stack) } Instruction::EnableSideEffects { condition } => { if let Some(last) = dfg[block].instructions().last().copied() { @@ -478,6 +532,7 @@ impl Instruction { Instruction::Allocate { .. } => None, Instruction::Load { .. } => None, Instruction::Store { .. } => None, + Instruction::IncrementRc { .. } => None, Instruction::RangeCheck { value, max_bit_size, .. } => { if let Some(numeric_constant) = dfg.get_numeric_constant(*value) { if numeric_constant.num_bits() < *max_bit_size { @@ -494,6 +549,14 @@ impl Instruction { /// that value is returned. Otherwise None is returned. fn simplify_cast(value: ValueId, dst_typ: &Type, dfg: &mut DataFlowGraph) -> SimplifyResult { use SimplifyResult::*; + let value = dfg.resolve(value); + + if let Value::Instruction { instruction, .. } = &dfg[value] { + if let Instruction::Cast(original_value, _) = &dfg[*instruction] { + return SimplifiedToInstruction(Instruction::Cast(*original_value, dst_typ.clone())); + } + } + if let Some(constant) = dfg.get_numeric_constant(value) { let src_typ = dfg.type_of_value(value); match (src_typ, dst_typ) { @@ -532,6 +595,129 @@ fn simplify_cast(value: ValueId, dst_typ: &Type, dfg: &mut DataFlowGraph) -> Sim } } +/// Try to decompose this constrain instruction. This constraint will be broken down such that it instead constrains +/// all the values which are used to compute the values which were being constrained. +fn decompose_constrain( + lhs: ValueId, + rhs: ValueId, + msg: Option, + dfg: &mut DataFlowGraph, +) -> Vec { + let lhs = dfg.resolve(lhs); + let rhs = dfg.resolve(rhs); + + if lhs == rhs { + // Remove trivial case `assert_eq(x, x)` + Vec::new() + } else { + match (&dfg[lhs], &dfg[rhs]) { + (Value::NumericConstant { constant, typ }, Value::Instruction { instruction, .. }) + | (Value::Instruction { instruction, .. }, Value::NumericConstant { constant, typ }) + if *typ == Type::bool() => + { + match dfg[*instruction] { + Instruction::Binary(Binary { lhs, rhs, operator: BinaryOp::Eq }) + if constant.is_one() => + { + // Replace an explicit two step equality assertion + // + // v2 = eq v0, u32 v1 + // constrain v2 == u1 1 + // + // with a direct assertion of equality between the two values + // + // v2 = eq v0, u32 v1 + // constrain v0 == v1 + // + // Note that this doesn't remove the value `v2` as it may be used in other instructions, but it + // will likely be removed through dead instruction elimination. + + vec![Instruction::Constrain(lhs, rhs, msg)] + } + + Instruction::Binary(Binary { lhs, rhs, operator: BinaryOp::Mul }) + if constant.is_one() && dfg.type_of_value(lhs) == Type::bool() => + { + // Replace an equality assertion on a boolean multiplication + // + // v2 = mul v0, v1 + // constrain v2 == u1 1 + // + // with a direct assertion that each value is equal to 1 + // + // v2 = mul v0, v1 + // constrain v0 == 1 + // constrain v1 == 1 + // + // This is due to the fact that for `v2` to be 1 then both `v0` and `v1` are 1. + // + // Note that this doesn't remove the value `v2` as it may be used in other instructions, but it + // will likely be removed through dead instruction elimination. + let one = FieldElement::one(); + let one = dfg.make_constant(one, Type::bool()); + + [ + decompose_constrain(lhs, one, msg.clone(), dfg), + decompose_constrain(rhs, one, msg, dfg), + ] + .concat() + } + + Instruction::Binary(Binary { lhs, rhs, operator: BinaryOp::Or }) + if constant.is_zero() => + { + // Replace an equality assertion on an OR + // + // v2 = or v0, v1 + // constrain v2 == u1 0 + // + // with a direct assertion that each value is equal to 0 + // + // v2 = or v0, v1 + // constrain v0 == 0 + // constrain v1 == 0 + // + // This is due to the fact that for `v2` to be 0 then both `v0` and `v1` are 0. + // + // Note that this doesn't remove the value `v2` as it may be used in other instructions, but it + // will likely be removed through dead instruction elimination. + let zero = FieldElement::zero(); + let zero = dfg.make_constant(zero, dfg.type_of_value(lhs)); + + [ + decompose_constrain(lhs, zero, msg.clone(), dfg), + decompose_constrain(rhs, zero, msg, dfg), + ] + .concat() + } + + Instruction::Not(value) => { + // Replace an assertion that a not instruction is truthy + // + // v1 = not v0 + // constrain v1 == u1 1 + // + // with an assertion that the not instruction input is falsy + // + // v1 = not v0 + // constrain v0 == u1 0 + // + // Note that this doesn't remove the value `v1` as it may be used in other instructions, but it + // will likely be removed through dead instruction elimination. + let reversed_constant = FieldElement::from(!constant.is_one()); + let reversed_constant = dfg.make_constant(reversed_constant, Type::bool()); + decompose_constrain(value, reversed_constant, msg, dfg) + } + + _ => vec![Instruction::Constrain(lhs, rhs, msg)], + } + } + + _ => vec![Instruction::Constrain(lhs, rhs, msg)], + } + } +} + /// The possible return values for Instruction::return_types pub(crate) enum InstructionResultType { /// The result type of this instruction matches that of this operand @@ -768,10 +954,19 @@ impl Binary { let zero = dfg.make_constant(FieldElement::zero(), Type::bool()); return SimplifyResult::SimplifiedTo(zero); } - if operand_type.is_unsigned() && rhs_is_zero { - // Unsigned values cannot be less than zero. - let zero = dfg.make_constant(FieldElement::zero(), Type::bool()); - return SimplifyResult::SimplifiedTo(zero); + if operand_type.is_unsigned() { + if rhs_is_zero { + // Unsigned values cannot be less than zero. + let zero = dfg.make_constant(FieldElement::zero(), Type::bool()); + return SimplifyResult::SimplifiedTo(zero); + } else if rhs_is_one { + let zero = dfg.make_constant(FieldElement::zero(), operand_type); + return SimplifyResult::SimplifiedToInstruction(Instruction::binary( + BinaryOp::Eq, + self.lhs, + zero, + )); + } } } BinaryOp::And => { @@ -1011,6 +1206,10 @@ pub(crate) enum SimplifyResult { /// Replace this function with an simpler but equivalent instruction. SimplifiedToInstruction(Instruction), + /// Replace this function with a set of simpler but equivalent instructions. + /// This is currently only to be used for [`Instruction::Constrain`]. + SimplifiedToInstructionMultiple(Vec), + /// Remove the instruction, it is unnecessary Remove, @@ -1019,9 +1218,10 @@ pub(crate) enum SimplifyResult { } impl SimplifyResult { - pub(crate) fn instruction(self) -> Option { + pub(crate) fn instructions(self) -> Option> { match self { - SimplifyResult::SimplifiedToInstruction(instruction) => Some(instruction), + SimplifyResult::SimplifiedToInstruction(instruction) => Some(vec![instruction]), + SimplifyResult::SimplifiedToInstructionMultiple(instructions) => Some(instructions), _ => None, } } diff --git a/compiler/noirc_evaluator/src/ssa/ir/instruction/call.rs b/compiler/noirc_evaluator/src/ssa/ir/instruction/call.rs index da5544d7dc6..68c7177f066 100644 --- a/compiler/noirc_evaluator/src/ssa/ir/instruction/call.rs +++ b/compiler/noirc_evaluator/src/ssa/ir/instruction/call.rs @@ -31,6 +31,7 @@ pub(super) fn simplify_call( dfg: &mut DataFlowGraph, block: BasicBlockId, ctrl_typevars: Option>, + call_stack: &CallStack, ) -> SimplifyResult { let intrinsic = match &dfg[func] { Value::Intrinsic(intrinsic) => *intrinsic, @@ -232,6 +233,20 @@ pub(super) fn simplify_call( SimplifyResult::None } } + Intrinsic::ApplyRangeConstraint => { + let value = arguments[0]; + let max_bit_size = dfg.get_numeric_constant(arguments[1]); + if let Some(max_bit_size) = max_bit_size { + let max_bit_size = max_bit_size.to_u128() as u32; + SimplifyResult::SimplifiedToInstruction(Instruction::RangeCheck { + value, + max_bit_size, + assert_message: Some("call to assert_max_bit_size".to_owned()), + }) + } else { + SimplifyResult::None + } + } Intrinsic::BlackBox(bb_func) => simplify_black_box_func(bb_func, arguments, dfg), Intrinsic::Sort => simplify_sort(dfg, arguments), Intrinsic::AsField => { @@ -242,12 +257,26 @@ pub(super) fn simplify_call( SimplifyResult::SimplifiedToInstruction(instruction) } Intrinsic::FromField => { - let instruction = Instruction::Cast(arguments[0], ctrl_typevars.unwrap().remove(0)); + let incoming_type = Type::field(); + let target_type = ctrl_typevars.unwrap().remove(0); + + let truncate = Instruction::Truncate { + value: arguments[0], + bit_size: target_type.bit_size(), + max_bit_size: incoming_type.bit_size(), + }; + let truncated_value = dfg + .insert_instruction_and_results( + truncate, + block, + Some(vec![incoming_type]), + call_stack.clone(), + ) + .first(); + + let instruction = Instruction::Cast(truncated_value, target_type); SimplifyResult::SimplifiedToInstruction(instruction) } - Intrinsic::WrappingShiftLeft => { - unreachable!("ICE - wrapping shift left should have been proccessed before") - } } } @@ -377,6 +406,8 @@ fn simplify_black_box_func( match bb_func { BlackBoxFunc::SHA256 => simplify_hash(dfg, arguments, acvm::blackbox_solver::sha256), BlackBoxFunc::Blake2s => simplify_hash(dfg, arguments, acvm::blackbox_solver::blake2s), + BlackBoxFunc::Blake3 => simplify_hash(dfg, arguments, acvm::blackbox_solver::blake3), + BlackBoxFunc::Keccakf1600 => SimplifyResult::None, //TODO(Guillaume) BlackBoxFunc::Keccak256 => { match (dfg.get_array_constant(arguments[0]), dfg.get_numeric_constant(arguments[1])) { (Some((input, _)), Some(num_bytes)) if array_is_constant(dfg, &input) => { @@ -396,18 +427,6 @@ fn simplify_black_box_func( _ => SimplifyResult::None, } } - BlackBoxFunc::HashToField128Security => match dfg.get_array_constant(arguments[0]) { - Some((input, _)) if array_is_constant(dfg, &input) => { - let input_bytes: Vec = to_u8_vec(dfg, input); - - let field = acvm::blackbox_solver::hash_to_field_128_security(&input_bytes) - .expect("Rust solvable black box function should not fail"); - - let field_constant = dfg.make_constant(field, Type::field()); - SimplifyResult::SimplifiedTo(field_constant) - } - _ => SimplifyResult::None, - }, BlackBoxFunc::EcdsaSecp256k1 => { simplify_signature(dfg, arguments, acvm::blackbox_solver::ecdsa_secp256k1_verify) diff --git a/compiler/noirc_evaluator/src/ssa/ir/printer.rs b/compiler/noirc_evaluator/src/ssa/ir/printer.rs index c6b1f3c7528..2899b987c1d 100644 --- a/compiler/noirc_evaluator/src/ssa/ir/printer.rs +++ b/compiler/noirc_evaluator/src/ssa/ir/printer.rs @@ -172,6 +172,9 @@ pub(crate) fn display_instruction( show(*value) ) } + Instruction::IncrementRc { value } => { + writeln!(f, "inc_rc {}", show(*value)) + } Instruction::RangeCheck { value, max_bit_size, .. } => { writeln!(f, "range_check {} to {} bits", show(*value), *max_bit_size,) } diff --git a/compiler/noirc_evaluator/src/ssa/ir/types.rs b/compiler/noirc_evaluator/src/ssa/ir/types.rs index 7fe0713e748..ae53c7705c2 100644 --- a/compiler/noirc_evaluator/src/ssa/ir/types.rs +++ b/compiler/noirc_evaluator/src/ssa/ir/types.rs @@ -18,6 +18,28 @@ pub enum NumericType { NativeField, } +impl NumericType { + /// Returns the bit size of the provided numeric type. + pub(crate) fn bit_size(self: &NumericType) -> u32 { + match self { + NumericType::NativeField => FieldElement::max_num_bits(), + NumericType::Unsigned { bit_size } | NumericType::Signed { bit_size } => *bit_size, + } + } + + /// Returns true if the given Field value is within the numeric limits + /// for the current NumericType. + pub(crate) fn value_is_within_limits(self, field: FieldElement) -> bool { + match self { + NumericType::Signed { bit_size } | NumericType::Unsigned { bit_size } => { + let max = 2u128.pow(bit_size) - 1; + field <= max.into() + } + NumericType::NativeField => true, + } + } +} + /// All types representable in the IR. #[derive(Clone, Debug, PartialEq, Eq, Hash, Ord, PartialOrd)] pub(crate) enum Type { @@ -25,7 +47,7 @@ pub(crate) enum Type { Numeric(NumericType), /// A reference to some value, such as an array - Reference, + Reference(Rc), /// An immutable array value with the given element type and length Array(Rc, usize), @@ -68,6 +90,18 @@ impl Type { Type::Numeric(NumericType::NativeField) } + /// Returns the bit size of the provided numeric type. + /// + /// # Panics + /// + /// Panics if `self` is not a [`Type::Numeric`] + pub(crate) fn bit_size(&self) -> u32 { + match self { + Type::Numeric(numeric_type) => numeric_type.bit_size(), + other => panic!("bit_size: Expected numeric type, found {other}"), + } + } + /// Returns the size of the element type for this array/slice. /// The size of a type is defined as representing how many Fields are needed /// to represent the type. This is 1 for every primitive type, and is the number of fields @@ -86,43 +120,38 @@ impl Type { } Type::Slice(_) => true, Type::Numeric(_) => false, - Type::Reference => false, + Type::Reference(_) => false, Type::Function => false, } } /// Returns the flattened size of a Type pub(crate) fn flattened_size(&self) -> usize { - let mut size = 0; match self { Type::Array(elements, len) => { - size = elements.iter().fold(size, |sum, elem| sum + (elem.flattened_size() * len)); + elements.iter().fold(0, |sum, elem| sum + (elem.flattened_size() * len)) } Type::Slice(_) => { unimplemented!("ICE: cannot fetch flattened slice size"); } - _ => size += 1, + _ => 1, } - size } -} -impl NumericType { - /// Returns true if the given Field value is within the numeric limits - /// for the current NumericType. - pub(crate) fn value_is_within_limits(self, field: FieldElement) -> bool { + pub(crate) fn is_nested_slice(&self) -> bool { + if let Type::Slice(element_types) = self { + element_types.as_ref().iter().any(|typ| typ.contains_slice_element()) + } else { + false + } + } + + /// True if this type is an array (or slice) or internally contains an array (or slice) + pub(crate) fn contains_an_array(&self) -> bool { match self { - NumericType::Signed { bit_size } => { - let min = -(2i128.pow(bit_size - 1)); - let max = 2u128.pow(bit_size - 1) - 1; - // Signed integers are odd since they will overflow the field value - field <= max.into() || field >= min.into() - } - NumericType::Unsigned { bit_size } => { - let max = 2u128.pow(bit_size) - 1; - field <= max.into() - } - NumericType::NativeField => true, + Type::Numeric(_) | Type::Function => false, + Type::Array(_, _) | Type::Slice(_) => true, + Type::Reference(element) => element.contains_an_array(), } } } @@ -136,7 +165,7 @@ impl std::fmt::Display for Type { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { Type::Numeric(numeric) => numeric.fmt(f), - Type::Reference => write!(f, "reference"), + Type::Reference(element) => write!(f, "&mut {element}"), Type::Array(element, length) => { let elements = vecmap(element.iter(), |element| element.to_string()); write!(f, "[{}; {length}]", elements.join(", ")) diff --git a/compiler/noirc_evaluator/src/ssa/opt/array_use.rs b/compiler/noirc_evaluator/src/ssa/opt/array_use.rs index cfa97cee551..0bb8b0112b6 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/array_use.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/array_use.rs @@ -14,6 +14,7 @@ impl Ssa { /// Map arrays with the last instruction that uses it /// For this we simply process all the instructions in execution order /// and update the map whenever there is a match + #[tracing::instrument(level = "trace", skip(self))] pub(crate) fn find_last_array_uses(&self) -> HashMap { let mut array_use = HashMap::default(); for func in self.functions.values() { diff --git a/compiler/noirc_evaluator/src/ssa/opt/assert_constant.rs b/compiler/noirc_evaluator/src/ssa/opt/assert_constant.rs index cd3a509a62e..a3608f89612 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/assert_constant.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/assert_constant.rs @@ -21,6 +21,7 @@ impl Ssa { /// seen by loop unrolling. Furthermore, this pass cannot be a part of loop unrolling /// since we must go through every instruction to find all references to `assert_constant` /// while loop unrolling only touches blocks with loops in them. + #[tracing::instrument(level = "trace", skip(self))] pub(crate) fn evaluate_assert_constant(mut self) -> Result { for function in self.functions.values_mut() { for block in function.reachable_blocks() { diff --git a/compiler/noirc_evaluator/src/ssa/opt/constant_folding.rs b/compiler/noirc_evaluator/src/ssa/opt/constant_folding.rs index f48e6f2a129..addaee3ba8d 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/constant_folding.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/constant_folding.rs @@ -40,6 +40,7 @@ impl Ssa { /// Performs constant folding on each instruction. /// /// See [`constant_folding`][self] module for more information. + #[tracing::instrument(level = "trace", skip(self))] pub(crate) fn fold_constants(mut self) -> Ssa { for function in self.functions.values_mut() { constant_fold(function); @@ -183,7 +184,7 @@ mod test { function_builder::FunctionBuilder, ir::{ function::RuntimeType, - instruction::{BinaryOp, Instruction, TerminatorInstruction}, + instruction::{Binary, BinaryOp, Instruction, TerminatorInstruction}, map::Id, types::Type, value::{Value, ValueId}, @@ -246,6 +247,117 @@ mod test { } } + #[test] + fn redundant_truncation() { + // fn main f0 { + // b0(v0: u16, v1: u16): + // v2 = div v0, v1 + // v3 = truncate v2 to 8 bits, max_bit_size: 16 + // return v3 + // } + // + // After constructing this IR, we set the value of v1 to 2^8. + // The expected return afterwards should be v2. + let main_id = Id::test_new(0); + + // Compiling main + let mut builder = FunctionBuilder::new("main".into(), main_id, RuntimeType::Acir); + let v0 = builder.add_parameter(Type::unsigned(16)); + let v1 = builder.add_parameter(Type::unsigned(16)); + + // Note that this constant guarantees that `v0/constant < 2^8`. We then do not need to truncate the result. + let constant = 2_u128.pow(8); + let constant = builder.numeric_constant(constant, Type::field()); + + let v2 = builder.insert_binary(v0, BinaryOp::Div, v1); + let v3 = builder.insert_truncate(v2, 8, 16); + builder.terminate_with_return(vec![v3]); + + let mut ssa = builder.finish(); + let main = ssa.main_mut(); + let instructions = main.dfg[main.entry_block()].instructions(); + assert_eq!(instructions.len(), 2); // The final return is not counted + + // Expected output: + // + // fn main f0 { + // b0(Field 2: Field): + // return Field 9 + // } + main.dfg.set_value_from_id(v1, constant); + + let ssa = ssa.fold_constants(); + let main = ssa.main(); + + println!("{ssa}"); + + let instructions = main.dfg[main.entry_block()].instructions(); + assert_eq!(instructions.len(), 1); + let instruction = &main.dfg[instructions[0]]; + + assert_eq!( + instruction, + &Instruction::Binary(Binary { lhs: v0, operator: BinaryOp::Div, rhs: constant }) + ); + } + + #[test] + fn non_redundant_truncation() { + // fn main f0 { + // b0(v0: u16, v1: u16): + // v2 = div v0, v1 + // v3 = truncate v2 to 8 bits, max_bit_size: 16 + // return v3 + // } + // + // After constructing this IR, we set the value of v1 to 2^8 - 1. + // This should not result in the truncation being removed. + let main_id = Id::test_new(0); + + // Compiling main + let mut builder = FunctionBuilder::new("main".into(), main_id, RuntimeType::Acir); + let v0 = builder.add_parameter(Type::unsigned(16)); + let v1 = builder.add_parameter(Type::unsigned(16)); + + // Note that this constant does not guarantee that `v0/constant < 2^8`. We must then truncate the result. + let constant = 2_u128.pow(8) - 1; + let constant = builder.numeric_constant(constant, Type::field()); + + let v2 = builder.insert_binary(v0, BinaryOp::Div, v1); + let v3 = builder.insert_truncate(v2, 8, 16); + builder.terminate_with_return(vec![v3]); + + let mut ssa = builder.finish(); + let main = ssa.main_mut(); + let instructions = main.dfg[main.entry_block()].instructions(); + assert_eq!(instructions.len(), 2); // The final return is not counted + + // Expected output: + // + // fn main f0 { + // b0(v0: u16, Field 255: Field): + // v5 = div v0, Field 255 + // v6 = truncate v5 to 8 bits, max_bit_size: 16 + // return v6 + // } + main.dfg.set_value_from_id(v1, constant); + + let ssa = ssa.fold_constants(); + let main = ssa.main(); + + let instructions = main.dfg[main.entry_block()].instructions(); + assert_eq!(instructions.len(), 2); + + assert_eq!( + &main.dfg[instructions[0]], + &Instruction::Binary(Binary { lhs: v0, operator: BinaryOp::Div, rhs: constant }) + ); + assert_eq!( + &main.dfg[instructions[1]], + &Instruction::Truncate { value: ValueId::test_new(5), bit_size: 8, max_bit_size: 16 } + ); + } + #[test] fn arrays_elements_are_updated() { // fn main f0 { @@ -279,11 +391,11 @@ mod test { let return_value_id = match entry_block.unwrap_terminator() { TerminatorInstruction::Return { return_values, .. } => return_values[0], - _ => unreachable!(), + _ => unreachable!("Should have terminator instruction"), }; let return_element = match &main.dfg[return_value_id] { Value::Array { array, .. } => array[0], - _ => unreachable!(), + _ => unreachable!("Return type should be array"), }; // The return element is expected to refer to the new add instruction result. assert_eq!(main.dfg.resolve(new_add_instr_result), main.dfg.resolve(return_element)); @@ -292,7 +404,7 @@ mod test { #[test] fn instruction_deduplication() { // fn main f0 { - // b0(v0: Field): + // b0(v0: u16): // v1 = cast v0 as u32 // v2 = cast v0 as u32 // constrain v1 v2 @@ -307,7 +419,7 @@ mod test { // Compiling main let mut builder = FunctionBuilder::new("main".into(), main_id, RuntimeType::Acir); - let v0 = builder.add_parameter(Type::field()); + let v0 = builder.add_parameter(Type::unsigned(16)); let v1 = builder.insert_cast(v0, Type::unsigned(32)); let v2 = builder.insert_cast(v0, Type::unsigned(32)); @@ -321,7 +433,7 @@ mod test { // Expected output: // // fn main f0 { - // b0(v0: Field): + // b0(v0: u16): // v1 = cast v0 as u32 // } let ssa = ssa.fold_constants(); @@ -331,6 +443,69 @@ mod test { assert_eq!(instructions.len(), 1); let instruction = &main.dfg[instructions[0]]; - assert_eq!(instruction, &Instruction::Cast(ValueId::test_new(0), Type::unsigned(32))); + assert_eq!(instruction, &Instruction::Cast(v0, Type::unsigned(32))); + } + + #[test] + fn constraint_decomposition() { + // fn main f0 { + // b0(v0: u1, v1: u1, v2: u1): + // v3 = mul v0 v1 + // v4 = not v2 + // v5 = mul v3 v4 + // constrain v4 u1 1 + // } + // + // When constructing this IR, we should automatically decompose the constraint to be in terms of `v0`, `v1` and `v2`. + // + // The mul instructions are retained and will be removed in the dead instruction elimination pass. + let main_id = Id::test_new(0); + + // Compiling main + let mut builder = FunctionBuilder::new("main".into(), main_id, RuntimeType::Acir); + let v0 = builder.add_parameter(Type::bool()); + let v1 = builder.add_parameter(Type::bool()); + let v2 = builder.add_parameter(Type::bool()); + + let v3 = builder.insert_binary(v0, BinaryOp::Mul, v1); + let v4 = builder.insert_not(v2); + let v5 = builder.insert_binary(v3, BinaryOp::Mul, v4); + + // This constraint is automatically decomposed when it is inserted. + let v_true = builder.numeric_constant(true, Type::bool()); + builder.insert_constrain(v5, v_true, None); + + let v_false = builder.numeric_constant(false, Type::bool()); + + // Expected output: + // + // fn main f0 { + // b0(v0: u1, v1: u1, v2: u1): + // v3 = mul v0 v1 + // v4 = not v2 + // v5 = mul v3 v4 + // constrain v0 u1 1 + // constrain v1 u1 1 + // constrain v2 u1 0 + // } + + let ssa = builder.finish(); + let main = ssa.main(); + let instructions = main.dfg[main.entry_block()].instructions(); + + assert_eq!(instructions.len(), 6); + + assert_eq!( + main.dfg[instructions[0]], + Instruction::Binary(Binary { lhs: v0, operator: BinaryOp::Mul, rhs: v1 }) + ); + assert_eq!(main.dfg[instructions[1]], Instruction::Not(v2)); + assert_eq!( + main.dfg[instructions[2]], + Instruction::Binary(Binary { lhs: v3, operator: BinaryOp::Mul, rhs: v4 }) + ); + assert_eq!(main.dfg[instructions[3]], Instruction::Constrain(v0, v_true, None)); + assert_eq!(main.dfg[instructions[4]], Instruction::Constrain(v1, v_true, None)); + assert_eq!(main.dfg[instructions[5]], Instruction::Constrain(v2, v_false, None)); } } diff --git a/compiler/noirc_evaluator/src/ssa/opt/defunctionalize.rs b/compiler/noirc_evaluator/src/ssa/opt/defunctionalize.rs index 62b335be1e2..b7f154397a6 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/defunctionalize.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/defunctionalize.rs @@ -52,6 +52,7 @@ struct DefunctionalizationContext { } impl Ssa { + #[tracing::instrument(level = "trace", skip(self))] pub(crate) fn defunctionalize(mut self) -> Ssa { // Find all functions used as value that share the same signature let variants = find_variants(&self); diff --git a/compiler/noirc_evaluator/src/ssa/opt/die.rs b/compiler/noirc_evaluator/src/ssa/opt/die.rs index 3db95f6ad99..f7d8adb5275 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/die.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/die.rs @@ -7,7 +7,7 @@ use crate::ssa::{ basic_block::{BasicBlock, BasicBlockId}, dfg::DataFlowGraph, function::Function, - instruction::InstructionId, + instruction::{Instruction, InstructionId}, post_order::PostOrder, value::{Value, ValueId}, }, @@ -17,6 +17,7 @@ use crate::ssa::{ impl Ssa { /// Performs Dead Instruction Elimination (DIE) to remove any instructions with /// unused results. + #[tracing::instrument(level = "trace", skip(self))] pub(crate) fn dead_instruction_elimination(mut self) -> Ssa { for function in self.functions.values_mut() { dead_instruction_elimination(function); @@ -33,11 +34,17 @@ impl Ssa { /// of its instructions are needed elsewhere. fn dead_instruction_elimination(function: &mut Function) { let mut context = Context::default(); + if let Some(call_data) = function.dfg.data_bus.call_data { + context.mark_used_instruction_results(&function.dfg, call_data); + } + let blocks = PostOrder::with_function(function); for block in blocks.as_slice() { context.remove_unused_instructions_in_block(function, *block); } + + context.remove_increment_rc_instructions(&mut function.dfg); } /// Per function context for tracking unused values and which instructions to remove. @@ -45,6 +52,11 @@ fn dead_instruction_elimination(function: &mut Function) { struct Context { used_values: HashSet, instructions_to_remove: HashSet, + + /// IncrementRc instructions must be revisited after the main DIE pass since + /// they technically contain side-effects but we still want to remove them if their + /// `value` parameter is not used elsewhere. + increment_rc_instructions: Vec<(InstructionId, BasicBlockId)>, } impl Context { @@ -67,14 +79,19 @@ impl Context { let block = &function.dfg[block_id]; self.mark_terminator_values_as_used(function, block); - for instruction in block.instructions().iter().rev() { - if self.is_unused(*instruction, function) { - self.instructions_to_remove.insert(*instruction); + for instruction_id in block.instructions().iter().rev() { + if self.is_unused(*instruction_id, function) { + self.instructions_to_remove.insert(*instruction_id); } else { - let instruction = &function.dfg[*instruction]; - instruction.for_each_value(|value| { - self.mark_used_instruction_results(&function.dfg, value); - }); + let instruction = &function.dfg[*instruction_id]; + + if let Instruction::IncrementRc { .. } = instruction { + self.increment_rc_instructions.push((*instruction_id, block_id)); + } else { + instruction.for_each_value(|value| { + self.mark_used_instruction_results(&function.dfg, value); + }); + } } } @@ -119,11 +136,28 @@ impl Context { self.mark_used_instruction_results(dfg, *elem); } } + Value::Param { .. } => { + self.used_values.insert(value_id); + } _ => { // Does not comprise of any instruction results } } } + + fn remove_increment_rc_instructions(self, dfg: &mut DataFlowGraph) { + for (increment_rc, block) in self.increment_rc_instructions { + let value = match &dfg[increment_rc] { + Instruction::IncrementRc { value } => *value, + other => unreachable!("Expected IncrementRc instruction, found {other:?}"), + }; + + // This could be more efficient if we have to remove multiple IncrementRcs in a single block + if !self.used_values.contains(&value) { + dfg[block].instructions_mut().retain(|instruction| *instruction != increment_rc); + } + } + } } #[cfg(test)] @@ -176,10 +210,10 @@ mod test { builder.switch_to_block(b1); let _v3 = builder.add_block_parameter(b1, Type::field()); - let v4 = builder.insert_allocate(); + let v4 = builder.insert_allocate(Type::field()); let _v5 = builder.insert_load(v4, Type::field()); - let v6 = builder.insert_allocate(); + let v6 = builder.insert_allocate(Type::field()); builder.insert_store(v6, one); let v7 = builder.insert_load(v6, Type::field()); let v8 = builder.insert_binary(v7, BinaryOp::Add, one); diff --git a/compiler/noirc_evaluator/src/ssa/opt/fill_internal_slices.rs b/compiler/noirc_evaluator/src/ssa/opt/fill_internal_slices.rs index d12b01b9196..5ee8e42fe3a 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/fill_internal_slices.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/fill_internal_slices.rs @@ -46,7 +46,7 @@ use crate::ssa::{ ir::{ basic_block::BasicBlockId, dfg::CallStack, - function::Function, + function::{Function, RuntimeType}, function_inserter::FunctionInserter, instruction::{Instruction, InstructionId, Intrinsic}, post_order::PostOrder, @@ -60,10 +60,20 @@ use acvm::FieldElement; use fxhash::FxHashMap as HashMap; impl Ssa { + #[tracing::instrument(level = "trace", skip(self))] pub(crate) fn fill_internal_slices(mut self) -> Ssa { for function in self.functions.values_mut() { - let mut context = Context::new(function); - context.process_blocks(); + // This pass is only necessary for generating ACIR and thus we should not + // process Brillig functions. + // The pass is also currently only setup to handle a function with a single flattened block. + // For complex Brillig functions we can expect this pass to panic. + if function.runtime() == RuntimeType::Acir { + let databus = function.dfg.data_bus.clone(); + let mut context = Context::new(function); + context.process_blocks(); + // update the databus with the new array instructions + function.dfg.data_bus = databus.map_values(|t| context.inserter.resolve(t)); + } } self } @@ -168,6 +178,9 @@ impl<'f> Context<'f> { panic!("ICE: should have inner slice set for {slice_value}") }); slice_sizes.insert(results[0], inner_slice.clone()); + if slice_value != results[0] { + self.mapped_slice_values.insert(slice_value, results[0]); + } } } } @@ -192,17 +205,11 @@ impl<'f> Context<'f> { let inner_sizes = slice_sizes.get_mut(array).expect("ICE expected slice sizes"); inner_sizes.1.push(*value); - - let value_parent = self.resolve_slice_parent(*value); - if slice_values.contains(&value_parent) { - // Map the value parent to the current array in case nested slices - // from the current array are set to larger values later in the program - self.mapped_slice_values.insert(value_parent, *array); - } } if let Some(inner_sizes) = slice_sizes.get_mut(array) { let inner_sizes = inner_sizes.clone(); + slice_sizes.insert(results[0], inner_sizes); self.mapped_slice_values.insert(*array, results[0]); @@ -218,14 +225,27 @@ impl<'f> Context<'f> { | Intrinsic::SlicePopBack | Intrinsic::SliceInsert | Intrinsic::SliceRemove => (1, 1), - Intrinsic::SlicePopFront => (1, 2), + // `pop_front` returns the popped element, and then the respective slice. + // This means in the case of a slice with structs, the result index of the popped slice + // will change depending on the number of elements in the struct. + // For example, a slice with four elements will look as such in SSA: + // v3, v4, v5, v6, v7, v8 = call slice_pop_front(v1, v2) + // where v7 is the slice length and v8 is the popped slice itself. + Intrinsic::SlicePopFront => (1, results.len() - 1), _ => return, }; + let slice_contents = arguments[argument_index]; match intrinsic { Intrinsic::SlicePushBack | Intrinsic::SlicePushFront | Intrinsic::SliceInsert => { - let slice_contents = arguments[argument_index]; + for arg in &arguments[(argument_index + 1)..] { + let element_typ = self.inserter.function.dfg.type_of_value(*arg); + if element_typ.contains_slice_element() { + slice_values.push(*arg); + self.compute_slice_sizes(*arg, slice_sizes); + } + } if let Some(inner_sizes) = slice_sizes.get_mut(&slice_contents) { inner_sizes.0 += 1; @@ -234,12 +254,12 @@ impl<'f> Context<'f> { self.mapped_slice_values .insert(slice_contents, results[result_index]); + self.slice_parents.insert(results[result_index], slice_contents); } } Intrinsic::SlicePopBack - | Intrinsic::SlicePopFront - | Intrinsic::SliceRemove => { - let slice_contents = arguments[argument_index]; + | Intrinsic::SliceRemove + | Intrinsic::SlicePopFront => { // We do not decrement the size on intrinsics that could remove values from a slice. // This is because we could potentially go back to the smaller slice and not fill in dummies. // This pass should be tracking the potential max that a slice ***could be*** @@ -249,6 +269,7 @@ impl<'f> Context<'f> { self.mapped_slice_values .insert(slice_contents, results[result_index]); + self.slice_parents.insert(results[result_index], slice_contents); } } _ => {} @@ -271,7 +292,6 @@ impl<'f> Context<'f> { if slice_values.contains(array) { let (new_array_op_instr, call_stack) = self.get_updated_array_op_instr(*array, slice_sizes, instruction); - self.inserter.push_instruction_value( new_array_op_instr, instruction, @@ -282,6 +302,55 @@ impl<'f> Context<'f> { self.inserter.push_instruction(instruction, block); } } + Instruction::Call { func: _, arguments } => { + let mut args_to_replace = Vec::new(); + for (i, arg) in arguments.iter().enumerate() { + let element_typ = self.inserter.function.dfg.type_of_value(*arg); + if slice_values.contains(arg) && element_typ.contains_slice_element() { + args_to_replace.push((i, *arg)); + } + } + if args_to_replace.is_empty() { + self.inserter.push_instruction(instruction, block); + } else { + // Using the original slice is ok to do as during collection of slice information + // we guarantee that only the arguments to slice intrinsic calls can be replaced. + let slice_contents = arguments[1]; + + let element_typ = self.inserter.function.dfg.type_of_value(arguments[1]); + let elem_depth = Self::compute_nested_slice_depth(&element_typ); + + let mut max_sizes = Vec::new(); + max_sizes.resize(elem_depth, 0); + // We want the max for the parent of the argument + let parent = self.resolve_slice_parent(slice_contents); + self.compute_slice_max_sizes(parent, slice_sizes, &mut max_sizes, 0); + + for (index, arg) in args_to_replace { + let element_typ = self.inserter.function.dfg.type_of_value(arg); + max_sizes.remove(0); + let new_array = + self.attach_slice_dummies(&element_typ, Some(arg), false, &max_sizes); + + let instruction_id = instruction; + let (instruction, call_stack) = + self.inserter.map_instruction(instruction_id); + let new_call_instr = match instruction { + Instruction::Call { func, mut arguments } => { + arguments[index] = new_array; + Instruction::Call { func, arguments } + } + _ => panic!("Expected call instruction"), + }; + self.inserter.push_instruction_value( + new_call_instr, + instruction_id, + block, + call_stack, + ); + } + } + } _ => { self.inserter.push_instruction(instruction, block); } @@ -308,6 +377,7 @@ impl<'f> Context<'f> { let typ = self.inserter.function.dfg.type_of_value(array_id); let depth = Self::compute_nested_slice_depth(&typ); max_sizes.resize(depth, 0); + max_sizes[0] = *current_size; self.compute_slice_max_sizes(array_id, slice_sizes, &mut max_sizes, 1); @@ -364,9 +434,12 @@ impl<'f> Context<'f> { if let Some(value) = value { let mut slice = im::Vector::new(); - let array = match self.inserter.function.dfg[value].clone() { + let value = self.inserter.function.dfg[value].clone(); + let array = match value { Value::Array { array, .. } => array, - _ => panic!("Expected an array value"), + _ => { + panic!("Expected an array value"); + } }; if is_parent_slice { @@ -398,7 +471,7 @@ impl<'f> Context<'f> { self.inserter.function.dfg.make_array(slice, typ.clone()) } } - Type::Reference => { + Type::Reference(_) => { unreachable!("ICE: Generating dummy data for references is unsupported") } Type::Function => { @@ -481,7 +554,6 @@ impl<'f> Context<'f> { self.compute_slice_max_sizes(*inner_slice, slice_sizes, max_sizes, depth + 1); } - max_sizes[depth] = max; if max > max_sizes[depth] { max_sizes[depth] = max; } diff --git a/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg.rs b/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg.rs index d2ed21c60d7..6bdf2ab1c0a 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg.rs @@ -163,6 +163,7 @@ impl Ssa { /// This pass will modify any instructions with side effects in particular, often multiplying /// them by jump conditions to maintain correctness even when all branches of a jmpif are inlined. /// For more information, see the module-level comment at the top of this file. + #[tracing::instrument(level = "trace", skip(self))] pub(crate) fn flatten_cfg(mut self) -> Ssa { flatten_function_cfg(self.main_mut()); self @@ -643,6 +644,9 @@ impl<'f> Context<'f> { // Condition needs to be cast to argument type in order to multiply them together. let argument_type = self.inserter.function.dfg.type_of_value(lhs); + // Sanity check that we're not constraining non-primitive types + assert!(matches!(argument_type, Type::Numeric(_))); + let casted_condition = self.insert_instruction( Instruction::Cast(condition, argument_type), call_stack.clone(), @@ -656,6 +660,7 @@ impl<'f> Context<'f> { Instruction::binary(BinaryOp::Mul, rhs, casted_condition), call_stack, ); + Instruction::Constrain(lhs, rhs, message) } Instruction::Store { address, value } => { @@ -817,7 +822,7 @@ mod test { #[test] fn merge_stores() { // fn main f0 { - // b0(v0: u1, v1: ref): + // b0(v0: u1, v1: &mut Field): // jmpif v0, then: b1, else: b2 // b1(): // store v1, Field 5 @@ -832,7 +837,7 @@ mod test { let b2 = builder.insert_block(); let v0 = builder.add_parameter(Type::bool()); - let v1 = builder.add_parameter(Type::Reference); + let v1 = builder.add_parameter(Type::Reference(Rc::new(Type::field()))); builder.terminate_with_jmpif(v0, b1, b2); @@ -894,7 +899,7 @@ mod test { let b3 = builder.insert_block(); let v0 = builder.add_parameter(Type::bool()); - let v1 = builder.add_parameter(Type::Reference); + let v1 = builder.add_parameter(Type::Reference(Rc::new(Type::field()))); builder.terminate_with_jmpif(v0, b1, b2); @@ -935,7 +940,6 @@ mod test { // } let ssa = ssa.flatten_cfg(); let main = ssa.main(); - println!("{ssa}"); assert_eq!(main.reachable_blocks().len(), 1); let store_count = count_instruction(main, |ins| matches!(ins, Instruction::Store { .. })); @@ -993,7 +997,7 @@ mod test { let c1 = builder.add_parameter(Type::bool()); let c4 = builder.add_parameter(Type::bool()); - let r1 = builder.insert_allocate(); + let r1 = builder.insert_allocate(Type::field()); let store_value = |builder: &mut FunctionBuilder, value: u128| { let value = builder.field_constant(value); @@ -1098,7 +1102,7 @@ mod test { let main = ssa.main(); let ret = match main.dfg[main.entry_block()].terminator() { Some(TerminatorInstruction::Return { return_values, .. }) => return_values[0], - _ => unreachable!(), + _ => unreachable!("Should have terminator instruction"), }; let merged_values = get_all_constants_reachable_from_instruction(&main.dfg, ret); @@ -1144,7 +1148,7 @@ mod test { builder.terminate_with_jmpif(v0, b1, b2); builder.switch_to_block(b1); - let v2 = builder.insert_allocate(); + let v2 = builder.insert_allocate(Type::field()); let zero = builder.field_constant(0u128); builder.insert_store(v2, zero); let _v4 = builder.insert_load(v2, Type::field()); @@ -1313,7 +1317,7 @@ mod test { let v8 = builder.insert_binary(v6, BinaryOp::Mod, i_two); let v9 = builder.insert_cast(v8, Type::bool()); - let v10 = builder.insert_allocate(); + let v10 = builder.insert_allocate(Type::field()); builder.insert_store(v10, zero); builder.terminate_with_jmpif(v9, b1, b2); @@ -1412,9 +1416,9 @@ mod test { let ten = builder.field_constant(10u128); let one_hundred = builder.field_constant(100u128); - let v0 = builder.insert_allocate(); + let v0 = builder.insert_allocate(Type::field()); builder.insert_store(v0, zero); - let v2 = builder.insert_allocate(); + let v2 = builder.insert_allocate(Type::field()); builder.insert_store(v2, two); let v4 = builder.insert_load(v2, Type::field()); let v5 = builder.insert_binary(v4, BinaryOp::Lt, two); @@ -1469,7 +1473,7 @@ mod test { None => unreachable!("Expected constant 200 for return value"), } } - _ => unreachable!(), + _ => unreachable!("Should have terminator instruction"), } } } diff --git a/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg/value_merger.rs b/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg/value_merger.rs index 32979f78632..446560f45f1 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg/value_merger.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg/value_merger.rs @@ -60,7 +60,7 @@ impl<'a> ValueMerger<'a> { typ @ Type::Slice(_) => { self.merge_slice_values(typ, then_condition, else_condition, then_value, else_value) } - Type::Reference => panic!("Cannot return references from an if expression"), + Type::Reference(_) => panic!("Cannot return references from an if expression"), Type::Function => panic!("Cannot return functions from an if expression"), } } @@ -333,7 +333,7 @@ impl<'a> ValueMerger<'a> { // to accurately construct dummy data unreachable!("ICE: Cannot return a slice of slices from an if expression") } - Type::Reference => { + Type::Reference(_) => { unreachable!("ICE: Merging references is unsupported") } Type::Function => { diff --git a/compiler/noirc_evaluator/src/ssa/opt/inlining.rs b/compiler/noirc_evaluator/src/ssa/opt/inlining.rs index ed2484febac..776f22b2877 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/inlining.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/inlining.rs @@ -36,6 +36,7 @@ impl Ssa { /// changes. This is because if the function's id later becomes known by a later /// pass, we would need to re-run all of inlining anyway to inline it, so we might /// as well save the work for later instead of performing it twice. + #[tracing::instrument(level = "trace", skip(self))] pub(crate) fn inline_functions(mut self) -> Ssa { self.functions = btree_map(get_entry_point_functions(&self), |entry_point| { let new_function = InlineContext::new(&self, entry_point).inline_all(&self); @@ -139,11 +140,15 @@ impl InlineContext { context.blocks.insert(context.source_function.entry_block(), entry_block); context.inline_blocks(ssa); + // translate databus values + let databus = entry_point.dfg.data_bus.map_values(|t| context.translate_value(t)); // Finally, we should have 1 function left representing the inlined version of the target function. let mut new_ssa = self.builder.finish(); assert_eq!(new_ssa.functions.len(), 1); - new_ssa.functions.pop_first().unwrap().1 + let mut new_func = new_ssa.functions.pop_first().unwrap().1; + new_func.dfg.data_bus = databus; + new_func } /// Inlines a function into the current function and returns the translated return values diff --git a/compiler/noirc_evaluator/src/ssa/opt/mem2reg.rs b/compiler/noirc_evaluator/src/ssa/opt/mem2reg.rs index e5fffaccdd0..0a49ca4ecca 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/mem2reg.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/mem2reg.rs @@ -86,11 +86,13 @@ use self::block::{Block, Expression}; impl Ssa { /// Attempts to remove any load instructions that recover values that are already available in /// scope, and attempts to remove stores that are subsequently redundant. + #[tracing::instrument(level = "trace", skip(self))] pub(crate) fn mem2reg(mut self) -> Ssa { for function in self.functions.values_mut() { let mut context = PerFunctionContext::new(function); context.mem2reg(); context.remove_instructions(); + context.update_data_bus(); } self } @@ -326,7 +328,7 @@ impl<'f> PerFunctionContext<'f> { match typ { Type::Numeric(_) => false, Type::Function => false, - Type::Reference => true, + Type::Reference(_) => true, Type::Array(elements, _) | Type::Slice(elements) => { elements.iter().any(Self::contains_references) } @@ -362,6 +364,11 @@ impl<'f> PerFunctionContext<'f> { } } + fn update_data_bus(&mut self) { + let databus = self.inserter.function.dfg.data_bus.clone(); + self.inserter.function.dfg.data_bus = databus.map_values(|t| self.inserter.resolve(t)); + } + fn handle_terminator(&mut self, block: BasicBlockId, references: &mut Block) { self.inserter.map_terminator_in_place(block); @@ -427,7 +434,7 @@ mod tests { let func_id = Id::test_new(0); let mut builder = FunctionBuilder::new("func".into(), func_id, RuntimeType::Acir); - let v0 = builder.insert_allocate(); + let v0 = builder.insert_allocate(Type::Array(Rc::new(vec![Type::field()]), 2)); let one = builder.field_constant(FieldElement::one()); let two = builder.field_constant(FieldElement::one()); @@ -468,7 +475,7 @@ mod tests { let func_id = Id::test_new(0); let mut builder = FunctionBuilder::new("func".into(), func_id, RuntimeType::Acir); - let v0 = builder.insert_allocate(); + let v0 = builder.insert_allocate(Type::field()); let one = builder.field_constant(FieldElement::one()); builder.insert_store(v0, one); let v1 = builder.insert_load(v0, Type::field()); @@ -502,7 +509,7 @@ mod tests { let func_id = Id::test_new(0); let mut builder = FunctionBuilder::new("func".into(), func_id, RuntimeType::Acir); - let v0 = builder.insert_allocate(); + let v0 = builder.insert_allocate(Type::field()); let const_one = builder.field_constant(FieldElement::one()); builder.insert_store(v0, const_one); builder.terminate_with_return(vec![v0]); @@ -562,7 +569,7 @@ mod tests { let main_id = Id::test_new(0); let mut builder = FunctionBuilder::new("main".into(), main_id, RuntimeType::Acir); - let v0 = builder.insert_allocate(); + let v0 = builder.insert_allocate(Type::field()); let five = builder.field_constant(5u128); builder.insert_store(v0, five); @@ -642,12 +649,12 @@ mod tests { let main_id = Id::test_new(0); let mut builder = FunctionBuilder::new("main".into(), main_id, RuntimeType::Acir); - let v0 = builder.insert_allocate(); + let v0 = builder.insert_allocate(Type::field()); let zero = builder.field_constant(0u128); builder.insert_store(v0, zero); - let v2 = builder.insert_allocate(); + let v2 = builder.insert_allocate(Type::Reference(Rc::new(Type::field()))); builder.insert_store(v2, v0); let v3 = builder.insert_load(v2, Type::field()); diff --git a/compiler/noirc_evaluator/src/ssa/opt/mem2reg/block.rs b/compiler/noirc_evaluator/src/ssa/opt/mem2reg/block.rs index 22c5705b723..532785d2928 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/mem2reg/block.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/mem2reg/block.rs @@ -1,4 +1,4 @@ -use std::{borrow::Cow, collections::BTreeMap}; +use std::borrow::Cow; use crate::ssa::ir::{ function::Function, @@ -19,27 +19,27 @@ pub(super) struct Block { /// Maps a ValueId to the Expression it represents. /// Multiple ValueIds can map to the same Expression, e.g. /// dereferences to the same allocation. - pub(super) expressions: BTreeMap, + pub(super) expressions: im::OrdMap, /// Each expression is tracked as to how many aliases it /// may have. If there is only 1, we can attempt to optimize /// out any known loads to that alias. Note that "alias" here /// includes the original reference as well. - pub(super) aliases: BTreeMap, + pub(super) aliases: im::OrdMap, /// Each allocate instruction result (and some reference block parameters) /// will map to a Reference value which tracks whether the last value stored /// to the reference is known. - pub(super) references: BTreeMap, + pub(super) references: im::OrdMap, /// The last instance of a `Store` instruction to each address in this block - pub(super) last_stores: BTreeMap, + pub(super) last_stores: im::OrdMap, } /// An `Expression` here is used to represent a canonical key /// into the aliases map since otherwise two dereferences of the /// same address will be given different ValueIds. -#[derive(Debug, Clone, PartialOrd, Ord, PartialEq, Eq)] +#[derive(Debug, Clone, PartialOrd, Ord, PartialEq, Eq, Hash)] pub(super) enum Expression { Dereference(Box), ArrayElement(Box), @@ -111,10 +111,7 @@ impl Block { } fn invalidate_all_references(&mut self) { - for reference_value in self.references.values_mut() { - *reference_value = ReferenceValue::Unknown; - } - + self.references.clear(); self.last_stores.clear(); } @@ -137,7 +134,7 @@ impl Block { } // Keep only the references present in both maps. - let mut intersection = BTreeMap::new(); + let mut intersection = im::OrdMap::new(); for (value_id, reference) in &other.references { if let Some(existing) = self.references.get(value_id) { intersection.insert(*value_id, existing.unify(*reference)); diff --git a/compiler/noirc_evaluator/src/ssa/opt/simplify_cfg.rs b/compiler/noirc_evaluator/src/ssa/opt/simplify_cfg.rs index d491afc3d26..a31def8fd98 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/simplify_cfg.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/simplify_cfg.rs @@ -29,6 +29,7 @@ impl Ssa { /// only 1 successor then (2) also will be applied. /// /// Currently, 1 and 4 are unimplemented. + #[tracing::instrument(level = "trace", skip(self))] pub(crate) fn simplify_cfg(mut self) -> Self { for function in self.functions.values_mut() { simplify_function(function); diff --git a/compiler/noirc_evaluator/src/ssa/opt/unrolling.rs b/compiler/noirc_evaluator/src/ssa/opt/unrolling.rs index 50c2f5b1524..645adb6b350 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/unrolling.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/unrolling.rs @@ -7,7 +7,7 @@ //! b. If we have previously modified any of the blocks in the loop, //! restart from step 1 to refresh the context. //! c. If not, try to unroll the loop. If successful, remember the modified -//! blocks. If unsuccessfuly either error if the abort_on_error flag is set, +//! blocks. If unsuccessfully either error if the abort_on_error flag is set, //! or otherwise remember that the loop failed to unroll and leave it unmodified. //! //! Note that this pass also often creates superfluous jmp instructions in the @@ -36,6 +36,7 @@ use fxhash::FxHashMap as HashMap; impl Ssa { /// Unroll all loops in each SSA function. /// If any loop cannot be unrolled, it is left as-is or in a partially unrolled state. + #[tracing::instrument(level = "trace", skip(self))] pub(crate) fn unroll_loops(mut self) -> Result { for function in self.functions.values_mut() { // Loop unrolling in brillig can lead to a code explosion currently. This can diff --git a/compiler/noirc_evaluator/src/ssa/ssa_gen/context.rs b/compiler/noirc_evaluator/src/ssa/ssa_gen/context.rs index 4879facd780..b34b667c31a 100644 --- a/compiler/noirc_evaluator/src/ssa/ssa_gen/context.rs +++ b/compiler/noirc_evaluator/src/ssa/ssa_gen/context.rs @@ -152,7 +152,8 @@ impl<'a> FunctionContext<'a> { /// Allocate a single slot of memory and store into it the given initial value of the variable. /// Always returns a Value::Mutable wrapping the allocate instruction. pub(super) fn new_mutable_variable(&mut self, value_to_store: ValueId) -> Value { - let alloc = self.builder.insert_allocate(); + let element_type = self.builder.current_function.dfg.type_of_value(value_to_store); + let alloc = self.builder.insert_allocate(element_type); self.builder.insert_store(alloc, value_to_store); let typ = self.builder.type_of_value(value_to_store); Value::Mutable(alloc, typ) @@ -177,7 +178,7 @@ impl<'a> FunctionContext<'a> { // A mutable reference wraps each element into a reference. // This can be multiple values if the element type is a tuple. ast::Type::MutableReference(element) => { - Self::map_type_helper(element, &mut |_| f(Type::Reference)) + Self::map_type_helper(element, &mut |typ| f(Type::Reference(Rc::new(typ)))) } ast::Type::FmtString(len, fields) => { // A format string is represented by multiple values @@ -231,8 +232,8 @@ impl<'a> FunctionContext<'a> { ast::Type::Slice(_) => panic!("convert_non_tuple_type called on a slice: {typ}"), ast::Type::MutableReference(element) => { // Recursive call to panic if element is a tuple - Self::convert_non_tuple_type(element); - Type::Reference + let element = Self::convert_non_tuple_type(element); + Type::Reference(Rc::new(element)) } } } @@ -274,16 +275,13 @@ impl<'a> FunctionContext<'a> { let bit_width = self.builder.numeric_constant(FieldElement::from(2_i128.pow(bit_size)), Type::field()); let sign_not = self.builder.insert_binary(one, BinaryOp::Sub, sign); - let as_field = - self.builder.insert_instruction(Instruction::Cast(input, Type::field()), None).first(); - let sign_field = - self.builder.insert_instruction(Instruction::Cast(sign, Type::field()), None).first(); + + // We use unsafe casts here, this is fine as we're casting to a `field` type. + let as_field = self.builder.insert_cast(input, Type::field()); + let sign_field = self.builder.insert_cast(sign, Type::field()); let positive_predicate = self.builder.insert_binary(sign_field, BinaryOp::Mul, as_field); let two_complement = self.builder.insert_binary(bit_width, BinaryOp::Sub, as_field); - let sign_not_field = self - .builder - .insert_instruction(Instruction::Cast(sign_not, Type::field()), None) - .first(); + let sign_not_field = self.builder.insert_cast(sign_not, Type::field()); let negative_predicate = self.builder.insert_binary(sign_not_field, BinaryOp::Mul, two_complement); self.builder.insert_binary(positive_predicate, BinaryOp::Add, negative_predicate) @@ -314,43 +312,24 @@ impl<'a> FunctionContext<'a> { match operator { BinaryOpKind::Add | BinaryOpKind::Subtract => { // Result is computed modulo the bit size - let mut result = self - .builder - .insert_instruction( - Instruction::Truncate { - value: result, - bit_size, - max_bit_size: bit_size + 1, - }, - None, - ) - .first(); - result = self.builder.insert_cast(result, Type::unsigned(bit_size)); + let result = self.builder.insert_truncate(result, bit_size, bit_size + 1); + let result = + self.insert_safe_cast(result, Type::unsigned(bit_size), location); self.check_signed_overflow(result, lhs, rhs, operator, bit_size, location); - self.builder.insert_cast(result, result_type) + self.insert_safe_cast(result, result_type, location) } BinaryOpKind::Multiply => { // Result is computed modulo the bit size let mut result = self.builder.insert_cast(result, Type::unsigned(2 * bit_size)); - result = self - .builder - .insert_instruction( - Instruction::Truncate { - value: result, - bit_size, - max_bit_size: 2 * bit_size, - }, - None, - ) - .first(); + result = self.builder.insert_truncate(result, bit_size, 2 * bit_size); self.check_signed_overflow(result, lhs, rhs, operator, bit_size, location); - self.builder.insert_cast(result, result_type) + self.insert_safe_cast(result, result_type, location) } - BinaryOpKind::ShiftLeft => { - unreachable!("shift is not supported for signed integer") + BinaryOpKind::ShiftLeft | BinaryOpKind::ShiftRight => { + self.check_shift_overflow(result, rhs, bit_size, location, true) } _ => unreachable!("operator {} should not overflow", operator), } @@ -363,22 +342,20 @@ impl<'a> FunctionContext<'a> { BinaryOpKind::ShiftLeft => "left shift", _ => unreachable!("operator {} should not overflow", operator), }; - let message = format!("attempt to {} with overflow", op_name); - let range_constraint = Instruction::RangeCheck { - value: result, - max_bit_size: bit_size, - assert_message: Some(message), - }; - self.builder.set_location(location).insert_instruction(range_constraint, None); - if operator == BinaryOpKind::ShiftLeft { - match result_type { - Type::Numeric(NumericType::Signed { bit_size }) - | Type::Numeric(NumericType::Unsigned { bit_size }) => { - self.builder.insert_truncate(result, bit_size, bit_size + 1) - } - _ => result, - } + + if operator == BinaryOpKind::Multiply && bit_size == 1 { + result + } else if operator == BinaryOpKind::ShiftLeft + || operator == BinaryOpKind::ShiftRight + { + self.check_shift_overflow(result, rhs, bit_size, location, false) } else { + let message = format!("attempt to {} with overflow", op_name); + self.builder.set_location(location).insert_range_check( + result, + bit_size, + Some(message), + ); result } } @@ -386,6 +363,51 @@ impl<'a> FunctionContext<'a> { } } + /// Overflow checks for bit-shift + /// We use Rust behavior for bit-shift: + /// If rhs is more or equal than the bit size, then we overflow + /// If not, we do not overflow and shift with 0 when bits are falling out of the bit size + fn check_shift_overflow( + &mut self, + result: ValueId, + rhs: ValueId, + bit_size: u32, + location: Location, + is_signed: bool, + ) -> ValueId { + let one = self.builder.numeric_constant(FieldElement::one(), Type::bool()); + let rhs = if is_signed { + self.insert_safe_cast(rhs, Type::unsigned(bit_size), location) + } else { + rhs + }; + // Bit-shift with a negative number is an overflow + if is_signed { + // We compute the sign of rhs. + let half_width = self.builder.numeric_constant( + FieldElement::from(2_i128.pow(bit_size - 1)), + Type::unsigned(bit_size), + ); + let sign = self.builder.insert_binary(rhs, BinaryOp::Lt, half_width); + self.builder.set_location(location).insert_constrain( + sign, + one, + Some("attempt to bit-shift with overflow".to_string()), + ); + } + + let max = self + .builder + .numeric_constant(FieldElement::from(bit_size as i128), Type::unsigned(bit_size)); + let overflow = self.builder.insert_binary(rhs, BinaryOp::Lt, max); + self.builder.set_location(location).insert_constrain( + overflow, + one, + Some("attempt to bit-shift with overflow".to_owned()), + ); + self.builder.insert_truncate(result, bit_size, bit_size + 1) + } + /// Insert constraints ensuring that the operation does not overflow the bit size of the result /// We assume that: /// lhs and rhs are signed integers of bit size bit_size @@ -414,8 +436,8 @@ impl<'a> FunctionContext<'a> { Type::unsigned(bit_size), ); // We compute the sign of the operands. The overflow checks for signed integers depends on these signs - let lhs_as_unsigned = self.builder.insert_cast(lhs, Type::unsigned(bit_size)); - let rhs_as_unsigned = self.builder.insert_cast(rhs, Type::unsigned(bit_size)); + let lhs_as_unsigned = self.insert_safe_cast(lhs, Type::unsigned(bit_size), location); + let rhs_as_unsigned = self.insert_safe_cast(rhs, Type::unsigned(bit_size), location); let lhs_sign = self.builder.insert_binary(lhs_as_unsigned, BinaryOp::Lt, half_width); let mut rhs_sign = self.builder.insert_binary(rhs_as_unsigned, BinaryOp::Lt, half_width); let message = if is_sub { @@ -446,30 +468,27 @@ impl<'a> FunctionContext<'a> { let product_field = self.builder.insert_binary(lhs_abs, BinaryOp::Mul, rhs_abs); // It must not already overflow the bit_size let message = "attempt to multiply with overflow".to_string(); - let size_overflow = Instruction::RangeCheck { - value: product_field, - max_bit_size: bit_size, - assert_message: Some(message.clone()), - }; - self.builder.set_location(location).insert_instruction(size_overflow, None); + self.builder.set_location(location).insert_range_check( + product_field, + bit_size, + Some(message.clone()), + ); let product = self.builder.insert_cast(product_field, Type::unsigned(bit_size)); // Then we check the signed product fits in a signed integer of bit_size-bits let not_same = self.builder.insert_binary(one, BinaryOp::Sub, same_sign); - let not_same_sign_field = self - .builder - .insert_instruction(Instruction::Cast(not_same, Type::unsigned(bit_size)), None) - .first(); + let not_same_sign_field = + self.insert_safe_cast(not_same, Type::unsigned(bit_size), location); let positive_maximum_with_offset = self.builder.insert_binary(half_width, BinaryOp::Add, not_same_sign_field); let product_overflow_check = self.builder.insert_binary(product, BinaryOp::Lt, positive_maximum_with_offset); - self.builder.set_location(location).insert_instruction( - Instruction::Constrain(product_overflow_check, one, Some(message)), - None, + self.builder.set_location(location).insert_constrain( + product_overflow_check, + one, + Some(message), ); } - BinaryOpKind::ShiftLeft => unreachable!("shift is not supported for signed integer"), _ => unreachable!("operator {} should not overflow", operator), } } @@ -485,11 +504,26 @@ impl<'a> FunctionContext<'a> { mut rhs: ValueId, location: Location, ) -> Values { + let result_type = self.builder.type_of_value(lhs); let mut result = match operator { - BinaryOpKind::ShiftLeft => self.builder.insert_shift_left(lhs, rhs), - BinaryOpKind::ShiftRight => self.builder.insert_shift_right(lhs, rhs), + BinaryOpKind::ShiftLeft => { + let bit_size = match result_type { + Type::Numeric(NumericType::Signed { bit_size }) + | Type::Numeric(NumericType::Unsigned { bit_size }) => bit_size, + _ => unreachable!("ICE: left-shift attempted on non-integer"), + }; + self.builder.insert_wrapping_shift_left(lhs, rhs, bit_size) + } + BinaryOpKind::ShiftRight => { + let bit_size = match result_type { + Type::Numeric(NumericType::Signed { bit_size }) + | Type::Numeric(NumericType::Unsigned { bit_size }) => bit_size, + _ => unreachable!("ICE: right-shift attempted on non-integer"), + }; + self.builder.insert_shift_right(lhs, rhs, bit_size) + } BinaryOpKind::Equal | BinaryOpKind::NotEqual - if matches!(self.builder.type_of_value(lhs), Type::Array(..)) => + if matches!(result_type, Type::Array(..)) => { return self.insert_array_equality(lhs, operator, rhs, location) } @@ -573,7 +607,7 @@ impl<'a> FunctionContext<'a> { let loop_end = self.builder.insert_block(); // pre-loop - let result_alloc = self.builder.set_location(location).insert_allocate(); + let result_alloc = self.builder.set_location(location).insert_allocate(Type::bool()); let true_value = self.builder.numeric_constant(1u128, Type::bool()); self.builder.insert_store(result_alloc, true_value); let zero = self.builder.field_constant(0u128); @@ -634,6 +668,29 @@ impl<'a> FunctionContext<'a> { reshaped_return_values } + /// Inserts a cast instruction at the end of the current block and returns the results + /// of the cast. + /// + /// Compared to `self.builder.insert_cast`, this version will automatically truncate `value` to be a valid `typ`. + pub(super) fn insert_safe_cast( + &mut self, + mut value: ValueId, + typ: Type, + location: Location, + ) -> ValueId { + self.builder.set_location(location); + + // To ensure that `value` is a valid `typ`, we insert an `Instruction::Truncate` instruction beforehand if + // we're narrowing the type size. + let incoming_type_size = self.builder.type_of_value(value).bit_size(); + let target_type_size = typ.bit_size(); + if target_type_size < incoming_type_size { + value = self.builder.insert_truncate(value, target_type_size, incoming_type_size); + } + + self.builder.insert_cast(value, typ) + } + /// Create a const offset of an address for an array load or store pub(super) fn make_offset(&mut self, mut address: ValueId, offset: u128) -> ValueId { if offset != 0 { diff --git a/compiler/noirc_evaluator/src/ssa/ssa_gen/mod.rs b/compiler/noirc_evaluator/src/ssa/ssa_gen/mod.rs index 7677f5669cb..0b8c3a37ef9 100644 --- a/compiler/noirc_evaluator/src/ssa/ssa_gen/mod.rs +++ b/compiler/noirc_evaluator/src/ssa/ssa_gen/mod.rs @@ -1,4 +1,4 @@ -mod context; +pub(crate) mod context; mod program; mod value; @@ -8,13 +8,16 @@ use context::SharedContext; use iter_extended::{try_vecmap, vecmap}; use noirc_errors::Location; use noirc_frontend::{ - monomorphization::ast::{self, Binary, Expression, Program}, - BinaryOpKind, + monomorphization::ast::{self, Expression, Program}, + Visibility, }; use crate::{ errors::RuntimeError, - ssa::ir::{instruction::Intrinsic, types::NumericType}, + ssa::{ + function_builder::data_bus::DataBusBuilder, + ir::{instruction::Intrinsic, types::NumericType}, + }, }; use self::{ @@ -22,17 +25,25 @@ use self::{ value::{Tree, Values}, }; -use super::ir::{ - function::RuntimeType, - instruction::{BinaryOp, TerminatorInstruction}, - types::Type, - value::ValueId, +use super::{ + function_builder::data_bus::DataBus, + ir::{ + function::RuntimeType, + instruction::{BinaryOp, TerminatorInstruction}, + types::Type, + value::ValueId, + }, }; /// Generates SSA for the given monomorphized program. /// /// This function will generate the SSA but does not perform any optimizations on it. pub(crate) fn generate_ssa(program: Program) -> Result { + // see which parameter has call_data/return_data attribute + let is_databus = DataBusBuilder::is_databus(&program.main_function_signature); + + let is_return_data = matches!(program.return_visibility, Visibility::DataBus); + let return_location = program.return_location; let context = SharedContext::new(program); @@ -48,20 +59,41 @@ pub(crate) fn generate_ssa(program: Program) -> Result { if main.unconstrained { RuntimeType::Brillig } else { RuntimeType::Acir }, &context, ); + + // Generate the call_data bus from the relevant parameters. We create it *before* processing the function body + let call_data = function_context.builder.call_data_bus(is_databus); + function_context.codegen_function_body(&main.body)?; + let mut return_data = DataBusBuilder::new(); if let Some(return_location) = return_location { let block = function_context.builder.current_block(); - if function_context.builder.current_function.dfg[block].terminator().is_some() { - let return_instruction = - function_context.builder.current_function.dfg[block].unwrap_terminator_mut(); - match return_instruction { - TerminatorInstruction::Return { call_stack, .. } => { - call_stack.clear(); - call_stack.push_back(return_location); + if function_context.builder.current_function.dfg[block].terminator().is_some() + && is_return_data + { + // initialize the return_data bus from the return values + let return_data_values = + match function_context.builder.current_function.dfg[block].unwrap_terminator() { + TerminatorInstruction::Return { return_values, .. } => return_values.to_owned(), + _ => unreachable!("ICE - expect return on the last block"), + }; + + return_data = + function_context.builder.initialize_data_bus(&return_data_values, return_data); + } + let return_instruction = + function_context.builder.current_function.dfg[block].unwrap_terminator_mut(); + match return_instruction { + TerminatorInstruction::Return { return_values, call_stack } => { + call_stack.clear(); + call_stack.push_back(return_location); + // replace the returned values with the return data array + if let Some(return_data_bus) = return_data.databus { + return_values.clear(); + return_values.push(return_data_bus); } - _ => unreachable!("ICE - expect return on the last block"), } + _ => unreachable!("ICE - expect return on the last block"), } } @@ -74,6 +106,9 @@ pub(crate) fn generate_ssa(program: Program) -> Result { function_context.new_function(dest_id, function); function_context.codegen_function_body(&function.body)?; } + // we save the data bus inside the dfg + function_context.builder.current_function.dfg.data_bus = + DataBus::get_data_bus(call_data, return_data); Ok(function_context.builder.finish()) } @@ -189,8 +224,9 @@ impl<'a> FunctionContext<'a> { } fn codegen_string(&mut self, string: &str) -> Values { - let elements = - vecmap(string.as_bytes(), |byte| self.builder.field_constant(*byte as u128).into()); + let elements = vecmap(string.as_bytes(), |byte| { + self.builder.numeric_constant(*byte as u128, Type::unsigned(8)).into() + }); let typ = Self::convert_non_tuple_type(&ast::Type::String(elements.len() as u64)); self.codegen_array(elements, typ) } @@ -210,6 +246,13 @@ impl<'a> FunctionContext<'a> { for element in elements { element.for_each(|element| { let element = element.eval(self); + + // If we're referencing a sub-array in a larger nested array we need to + // increase the reference count of the sub array. This maintains a + // pessimistic reference count (since some are likely moved rather than shared) + // which is important for Brillig's copy on write optimization. This has no + // effect in ACIR code. + self.builder.increment_array_reference_count(element); array.push_back(element); }); } @@ -248,11 +291,12 @@ impl<'a> FunctionContext<'a> { Ok(self.codegen_reference(&unary.rhs)?.map(|rhs| { match rhs { value::Value::Normal(value) => { - let alloc = self.builder.insert_allocate(); + let rhs_type = self.builder.current_function.dfg.type_of_value(value); + let alloc = self.builder.insert_allocate(rhs_type); self.builder.insert_store(alloc, value); Tree::Leaf(value::Value::Normal(alloc)) } - // NOTE: The `.into()` here converts the Value::Mutable into + // The `.into()` here converts the Value::Mutable into // a Value::Normal so it is no longer automatically dereferenced. value::Value::Mutable(reference, _) => reference.into(), } @@ -300,6 +344,7 @@ impl<'a> FunctionContext<'a> { } else { (array_or_slice[0], None) }; + self.codegen_array_index( array, index_value, @@ -344,7 +389,13 @@ impl<'a> FunctionContext<'a> { } _ => unreachable!("must have array or slice but got {array_type}"), } - self.builder.insert_array_get(array, offset, typ).into() + + // Reference counting in brillig relies on us incrementing reference + // counts when nested arrays/slices are constructed or indexed. This + // has no effect in ACIR code. + let result = self.builder.insert_array_get(array, offset, typ); + self.builder.increment_array_reference_count(result); + result.into() })) } @@ -383,8 +434,8 @@ impl<'a> FunctionContext<'a> { fn codegen_cast(&mut self, cast: &ast::Cast) -> Result { let lhs = self.codegen_non_tuple_expression(&cast.lhs)?; let typ = Self::convert_non_tuple_type(&cast.r#type); - self.builder.set_location(cast.location); - Ok(self.builder.insert_cast(lhs, typ).into()) + + Ok(self.insert_safe_cast(lhs, typ, cast.location).into()) } /// Codegens a for loop, creating three new blocks in the process. @@ -534,8 +585,12 @@ impl<'a> FunctionContext<'a> { arguments.append(&mut values); } - self.codegen_intrinsic_call_checks(function, &arguments, call.location); + // If an array is passed as an argument we increase its reference count + for argument in &arguments { + self.builder.increment_array_reference_count(*argument); + } + self.codegen_intrinsic_call_checks(function, &arguments, call.location); Ok(self.insert_call(function, arguments, &call.return_type, call.location)) } @@ -575,12 +630,18 @@ impl<'a> FunctionContext<'a> { fn codegen_let(&mut self, let_expr: &ast::Let) -> Result { let mut values = self.codegen_expression(&let_expr.expression)?; - if let_expr.mutable { - values = values.map(|value| { - let value = value.eval(self); - Tree::Leaf(self.new_mutable_variable(value)) - }); - } + values = values.map(|value| { + let value = value.eval(self); + + // Make sure to increment array reference counts on each let binding + self.builder.increment_array_reference_count(value); + + Tree::Leaf(if let_expr.mutable { + self.new_mutable_variable(value) + } else { + value::Value::Normal(value) + }) + }); self.define(let_expr.id, values); Ok(Self::unit_value()) @@ -592,24 +653,10 @@ impl<'a> FunctionContext<'a> { location: Location, assert_message: Option, ) -> Result { - match expr { - // If we're constraining an equality to be true then constrain the two sides directly. - Expression::Binary(Binary { lhs, operator: BinaryOpKind::Equal, rhs, .. }) => { - let lhs = self.codegen_non_tuple_expression(lhs)?; - let rhs = self.codegen_non_tuple_expression(rhs)?; - self.builder.set_location(location).insert_constrain(lhs, rhs, assert_message); - } + let expr = self.codegen_non_tuple_expression(expr)?; + let true_literal = self.builder.numeric_constant(true, Type::bool()); + self.builder.set_location(location).insert_constrain(expr, true_literal, assert_message); - _ => { - let expr = self.codegen_non_tuple_expression(expr)?; - let true_literal = self.builder.numeric_constant(true, Type::bool()); - self.builder.set_location(location).insert_constrain( - expr, - true_literal, - assert_message, - ); - } - } Ok(Self::unit_value()) } diff --git a/compiler/noirc_frontend/Cargo.toml b/compiler/noirc_frontend/Cargo.toml index 246d6617c94..80d767f7f2c 100644 --- a/compiler/noirc_frontend/Cargo.toml +++ b/compiler/noirc_frontend/Cargo.toml @@ -22,10 +22,9 @@ serde.workspace = true rustc-hash = "1.1.0" small-ord-set = "0.1.3" regex = "1.9.1" +tracing.workspace = true [dev-dependencies] strum = "0.24" strum_macros = "0.24" - -[features] -aztec = [] +tempfile.workspace = true diff --git a/compiler/noirc_frontend/src/ast/expression.rs b/compiler/noirc_frontend/src/ast/expression.rs index d29e1670944..c78deaf6dbb 100644 --- a/compiler/noirc_frontend/src/ast/expression.rs +++ b/compiler/noirc_frontend/src/ast/expression.rs @@ -50,7 +50,13 @@ impl ExpressionKind { } pub fn prefix(operator: UnaryOp, rhs: Expression) -> ExpressionKind { - ExpressionKind::Prefix(Box::new(PrefixExpression { operator, rhs })) + match (operator, &rhs) { + ( + UnaryOp::Minus, + Expression { kind: ExpressionKind::Literal(Literal::Integer(field, sign)), .. }, + ) => ExpressionKind::Literal(Literal::Integer(*field, !sign)), + _ => ExpressionKind::Prefix(Box::new(PrefixExpression { operator, rhs })), + } } pub fn array(contents: Vec) -> ExpressionKind { @@ -65,7 +71,7 @@ impl ExpressionKind { } pub fn integer(contents: FieldElement) -> ExpressionKind { - ExpressionKind::Literal(Literal::Integer(contents)) + ExpressionKind::Literal(Literal::Integer(contents, false)) } pub fn boolean(contents: bool) -> ExpressionKind { @@ -76,6 +82,10 @@ impl ExpressionKind { ExpressionKind::Literal(Literal::Str(contents)) } + pub fn raw_string(contents: String, hashes: u8) -> ExpressionKind { + ExpressionKind::Literal(Literal::RawStr(contents, hashes)) + } + pub fn format_string(contents: String) -> ExpressionKind { ExpressionKind::Literal(Literal::FmtStr(contents)) } @@ -96,7 +106,7 @@ impl ExpressionKind { }; match literal { - Literal::Integer(integer) => Some(*integer), + Literal::Integer(integer, _) => Some(*integer), _ => None, } } @@ -310,8 +320,9 @@ impl UnaryOp { pub enum Literal { Array(ArrayLiteral), Bool(bool), - Integer(FieldElement), + Integer(FieldElement, /*sign*/ bool), // false for positive integer and true for negative Str(String), + RawStr(String, u8), FmtStr(String), Unit, } @@ -505,8 +516,19 @@ impl Display for Literal { write!(f, "[{repeated_element}; {length}]") } Literal::Bool(boolean) => write!(f, "{}", if *boolean { "true" } else { "false" }), - Literal::Integer(integer) => write!(f, "{}", integer.to_u128()), + Literal::Integer(integer, sign) => { + if *sign { + write!(f, "-{}", integer.to_u128()) + } else { + write!(f, "{}", integer.to_u128()) + } + } Literal::Str(string) => write!(f, "\"{string}\""), + Literal::RawStr(string, num_hashes) => { + let hashes: String = + std::iter::once('#').cycle().take(*num_hashes as usize).collect(); + write!(f, "r{hashes}\"{string}\"{hashes}") + } Literal::FmtStr(string) => write!(f, "f\"{string}\""), Literal::Unit => write!(f, "()"), } diff --git a/compiler/noirc_frontend/src/ast/mod.rs b/compiler/noirc_frontend/src/ast/mod.rs index 7ce01f461ed..d9af5893024 100644 --- a/compiler/noirc_frontend/src/ast/mod.rs +++ b/compiler/noirc_frontend/src/ast/mod.rs @@ -15,6 +15,7 @@ pub use expression::*; pub use function::*; use noirc_errors::Span; +use serde::{Deserialize, Serialize}; pub use statement::*; pub use structure::*; pub use traits::*; @@ -41,6 +42,8 @@ pub enum UnresolvedTypeData { FormatString(UnresolvedTypeExpression, Box), Unit, + Parenthesized(Box), + /// A Named UnresolvedType can be a struct type or a type variable Named(Path, Vec), @@ -152,6 +155,7 @@ impl std::fmt::Display for UnresolvedTypeData { Unit => write!(f, "()"), Error => write!(f, "error"), Unspecified => write!(f, "unspecified"), + Parenthesized(typ) => write!(f, "({typ})"), } } } @@ -230,10 +234,13 @@ impl UnresolvedTypeExpression { fn from_expr_helper(expr: Expression) -> Result { match expr.kind { - ExpressionKind::Literal(Literal::Integer(int)) => match int.try_to_u64() { - Some(int) => Ok(UnresolvedTypeExpression::Constant(int, expr.span)), - None => Err(expr), - }, + ExpressionKind::Literal(Literal::Integer(int, sign)) => { + assert!(!sign, "Negative literal is not allowed here"); + match int.try_to_u64() { + Some(int) => Ok(UnresolvedTypeExpression::Constant(int, expr.span)), + None => Err(expr), + } + } ExpressionKind::Variable(path) => Ok(UnresolvedTypeExpression::Variable(path)), ExpressionKind::Prefix(prefix) if prefix.operator == UnaryOp::Minus => { let lhs = Box::new(UnresolvedTypeExpression::Constant(0, expr.span)); @@ -250,7 +257,20 @@ impl UnresolvedTypeExpression { BinaryOpKind::Multiply => BinaryTypeOperator::Multiplication, BinaryOpKind::Divide => BinaryTypeOperator::Division, BinaryOpKind::Modulo => BinaryTypeOperator::Modulo, - _ => unreachable!(), // impossible via operator_allowed check + + BinaryOpKind::Equal + | BinaryOpKind::NotEqual + | BinaryOpKind::Less + | BinaryOpKind::LessEqual + | BinaryOpKind::Greater + | BinaryOpKind::GreaterEqual + | BinaryOpKind::And + | BinaryOpKind::Or + | BinaryOpKind::Xor + | BinaryOpKind::ShiftRight + | BinaryOpKind::ShiftLeft => { + unreachable!("impossible via `operator_allowed` check") + } }; Ok(UnresolvedTypeExpression::BinaryOperation(lhs, op, rhs, expr.span)) } @@ -278,13 +298,16 @@ pub enum FunctionVisibility { PublicCrate, } -#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Serialize, Deserialize)] /// Represents whether the parameter is public or known only to the prover. pub enum Visibility { Public, // Constants are not allowed in the ABI for main at the moment. // Constant, Private, + /// DataBus is public input handled as private input. We use the fact that return values are properly computed by the program to avoid having them as public inputs + /// it is useful for recursion and is handled by the proving system. + DataBus, } impl std::fmt::Display for Visibility { @@ -292,6 +315,7 @@ impl std::fmt::Display for Visibility { match self { Self::Public => write!(f, "pub"), Self::Private => write!(f, "priv"), + Self::DataBus => write!(f, "databus"), } } } diff --git a/compiler/noirc_frontend/src/ast/statement.rs b/compiler/noirc_frontend/src/ast/statement.rs index 1ca4d3101a9..73b1f68778d 100644 --- a/compiler/noirc_frontend/src/ast/statement.rs +++ b/compiler/noirc_frontend/src/ast/statement.rs @@ -198,7 +198,11 @@ impl From for Expression { fn from(i: Ident) -> Expression { Expression { span: i.0.span(), - kind: ExpressionKind::Variable(Path { segments: vec![i], kind: PathKind::Plain }), + kind: ExpressionKind::Variable(Path { + span: i.span(), + segments: vec![i], + kind: PathKind::Plain, + }), } } } @@ -311,6 +315,7 @@ impl UseTree { pub struct Path { pub segments: Vec, pub kind: PathKind, + pub span: Span, } impl Path { @@ -330,18 +335,11 @@ impl Path { } pub fn from_ident(name: Ident) -> Path { - Path { segments: vec![name], kind: PathKind::Plain } + Path { span: name.span(), segments: vec![name], kind: PathKind::Plain } } pub fn span(&self) -> Span { - let mut segments = self.segments.iter(); - let first_segment = segments.next().expect("ice : cannot have an empty path"); - let mut span = first_segment.0.span(); - - for segment in segments { - span = span.merge(segment.0.span()); - } - span + self.span } pub fn last_segment(&self) -> Ident { @@ -545,8 +543,11 @@ impl ForRange { // array.len() let segments = vec![array_ident]; - let array_ident = - ExpressionKind::Variable(Path { segments, kind: PathKind::Plain }); + let array_ident = ExpressionKind::Variable(Path { + segments, + kind: PathKind::Plain, + span: array_span, + }); let end_range = ExpressionKind::MethodCall(Box::new(MethodCallExpression { object: Expression::new(array_ident.clone(), array_span), @@ -561,8 +562,11 @@ impl ForRange { // array[i] let segments = vec![Ident::new(index_name, array_span)]; - let index_ident = - ExpressionKind::Variable(Path { segments, kind: PathKind::Plain }); + let index_ident = ExpressionKind::Variable(Path { + segments, + kind: PathKind::Plain, + span: array_span, + }); let loop_element = ExpressionKind::Index(Box::new(IndexExpression { collection: Expression::new(array_ident, array_span), diff --git a/compiler/noirc_frontend/src/hir/aztec_library.rs b/compiler/noirc_frontend/src/hir/aztec_library.rs deleted file mode 100644 index 3b4703dc60f..00000000000 --- a/compiler/noirc_frontend/src/hir/aztec_library.rs +++ /dev/null @@ -1,1001 +0,0 @@ -use acvm::FieldElement; -use iter_extended::vecmap; -use noirc_errors::Span; - -use crate::graph::CrateId; -use crate::hir::def_collector::errors::DefCollectorErrorKind; -use crate::hir_def::expr::{HirExpression, HirLiteral}; -use crate::hir_def::stmt::HirStatement; -use crate::node_interner::{NodeInterner, StructId}; -use crate::parser::SortedModule; -use crate::token::SecondaryAttribute; -use crate::{ - hir::Context, BlockExpression, CallExpression, CastExpression, Distinctness, Expression, - ExpressionKind, FunctionReturnType, Ident, IndexExpression, LetStatement, Literal, - MemberAccessExpression, MethodCallExpression, NoirFunction, Path, PathKind, Pattern, Statement, - UnresolvedType, UnresolvedTypeData, Visibility, -}; -use crate::{ - ForLoopStatement, ForRange, FunctionDefinition, FunctionVisibility, ImportStatement, - NoirStruct, Param, PrefixExpression, Signedness, StatementKind, StructType, Type, TypeImpl, - UnaryOp, -}; -use fm::FileId; - -use super::def_map::ModuleDefId; - -// -// Helper macros for creating noir ast nodes -// -fn ident(name: &str) -> Ident { - Ident::new(name.to_string(), Span::default()) -} - -fn ident_path(name: &str) -> Path { - Path::from_ident(ident(name)) -} - -fn path(ident: Ident) -> Path { - Path::from_ident(ident) -} - -fn expression(kind: ExpressionKind) -> Expression { - Expression::new(kind, Span::default()) -} - -fn variable(name: &str) -> Expression { - expression(ExpressionKind::Variable(ident_path(name))) -} - -fn variable_ident(identifier: Ident) -> Expression { - expression(ExpressionKind::Variable(path(identifier))) -} - -fn variable_path(path: Path) -> Expression { - expression(ExpressionKind::Variable(path)) -} - -fn method_call(object: Expression, method_name: &str, arguments: Vec) -> Expression { - expression(ExpressionKind::MethodCall(Box::new(MethodCallExpression { - object, - method_name: ident(method_name), - arguments, - }))) -} - -fn call(func: Expression, arguments: Vec) -> Expression { - expression(ExpressionKind::Call(Box::new(CallExpression { func: Box::new(func), arguments }))) -} - -fn pattern(name: &str) -> Pattern { - Pattern::Identifier(ident(name)) -} - -fn mutable(name: &str) -> Pattern { - Pattern::Mutable(Box::new(pattern(name)), Span::default()) -} - -fn mutable_assignment(name: &str, assigned_to: Expression) -> Statement { - make_statement(StatementKind::Let(LetStatement { - pattern: mutable(name), - r#type: make_type(UnresolvedTypeData::Unspecified), - expression: assigned_to, - })) -} - -fn mutable_reference(variable_name: &str) -> Expression { - expression(ExpressionKind::Prefix(Box::new(PrefixExpression { - operator: UnaryOp::MutableReference, - rhs: variable(variable_name), - }))) -} - -fn assignment(name: &str, assigned_to: Expression) -> Statement { - make_statement(StatementKind::Let(LetStatement { - pattern: pattern(name), - r#type: make_type(UnresolvedTypeData::Unspecified), - expression: assigned_to, - })) -} - -fn member_access(lhs: &str, rhs: &str) -> Expression { - expression(ExpressionKind::MemberAccess(Box::new(MemberAccessExpression { - lhs: variable(lhs), - rhs: ident(rhs), - }))) -} - -macro_rules! chained_path { - ( $base:expr $(, $tail:expr)* ) => { - { - let mut base_path = ident_path($base); - $( - base_path.segments.push(ident($tail)); - )* - base_path - } - } -} - -macro_rules! chained_dep { - ( $base:expr $(, $tail:expr)* ) => { - { - let mut base_path = ident_path($base); - base_path.kind = PathKind::Dep; - $( - base_path.segments.push(ident($tail)); - )* - base_path - } - } -} - -fn cast(lhs: Expression, ty: UnresolvedTypeData) -> Expression { - expression(ExpressionKind::Cast(Box::new(CastExpression { lhs, r#type: make_type(ty) }))) -} - -fn make_type(typ: UnresolvedTypeData) -> UnresolvedType { - UnresolvedType { typ, span: None } -} - -fn index_array(array: Ident, index: &str) -> Expression { - expression(ExpressionKind::Index(Box::new(IndexExpression { - collection: variable_path(path(array)), - index: variable(index), - }))) -} - -fn index_array_variable(array: Expression, index: &str) -> Expression { - expression(ExpressionKind::Index(Box::new(IndexExpression { - collection: array, - index: variable(index), - }))) -} - -fn import(path: Path) -> ImportStatement { - ImportStatement { path, alias: None } -} - -// -// Create AST Nodes for Aztec -// - -/// Traverses every function in the ast, calling `transform_function` which -/// determines if further processing is required -pub(crate) fn transform( - mut ast: SortedModule, - crate_id: &CrateId, - context: &Context, -) -> Result { - // Usage -> mut ast -> aztec_library::transform(&mut ast) - - // Covers all functions in the ast - for submodule in ast.submodules.iter_mut().filter(|submodule| submodule.is_contract) { - if transform_module(&mut submodule.contents, crate_id, context)? { - check_for_aztec_dependency(crate_id, context)?; - include_relevant_imports(&mut submodule.contents); - } - } - Ok(ast) -} - -// -// Transform Hir Nodes for Aztec -// - -/// Completes the Hir with data gathered from type resolution -pub(crate) fn transform_hir(crate_id: &CrateId, context: &mut Context) { - transform_events(crate_id, context); -} - -/// Includes an import to the aztec library if it has not been included yet -fn include_relevant_imports(ast: &mut SortedModule) { - // Create the aztec import path using the assumed chained_dep! macro - let aztec_import_path = import(chained_dep!("aztec")); - - // Check if the aztec import already exists - let is_aztec_imported = - ast.imports.iter().any(|existing_import| existing_import.path == aztec_import_path.path); - - // If aztec is not imported, add the import at the beginning - if !is_aztec_imported { - ast.imports.insert(0, aztec_import_path); - } -} - -/// Creates an error alerting the user that they have not downloaded the Aztec-noir library -fn check_for_aztec_dependency( - crate_id: &CrateId, - context: &Context, -) -> Result<(), (DefCollectorErrorKind, FileId)> { - let crate_graph = &context.crate_graph[crate_id]; - let has_aztec_dependency = crate_graph.dependencies.iter().any(|dep| dep.as_name() == "aztec"); - if has_aztec_dependency { - Ok(()) - } else { - Err((DefCollectorErrorKind::AztecNotFound {}, crate_graph.root_file_id)) - } -} - -// Check to see if the user has defined a storage struct -fn check_for_storage_definition(module: &SortedModule) -> bool { - module.types.iter().any(|r#struct| r#struct.name.0.contents == "Storage") -} - -// Check if "compute_note_hash_and_nullifier(Field,Field,Field,[Field; N]) -> [Field; 4]" is defined -fn check_for_compute_note_hash_and_nullifier_definition(module: &SortedModule) -> bool { - module.functions.iter().any(|func| { - func.def.name.0.contents == "compute_note_hash_and_nullifier" - && func.def.parameters.len() == 4 - && func.def.parameters[0].typ.typ == UnresolvedTypeData::FieldElement - && func.def.parameters[1].typ.typ == UnresolvedTypeData::FieldElement - && func.def.parameters[2].typ.typ == UnresolvedTypeData::FieldElement - // checks if the 4th parameter is an array and the Box in - // Array(Option, Box) contains only fields - && match &func.def.parameters[3].typ.typ { - UnresolvedTypeData::Array(_, inner_type) => { - match inner_type.typ { - UnresolvedTypeData::FieldElement => true, - _ => false, - } - }, - _ => false, - } - // We check the return type the same way as we did the 4th parameter - && match &func.def.return_type { - FunctionReturnType::Default(_) => false, - FunctionReturnType::Ty(unresolved_type) => { - match &unresolved_type.typ { - UnresolvedTypeData::Array(_, inner_type) => { - match inner_type.typ { - UnresolvedTypeData::FieldElement => true, - _ => false, - } - }, - _ => false, - } - } - } - }) -} - -/// Checks if an attribute is a custom attribute with a specific name -fn is_custom_attribute(attr: &SecondaryAttribute, attribute_name: &str) -> bool { - if let SecondaryAttribute::Custom(custom_attr) = attr { - custom_attr.as_str() == attribute_name - } else { - false - } -} - -/// Determines if ast nodes are annotated with aztec attributes. -/// For annotated functions it calls the `transform` function which will perform the required transformations. -/// Returns true if an annotated node is found, false otherwise -fn transform_module( - module: &mut SortedModule, - crate_id: &CrateId, - context: &Context, -) -> Result { - let mut has_transformed_module = false; - - // Check for a user defined storage struct - let storage_defined = check_for_storage_definition(&module); - - if storage_defined && !check_for_compute_note_hash_and_nullifier_definition(&module) { - let crate_graph = &context.crate_graph[crate_id]; - return Err(( - DefCollectorErrorKind::AztecComputeNoteHashAndNullifierNotFound { - span: Span::default(), // Add a default span so we know which contract file the error originates from - }, - crate_graph.root_file_id, - )); - } - - for structure in module.types.iter() { - if structure.attributes.iter().any(|attr| matches!(attr, SecondaryAttribute::Event)) { - module.impls.push(generate_selector_impl(structure)); - has_transformed_module = true; - } - } - - for func in module.functions.iter_mut() { - for secondary_attribute in func.def.attributes.secondary.clone() { - if is_custom_attribute(&secondary_attribute, "aztec(private)") { - transform_function("Private", func, storage_defined); - has_transformed_module = true; - } else if is_custom_attribute(&secondary_attribute, "aztec(public)") { - transform_function("Public", func, storage_defined); - has_transformed_module = true; - } - } - // Add the storage struct to the beginning of the function if it is unconstrained in an aztec contract - if storage_defined && func.def.is_unconstrained { - transform_unconstrained(func); - has_transformed_module = true; - } - } - Ok(has_transformed_module) -} - -/// If it does, it will insert the following things: -/// - A new Input that is provided for a kernel app circuit, named: {Public/Private}ContextInputs -/// - Hashes all of the function input variables -/// - This instantiates a helper function -fn transform_function(ty: &str, func: &mut NoirFunction, storage_defined: bool) { - let context_name = format!("{}Context", ty); - let inputs_name = format!("{}ContextInputs", ty); - let return_type_name = format!("{}CircuitPublicInputs", ty); - - // Add access to the storage struct - if storage_defined { - let storage_def = abstract_storage(&ty.to_lowercase(), false); - func.def.body.0.insert(0, storage_def); - } - - // Insert the context creation as the first action - let create_context = create_context(&context_name, &func.def.parameters); - func.def.body.0.splice(0..0, (create_context).iter().cloned()); - - // Add the inputs to the params - let input = create_inputs(&inputs_name); - func.def.parameters.insert(0, input); - - // Abstract return types such that they get added to the kernel's return_values - if let Some(return_values) = abstract_return_values(func) { - func.def.body.0.push(return_values); - } - - // Push the finish method call to the end of the function - let finish_def = create_context_finish(); - func.def.body.0.push(finish_def); - - let return_type = create_return_type(&return_type_name); - func.def.return_type = return_type; - func.def.return_visibility = Visibility::Public; - - // Distinct return types are only required for private functions - // Public functions should have open auto-inferred - match ty { - "Private" => func.def.return_distinctness = Distinctness::Distinct, - "Public" => func.def.is_open = true, - _ => (), - } -} - -/// Transform Unconstrained -/// -/// Inserts the following code at the beginning of an unconstrained function -/// ```noir -/// let storage = Storage::init(Context::none()); -/// ``` -/// -/// This will allow developers to access their contract' storage struct in unconstrained functions -fn transform_unconstrained(func: &mut NoirFunction) { - func.def.body.0.insert(0, abstract_storage("Unconstrained", true)); -} - -fn collect_crate_structs(crate_id: &CrateId, context: &Context) -> Vec { - context - .def_map(crate_id) - .expect("ICE: Missing crate in def_map") - .modules() - .iter() - .flat_map(|(_, module)| { - module.type_definitions().filter_map(|typ| { - if let ModuleDefId::TypeId(struct_id) = typ { - Some(struct_id) - } else { - None - } - }) - }) - .collect() -} - -/// Substitutes the signature literal that was introduced in the selector method previously with the actual signature. -fn transform_event(struct_id: StructId, interner: &mut NodeInterner) { - let struct_type = interner.get_struct(struct_id); - let selector_id = interner - .lookup_method(&Type::Struct(struct_type, vec![]), struct_id, "selector", false) - .expect("Selector method not found"); - let selector_function = interner.function(&selector_id); - - let compute_selector_statement = interner.statement( - selector_function - .block(interner) - .statements() - .first() - .expect("Compute selector statement not found"), - ); - - let compute_selector_expression = match compute_selector_statement { - HirStatement::Expression(expression_id) => match interner.expression(&expression_id) { - HirExpression::Call(hir_call_expression) => Some(hir_call_expression), - _ => None, - }, - _ => None, - } - .expect("Compute selector statement is not a call expression"); - - let first_arg_id = compute_selector_expression - .arguments - .first() - .expect("Missing argument for compute selector"); - - match interner.expression(first_arg_id) { - HirExpression::Literal(HirLiteral::Str(signature)) - if signature == SIGNATURE_PLACEHOLDER => - { - let selector_literal_id = first_arg_id; - - let structure = interner.get_struct(struct_id); - let signature = event_signature(&structure.borrow()); - interner.update_expression(*selector_literal_id, |expr| { - *expr = HirExpression::Literal(HirLiteral::Str(signature.clone())); - }); - - // Also update the type! It might have a different length now than the placeholder. - interner.push_expr_type( - selector_literal_id, - Type::String(Box::new(Type::Constant(signature.len() as u64))), - ); - } - _ => unreachable!("Signature placeholder literal does not match"), - } -} - -fn transform_events(crate_id: &CrateId, context: &mut Context) { - for struct_id in collect_crate_structs(crate_id, context) { - let attributes = context.def_interner.struct_attributes(&struct_id); - if attributes.iter().any(|attr| matches!(attr, SecondaryAttribute::Event)) { - transform_event(struct_id, &mut context.def_interner); - } - } -} - -const SIGNATURE_PLACEHOLDER: &str = "SIGNATURE_PLACEHOLDER"; - -/// Generates the impl for an event selector -/// -/// Inserts the following code: -/// ```noir -/// impl SomeStruct { -/// fn selector() -> Field { -/// aztec::oracle::compute_selector::compute_selector("SIGNATURE_PLACEHOLDER") -/// } -/// } -/// ``` -/// -/// This allows developers to emit events without having to write the signature of the event every time they emit it. -/// The signature cannot be known at this point since types are not resolved yet, so we use a signature placeholder. -/// It'll get resolved after by transforming the HIR. -fn generate_selector_impl(structure: &NoirStruct) -> TypeImpl { - let struct_type = make_type(UnresolvedTypeData::Named(path(structure.name.clone()), vec![])); - - let selector_fun_body = BlockExpression(vec![make_statement(StatementKind::Expression(call( - variable_path(chained_path!("aztec", "selector", "compute_selector")), - vec![expression(ExpressionKind::Literal(Literal::Str(SIGNATURE_PLACEHOLDER.to_string())))], - )))]); - - let mut selector_fn_def = FunctionDefinition::normal( - &ident("selector"), - &vec![], - &[], - &selector_fun_body, - &[], - &FunctionReturnType::Ty(make_type(UnresolvedTypeData::FieldElement)), - ); - - selector_fn_def.visibility = FunctionVisibility::Public; - - // Seems to be necessary on contract modules - selector_fn_def.return_visibility = Visibility::Public; - - TypeImpl { - object_type: struct_type, - type_span: structure.span, - generics: vec![], - methods: vec![NoirFunction::normal(selector_fn_def)], - } -} - -/// Helper function that returns what the private context would look like in the ast -/// This should make it available to be consumed within aztec private annotated functions. -/// -/// The replaced code: -/// ```noir -/// /// Before -/// fn foo(inputs: PrivateContextInputs) { -/// // ... -/// } -/// -/// /// After -/// #[aztec(private)] -/// fn foo() { -/// // ... -/// } -pub(crate) fn create_inputs(ty: &str) -> Param { - let context_ident = ident("inputs"); - let context_pattern = Pattern::Identifier(context_ident); - let type_path = chained_path!("aztec", "abi", ty); - let context_type = make_type(UnresolvedTypeData::Named(type_path, vec![])); - let visibility = Visibility::Private; - - Param { pattern: context_pattern, typ: context_type, visibility, span: Span::default() } -} - -/// Creates the private context object to be accessed within the function, the parameters need to be extracted to be -/// appended into the args hash object. -/// -/// The replaced code: -/// ```noir -/// #[aztec(private)] -/// fn foo(structInput: SomeStruct, arrayInput: [u8; 10], fieldInput: Field) -> Field { -/// // Create the hasher object -/// let mut hasher = Hasher::new(); -/// -/// // struct inputs call serialize on them to add an array of fields -/// hasher.add_multiple(structInput.serialize()); -/// -/// // Array inputs are iterated over and each element is added to the hasher (as a field) -/// for i in 0..arrayInput.len() { -/// hasher.add(arrayInput[i] as Field); -/// } -/// // Field inputs are added to the hasher -/// hasher.add({ident}); -/// -/// // Create the context -/// // The inputs (injected by this `create_inputs`) and completed hash object are passed to the context -/// let mut context = PrivateContext::new(inputs, hasher.hash()); -/// } -/// ``` -fn create_context(ty: &str, params: &[Param]) -> Vec { - let mut injected_expressions: Vec = vec![]; - - // `let mut hasher = Hasher::new();` - let let_hasher = mutable_assignment( - "hasher", // Assigned to - call( - variable_path(chained_path!("aztec", "abi", "Hasher", "new")), // Path - vec![], // args - ), - ); - - // Completes: `let mut hasher = Hasher::new();` - injected_expressions.push(let_hasher); - - // Iterate over each of the function parameters, adding to them to the hasher - params.iter().for_each(|Param { pattern, typ, span: _, visibility: _ }| { - match pattern { - Pattern::Identifier(identifier) => { - // Match the type to determine the padding to do - let unresolved_type = &typ.typ; - let expression = match unresolved_type { - // `hasher.add_multiple({ident}.serialize())` - UnresolvedTypeData::Named(..) => add_struct_to_hasher(identifier), - UnresolvedTypeData::Array(_, arr_type) => { - add_array_to_hasher(identifier, &arr_type) - } - // `hasher.add({ident})` - UnresolvedTypeData::FieldElement => add_field_to_hasher(identifier), - // Add the integer to the hasher, casted to a field - // `hasher.add({ident} as Field)` - UnresolvedTypeData::Integer(..) | UnresolvedTypeData::Bool => { - add_cast_to_hasher(identifier) - } - _ => unreachable!("[Aztec Noir] Provided parameter type is not supported"), - }; - injected_expressions.push(expression); - } - _ => todo!(), // Maybe unreachable? - } - }); - - // Create the inputs to the context - let inputs_expression = variable("inputs"); - // `hasher.hash()` - let hash_call = method_call( - variable("hasher"), // variable - "hash", // method name - vec![], // args - ); - - // let mut context = {ty}::new(inputs, hash); - let let_context = mutable_assignment( - "context", // Assigned to - call( - variable_path(chained_path!("aztec", "context", ty, "new")), // Path - vec![inputs_expression, hash_call], // args - ), - ); - injected_expressions.push(let_context); - - // Return all expressions that will be injected by the hasher - injected_expressions -} - -/// Abstract Return Type -/// -/// This function intercepts the function's current return type and replaces it with pushes -/// To the kernel -/// -/// The replaced code: -/// ```noir -/// /// Before -/// #[aztec(private)] -/// fn foo() -> abi::PrivateCircuitPublicInputs { -/// // ... -/// let my_return_value: Field = 10; -/// context.return_values.push(my_return_value); -/// } -/// -/// /// After -/// #[aztec(private)] -/// fn foo() -> Field { -/// // ... -/// let my_return_value: Field = 10; -/// my_return_value -/// } -/// ``` -/// Similarly; Structs will be pushed to the context, after serialize() is called on them. -/// Arrays will be iterated over and each element will be pushed to the context. -/// Any primitive type that can be cast will be casted to a field and pushed to the context. -fn abstract_return_values(func: &NoirFunction) -> Option { - let current_return_type = func.return_type().typ; - let len = func.def.body.len(); - let last_statement = &func.def.body.0[len - 1]; - - // TODO: (length, type) => We can limit the size of the array returned to be limited by kernel size - // Doesn't need done until we have settled on a kernel size - // TODO: support tuples here and in inputs -> convert into an issue - - // Check if the return type is an expression, if it is, we can handle it - match last_statement { - Statement { kind: StatementKind::Expression(expression), .. } => { - match current_return_type { - // Call serialize on structs, push the whole array, calling push_array - UnresolvedTypeData::Named(..) => Some(make_struct_return_type(expression.clone())), - UnresolvedTypeData::Array(..) => Some(make_array_return_type(expression.clone())), - // Cast these types to a field before pushing - UnresolvedTypeData::Bool | UnresolvedTypeData::Integer(..) => { - Some(make_castable_return_type(expression.clone())) - } - UnresolvedTypeData::FieldElement => Some(make_return_push(expression.clone())), - _ => None, - } - } - _ => None, - } -} - -/// Abstract storage -/// -/// For private functions: -/// ```noir -/// #[aztec(private)] -/// fn lol() { -/// let storage = Storage::init(Context::private(context)); -/// } -/// ``` -/// -/// For public functions: -/// ```noir -/// #[aztec(public)] -/// fn lol() { -/// let storage = Storage::init(Context::public(context)); -/// } -/// ``` -/// -/// For unconstrained functions: -/// ```noir -/// unconstrained fn lol() { -/// let storage = Storage::init(Context::none()); -/// } -fn abstract_storage(typ: &str, unconstrained: bool) -> Statement { - let init_context_call = if unconstrained { - call( - variable_path(chained_path!("aztec", "context", "Context", "none")), // Path - vec![], // args - ) - } else { - call( - variable_path(chained_path!("aztec", "context", "Context", typ)), // Path - vec![mutable_reference("context")], // args - ) - }; - - assignment( - "storage", // Assigned to - call( - variable_path(chained_path!("Storage", "init")), // Path - vec![init_context_call], // args - ), - ) -} - -/// Context Return Values -/// -/// Creates an instance to the context return values -/// ```noir -/// `context.return_values` -/// ``` -fn context_return_values() -> Expression { - member_access("context", "return_values") -} - -fn make_statement(kind: StatementKind) -> Statement { - Statement { span: Span::default(), kind } -} - -/// Make return Push -/// -/// Translates to: -/// `context.return_values.push({push_value})` -fn make_return_push(push_value: Expression) -> Statement { - make_statement(StatementKind::Semi(method_call( - context_return_values(), - "push", - vec![push_value], - ))) -} - -/// Make Return push array -/// -/// Translates to: -/// `context.return_values.push_array({push_value})` -fn make_return_push_array(push_value: Expression) -> Statement { - make_statement(StatementKind::Semi(method_call( - context_return_values(), - "push_array", - vec![push_value], - ))) -} - -/// Make struct return type -/// -/// Translates to: -/// ```noir -/// `context.return_values.push_array({push_value}.serialize())` -fn make_struct_return_type(expression: Expression) -> Statement { - let serialized_call = method_call( - expression, // variable - "serialize", // method name - vec![], // args - ); - make_return_push_array(serialized_call) -} - -/// Make array return type -/// -/// Translates to: -/// ```noir -/// for i in 0..{ident}.len() { -/// context.return_values.push({ident}[i] as Field) -/// } -/// ``` -fn make_array_return_type(expression: Expression) -> Statement { - let inner_cast_expression = - cast(index_array_variable(expression.clone(), "i"), UnresolvedTypeData::FieldElement); - let assignment = make_statement(StatementKind::Semi(method_call( - context_return_values(), // variable - "push", // method name - vec![inner_cast_expression], - ))); - - create_loop_over(expression, vec![assignment]) -} - -/// Castable return type -/// -/// Translates to: -/// ```noir -/// context.return_values.push({ident} as Field) -/// ``` -fn make_castable_return_type(expression: Expression) -> Statement { - // Cast these types to a field before pushing - let cast_expression = cast(expression, UnresolvedTypeData::FieldElement); - make_return_push(cast_expression) -} - -/// Create Return Type -/// -/// Public functions return abi::PublicCircuitPublicInputs while -/// private functions return abi::PrivateCircuitPublicInputs -/// -/// This call constructs an ast token referencing the above types -/// The name is set in the function above `transform`, hence the -/// whole token name is passed in -/// -/// The replaced code: -/// ```noir -/// -/// /// Before -/// fn foo() -> abi::PrivateCircuitPublicInputs { -/// // ... -/// } -/// -/// /// After -/// #[aztec(private)] -/// fn foo() { -/// // ... -/// } -pub(crate) fn create_return_type(ty: &str) -> FunctionReturnType { - let return_path = chained_path!("aztec", "abi", ty); - - let ty = make_type(UnresolvedTypeData::Named(return_path, vec![])); - FunctionReturnType::Ty(ty) -} - -/// Create Context Finish -/// -/// Each aztec function calls `context.finish()` at the end of a function -/// to return values required by the kernel. -/// -/// The replaced code: -/// ```noir -/// /// Before -/// fn foo() -> abi::PrivateCircuitPublicInputs { -/// // ... -/// context.finish() -/// } -/// -/// /// After -/// #[aztec(private)] -/// fn foo() { -/// // ... -/// } -pub(crate) fn create_context_finish() -> Statement { - let method_call = method_call( - variable("context"), // variable - "finish", // method name - vec![], // args - ); - make_statement(StatementKind::Expression(method_call)) -} - -// -// Methods to create hasher inputs -// - -fn add_struct_to_hasher(identifier: &Ident) -> Statement { - // If this is a struct, we call serialize and add the array to the hasher - let serialized_call = method_call( - variable_path(path(identifier.clone())), // variable - "serialize", // method name - vec![], // args - ); - - make_statement(StatementKind::Semi(method_call( - variable("hasher"), // variable - "add_multiple", // method name - vec![serialized_call], // args - ))) -} - -fn create_loop_over(var: Expression, loop_body: Vec) -> Statement { - // If this is an array of primitive types (integers / fields) we can add them each to the hasher - // casted to a field - let span = var.span.clone(); - - // `array.len()` - let end_range_expression = method_call( - var, // variable - "len", // method name - vec![], // args - ); - - // What will be looped over - // - `hasher.add({ident}[i] as Field)` - let for_loop_block = expression(ExpressionKind::Block(BlockExpression(loop_body))); - - // `for i in 0..{ident}.len()` - make_statement(StatementKind::For(ForLoopStatement { - range: ForRange::Range( - expression(ExpressionKind::Literal(Literal::Integer(FieldElement::from(i128::from( - 0, - ))))), - end_range_expression, - ), - identifier: ident("i"), - block: for_loop_block, - span, - })) -} - -fn add_array_to_hasher(identifier: &Ident, arr_type: &UnresolvedType) -> Statement { - // If this is an array of primitive types (integers / fields) we can add them each to the hasher - // casted to a field - - // Wrap in the semi thing - does that mean ended with semi colon? - // `hasher.add({ident}[i] as Field)` - - let arr_index = index_array(identifier.clone(), "i"); - let (add_expression, hasher_method_name) = match arr_type.typ { - UnresolvedTypeData::Named(..) => { - let hasher_method_name = "add_multiple".to_owned(); - let call = method_call( - // All serialise on each element - arr_index, // variable - "serialize", // method name - vec![], // args - ); - (call, hasher_method_name) - } - _ => { - let hasher_method_name = "add".to_owned(); - let call = cast( - arr_index, // lhs - `ident[i]` - UnresolvedTypeData::FieldElement, // cast to - `as Field` - ); - (call, hasher_method_name) - } - }; - - let block_statement = make_statement(StatementKind::Semi(method_call( - variable("hasher"), // variable - &hasher_method_name, // method name - vec![add_expression], - ))); - - create_loop_over(variable_ident(identifier.clone()), vec![block_statement]) -} - -fn add_field_to_hasher(identifier: &Ident) -> Statement { - // `hasher.add({ident})` - let ident = variable_path(path(identifier.clone())); - make_statement(StatementKind::Semi(method_call( - variable("hasher"), // variable - "add", // method name - vec![ident], // args - ))) -} - -fn add_cast_to_hasher(identifier: &Ident) -> Statement { - // `hasher.add({ident} as Field)` - // `{ident} as Field` - let cast_operation = cast( - variable_path(path(identifier.clone())), // lhs - UnresolvedTypeData::FieldElement, // rhs - ); - - // `hasher.add({ident} as Field)` - make_statement(StatementKind::Semi(method_call( - variable("hasher"), // variable - "add", // method name - vec![cast_operation], // args - ))) -} - -/// Computes the aztec signature for a resolved type. -fn signature_of_type(typ: &Type) -> String { - match typ { - Type::Integer(Signedness::Signed, bit_size) => format!("i{}", bit_size), - Type::Integer(Signedness::Unsigned, bit_size) => format!("u{}", bit_size), - Type::FieldElement => "Field".to_owned(), - Type::Bool => "bool".to_owned(), - Type::Array(len, typ) => { - if let Type::Constant(len) = **len { - format!("[{};{len}]", signature_of_type(typ)) - } else { - unimplemented!("Cannot generate signature for array with length type {:?}", typ) - } - } - Type::Struct(def, args) => { - let fields = def.borrow().get_fields(args); - let fields = vecmap(fields, |(_, typ)| signature_of_type(&typ)); - format!("({})", fields.join(",")) - } - Type::Tuple(types) => { - let fields = vecmap(types, signature_of_type); - format!("({})", fields.join(",")) - } - _ => unimplemented!("Cannot generate signature for type {:?}", typ), - } -} - -/// Computes the signature for a resolved event type. -/// It has the form 'EventName(Field,(Field),[u8;2])' -fn event_signature(event: &StructType) -> String { - let fields = vecmap(event.get_fields(&[]), |(_, typ)| signature_of_type(&typ)); - format!("{}({})", event.name.0.contents, fields.join(",")) -} diff --git a/compiler/noirc_frontend/src/hir/def_collector/dc_crate.rs b/compiler/noirc_frontend/src/hir/def_collector/dc_crate.rs index ce1cf675a07..b1e559895b8 100644 --- a/compiler/noirc_frontend/src/hir/def_collector/dc_crate.rs +++ b/compiler/noirc_frontend/src/hir/def_collector/dc_crate.rs @@ -1,33 +1,33 @@ use super::dc_mod::collect_defs; use super::errors::{DefCollectorErrorKind, DuplicateType}; use crate::graph::CrateId; -use crate::hir::def_map::{CrateDefMap, LocalModuleId, ModuleData, ModuleDefId, ModuleId}; +use crate::hir::def_map::{CrateDefMap, LocalModuleId, ModuleId}; use crate::hir::resolution::errors::ResolverError; -use crate::hir::resolution::import::PathResolutionError; -use crate::hir::resolution::path_resolver::PathResolver; + +use crate::hir::resolution::import::{resolve_import, ImportDirective}; use crate::hir::resolution::resolver::Resolver; use crate::hir::resolution::{ - import::{resolve_imports, ImportDirective}, - path_resolver::StandardPathResolver, + collect_impls, collect_trait_impls, path_resolver, resolve_free_functions, resolve_globals, + resolve_impls, resolve_structs, resolve_trait_by_path, resolve_trait_impls, resolve_traits, + resolve_type_aliases, }; use crate::hir::type_check::{type_check_func, TypeCheckError, TypeChecker}; use crate::hir::Context; -use crate::hir_def::traits::{Trait, TraitConstant, TraitFunction, TraitImpl, TraitType}; -use crate::node_interner::{ - FuncId, NodeInterner, StmtId, StructId, TraitId, TraitImplId, TypeAliasId, -}; + +use crate::macros_api::MacroProcessor; +use crate::node_interner::{FuncId, NodeInterner, StmtId, StructId, TraitId, TypeAliasId}; use crate::parser::{ParserError, SortedModule}; use crate::{ - ExpressionKind, Generics, Ident, LetStatement, Literal, NoirFunction, NoirStruct, NoirTrait, - NoirTypeAlias, Path, Shared, StructType, TraitItem, Type, TypeBinding, TypeVariableKind, - UnresolvedGenerics, UnresolvedTraitConstraint, UnresolvedType, + ExpressionKind, Ident, LetStatement, Literal, NoirFunction, NoirStruct, NoirTrait, + NoirTypeAlias, Path, PathKind, Type, UnresolvedGenerics, UnresolvedTraitConstraint, + UnresolvedType, }; use fm::FileId; use iter_extended::vecmap; use noirc_errors::{CustomDiagnostic, Span}; -use std::collections::{BTreeMap, HashMap, HashSet}; -use std::rc::Rc; +use std::collections::{BTreeMap, HashMap}; + use std::vec; /// Stores all of the unresolved functions in a particular file/mod @@ -82,6 +82,7 @@ pub struct UnresolvedTrait { pub module_id: LocalModuleId, pub crate_id: CrateId, pub trait_def: NoirTrait, + pub method_ids: HashMap, pub fns_with_default_impl: UnresolvedFunctions, } @@ -124,6 +125,16 @@ pub struct DefCollector { pub(crate) collected_traits_impls: Vec, } +/// Maps the type and the module id in which the impl is defined to the functions contained in that +/// impl along with the generics declared on the impl itself. This also contains the Span +/// of the object_type of the impl, used to issue an error if the object type fails to resolve. +/// +/// Note that because these are keyed by unresolved types, the impl map is one of the few instances +/// of HashMap rather than BTreeMap. For this reason, we should be careful not to iterate over it +/// since it would be non-deterministic. +pub(crate) type ImplMap = + HashMap<(UnresolvedType, LocalModuleId), Vec<(UnresolvedGenerics, Span, UnresolvedFunctions)>>; + #[derive(Debug, Clone)] pub enum CompilationError { ParseError(ParserError), @@ -166,16 +177,6 @@ impl From for CompilationError { } } -/// Maps the type and the module id in which the impl is defined to the functions contained in that -/// impl along with the generics declared on the impl itself. This also contains the Span -/// of the object_type of the impl, used to issue an error if the object type fails to resolve. -/// -/// Note that because these are keyed by unresolved types, the impl map is one of the few instances -/// of HashMap rather than BTreeMap. For this reason, we should be careful not to iterate over it -/// since it would be non-deterministic. -type ImplMap = - HashMap<(UnresolvedType, LocalModuleId), Vec<(UnresolvedGenerics, Span, UnresolvedFunctions)>>; - impl DefCollector { fn new(def_map: CrateDefMap) -> DefCollector { DefCollector { @@ -199,6 +200,7 @@ impl DefCollector { context: &mut Context, ast: SortedModule, root_file_id: FileId, + macro_processors: Vec<&dyn MacroProcessor>, ) -> Vec<(CompilationError, FileId)> { let mut errors: Vec<(CompilationError, FileId)> = vec![]; let crate_id = def_map.krate; @@ -211,7 +213,11 @@ impl DefCollector { let crate_graph = &context.crate_graph[crate_id]; for dep in crate_graph.dependencies.clone() { - errors.extend(CrateDefMap::collect_defs(dep.crate_id, context)); + errors.extend(CrateDefMap::collect_defs( + dep.crate_id, + context, + macro_processors.clone(), + )); let dep_def_root = context.def_map(&dep.crate_id).expect("ice: def map was just created").root; @@ -239,37 +245,47 @@ impl DefCollector { context, )); + let submodules = vecmap(def_collector.def_map.modules().iter(), |(index, _)| index); // Add the current crate to the collection of DefMaps context.def_maps.insert(crate_id, def_collector.def_map); - // Resolve unresolved imports collected from the crate - let (resolved, unresolved_imports) = - resolve_imports(crate_id, def_collector.collected_imports, &context.def_maps); + inject_prelude(crate_id, context, crate_root, &mut def_collector.collected_imports); + for submodule in submodules { + inject_prelude( + crate_id, + context, + LocalModuleId(submodule), + &mut def_collector.collected_imports, + ); + } - { - let current_def_map = context.def_maps.get(&crate_id).unwrap(); - errors.extend(vecmap(unresolved_imports, |(error, module_id)| { - let file_id = current_def_map.file_id(module_id); - let error = DefCollectorErrorKind::PathResolutionError(error); - (error.into(), file_id) - })); - }; - - // Populate module namespaces according to the imports used - let current_def_map = context.def_maps.get_mut(&crate_id).unwrap(); - for resolved_import in resolved { - let name = resolved_import.name; - for ns in resolved_import.resolved_namespace.iter_defs() { - let result = current_def_map.modules[resolved_import.module_scope.0] - .import(name.clone(), ns); - - if let Err((first_def, second_def)) = result { - let err = DefCollectorErrorKind::Duplicate { - typ: DuplicateType::Import, - first_def, - second_def, - }; - errors.push((err.into(), root_file_id)); + // Resolve unresolved imports collected from the crate, one by one. + for collected_import in def_collector.collected_imports { + match resolve_import(crate_id, collected_import, &context.def_maps) { + Ok(resolved_import) => { + // Populate module namespaces according to the imports used + let current_def_map = context.def_maps.get_mut(&crate_id).unwrap(); + + let name = resolved_import.name; + for ns in resolved_import.resolved_namespace.iter_defs() { + let result = current_def_map.modules[resolved_import.module_scope.0] + .import(name.clone(), ns, resolved_import.is_prelude); + + if let Err((first_def, second_def)) = result { + let err = DefCollectorErrorKind::Duplicate { + typ: DuplicateType::Import, + first_def, + second_def, + }; + errors.push((err.into(), root_file_id)); + } + } + } + Err((error, module_id)) => { + let current_def_map = context.def_maps.get(&crate_id).unwrap(); + let file_id = current_def_map.file_id(module_id); + let error = DefCollectorErrorKind::PathResolutionError(error); + errors.push((error.into(), file_id)); } } } @@ -341,10 +357,9 @@ impl DefCollector { errors.extend(resolved_globals.errors); - // We run hir transformations before type checks - #[cfg(feature = "aztec")] - crate::hir::aztec_library::transform_hir(&crate_id, context); - + for macro_processor in macro_processors { + macro_processor.process_typed_ast(&crate_id, context); + } errors.extend(type_check_globals(&mut context.def_interner, resolved_globals.globals)); // Type check all of the functions in the crate @@ -355,262 +370,45 @@ impl DefCollector { } } -/// Go through the list of impls and add each function within to the scope -/// of the module defined by its type. -fn collect_impls( - context: &mut Context, +fn inject_prelude( crate_id: CrateId, - collected_impls: &ImplMap, -) -> Vec<(CompilationError, FileId)> { - let interner = &mut context.def_interner; - let def_maps = &mut context.def_maps; - let mut errors: Vec<(CompilationError, FileId)> = vec![]; - - for ((unresolved_type, module_id), methods) in collected_impls { - let path_resolver = - StandardPathResolver::new(ModuleId { local_id: *module_id, krate: crate_id }); - - let file = def_maps[&crate_id].file_id(*module_id); - - for (generics, span, unresolved) in methods { - let mut resolver = Resolver::new(interner, &path_resolver, def_maps, file); - resolver.add_generics(generics); - let typ = resolver.resolve_type(unresolved_type.clone()); - - errors.extend(take_errors(unresolved.file_id, resolver)); - - if let Some(struct_type) = get_struct_type(&typ) { - let struct_type = struct_type.borrow(); - - // `impl`s are only allowed on types defined within the current crate - if struct_type.id.krate() != crate_id { - let span = *span; - let type_name = struct_type.name.to_string(); - let error = DefCollectorErrorKind::ForeignImpl { span, type_name }; - errors.push((error.into(), unresolved.file_id)); - continue; - } - - // Grab the module defined by the struct type. Note that impls are a case - // where the module the methods are added to is not the same as the module - // they are resolved in. - let module = get_module_mut(def_maps, struct_type.id.module_id()); - - for (_, method_id, method) in &unresolved.functions { - // If this method was already declared, remove it from the module so it cannot - // be accessed with the `TypeName::method` syntax. We'll check later whether the - // object types in each method overlap or not. If they do, we issue an error. - // If not, that is specialization which is allowed. - if module.declare_function(method.name_ident().clone(), *method_id).is_err() { - module.remove_function(method.name_ident()); - } - } - // Prohibit defining impls for primitive types if we're not in the stdlib - } else if typ != Type::Error && !crate_id.is_stdlib() { - let span = *span; - let error = DefCollectorErrorKind::NonStructTypeInImpl { span }; - errors.push((error.into(), unresolved.file_id)); - } - } - } - errors -} - -fn get_module_mut( - def_maps: &mut BTreeMap, - module: ModuleId, -) -> &mut ModuleData { - &mut def_maps.get_mut(&module.krate).unwrap().modules[module.local_id.0] -} - -fn collect_trait_impl_methods( - interner: &mut NodeInterner, - def_maps: &BTreeMap, - crate_id: CrateId, - trait_id: TraitId, - trait_impl: &mut UnresolvedTraitImpl, -) -> Vec<(CompilationError, FileId)> { - // In this Vec methods[i] corresponds to trait.methods[i]. If the impl has no implementation - // for a particular method, the default implementation will be added at that slot. - let mut ordered_methods = Vec::new(); - - let the_trait = interner.get_trait(trait_id); - - // check whether the trait implementation is in the same crate as either the trait or the type - let mut errors = - check_trait_impl_crate_coherence(interner, &the_trait, trait_impl, crate_id, def_maps); - // set of function ids that have a corresponding method in the trait - let mut func_ids_in_trait = HashSet::new(); - - for method in &the_trait.methods { - let overrides: Vec<_> = trait_impl - .methods - .functions - .iter() - .filter(|(_, _, f)| f.name() == method.name.0.contents) - .collect(); - - if overrides.is_empty() { - if let Some(default_impl) = &method.default_impl { - let func_id = interner.push_empty_fn(); - let module = ModuleId { local_id: trait_impl.module_id, krate: crate_id }; - interner.push_function(func_id, &default_impl.def, module); - func_ids_in_trait.insert(func_id); - ordered_methods.push(( - method.default_impl_module_id, - func_id, - *default_impl.clone(), - )); - } else { - let error = DefCollectorErrorKind::TraitMissingMethod { - trait_name: the_trait.name.clone(), - method_name: method.name.clone(), - trait_impl_span: trait_impl.object_type.span.expect("type must have a span"), - }; - errors.push((error.into(), trait_impl.file_id)); - } - } else { - for (_, func_id, _) in &overrides { - func_ids_in_trait.insert(*func_id); - } - - if overrides.len() > 1 { - let error = DefCollectorErrorKind::Duplicate { - typ: DuplicateType::TraitAssociatedFunction, - first_def: overrides[0].2.name_ident().clone(), - second_def: overrides[1].2.name_ident().clone(), - }; - errors.push((error.into(), trait_impl.file_id)); - } - - ordered_methods.push(overrides[0].clone()); - } - } - - // Emit MethodNotInTrait error for methods in the impl block that - // don't have a corresponding method signature defined in the trait - for (_, func_id, func) in &trait_impl.methods.functions { - if !func_ids_in_trait.contains(func_id) { - let error = DefCollectorErrorKind::MethodNotInTrait { - trait_name: the_trait.name.clone(), - impl_method: func.name_ident().clone(), - }; - errors.push((error.into(), trait_impl.file_id)); - } - } - - trait_impl.methods.functions = ordered_methods; - trait_impl.methods.trait_id = Some(trait_id); - errors -} + context: &Context, + crate_root: LocalModuleId, + collected_imports: &mut Vec, +) { + let segments: Vec<_> = "std::prelude" + .split("::") + .map(|segment| crate::Ident::new(segment.into(), Span::default())) + .collect(); -fn collect_trait_impl( - context: &mut Context, - crate_id: CrateId, - trait_impl: &mut UnresolvedTraitImpl, -) -> Vec<(CompilationError, FileId)> { - let interner = &mut context.def_interner; - let def_maps = &mut context.def_maps; - let mut errors: Vec<(CompilationError, FileId)> = vec![]; - let unresolved_type = trait_impl.object_type.clone(); - let module = ModuleId { local_id: trait_impl.module_id, krate: crate_id }; - trait_impl.trait_id = - match resolve_trait_by_path(def_maps, module, trait_impl.trait_path.clone()) { - Ok(trait_id) => Some(trait_id), - Err(error) => { - errors.push((error.into(), trait_impl.file_id)); - None - } - }; + let path = + Path { segments: segments.clone(), kind: crate::PathKind::Dep, span: Span::default() }; - if let Some(trait_id) = trait_impl.trait_id { - errors - .extend(collect_trait_impl_methods(interner, def_maps, crate_id, trait_id, trait_impl)); - - let path_resolver = StandardPathResolver::new(module); - let file = def_maps[&crate_id].file_id(trait_impl.module_id); - let mut resolver = Resolver::new(interner, &path_resolver, def_maps, file); - resolver.add_generics(&trait_impl.generics); - let typ = resolver.resolve_type(unresolved_type); - errors.extend(take_errors(trait_impl.file_id, resolver)); - - if let Some(struct_type) = get_struct_type(&typ) { - let struct_type = struct_type.borrow(); - let module = get_module_mut(def_maps, struct_type.id.module_id()); - - for (_, method_id, method) in &trait_impl.methods.functions { - // If this method was already declared, remove it from the module so it cannot - // be accessed with the `TypeName::method` syntax. We'll check later whether the - // object types in each method overlap or not. If they do, we issue an error. - // If not, that is specialization which is allowed. - if module.declare_function(method.name_ident().clone(), *method_id).is_err() { - module.remove_function(method.name_ident()); - } + if !crate_id.is_stdlib() { + if let Ok(module_def) = path_resolver::resolve_path( + &context.def_maps, + ModuleId { krate: crate_id, local_id: crate_root }, + path, + ) { + let module_id = module_def.as_module().expect("std::prelude should be a module"); + let prelude = context.module(module_id).scope().names(); + + for path in prelude { + let mut segments = segments.clone(); + segments.push(Ident::new(path.to_string(), Span::default())); + + collected_imports.insert( + 0, + ImportDirective { + module_id: crate_root, + path: Path { segments, kind: PathKind::Dep, span: Span::default() }, + alias: None, + is_prelude: true, + }, + ); } } } - errors -} - -fn collect_trait_impls( - context: &mut Context, - crate_id: CrateId, - collected_impls: &mut [UnresolvedTraitImpl], -) -> Vec<(CompilationError, FileId)> { - collected_impls - .iter_mut() - .flat_map(|trait_impl| collect_trait_impl(context, crate_id, trait_impl)) - .collect() -} - -fn check_trait_impl_crate_coherence( - interner: &mut NodeInterner, - the_trait: &Trait, - trait_impl: &UnresolvedTraitImpl, - current_crate: CrateId, - def_maps: &BTreeMap, -) -> Vec<(CompilationError, FileId)> { - let mut errors: Vec<(CompilationError, FileId)> = vec![]; - - let module = ModuleId { krate: current_crate, local_id: trait_impl.module_id }; - let file = def_maps[¤t_crate].file_id(trait_impl.module_id); - let path_resolver = StandardPathResolver::new(module); - let mut resolver = Resolver::new(interner, &path_resolver, def_maps, file); - - let object_crate = match resolver.resolve_type(trait_impl.object_type.clone()) { - Type::Struct(struct_type, _) => struct_type.borrow().id.krate(), - _ => CrateId::Dummy, - }; - - if current_crate != the_trait.crate_id && current_crate != object_crate { - let error = DefCollectorErrorKind::TraitImplOrphaned { - span: trait_impl.object_type.span.expect("object type must have a span"), - }; - errors.push((error.into(), trait_impl.file_id)); - } - - errors -} - -fn resolve_trait_by_path( - def_maps: &BTreeMap, - module: ModuleId, - path: Path, -) -> Result { - let path_resolver = StandardPathResolver::new(module); - - match path_resolver.resolve(def_maps, path.clone()) { - Ok(ModuleDefId::TraitId(trait_id)) => Ok(trait_id), - Ok(_) => Err(DefCollectorErrorKind::NotATrait { not_a_trait_name: path }), - Err(_) => Err(DefCollectorErrorKind::TraitNotFound { trait_path: path }), - } -} - -fn get_struct_type(typ: &Type) -> Option<&Shared> { - match typ { - Type::Struct(definition, _) => Some(definition), - _ => None, - } } /// Separate the globals Vec into two. The first element in the tuple will be the @@ -625,50 +423,6 @@ fn filter_literal_globals( }) } -pub struct ResolvedGlobals { - pub globals: Vec<(FileId, StmtId)>, - pub errors: Vec<(CompilationError, FileId)>, -} - -impl ResolvedGlobals { - pub fn extend(&mut self, oth: Self) { - self.globals.extend(oth.globals); - self.errors.extend(oth.errors); - } -} - -fn resolve_globals( - context: &mut Context, - globals: Vec, - crate_id: CrateId, -) -> ResolvedGlobals { - let mut errors: Vec<(CompilationError, FileId)> = vec![]; - let globals = vecmap(globals, |global| { - let module_id = ModuleId { local_id: global.module_id, krate: crate_id }; - let path_resolver = StandardPathResolver::new(module_id); - let storage_slot = context.next_storage_slot(module_id); - - let mut resolver = Resolver::new( - &mut context.def_interner, - &path_resolver, - &context.def_maps, - global.file_id, - ); - - let name = global.stmt_def.pattern.name_ident().clone(); - - let hir_stmt = resolver.resolve_global_let(global.stmt_def); - errors.extend(take_errors(global.file_id, resolver)); - - context.def_interner.update_global(global.stmt_id, hir_stmt); - - context.def_interner.push_global(global.stmt_id, name, global.module_id, storage_slot); - - (global.file_id, global.stmt_id) - }); - ResolvedGlobals { globals, errors } -} - fn type_check_globals( interner: &mut NodeInterner, global_ids: Vec<(FileId, StmtId)>, @@ -701,406 +455,60 @@ fn type_check_functions( .collect() } -/// Create the mappings from TypeId -> StructType -/// so that expressions can access the fields of structs -fn resolve_structs( - context: &mut Context, - structs: BTreeMap, - crate_id: CrateId, -) -> Vec<(CompilationError, FileId)> { - let mut errors: Vec<(CompilationError, FileId)> = vec![]; - // Resolve each field in each struct. - // Each struct should already be present in the NodeInterner after def collection. - for (type_id, typ) in structs { - let file_id = typ.file_id; - let (generics, fields, resolver_errors) = resolve_struct_fields(context, crate_id, typ); - errors.extend(vecmap(resolver_errors, |err| (err.into(), file_id))); - context.def_interner.update_struct(type_id, |struct_def| { - struct_def.set_fields(fields); - struct_def.generics = generics; - }); - } - errors -} - -fn resolve_trait_types( - _context: &mut Context, - _crate_id: CrateId, - _unresolved_trait: &UnresolvedTrait, -) -> (Vec, Vec<(CompilationError, FileId)>) { - // TODO - (vec![], vec![]) -} -fn resolve_trait_constants( - _context: &mut Context, - _crate_id: CrateId, - _unresolved_trait: &UnresolvedTrait, -) -> (Vec, Vec<(CompilationError, FileId)>) { - // TODO - (vec![], vec![]) -} - -fn resolve_trait_methods( - context: &mut Context, - trait_id: TraitId, - crate_id: CrateId, - unresolved_trait: &UnresolvedTrait, -) -> (Vec, Vec<(CompilationError, FileId)>) { - let interner = &mut context.def_interner; - let def_maps = &mut context.def_maps; - - let path_resolver = StandardPathResolver::new(ModuleId { - local_id: unresolved_trait.module_id, - krate: crate_id, - }); - let file = def_maps[&crate_id].file_id(unresolved_trait.module_id); - - let mut res = vec![]; - let mut resolver_errors = vec![]; - for item in &unresolved_trait.trait_def.items { - if let TraitItem::Function { - name, - generics, - parameters, - return_type, - where_clause: _, - body: _, - } = item - { - let the_trait = interner.get_trait(trait_id); - let self_type = - Type::TypeVariable(the_trait.self_type_typevar.clone(), TypeVariableKind::Normal); - - let mut resolver = Resolver::new(interner, &path_resolver, def_maps, file); - resolver.add_generics(generics); - resolver.set_self_type(Some(self_type)); - - let arguments = vecmap(parameters, |param| resolver.resolve_type(param.1.clone())); - let resolved_return_type = resolver.resolve_type(return_type.get_type().into_owned()); - let generics = resolver.get_generics().to_vec(); - - let name = name.clone(); - let span: Span = name.span(); - let default_impl_list: Vec<_> = unresolved_trait - .fns_with_default_impl - .functions - .iter() - .filter(|(_, _, q)| q.name() == name.0.contents) - .collect(); - let default_impl = if default_impl_list.len() == 1 { - Some(Box::new(default_impl_list[0].2.clone())) - } else { - None - }; - - let f = TraitFunction { - name, - generics, - arguments, - return_type: resolved_return_type, - span, - default_impl, - default_impl_file_id: unresolved_trait.file_id, - default_impl_module_id: unresolved_trait.module_id, - }; - res.push(f); - resolver_errors.extend(take_errors_filter_self_not_resolved(file, resolver)); - } - } - (res, resolver_errors) -} - -fn take_errors_filter_self_not_resolved( - file_id: FileId, - resolver: Resolver<'_>, -) -> Vec<(CompilationError, FileId)> { - resolver - .take_errors() - .iter() - .filter(|resolution_error| match resolution_error { - ResolverError::PathResolutionError(PathResolutionError::Unresolved(ident)) => { - &ident.0.contents != "Self" - } - _ => true, - }) - .cloned() - .map(|resolution_error| (resolution_error.into(), file_id)) - .collect() -} - -fn take_errors(file_id: FileId, resolver: Resolver<'_>) -> Vec<(CompilationError, FileId)> { - vecmap(resolver.take_errors(), |e| (e.into(), file_id)) -} - -/// Create the mappings from TypeId -> TraitType -/// so that expressions can access the elements of traits -fn resolve_traits( - context: &mut Context, - traits: BTreeMap, - crate_id: CrateId, -) -> Vec<(CompilationError, FileId)> { - for (trait_id, unresolved_trait) in &traits { - context.def_interner.push_empty_trait(*trait_id, unresolved_trait); - } - let mut res: Vec<(CompilationError, FileId)> = vec![]; - for (trait_id, unresolved_trait) in traits { - // Resolve order - // 1. Trait Types ( Trait constants can have a trait type, therefore types before constants) - let _ = resolve_trait_types(context, crate_id, &unresolved_trait); - // 2. Trait Constants ( Trait's methods can use trait types & constants, therefore they should be after) - let _ = resolve_trait_constants(context, crate_id, &unresolved_trait); - // 3. Trait Methods - let (methods, errors) = - resolve_trait_methods(context, trait_id, crate_id, &unresolved_trait); - res.extend(errors); - context.def_interner.update_trait(trait_id, |trait_def| { - trait_def.set_methods(methods); - }); - } - res -} - -fn resolve_struct_fields( - context: &mut Context, - krate: CrateId, - unresolved: UnresolvedStruct, -) -> (Generics, Vec<(Ident, Type)>, Vec) { - let path_resolver = - StandardPathResolver::new(ModuleId { local_id: unresolved.module_id, krate }); - let file_id = unresolved.file_id; - let (generics, fields, errors) = - Resolver::new(&mut context.def_interner, &path_resolver, &context.def_maps, file_id) - .resolve_struct_fields(unresolved.struct_def); - (generics, fields, errors) -} - -fn resolve_type_aliases( - context: &mut Context, - type_aliases: BTreeMap, - crate_id: CrateId, -) -> Vec<(CompilationError, FileId)> { - let mut errors: Vec<(CompilationError, FileId)> = vec![]; - for (type_id, unresolved_typ) in type_aliases { - let path_resolver = StandardPathResolver::new(ModuleId { - local_id: unresolved_typ.module_id, - krate: crate_id, - }); - let file = unresolved_typ.file_id; - let (typ, generics, resolver_errors) = - Resolver::new(&mut context.def_interner, &path_resolver, &context.def_maps, file) - .resolve_type_aliases(unresolved_typ.type_alias_def); - errors.extend(resolver_errors.iter().cloned().map(|e| (e.into(), file))); - context.def_interner.set_type_alias(type_id, typ, generics); - } - errors -} - -fn resolve_impls( - interner: &mut NodeInterner, - crate_id: CrateId, - def_maps: &BTreeMap, - collected_impls: ImplMap, - errors: &mut Vec<(CompilationError, FileId)>, -) -> Vec<(FileId, FuncId)> { - let mut file_method_ids = Vec::new(); - - for ((unresolved_type, module_id), methods) in collected_impls { - let path_resolver = - StandardPathResolver::new(ModuleId { local_id: module_id, krate: crate_id }); - - let file = def_maps[&crate_id].file_id(module_id); - - for (generics, _, functions) in methods { - let mut resolver = Resolver::new(interner, &path_resolver, def_maps, file); - resolver.add_generics(&generics); - let generics = resolver.get_generics().to_vec(); - let self_type = resolver.resolve_type(unresolved_type.clone()); - - let mut file_func_ids = resolve_function_set( - interner, - crate_id, - def_maps, - functions, - Some(self_type.clone()), - None, - generics, - errors, - ); - if self_type != Type::Error { - for (file_id, method_id) in &file_func_ids { - let method_name = interner.function_name(method_id).to_owned(); - - if let Some(first_fn) = - interner.add_method(&self_type, method_name.clone(), *method_id, false) - { - let error = ResolverError::DuplicateDefinition { - name: method_name, - first_span: interner.function_ident(&first_fn).span(), - second_span: interner.function_ident(method_id).span(), - }; - errors.push((error.into(), *file_id)); - } - } - } - file_method_ids.append(&mut file_func_ids); - } - } - - file_method_ids -} - -fn resolve_trait_impls( - context: &mut Context, - traits: Vec, - crate_id: CrateId, - errors: &mut Vec<(CompilationError, FileId)>, -) -> Vec<(FileId, FuncId)> { - let interner = &mut context.def_interner; - let mut methods = Vec::<(FileId, FuncId)>::new(); - - for trait_impl in traits { - let unresolved_type = trait_impl.object_type; - let local_mod_id = trait_impl.module_id; - let module_id = ModuleId { krate: crate_id, local_id: local_mod_id }; - let path_resolver = StandardPathResolver::new(module_id); - - let self_type_span = unresolved_type.span; - - let mut resolver = - Resolver::new(interner, &path_resolver, &context.def_maps, trait_impl.file_id); - resolver.add_generics(&trait_impl.generics); - let self_type = resolver.resolve_type(unresolved_type.clone()); - let generics = resolver.get_generics().to_vec(); - - let impl_id = interner.next_trait_impl_id(); - - let mut impl_methods = resolve_function_set( - interner, - crate_id, - &context.def_maps, - trait_impl.methods.clone(), - Some(self_type.clone()), - Some(impl_id), - generics.clone(), - errors, - ); - - let maybe_trait_id = trait_impl.trait_id; - if let Some(trait_id) = maybe_trait_id { - for (_, func) in &impl_methods { - interner.set_function_trait(*func, self_type.clone(), trait_id); - } - } - - if matches!(self_type, Type::MutableReference(_)) { - let span = self_type_span.unwrap_or_else(|| trait_impl.trait_path.span()); - let error = DefCollectorErrorKind::MutableReferenceInTraitImpl { span }; - errors.push((error.into(), trait_impl.file_id)); - } - - let mut new_resolver = - Resolver::new(interner, &path_resolver, &context.def_maps, trait_impl.file_id); - - new_resolver.set_generics(generics); - new_resolver.set_self_type(Some(self_type.clone())); - - if let Some(trait_id) = maybe_trait_id { - check_methods_signatures( - &mut new_resolver, - &impl_methods, - trait_id, - trait_impl.generics.len(), - errors, - ); - - let where_clause = trait_impl - .where_clause - .into_iter() - .flat_map(|item| new_resolver.resolve_trait_constraint(item)) - .collect(); - - let resolved_trait_impl = Shared::new(TraitImpl { - ident: trait_impl.trait_path.last_segment().clone(), - typ: self_type.clone(), - trait_id, - file: trait_impl.file_id, - where_clause, - methods: vecmap(&impl_methods, |(_, func_id)| *func_id), - }); - - if let Err((prev_span, prev_file)) = interner.add_trait_implementation( - self_type.clone(), - trait_id, - impl_id, - resolved_trait_impl, - ) { - let error = DefCollectorErrorKind::OverlappingImpl { - typ: self_type.clone(), - span: self_type_span.unwrap_or_else(|| trait_impl.trait_path.span()), - }; - errors.push((error.into(), trait_impl.file_id)); - - // The 'previous impl defined here' note must be a separate error currently - // since it may be in a different file and all errors have the same file id. - let error = DefCollectorErrorKind::OverlappingImplNote { span: prev_span }; - errors.push((error.into(), prev_file)); - } - - methods.append(&mut impl_methods); - } - } - - methods -} - // TODO(vitkov): Move this out of here and into type_check -fn check_methods_signatures( +pub(crate) fn check_methods_signatures( resolver: &mut Resolver, impl_methods: &Vec<(FileId, FuncId)>, trait_id: TraitId, trait_impl_generic_count: usize, errors: &mut Vec<(CompilationError, FileId)>, ) { - let the_trait = resolver.interner.get_trait(trait_id); - - let self_type = resolver.get_self_type().expect("trait impl must have a Self type"); + let self_type = resolver.get_self_type().expect("trait impl must have a Self type").clone(); // Temporarily bind the trait's Self type to self_type so we can type check - let _ = the_trait.self_type_typevar.borrow_mut().bind_to(self_type.clone(), the_trait.span); + let the_trait = resolver.interner.get_trait_mut(trait_id); + the_trait.self_type_typevar.bind(self_type); + + // Temporarily take the trait's methods so we can use both them and a mutable reference + // to the interner within the loop. + let trait_methods = std::mem::take(&mut the_trait.methods); for (file_id, func_id) in impl_methods { - let impl_method = resolver.interner.function_meta(func_id); let func_name = resolver.interner.function_name(func_id).to_owned(); - let mut typecheck_errors = Vec::new(); - // This is None in the case where the impl block has a method that's not part of the trait. // If that's the case, a `MethodNotInTrait` error has already been thrown, and we can ignore // the impl method, since there's nothing in the trait to match its signature against. if let Some(trait_method) = - the_trait.methods.iter().find(|method| method.name.0.contents == func_name) + trait_methods.iter().find(|method| method.name.0.contents == func_name) { - let impl_function_type = impl_method.typ.instantiate(resolver.interner); + let mut typecheck_errors = Vec::new(); + let impl_method = resolver.interner.function_meta(func_id); + + let (impl_function_type, _) = impl_method.typ.instantiate(resolver.interner); let impl_method_generic_count = impl_method.typ.generic_count() - trait_impl_generic_count; - let trait_method_generic_count = trait_method.generics.len(); + + // We subtract 1 here to account for the implicit generic `Self` type that is on all + // traits (and thus trait methods) but is not required (or allowed) for users to specify. + let trait_method_generic_count = trait_method.generics().len() - 1; if impl_method_generic_count != trait_method_generic_count { let error = DefCollectorErrorKind::MismatchTraitImplementationNumGenerics { impl_method_generic_count, trait_method_generic_count, - trait_name: the_trait.name.to_string(), + trait_name: resolver.interner.get_trait(trait_id).name.to_string(), method_name: func_name.to_string(), span: impl_method.location.span, }; errors.push((error.into(), *file_id)); } - if let Type::Function(impl_params, _, _) = impl_function_type.0 { - if trait_method.arguments.len() == impl_params.len() { + if let Type::Function(impl_params, _, _) = impl_function_type { + if trait_method.arguments().len() == impl_params.len() { // Check the parameters of the impl method against the parameters of the trait method - let args = trait_method.arguments.iter(); + let args = trait_method.arguments().iter(); let args_and_params = args.zip(&impl_params).zip(&impl_method.parameters.0); for (parameter_index, ((expected, actual), (hir_pattern, _, _))) in @@ -1119,8 +527,8 @@ fn check_methods_signatures( } else { let error = DefCollectorErrorKind::MismatchTraitImplementationNumParameters { actual_num_parameters: impl_method.parameters.0.len(), - expected_num_parameters: trait_method.arguments.len(), - trait_name: the_trait.name.to_string(), + expected_num_parameters: trait_method.arguments().len(), + trait_name: resolver.interner.get_trait(trait_id).name.to_string(), method_name: func_name.to_string(), span: impl_method.location.span, }; @@ -1132,11 +540,13 @@ fn check_methods_signatures( let resolved_return_type = resolver.resolve_type(impl_method.return_type.get_type().into_owned()); - trait_method.return_type.unify(&resolved_return_type, &mut typecheck_errors, || { + // TODO: This is not right since it may bind generic return types + trait_method.return_type().unify(&resolved_return_type, &mut typecheck_errors, || { + let impl_method = resolver.interner.function_meta(func_id); let ret_type_span = impl_method.return_type.get_type().span; let expr_span = ret_type_span.expect("return type must always have a span"); - let expected_typ = trait_method.return_type.to_string(); + let expected_typ = trait_method.return_type().to_string(); let expr_typ = impl_method.return_type().to_string(); TypeCheckError::TypeMismatch { expr_typ, expected_typ, expr_span } }); @@ -1145,74 +555,7 @@ fn check_methods_signatures( } } - the_trait.self_type_typevar.borrow_mut().unbind(the_trait.self_type_typevar_id); -} - -fn resolve_free_functions( - interner: &mut NodeInterner, - crate_id: CrateId, - def_maps: &BTreeMap, - collected_functions: Vec, - self_type: Option, - errors: &mut Vec<(CompilationError, FileId)>, -) -> Vec<(FileId, FuncId)> { - // Lower each function in the crate. This is now possible since imports have been resolved - collected_functions - .into_iter() - .flat_map(|unresolved_functions| { - resolve_function_set( - interner, - crate_id, - def_maps, - unresolved_functions, - self_type.clone(), - None, - vec![], // no impl generics - errors, - ) - }) - .collect() -} - -#[allow(clippy::too_many_arguments)] -fn resolve_function_set( - interner: &mut NodeInterner, - crate_id: CrateId, - def_maps: &BTreeMap, - mut unresolved_functions: UnresolvedFunctions, - self_type: Option, - trait_impl_id: Option, - impl_generics: Vec<(Rc, Shared, Span)>, - errors: &mut Vec<(CompilationError, FileId)>, -) -> Vec<(FileId, FuncId)> { - let file_id = unresolved_functions.file_id; - - let where_clause_errors = - unresolved_functions.resolve_trait_bounds_trait_ids(def_maps, crate_id); - errors.extend(where_clause_errors.iter().cloned().map(|e| (e.into(), file_id))); - - vecmap(unresolved_functions.functions, |(mod_id, func_id, func)| { - let module_id = ModuleId { krate: crate_id, local_id: mod_id }; - let path_resolver = StandardPathResolver::new(module_id); - - let mut resolver = Resolver::new(interner, &path_resolver, def_maps, file_id); - // Must use set_generics here to ensure we re-use the same generics from when - // the impl was originally collected. Otherwise the function will be using different - // TypeVariables for the same generic, causing it to instantiate incorrectly. - resolver.set_generics(impl_generics.clone()); - resolver.set_self_type(self_type.clone()); - resolver.set_trait_id(unresolved_functions.trait_id); - resolver.set_trait_impl_id(trait_impl_id); - - // Without this, impl methods can accidentally be placed in contracts. See #3254 - if self_type.is_some() { - resolver.set_in_contract(false); - } - - let (hir_func, func_meta, errs) = resolver.resolve_function(func, func_id); - interner.push_fn_meta(func_meta, func_id); - interner.update_fn(func_id, hir_func); - errors.extend(errs.iter().cloned().map(|e| (e.into(), file_id))); - (file_id, func_id) - }) + let the_trait = resolver.interner.get_trait_mut(trait_id); + the_trait.set_methods(trait_methods); + the_trait.self_type_typevar.unbind(the_trait.self_type_typevar_id); } diff --git a/compiler/noirc_frontend/src/hir/def_collector/dc_mod.rs b/compiler/noirc_frontend/src/hir/def_collector/dc_mod.rs index 17aa5e9951f..04791b11b2a 100644 --- a/compiler/noirc_frontend/src/hir/def_collector/dc_mod.rs +++ b/compiler/noirc_frontend/src/hir/def_collector/dc_mod.rs @@ -1,7 +1,7 @@ -use std::vec; +use std::{collections::HashMap, path::Path, vec}; use acvm::acir::acir_field::FieldOptions; -use fm::FileId; +use fm::{FileId, FileManager, FILE_EXTENSION}; use noirc_errors::Location; use crate::{ @@ -44,6 +44,7 @@ pub fn collect_defs( ) -> Vec<(CompilationError, FileId)> { let mut collector = ModCollector { def_collector, file_id, module_id }; let mut errors: Vec<(CompilationError, FileId)> = vec![]; + // First resolve the module declarations for decl in ast.module_decls { errors.extend(collector.parse_module_declaration(context, &decl, crate_id)); @@ -57,6 +58,7 @@ pub fn collect_defs( module_id: collector.module_id, path: import.path, alias: import.alias, + is_prelude: false, }); } @@ -126,7 +128,8 @@ impl<'a> ModCollector<'a> { for method in r#impl.methods { let func_id = context.def_interner.push_empty_fn(); - context.def_interner.push_function(func_id, &method.def, module_id); + let location = Location::new(method.span(), self.file_id); + context.def_interner.push_function(func_id, &method.def, module_id, location); unresolved_functions.push_fn(self.module_id, func_id, method); } @@ -152,7 +155,8 @@ impl<'a> ModCollector<'a> { for (_, func_id, noir_function) in &mut unresolved_functions.functions { noir_function.def.where_clause.append(&mut trait_impl.where_clause.clone()); - context.def_interner.push_function(*func_id, &noir_function.def, module); + let location = Location::new(noir_function.def.span, self.file_id); + context.def_interner.push_function(*func_id, &noir_function.def, module, location); } let unresolved_trait_impl = UnresolvedTraitImpl { @@ -185,7 +189,8 @@ impl<'a> ModCollector<'a> { for item in &trait_impl.items { if let TraitImplItem::Function(impl_method) = item { let func_id = context.def_interner.push_empty_fn(); - context.def_interner.push_function(func_id, &impl_method.def, module); + let location = Location::new(impl_method.span(), self.file_id); + context.def_interner.push_function(func_id, &impl_method.def, module, location); unresolved_functions.push_fn(self.module_id, func_id, impl_method.clone()); } } @@ -218,7 +223,8 @@ impl<'a> ModCollector<'a> { // First create dummy function in the DefInterner // So that we can get a FuncId - context.def_interner.push_function(func_id, &function.def, module); + let location = Location::new(function.span(), self.file_id); + context.def_interner.push_function(func_id, &function.def, module, location); // Now link this func_id to a crate level map with the noir function and the module id // Encountering a NoirFunction, we retrieve it's module_data to get the namespace @@ -266,7 +272,9 @@ impl<'a> ModCollector<'a> { // Create the corresponding module for the struct namespace let id = match self.push_child_module(&name, self.file_id, false, false) { - Ok(local_id) => context.def_interner.new_struct(&unresolved, krate, local_id), + Ok(local_id) => { + context.def_interner.new_struct(&unresolved, krate, local_id, self.file_id) + } Err(error) => { definition_errors.push((error.into(), self.file_id)); continue; @@ -343,7 +351,7 @@ impl<'a> ModCollector<'a> { let name = trait_definition.name.clone(); // Create the corresponding module for the trait namespace - let id = match self.push_child_module(&name, self.file_id, false, false) { + let trait_id = match self.push_child_module(&name, self.file_id, false, false) { Ok(local_id) => TraitId(ModuleId { krate, local_id }), Err(error) => { errors.push((error.into(), self.file_id)); @@ -353,7 +361,7 @@ impl<'a> ModCollector<'a> { // Add the trait to scope so its path can be looked up later let result = - self.def_collector.def_map.modules[self.module_id.0].declare_trait(name, id); + self.def_collector.def_map.modules[self.module_id.0].declare_trait(name, trait_id); if let Err((first_def, second_def)) = result { let error = DefCollectorErrorKind::Duplicate { @@ -370,6 +378,8 @@ impl<'a> ModCollector<'a> { functions: Vec::new(), trait_id: None, }; + + let mut method_ids = HashMap::new(); for trait_item in &trait_definition.items { match trait_item { TraitItem::Function { @@ -381,6 +391,8 @@ impl<'a> ModCollector<'a> { body, } => { let func_id = context.def_interner.push_empty_fn(); + method_ids.insert(name.to_string(), func_id); + let modifiers = FunctionModifiers { name: name.to_string(), visibility: crate::FunctionVisibility::Public, @@ -391,9 +403,12 @@ impl<'a> ModCollector<'a> { is_internal: None, }; - context.def_interner.push_function_definition(func_id, modifiers, id.0); + let location = Location::new(name.span(), self.file_id); + context + .def_interner + .push_function_definition(func_id, modifiers, trait_id.0, location); - match self.def_collector.def_map.modules[id.0.local_id.0] + match self.def_collector.def_map.modules[trait_id.0.local_id.0] .declare_function(name.clone(), func_id) { Ok(()) => { @@ -428,7 +443,7 @@ impl<'a> ModCollector<'a> { let stmt_id = context.def_interner.push_empty_global(); if let Err((first_def, second_def)) = self.def_collector.def_map.modules - [id.0.local_id.0] + [trait_id.0.local_id.0] .declare_global(name.clone(), stmt_id) { let error = DefCollectorErrorKind::Duplicate { @@ -442,7 +457,7 @@ impl<'a> ModCollector<'a> { TraitItem::Type { name } => { // TODO(nickysn or alexvitkov): implement context.def_interner.push_empty_type_alias and get an id, instead of using TypeAliasId::dummy_id() if let Err((first_def, second_def)) = self.def_collector.def_map.modules - [id.0.local_id.0] + [trait_id.0.local_id.0] .declare_type_alias(name.clone(), TypeAliasId::dummy_id()) { let error = DefCollectorErrorKind::Duplicate { @@ -462,9 +477,10 @@ impl<'a> ModCollector<'a> { module_id: self.module_id, crate_id: krate, trait_def: trait_definition, + method_ids, fns_with_default_impl: unresolved_functions, }; - self.def_collector.collected_traits.insert(id, unresolved); + self.def_collector.collected_traits.insert(trait_id, unresolved); } errors } @@ -508,7 +524,7 @@ impl<'a> ModCollector<'a> { ) -> Vec<(CompilationError, FileId)> { let mut errors: Vec<(CompilationError, FileId)> = vec![]; let child_file_id = - match context.file_manager.find_module(self.file_id, &mod_name.0.contents) { + match find_module(&context.file_manager, self.file_id, &mod_name.0.contents) { Ok(child_file_id) => child_file_id, Err(expected_path) => { let mod_name = mod_name.clone(); @@ -528,11 +544,11 @@ impl<'a> ModCollector<'a> { }; errors.push((error.into(), location.file)); - let error2 = DefCollectorErrorKind::ModuleOriginallyDefined { + let error = DefCollectorErrorKind::ModuleOriginallyDefined { mod_name: mod_name.clone(), span: old_location.span, }; - errors.push((error2.into(), old_location.file)); + errors.push((error.into(), old_location.file)); return errors; } @@ -612,3 +628,116 @@ impl<'a> ModCollector<'a> { Ok(LocalModuleId(module_id)) } } + +fn find_module( + file_manager: &FileManager, + anchor: FileId, + mod_name: &str, +) -> Result { + let anchor_path = file_manager.path(anchor).with_extension(""); + let anchor_dir = anchor_path.parent().unwrap(); + + // if `anchor` is a `main.nr`, `lib.nr`, `mod.nr` or `{mod_name}.nr`, we check siblings of + // the anchor at `base/mod_name.nr`. + let candidate = if should_check_siblings_for_module(&anchor_path, anchor_dir) { + anchor_dir.join(format!("{mod_name}.{FILE_EXTENSION}")) + } else { + // Otherwise, we check for children of the anchor at `base/anchor/mod_name.nr` + anchor_path.join(format!("{mod_name}.{FILE_EXTENSION}")) + }; + + file_manager + .name_to_id(candidate.clone()) + .ok_or_else(|| candidate.as_os_str().to_string_lossy().to_string()) +} + +/// Returns true if a module's child modules are expected to be in the same directory. +/// Returns false if they are expected to be in a subdirectory matching the name of the module. +fn should_check_siblings_for_module(module_path: &Path, parent_path: &Path) -> bool { + if let Some(filename) = module_path.file_stem() { + // This check also means a `main.nr` or `lib.nr` file outside of the crate root would + // check its same directory for child modules instead of a subdirectory. Should we prohibit + // `main.nr` and `lib.nr` files outside of the crate root? + filename == "main" + || filename == "lib" + || filename == "mod" + || Some(filename) == parent_path.file_stem() + } else { + // If there's no filename, we arbitrarily return true. + // Alternatively, we could panic, but this is left to a different step where we + // ideally have some source location to issue an error. + true + } +} + +#[cfg(test)] +mod tests { + use super::*; + + use std::path::PathBuf; + use tempfile::{tempdir, TempDir}; + + // Returns the absolute path to the file + fn create_dummy_file(dir: &TempDir, file_name: &Path) -> PathBuf { + let file_path = dir.path().join(file_name); + let _file = std::fs::File::create(&file_path).unwrap(); + file_path + } + + #[test] + fn path_resolve_file_module() { + let dir = tempdir().unwrap(); + + let entry_file_name = Path::new("my_dummy_file.nr"); + create_dummy_file(&dir, entry_file_name); + + let mut fm = FileManager::new(dir.path()); + + let file_id = fm.add_file_with_source(entry_file_name, "fn foo() {}".to_string()).unwrap(); + + let dep_file_name = Path::new("foo.nr"); + create_dummy_file(&dir, dep_file_name); + find_module(&fm, file_id, "foo").unwrap_err(); + } + + #[test] + fn path_resolve_sub_module() { + let dir = tempdir().unwrap(); + let mut fm = FileManager::new(dir.path()); + + // Create a lib.nr file at the root. + // we now have dir/lib.nr + let lib_nr_path = create_dummy_file(&dir, Path::new("lib.nr")); + let file_id = fm + .add_file_with_source(lib_nr_path.as_path(), "fn foo() {}".to_string()) + .expect("could not add file to file manager and obtain a FileId"); + + // Create a sub directory + // we now have: + // - dir/lib.nr + // - dir/sub_dir + let sub_dir = TempDir::new_in(&dir).unwrap(); + let sub_dir_name = sub_dir.path().file_name().unwrap().to_str().unwrap(); + + // Add foo.nr to the subdirectory + // we no have: + // - dir/lib.nr + // - dir/sub_dir/foo.nr + let foo_nr_path = create_dummy_file(&sub_dir, Path::new("foo.nr")); + fm.add_file_with_source(foo_nr_path.as_path(), "fn foo() {}".to_string()); + + // Add a parent module for the sub_dir + // we no have: + // - dir/lib.nr + // - dir/sub_dir.nr + // - dir/sub_dir/foo.nr + let sub_dir_nr_path = create_dummy_file(&dir, Path::new(&format!("{sub_dir_name}.nr"))); + fm.add_file_with_source(sub_dir_nr_path.as_path(), "fn foo() {}".to_string()); + + // First check for the sub_dir.nr file and add it to the FileManager + let sub_dir_file_id = find_module(&fm, file_id, sub_dir_name).unwrap(); + + // Now check for files in it's subdirectory + find_module(&fm, sub_dir_file_id, "foo").unwrap(); + } +} diff --git a/compiler/noirc_frontend/src/hir/def_collector/errors.rs b/compiler/noirc_frontend/src/hir/def_collector/errors.rs index edb39fe68d7..2b91c4b36c5 100644 --- a/compiler/noirc_frontend/src/hir/def_collector/errors.rs +++ b/compiler/noirc_frontend/src/hir/def_collector/errors.rs @@ -73,15 +73,16 @@ pub enum DefCollectorErrorKind { "Either the type or the trait must be from the same crate as the trait implementation" )] TraitImplOrphaned { span: Span }, + #[error("macro error : {0:?}")] + MacroError(MacroError), +} - // Aztec feature flag errors - // TODO(benesjan): https://github.com/AztecProtocol/aztec-packages/issues/2905 - #[cfg(feature = "aztec")] - #[error("Aztec dependency not found. Please add aztec as a dependency in your Cargo.toml")] - AztecNotFound {}, - #[cfg(feature = "aztec")] - #[error("compute_note_hash_and_nullifier function not found. Define it in your contract.")] - AztecComputeNoteHashAndNullifierNotFound { span: Span }, +/// An error struct that macro processors can return. +#[derive(Debug, Clone)] +pub struct MacroError { + pub primary_message: String, + pub secondary_message: Option, + pub span: Option, } impl DefCollectorErrorKind { @@ -245,16 +246,9 @@ impl From for Diagnostic { "Either the type or the trait must be from the same crate as the trait implementation".into(), span, ), - #[cfg(feature = "aztec")] - DefCollectorErrorKind::AztecNotFound {} => Diagnostic::from_message( - "Aztec dependency not found. Please add aztec as a dependency in your Cargo.toml", - ), - #[cfg(feature = "aztec")] - DefCollectorErrorKind::AztecComputeNoteHashAndNullifierNotFound {span} => Diagnostic::simple_error( - "compute_note_hash_and_nullifier function not found. Define it in your contract.".into(), - "".into(), - span - ), + DefCollectorErrorKind::MacroError(macro_error) => { + Diagnostic::simple_error(macro_error.primary_message, macro_error.secondary_message.unwrap_or_default(), macro_error.span.unwrap_or_default()) + }, } } } diff --git a/compiler/noirc_frontend/src/hir/def_map/item_scope.rs b/compiler/noirc_frontend/src/hir/def_map/item_scope.rs index 42cca550651..523def89518 100644 --- a/compiler/noirc_frontend/src/hir/def_map/item_scope.rs +++ b/compiler/noirc_frontend/src/hir/def_map/item_scope.rs @@ -5,6 +5,8 @@ use crate::{ }; use std::collections::{hash_map::Entry, HashMap}; +type Scope = HashMap, (ModuleDefId, Visibility, bool /*is_prelude*/)>; + #[derive(Debug, PartialEq, Eq, Copy, Clone)] pub enum Visibility { Public, @@ -12,8 +14,8 @@ pub enum Visibility { #[derive(Default, Debug, PartialEq, Eq)] pub struct ItemScope { - types: HashMap, (ModuleDefId, Visibility)>>, - values: HashMap, (ModuleDefId, Visibility)>>, + types: HashMap, + values: HashMap, defs: Vec, } @@ -25,7 +27,7 @@ impl ItemScope { mod_def: ModuleDefId, trait_id: Option, ) -> Result<(), (Ident, Ident)> { - self.add_item_to_namespace(name, mod_def, trait_id)?; + self.add_item_to_namespace(name, mod_def, trait_id, false)?; self.defs.push(mod_def); Ok(()) } @@ -38,25 +40,31 @@ impl ItemScope { name: Ident, mod_def: ModuleDefId, trait_id: Option, + is_prelude: bool, ) -> Result<(), (Ident, Ident)> { - let add_item = - |map: &mut HashMap, (ModuleDefId, Visibility)>>| { - if let Entry::Occupied(mut o) = map.entry(name.clone()) { - let trait_hashmap = o.get_mut(); - if let Entry::Occupied(_) = trait_hashmap.entry(trait_id) { - let old_ident = o.key(); - Err((old_ident.clone(), name)) - } else { - trait_hashmap.insert(trait_id, (mod_def, Visibility::Public)); + let add_item = |map: &mut HashMap| { + if let Entry::Occupied(mut o) = map.entry(name.clone()) { + let trait_hashmap = o.get_mut(); + if let Entry::Occupied(mut n) = trait_hashmap.entry(trait_id) { + let is_prelude = std::mem::replace(&mut n.get_mut().2, is_prelude); + let old_ident = o.key(); + + if is_prelude { Ok(()) + } else { + Err((old_ident.clone(), name)) } } else { - let mut trait_hashmap = HashMap::new(); - trait_hashmap.insert(trait_id, (mod_def, Visibility::Public)); - map.insert(name, trait_hashmap); + trait_hashmap.insert(trait_id, (mod_def, Visibility::Public, is_prelude)); Ok(()) } - }; + } else { + let mut trait_hashmap = HashMap::new(); + trait_hashmap.insert(trait_id, (mod_def, Visibility::Public, is_prelude)); + map.insert(name, trait_hashmap); + Ok(()) + } + }; match mod_def { ModuleDefId::ModuleId(_) => add_item(&mut self.types), @@ -69,7 +77,7 @@ impl ItemScope { } pub fn find_module_with_name(&self, mod_name: &Ident) -> Option<&ModuleId> { - let (module_def, _) = self.types.get(mod_name)?.get(&None)?; + let (module_def, _, _) = self.types.get(mod_name)?.get(&None)?; match module_def { ModuleDefId::ModuleId(id) => Some(id), _ => None, @@ -81,13 +89,13 @@ impl ItemScope { // methods introduced without trait take priority and hide methods with the same name that come from a trait let a = trait_hashmap.get(&None); match a { - Some((module_def, _)) => match module_def { + Some((module_def, _, _)) => match module_def { ModuleDefId::FunctionId(id) => Some(*id), _ => None, }, None => { if trait_hashmap.len() == 1 { - let (module_def, _) = trait_hashmap.get(trait_hashmap.keys().last()?)?; + let (module_def, _, _) = trait_hashmap.get(trait_hashmap.keys().last()?)?; match module_def { ModuleDefId::FunctionId(id) => Some(*id), _ => None, @@ -105,7 +113,7 @@ impl ItemScope { func_name: &Ident, trait_id: &Option, ) -> Option { - let (module_def, _) = self.values.get(func_name)?.get(trait_id)?; + let (module_def, _, _) = self.values.get(func_name)?.get(trait_id)?; match module_def { ModuleDefId::FunctionId(id) => Some(*id), _ => None, @@ -115,20 +123,19 @@ impl ItemScope { pub fn find_name(&self, name: &Ident) -> PerNs { // Names, not associated with traits are searched first. If not found, we search for name, coming from a trait. // If we find only one name from trait, we return it. If there are multiple traits, providing the same name, we return None. - let find_name_in = - |a: &HashMap, (ModuleDefId, Visibility)>>| { - if let Some(t) = a.get(name) { - if let Some(tt) = t.get(&None) { - Some(*tt) - } else if t.len() == 1 { - t.values().last().cloned() - } else { - None - } + let find_name_in = |a: &HashMap| { + if let Some(t) = a.get(name) { + if let Some(tt) = t.get(&None) { + Some(*tt) + } else if t.len() == 1 { + t.values().last().cloned() } else { None } - }; + } else { + None + } + }; PerNs { types: find_name_in(&self.types), values: find_name_in(&self.values) } } @@ -144,15 +151,19 @@ impl ItemScope { } } + pub fn names(&self) -> impl Iterator { + self.types.keys().chain(self.values.keys()) + } + pub fn definitions(&self) -> Vec { self.defs.clone() } - pub fn types(&self) -> &HashMap, (ModuleDefId, Visibility)>> { + pub fn types(&self) -> &HashMap { &self.types } - pub fn values(&self) -> &HashMap, (ModuleDefId, Visibility)>> { + pub fn values(&self) -> &HashMap { &self.values } diff --git a/compiler/noirc_frontend/src/hir/def_map/mod.rs b/compiler/noirc_frontend/src/hir/def_map/mod.rs index 345e5447bf5..fbf94468c4b 100644 --- a/compiler/noirc_frontend/src/hir/def_map/mod.rs +++ b/compiler/noirc_frontend/src/hir/def_map/mod.rs @@ -1,6 +1,7 @@ use crate::graph::CrateId; use crate::hir::def_collector::dc_crate::{CompilationError, DefCollector}; use crate::hir::Context; +use crate::macros_api::MacroProcessor; use crate::node_interner::{FuncId, NodeInterner, StructId}; use crate::parser::{parse_program, ParsedModule, ParserError}; use crate::token::{FunctionAttribute, SecondaryAttribute, TestScope}; @@ -17,6 +18,8 @@ pub use module_data::*; mod namespace; pub use namespace::*; +use super::def_collector::errors::DefCollectorErrorKind; + /// The name that is used for a non-contract program's entry-point function. pub const MAIN_FUNCTION: &str = "main"; @@ -61,6 +64,7 @@ pub struct CrateDefMap { pub(crate) krate: CrateId, + /// Maps an external dependency's name to its root module id. pub(crate) extern_prelude: BTreeMap, } @@ -69,6 +73,7 @@ impl CrateDefMap { pub fn collect_defs( crate_id: CrateId, context: &mut Context, + macro_processors: Vec<&dyn MacroProcessor>, ) -> Vec<(CompilationError, FileId)> { // Check if this Crate has already been compiled // XXX: There is probably a better alternative for this. @@ -83,16 +88,18 @@ impl CrateDefMap { // First parse the root file. let root_file_id = context.crate_graph[crate_id].root_file_id; let (ast, parsing_errors) = parse_file(&context.file_manager, root_file_id); - let ast = ast.into_sorted(); - - #[cfg(feature = "aztec")] - let ast = match super::aztec_library::transform(ast, &crate_id, context) { - Ok(ast) => ast, - Err((error, file_id)) => { - errors.push((error.into(), file_id)); - return errors; - } - }; + let mut ast = ast.into_sorted(); + + for macro_processor in ¯o_processors { + ast = match macro_processor.process_untyped_ast(ast, &crate_id, context) { + Ok(ast) => ast, + Err((error, file_id)) => { + let def_error = DefCollectorErrorKind::MacroError(error); + errors.push((def_error.into(), file_id)); + return errors; + } + }; + } // Allocate a default Module for the root, giving it a ModuleId let mut modules: Arena = Arena::default(); @@ -107,7 +114,13 @@ impl CrateDefMap { }; // Now we want to populate the CrateDefMap using the DefCollector - errors.extend(DefCollector::collect(def_map, context, ast, root_file_id)); + errors.extend(DefCollector::collect( + def_map, + context, + ast, + root_file_id, + macro_processors.clone(), + )); errors.extend( parsing_errors.iter().map(|e| (e.clone().into(), root_file_id)).collect::>(), @@ -162,6 +175,28 @@ impl CrateDefMap { }) } + /// Go through all modules in this crate, and find all functions in + /// each module with the #[export] attribute + pub fn get_all_exported_functions<'a>( + &'a self, + interner: &'a NodeInterner, + ) -> impl Iterator + 'a { + self.modules.iter().flat_map(|(_, module)| { + module.value_definitions().filter_map(|id| { + if let Some(func_id) = id.as_function() { + let attributes = interner.function_attributes(&func_id); + if attributes.secondary.contains(&SecondaryAttribute::Export) { + Some(func_id) + } else { + None + } + } else { + None + } + }) + }) + } + /// Go through all modules in this crate, find all `contract ... { ... }` declarations, /// and collect them all into a Vec. pub fn get_all_contracts(&self, interner: &NodeInterner) -> Vec { @@ -259,8 +294,8 @@ pub struct Contract { /// Given a FileId, fetch the File, from the FileManager and parse it's content pub fn parse_file(fm: &FileManager, file_id: FileId) -> (ParsedModule, Vec) { - let file = fm.fetch_file(file_id); - parse_program(file.source()) + let file_source = fm.fetch_file(file_id); + parse_program(file_source) } impl std::ops::Index for CrateDefMap { diff --git a/compiler/noirc_frontend/src/hir/def_map/module_data.rs b/compiler/noirc_frontend/src/hir/def_map/module_data.rs index 29b11e92c01..fbb5e5cf741 100644 --- a/compiler/noirc_frontend/src/hir/def_map/module_data.rs +++ b/compiler/noirc_frontend/src/hir/def_map/module_data.rs @@ -41,6 +41,10 @@ impl ModuleData { } } + pub(crate) fn scope(&self) -> &ItemScope { + &self.scope + } + fn declare( &mut self, name: Ident, @@ -104,8 +108,13 @@ impl ModuleData { self.scope.find_func_with_name(name) } - pub fn import(&mut self, name: Ident, id: ModuleDefId) -> Result<(), (Ident, Ident)> { - self.scope.add_item_to_namespace(name, id, None) + pub fn import( + &mut self, + name: Ident, + id: ModuleDefId, + is_prelude: bool, + ) -> Result<(), (Ident, Ident)> { + self.scope.add_item_to_namespace(name, id, None, is_prelude) } pub fn find_name(&self, name: &Ident) -> PerNs { @@ -113,12 +122,12 @@ impl ModuleData { } pub fn type_definitions(&self) -> impl Iterator + '_ { - self.definitions.types().values().flat_map(|a| a.values().map(|(id, _)| *id)) + self.definitions.types().values().flat_map(|a| a.values().map(|(id, _, _)| *id)) } /// Return an iterator over all definitions defined within this module, /// excluding any type definitions. pub fn value_definitions(&self) -> impl Iterator + '_ { - self.definitions.values().values().flat_map(|a| a.values().map(|(id, _)| *id)) + self.definitions.values().values().flat_map(|a| a.values().map(|(id, _, _)| *id)) } } diff --git a/compiler/noirc_frontend/src/hir/def_map/module_def.rs b/compiler/noirc_frontend/src/hir/def_map/module_def.rs index 659d7712ab4..3e5629639fa 100644 --- a/compiler/noirc_frontend/src/hir/def_map/module_def.rs +++ b/compiler/noirc_frontend/src/hir/def_map/module_def.rs @@ -61,6 +61,13 @@ impl ModuleDefId { ModuleDefId::GlobalId(_) => "global", } } + + pub fn as_module(&self) -> Option { + match self { + Self::ModuleId(v) => Some(*v), + _ => None, + } + } } impl From for ModuleDefId { diff --git a/compiler/noirc_frontend/src/hir/def_map/namespace.rs b/compiler/noirc_frontend/src/hir/def_map/namespace.rs index 9221b389d84..ca14d9f8617 100644 --- a/compiler/noirc_frontend/src/hir/def_map/namespace.rs +++ b/compiler/noirc_frontend/src/hir/def_map/namespace.rs @@ -3,13 +3,13 @@ use super::{item_scope::Visibility, ModuleDefId}; // This works exactly the same as in r-a, just simplified #[derive(Debug, PartialEq, Eq, Copy, Clone)] pub struct PerNs { - pub types: Option<(ModuleDefId, Visibility)>, - pub values: Option<(ModuleDefId, Visibility)>, + pub types: Option<(ModuleDefId, Visibility, bool)>, + pub values: Option<(ModuleDefId, Visibility, bool)>, } impl PerNs { pub fn types(t: ModuleDefId) -> PerNs { - PerNs { types: Some((t, Visibility::Public)), values: None } + PerNs { types: Some((t, Visibility::Public, false)), values: None } } pub fn take_types(self) -> Option { @@ -24,7 +24,7 @@ impl PerNs { self.types.map(|it| it.0).into_iter().chain(self.values.map(|it| it.0)) } - pub fn iter_items(self) -> impl Iterator { + pub fn iter_items(self) -> impl Iterator { self.types.into_iter().chain(self.values) } diff --git a/compiler/noirc_frontend/src/hir/mod.rs b/compiler/noirc_frontend/src/hir/mod.rs index 5a28d7b779a..c62f357167f 100644 --- a/compiler/noirc_frontend/src/hir/mod.rs +++ b/compiler/noirc_frontend/src/hir/mod.rs @@ -4,15 +4,13 @@ pub mod resolution; pub mod scope; pub mod type_check; -#[cfg(feature = "aztec")] -pub(crate) mod aztec_library; - use crate::graph::{CrateGraph, CrateId}; use crate::hir_def::function::FuncMeta; use crate::node_interner::{FuncId, NodeInterner, StructId}; use def_map::{Contract, CrateDefMap}; use fm::FileManager; use noirc_errors::Location; +use std::borrow::Cow; use std::collections::BTreeMap; use self::def_map::TestFunction; @@ -20,19 +18,18 @@ use self::def_map::TestFunction; /// Helper object which groups together several useful context objects used /// during name resolution. Once name resolution is finished, only the /// def_interner is required for type inference and monomorphization. -pub struct Context { +pub struct Context<'file_manager> { pub def_interner: NodeInterner, pub crate_graph: CrateGraph, pub(crate) def_maps: BTreeMap, - pub file_manager: FileManager, + // In the WASM context, we take ownership of the file manager, + // which is why this needs to be a Cow. In all use-cases, the file manager + // is read-only however, once it has been passed to the Context. + pub file_manager: Cow<'file_manager, FileManager>, /// A map of each file that already has been visited from a prior `mod foo;` declaration. /// This is used to issue an error if a second `mod foo;` is declared to the same file. pub visited_files: BTreeMap, - - /// Maps a given (contract) module id to the next available storage slot - /// for that contract. - pub storage_slots: BTreeMap, } #[derive(Debug, Copy, Clone)] @@ -42,17 +39,24 @@ pub enum FunctionNameMatch<'a> { Contains(&'a str), } -pub type StorageSlot = u32; +impl Context<'_> { + pub fn new(file_manager: FileManager) -> Context<'static> { + Context { + def_interner: NodeInterner::default(), + def_maps: BTreeMap::new(), + visited_files: BTreeMap::new(), + crate_graph: CrateGraph::default(), + file_manager: Cow::Owned(file_manager), + } + } -impl Context { - pub fn new(file_manager: FileManager, crate_graph: CrateGraph) -> Context { + pub fn from_ref_file_manager(file_manager: &FileManager) -> Context<'_> { Context { def_interner: NodeInterner::default(), def_maps: BTreeMap::new(), visited_files: BTreeMap::new(), - crate_graph, - file_manager, - storage_slots: BTreeMap::new(), + crate_graph: CrateGraph::default(), + file_manager: Cow::Borrowed(file_manager), } } @@ -148,7 +152,7 @@ impl Context { None } - pub fn function_meta(&self, func_id: &FuncId) -> FuncMeta { + pub fn function_meta(&self, func_id: &FuncId) -> &FuncMeta { self.def_interner.function_meta(func_id) } @@ -190,6 +194,19 @@ impl Context { .collect() } + pub fn get_all_exported_functions_in_crate(&self, crate_id: &CrateId) -> Vec<(String, FuncId)> { + let interner = &self.def_interner; + let def_map = self.def_map(crate_id).expect("The local crate should be analyzed already"); + + def_map + .get_all_exported_functions(interner) + .map(|function_id| { + let function_name = self.function_name(&function_id).to_owned(); + (function_name, function_id) + }) + .collect() + } + /// Return a Vec of all `contract` declarations in the source code and the functions they contain pub fn get_all_contracts(&self, crate_id: &CrateId) -> Vec { self.def_map(crate_id) @@ -200,16 +217,4 @@ impl Context { fn module(&self, module_id: def_map::ModuleId) -> &def_map::ModuleData { module_id.module(&self.def_maps) } - - /// Returns the next available storage slot in the given module. - /// Returns None if the given module is not a contract module. - fn next_storage_slot(&mut self, module_id: def_map::ModuleId) -> Option { - let module = self.module(module_id); - - module.is_contract.then(|| { - let next_slot = self.storage_slots.entry(module_id).or_insert(0); - *next_slot += 1; - *next_slot - }) - } } diff --git a/compiler/noirc_frontend/src/hir/resolution/functions.rs b/compiler/noirc_frontend/src/hir/resolution/functions.rs new file mode 100644 index 00000000000..e63de9b9173 --- /dev/null +++ b/compiler/noirc_frontend/src/hir/resolution/functions.rs @@ -0,0 +1,85 @@ +use std::{collections::BTreeMap, rc::Rc}; + +use fm::FileId; +use iter_extended::vecmap; +use noirc_errors::Span; + +use crate::{ + graph::CrateId, + hir::{ + def_collector::dc_crate::{CompilationError, UnresolvedFunctions}, + def_map::{CrateDefMap, ModuleId}, + }, + node_interner::{FuncId, NodeInterner, TraitImplId}, + Type, TypeVariable, +}; + +use super::{path_resolver::StandardPathResolver, resolver::Resolver}; + +#[allow(clippy::too_many_arguments)] +pub(crate) fn resolve_function_set( + interner: &mut NodeInterner, + crate_id: CrateId, + def_maps: &BTreeMap, + mut unresolved_functions: UnresolvedFunctions, + self_type: Option, + trait_impl_id: Option, + impl_generics: Vec<(Rc, TypeVariable, Span)>, + errors: &mut Vec<(CompilationError, FileId)>, +) -> Vec<(FileId, FuncId)> { + let file_id = unresolved_functions.file_id; + + let where_clause_errors = + unresolved_functions.resolve_trait_bounds_trait_ids(def_maps, crate_id); + errors.extend(where_clause_errors.iter().cloned().map(|e| (e.into(), file_id))); + + vecmap(unresolved_functions.functions, |(mod_id, func_id, func)| { + let module_id = ModuleId { krate: crate_id, local_id: mod_id }; + let path_resolver = StandardPathResolver::new(module_id); + + let mut resolver = Resolver::new(interner, &path_resolver, def_maps, file_id); + // Must use set_generics here to ensure we re-use the same generics from when + // the impl was originally collected. Otherwise the function will be using different + // TypeVariables for the same generic, causing it to instantiate incorrectly. + resolver.set_generics(impl_generics.clone()); + resolver.set_self_type(self_type.clone()); + resolver.set_trait_id(unresolved_functions.trait_id); + resolver.set_trait_impl_id(trait_impl_id); + + // Without this, impl methods can accidentally be placed in contracts. See #3254 + if self_type.is_some() { + resolver.set_in_contract(false); + } + + let (hir_func, func_meta, errs) = resolver.resolve_function(func, func_id); + interner.push_fn_meta(func_meta, func_id); + interner.update_fn(func_id, hir_func); + errors.extend(errs.iter().cloned().map(|e| (e.into(), file_id))); + (file_id, func_id) + }) +} + +pub(crate) fn resolve_free_functions( + interner: &mut NodeInterner, + crate_id: CrateId, + def_maps: &BTreeMap, + collected_functions: Vec, + self_type: Option, + errors: &mut Vec<(CompilationError, FileId)>, +) -> Vec<(FileId, FuncId)> { + collected_functions + .into_iter() + .flat_map(|unresolved_functions| { + resolve_function_set( + interner, + crate_id, + def_maps, + unresolved_functions, + self_type.clone(), + None, + vec![], // no impl generics + errors, + ) + }) + .collect() +} diff --git a/compiler/noirc_frontend/src/hir/resolution/globals.rs b/compiler/noirc_frontend/src/hir/resolution/globals.rs new file mode 100644 index 00000000000..b5aec212dbf --- /dev/null +++ b/compiler/noirc_frontend/src/hir/resolution/globals.rs @@ -0,0 +1,55 @@ +use super::{path_resolver::StandardPathResolver, resolver::Resolver, take_errors}; +use crate::{ + graph::CrateId, + hir::{ + def_collector::dc_crate::{CompilationError, UnresolvedGlobal}, + def_map::ModuleId, + Context, + }, + node_interner::StmtId, +}; +use fm::FileId; +use iter_extended::vecmap; + +pub(crate) struct ResolvedGlobals { + pub(crate) globals: Vec<(FileId, StmtId)>, + pub(crate) errors: Vec<(CompilationError, FileId)>, +} + +impl ResolvedGlobals { + pub(crate) fn extend(&mut self, oth: Self) { + self.globals.extend(oth.globals); + self.errors.extend(oth.errors); + } +} + +pub(crate) fn resolve_globals( + context: &mut Context, + globals: Vec, + crate_id: CrateId, +) -> ResolvedGlobals { + let mut errors: Vec<(CompilationError, FileId)> = vec![]; + let globals = vecmap(globals, |global| { + let module_id = ModuleId { local_id: global.module_id, krate: crate_id }; + let path_resolver = StandardPathResolver::new(module_id); + + let mut resolver = Resolver::new( + &mut context.def_interner, + &path_resolver, + &context.def_maps, + global.file_id, + ); + + let name = global.stmt_def.pattern.name_ident().clone(); + + let hir_stmt = resolver.resolve_global_let(global.stmt_def); + errors.extend(take_errors(global.file_id, resolver)); + + context.def_interner.update_global(global.stmt_id, hir_stmt); + + context.def_interner.push_global(global.stmt_id, name, global.module_id); + + (global.file_id, global.stmt_id) + }); + ResolvedGlobals { globals, errors } +} diff --git a/compiler/noirc_frontend/src/hir/resolution/impls.rs b/compiler/noirc_frontend/src/hir/resolution/impls.rs new file mode 100644 index 00000000000..4aa70f00cfc --- /dev/null +++ b/compiler/noirc_frontend/src/hir/resolution/impls.rs @@ -0,0 +1,137 @@ +use std::collections::BTreeMap; + +use fm::FileId; + +use crate::{ + graph::CrateId, + hir::{ + def_collector::{ + dc_crate::{CompilationError, ImplMap}, + errors::DefCollectorErrorKind, + }, + def_map::{CrateDefMap, ModuleId}, + Context, + }, + node_interner::{FuncId, NodeInterner}, + Type, +}; + +use super::{ + errors::ResolverError, functions, get_module_mut, get_struct_type, + path_resolver::StandardPathResolver, resolver::Resolver, take_errors, +}; + +/// Go through the list of impls and add each function within to the scope +/// of the module defined by its type. +pub(crate) fn collect_impls( + context: &mut Context, + crate_id: CrateId, + collected_impls: &ImplMap, +) -> Vec<(CompilationError, FileId)> { + let interner = &mut context.def_interner; + let def_maps = &mut context.def_maps; + let mut errors: Vec<(CompilationError, FileId)> = vec![]; + + for ((unresolved_type, module_id), methods) in collected_impls { + let path_resolver = + StandardPathResolver::new(ModuleId { local_id: *module_id, krate: crate_id }); + + let file = def_maps[&crate_id].file_id(*module_id); + + for (generics, span, unresolved) in methods { + let mut resolver = Resolver::new(interner, &path_resolver, def_maps, file); + resolver.add_generics(generics); + let typ = resolver.resolve_type(unresolved_type.clone()); + + errors.extend(take_errors(unresolved.file_id, resolver)); + + if let Some(struct_type) = get_struct_type(&typ) { + let struct_type = struct_type.borrow(); + + // `impl`s are only allowed on types defined within the current crate + if struct_type.id.krate() != crate_id { + let span = *span; + let type_name = struct_type.name.to_string(); + let error = DefCollectorErrorKind::ForeignImpl { span, type_name }; + errors.push((error.into(), unresolved.file_id)); + continue; + } + + // Grab the module defined by the struct type. Note that impls are a case + // where the module the methods are added to is not the same as the module + // they are resolved in. + let module = get_module_mut(def_maps, struct_type.id.module_id()); + + for (_, method_id, method) in &unresolved.functions { + // If this method was already declared, remove it from the module so it cannot + // be accessed with the `TypeName::method` syntax. We'll check later whether the + // object types in each method overlap or not. If they do, we issue an error. + // If not, that is specialization which is allowed. + if module.declare_function(method.name_ident().clone(), *method_id).is_err() { + module.remove_function(method.name_ident()); + } + } + // Prohibit defining impls for primitive types if we're not in the stdlib + } else if typ != Type::Error && !crate_id.is_stdlib() { + let span = *span; + let error = DefCollectorErrorKind::NonStructTypeInImpl { span }; + errors.push((error.into(), unresolved.file_id)); + } + } + } + errors +} + +pub(crate) fn resolve_impls( + interner: &mut NodeInterner, + crate_id: CrateId, + def_maps: &BTreeMap, + collected_impls: ImplMap, + errors: &mut Vec<(CompilationError, FileId)>, +) -> Vec<(FileId, FuncId)> { + let mut file_method_ids = Vec::new(); + + for ((unresolved_type, module_id), methods) in collected_impls { + let path_resolver = + StandardPathResolver::new(ModuleId { local_id: module_id, krate: crate_id }); + + let file = def_maps[&crate_id].file_id(module_id); + + for (generics, _, functions) in methods { + let mut resolver = Resolver::new(interner, &path_resolver, def_maps, file); + resolver.add_generics(&generics); + let generics = resolver.get_generics().to_vec(); + let self_type = resolver.resolve_type(unresolved_type.clone()); + + let mut file_func_ids = functions::resolve_function_set( + interner, + crate_id, + def_maps, + functions, + Some(self_type.clone()), + None, + generics, + errors, + ); + if self_type != Type::Error { + for (file_id, method_id) in &file_func_ids { + let method_name = interner.function_name(method_id).to_owned(); + + if let Some(first_fn) = + interner.add_method(&self_type, method_name.clone(), *method_id, false) + { + let error = ResolverError::DuplicateDefinition { + name: method_name, + first_span: interner.function_ident(&first_fn).span(), + second_span: interner.function_ident(method_id).span(), + }; + errors.push((error.into(), *file_id)); + } + } + } + file_method_ids.append(&mut file_func_ids); + } + } + + file_method_ids +} diff --git a/compiler/noirc_frontend/src/hir/resolution/import.rs b/compiler/noirc_frontend/src/hir/resolution/import.rs index 6f3140a65d4..e6ac33053a0 100644 --- a/compiler/noirc_frontend/src/hir/resolution/import.rs +++ b/compiler/noirc_frontend/src/hir/resolution/import.rs @@ -1,5 +1,4 @@ -use iter_extended::partition_results; -use noirc_errors::CustomDiagnostic; +use noirc_errors::{CustomDiagnostic, Span}; use crate::graph::CrateId; use std::collections::BTreeMap; @@ -12,6 +11,7 @@ pub struct ImportDirective { pub module_id: LocalModuleId, pub path: Path, pub alias: Option, + pub is_prelude: bool, } pub type PathResolution = Result; @@ -30,6 +30,7 @@ pub struct ResolvedImport { pub resolved_namespace: PerNs, // The module which we must add the resolved namespace to pub module_scope: LocalModuleId, + pub is_prelude: bool, } impl From for CustomDiagnostic { @@ -49,24 +50,27 @@ impl From for CustomDiagnostic { } } -pub fn resolve_imports( +pub fn resolve_import( crate_id: CrateId, - imports_to_resolve: Vec, + import_directive: ImportDirective, def_maps: &BTreeMap, -) -> (Vec, Vec<(PathResolutionError, LocalModuleId)>) { +) -> Result { let def_map = &def_maps[&crate_id]; - partition_results(imports_to_resolve, |import_directive| { - let allow_contracts = - allow_referencing_contracts(def_maps, crate_id, import_directive.module_id); + let allow_contracts = + allow_referencing_contracts(def_maps, crate_id, import_directive.module_id); - let module_scope = import_directive.module_id; - let resolved_namespace = - resolve_path_to_ns(&import_directive, def_map, def_maps, allow_contracts) - .map_err(|error| (error, module_scope))?; + let module_scope = import_directive.module_id; + let resolved_namespace = + resolve_path_to_ns(&import_directive, def_map, def_maps, allow_contracts) + .map_err(|error| (error, module_scope))?; - let name = resolve_path_name(&import_directive); - Ok(ResolvedImport { name, resolved_namespace, module_scope }) + let name = resolve_path_name(&import_directive); + Ok(ResolvedImport { + name, + resolved_namespace, + module_scope, + is_prelude: import_directive.is_prelude, }) } @@ -202,9 +206,17 @@ fn resolve_external_dep( // Create an import directive for the dependency crate let path_without_crate_name = &path[1..]; // XXX: This will panic if the path is of the form `use dep::std` Ideal algorithm will not distinguish between crate and module - let path = Path { segments: path_without_crate_name.to_vec(), kind: PathKind::Plain }; - let dep_directive = - ImportDirective { module_id: dep_module.local_id, path, alias: directive.alias.clone() }; + let path = Path { + segments: path_without_crate_name.to_vec(), + kind: PathKind::Plain, + span: Span::default(), + }; + let dep_directive = ImportDirective { + module_id: dep_module.local_id, + path, + alias: directive.alias.clone(), + is_prelude: false, + }; let dep_def_map = def_maps.get(&dep_module.krate).unwrap(); diff --git a/compiler/noirc_frontend/src/hir/resolution/mod.rs b/compiler/noirc_frontend/src/hir/resolution/mod.rs index 601e78015ca..8c16a9cca80 100644 --- a/compiler/noirc_frontend/src/hir/resolution/mod.rs +++ b/compiler/noirc_frontend/src/hir/resolution/mod.rs @@ -9,3 +9,50 @@ pub mod errors; pub mod import; pub mod path_resolver; pub mod resolver; + +mod functions; +mod globals; +mod impls; +mod structs; +mod traits; +mod type_aliases; + +pub(crate) use functions::resolve_free_functions; +pub(crate) use globals::resolve_globals; +pub(crate) use impls::{collect_impls, resolve_impls}; +pub(crate) use structs::resolve_structs; +pub(crate) use traits::{ + collect_trait_impls, resolve_trait_by_path, resolve_trait_impls, resolve_traits, +}; +pub(crate) use type_aliases::resolve_type_aliases; + +use crate::{ + graph::CrateId, + hir::{ + def_collector::dc_crate::CompilationError, + def_map::{CrateDefMap, ModuleData, ModuleId}, + }, + Shared, StructType, Type, +}; +use fm::FileId; +use iter_extended::vecmap; +use resolver::Resolver; +use std::collections::BTreeMap; + +fn take_errors(file_id: FileId, resolver: Resolver<'_>) -> Vec<(CompilationError, FileId)> { + vecmap(resolver.take_errors(), |e| (e.into(), file_id)) +} + +fn get_module_mut( + def_maps: &mut BTreeMap, + module: ModuleId, +) -> &mut ModuleData { + &mut def_maps.get_mut(&module.krate).unwrap().modules[module.local_id.0] +} + +fn get_struct_type(typ: &Type) -> Option<&Shared> { + match typ { + Type::Struct(definition, _) => Some(definition), + _ => None, + } +} diff --git a/compiler/noirc_frontend/src/hir/resolution/path_resolver.rs b/compiler/noirc_frontend/src/hir/resolution/path_resolver.rs index 4c16edd56f1..4c5fa3bceef 100644 --- a/compiler/noirc_frontend/src/hir/resolution/path_resolver.rs +++ b/compiler/noirc_frontend/src/hir/resolution/path_resolver.rs @@ -57,14 +57,15 @@ pub fn resolve_path( path: Path, ) -> Result { // lets package up the path into an ImportDirective and resolve it using that - let import = ImportDirective { module_id: module_id.local_id, path, alias: None }; + let import = + ImportDirective { module_id: module_id.local_id, path, alias: None, is_prelude: false }; let allow_referencing_contracts = allow_referencing_contracts(def_maps, module_id.krate, module_id.local_id); let def_map = &def_maps[&module_id.krate]; let ns = resolve_path_to_ns(&import, def_map, def_maps, allow_referencing_contracts)?; - let function = ns.values.map(|(id, _)| id); - let id = function.or_else(|| ns.types.map(|(id, _)| id)); + let function = ns.values.map(|(id, _, _)| id); + let id = function.or_else(|| ns.types.map(|(id, _, _)| id)); Ok(id.expect("Found empty namespace")) } diff --git a/compiler/noirc_frontend/src/hir/resolution/resolver.rs b/compiler/noirc_frontend/src/hir/resolution/resolver.rs index 4b829932b76..b9770f34e1e 100644 --- a/compiler/noirc_frontend/src/hir/resolution/resolver.rs +++ b/compiler/noirc_frontend/src/hir/resolution/resolver.rs @@ -19,7 +19,7 @@ use crate::hir_def::expr::{ }; use crate::hir_def::traits::{Trait, TraitConstraint}; -use crate::token::FunctionAttribute; +use crate::token::{Attributes, FunctionAttribute}; use regex::Regex; use std::collections::{BTreeMap, HashSet}; use std::rc::Rc; @@ -37,11 +37,11 @@ use crate::{ StatementKind, }; use crate::{ - ArrayLiteral, ContractFunctionType, Distinctness, ForRange, FunctionVisibility, Generics, - LValue, NoirStruct, NoirTypeAlias, Param, Path, PathKind, Pattern, Shared, StructType, Type, - TypeAliasType, TypeBinding, TypeVariable, UnaryOp, UnresolvedGenerics, - UnresolvedTraitConstraint, UnresolvedType, UnresolvedTypeData, UnresolvedTypeExpression, - Visibility, ERROR_IDENT, + ArrayLiteral, ContractFunctionType, Distinctness, ForRange, FunctionDefinition, + FunctionReturnType, FunctionVisibility, Generics, LValue, NoirStruct, NoirTypeAlias, Param, + Path, PathKind, Pattern, Shared, StructType, Type, TypeAliasType, TypeBinding, TypeVariable, + UnaryOp, UnresolvedGenerics, UnresolvedTraitConstraint, UnresolvedType, UnresolvedTypeData, + UnresolvedTypeExpression, Visibility, ERROR_IDENT, }; use fm::FileId; use iter_extended::vecmap; @@ -95,7 +95,7 @@ pub struct Resolver<'a> { /// True if the current module is a contract. /// This is usually determined by self.path_resolver.module_id(), but it can - /// be overriden for impls. Impls are an odd case since the methods within resolve + /// be overridden for impls. Impls are an odd case since the methods within resolve /// as if they're in the parent module, but should be placed in a child module. /// Since they should be within a child module, in_contract is manually set to false /// for these so we can still resolve them in the parent module without them being in a contract. @@ -200,6 +200,51 @@ impl<'a> Resolver<'a> { (hir_func, func_meta, self.errors) } + pub fn resolve_trait_function( + &mut self, + name: &Ident, + parameters: &[(Ident, UnresolvedType)], + return_type: &FunctionReturnType, + where_clause: &[UnresolvedTraitConstraint], + func_id: FuncId, + ) -> (HirFunction, FuncMeta) { + self.scopes.start_function(); + + // Check whether the function has globals in the local module and add them to the scope + self.resolve_local_globals(); + + self.trait_bounds = where_clause.to_vec(); + + let kind = FunctionKind::Normal; + let def = FunctionDefinition { + name: name.clone(), + attributes: Attributes::empty(), + is_open: false, + is_internal: false, + is_unconstrained: false, + visibility: FunctionVisibility::Public, // Trait functions are always public + generics: Vec::new(), // self.generics should already be set + parameters: vecmap(parameters, |(name, typ)| Param { + visibility: Visibility::Private, + pattern: Pattern::Identifier(name.clone()), + typ: typ.clone(), + span: name.span(), + }), + body: BlockExpression(Vec::new()), + span: name.span(), + where_clause: where_clause.to_vec(), + return_type: return_type.clone(), + return_visibility: Visibility::Private, + return_distinctness: Distinctness::DuplicationAllowed, + }; + + let (hir_func, func_meta) = self.intern_function(NoirFunction { kind, def }, func_id); + let _ = self.scopes.end_function(); + // Don't check the scope tree for unused variables, they can't be used in a declaration anyway. + self.trait_bounds.clear(); + (hir_func, func_meta) + } + fn check_for_unused_variables_in_scope_tree(&mut self, scope_decls: ScopeTree) { let mut unused_vars = Vec::new(); for scope in scope_decls.0.into_iter() { @@ -256,8 +301,9 @@ impl<'a> Resolver<'a> { return self.add_global_variable_decl(name, definition); } - let id = self.interner.push_definition(name.0.contents.clone(), mutable, definition); let location = Location::new(name.span(), self.file); + let id = + self.interner.push_definition(name.0.contents.clone(), mutable, definition, location); let ident = HirIdent { location, id }; let resolver_meta = ResolverMeta { num_times_used: 0, ident, warn_if_unused }; @@ -300,8 +346,9 @@ impl<'a> Resolver<'a> { ident = hir_let_stmt.ident(); resolver_meta = ResolverMeta { num_times_used: 0, ident, warn_if_unused: true }; } else { - let id = self.interner.push_definition(name.0.contents.clone(), false, definition); let location = Location::new(name.span(), self.file); + let id = + self.interner.push_definition(name.0.contents.clone(), false, definition, location); ident = HirIdent { location, id }; resolver_meta = ResolverMeta { num_times_used: 0, ident, warn_if_unused: true }; } @@ -441,6 +488,7 @@ impl<'a> Resolver<'a> { MutableReference(element) => { Type::MutableReference(Box::new(self.resolve_type_inner(*element, new_variables))) } + Parenthesized(typ) => self.resolve_type_inner(*typ, new_variables), } } @@ -520,7 +568,7 @@ impl<'a> Resolver<'a> { _new_variables: &mut Generics, ) -> Type { if let Some(t) = self.lookup_trait_or_error(path) { - Type::TraitAsType(t) + Type::TraitAsType(t.id, Rc::new(t.name.to_string())) } else { Type::Error } @@ -571,7 +619,7 @@ impl<'a> Resolver<'a> { match length { None => { let id = self.interner.next_type_variable_id(); - let typevar = Shared::new(TypeBinding::Unbound(id)); + let typevar = TypeVariable::unbound(id); new_variables.push((id, typevar.clone())); // 'Named'Generic is a bit of a misnomer here, we want a type variable that @@ -681,7 +729,7 @@ impl<'a> Resolver<'a> { vecmap(generics, |generic| { // Map the generic to a fresh type variable let id = self.interner.next_type_variable_id(); - let typevar = Shared::new(TypeBinding::Unbound(id)); + let typevar = TypeVariable::unbound(id); let span = generic.0.span(); // Check for name collisions of this generic @@ -790,10 +838,10 @@ impl<'a> Resolver<'a> { }); } - // 'pub_allowed' also implies 'pub' is required on return types - if self.pub_allowed(func) + // 'pub' is required on return types for entry point functions + if self.is_entry_point_function(func) && return_type.as_ref() != &Type::Unit - && func.def.return_visibility != Visibility::Public + && func.def.return_visibility == Visibility::Private { self.push_err(ResolverError::NecessaryPub { ident: func.name_ident().clone() }); } @@ -846,12 +894,9 @@ impl<'a> Resolver<'a> { } /// True if the 'pub' keyword is allowed on parameters in this function + /// 'pub' on function parameters is only allowed for entry point functions fn pub_allowed(&self, func: &NoirFunction) -> bool { - if self.in_contract { - !func.def.is_unconstrained - } else { - func.name() == MAIN_FUNCTION - } + self.is_entry_point_function(func) } fn is_entry_point_function(&self, func: &NoirFunction) -> bool { @@ -927,10 +972,7 @@ impl<'a> Resolver<'a> { found.into_iter().collect() } - fn find_numeric_generics_in_type( - typ: &Type, - found: &mut BTreeMap>, - ) { + fn find_numeric_generics_in_type(typ: &Type, found: &mut BTreeMap) { match typ { Type::FieldElement | Type::Integer(_, _) @@ -941,7 +983,7 @@ impl<'a> Resolver<'a> { | Type::Constant(_) | Type::NamedGeneric(_, _) | Type::NotConstant - | Type::TraitAsType(_) + | Type::TraitAsType(..) | Type::Forall(_, _) => (), Type::Array(length, element_type) => { @@ -1201,8 +1243,9 @@ impl<'a> Resolver<'a> { HirLiteral::Array(HirArrayLiteral::Repeated { repeated_element, length }) } - Literal::Integer(integer) => HirLiteral::Integer(integer), + Literal::Integer(integer, sign) => HirLiteral::Integer(integer, sign), Literal::Str(str) => HirLiteral::Str(str), + Literal::RawStr(str, _) => HirLiteral::Str(str), Literal::FmtStr(str) => self.resolve_fmt_str_literal(str, expr.span), Literal::Unit => HirLiteral::Unit, }), @@ -1210,8 +1253,10 @@ impl<'a> Resolver<'a> { if let Some((hir_expr, object_type)) = self.resolve_trait_generic_path(&path) { let expr_id = self.interner.push_expr(hir_expr); self.interner.push_expr_location(expr_id, expr.span, self.file); - self.interner - .select_impl_for_ident(expr_id, TraitImplKind::Assumed { object_type }); + self.interner.select_impl_for_expression( + expr_id, + TraitImplKind::Assumed { object_type }, + ); return expr_id; } else { // If the Path is being used as an Expression, then it is referring to a global from a separate module @@ -1270,10 +1315,12 @@ impl<'a> Resolver<'a> { ExpressionKind::Infix(infix) => { let lhs = self.resolve_expression(infix.lhs); let rhs = self.resolve_expression(infix.rhs); + let trait_id = self.interner.get_operator_trait_method(infix.operator.contents); HirExpression::Infix(HirInfixExpression { lhs, operator: HirBinaryOp::new(infix.operator, self.file), + trait_method_id: trait_id, rhs, }) } @@ -1500,8 +1547,8 @@ impl<'a> Resolver<'a> { self.interner.get_struct(type_id) } - pub fn get_trait(&self, trait_id: TraitId) -> Trait { - self.interner.get_trait(trait_id) + pub fn get_trait_mut(&mut self, trait_id: TraitId) -> &mut Trait { + self.interner.get_trait_mut(trait_id) } fn lookup(&mut self, path: Path) -> Result { @@ -1544,9 +1591,9 @@ impl<'a> Resolver<'a> { } /// Lookup a given trait by name/path. - fn lookup_trait_or_error(&mut self, path: Path) -> Option { + fn lookup_trait_or_error(&mut self, path: Path) -> Option<&mut Trait> { match self.lookup(path) { - Ok(trait_id) => Some(self.get_trait(trait_id)), + Ok(trait_id) => Some(self.get_trait_mut(trait_id)), Err(error) => { self.push_err(error); None @@ -1586,27 +1633,39 @@ impl<'a> Resolver<'a> { &mut self, path: &Path, ) -> Option<(HirExpression, Type)> { - if let Some(trait_id) = self.trait_id { - if path.kind == PathKind::Plain && path.segments.len() == 2 { - let name = &path.segments[0].0.contents; - let method = &path.segments[1]; + let trait_id = self.trait_id?; - if name == SELF_TYPE_NAME { - let the_trait = self.interner.get_trait(trait_id); + if path.kind == PathKind::Plain && path.segments.len() == 2 { + let name = &path.segments[0].0.contents; + let method = &path.segments[1]; - if let Some(method) = the_trait.find_method(method.clone()) { - let self_type = Type::TypeVariable( - the_trait.self_type_typevar, - crate::TypeVariableKind::Normal, - ); - return Some((HirExpression::TraitMethodReference(method), self_type)); - } - } + if name == SELF_TYPE_NAME { + let the_trait = self.interner.get_trait(trait_id); + let method = the_trait.find_method(method.0.contents.as_str())?; + let self_type = self.self_type.clone()?; + return Some((HirExpression::TraitMethodReference(method), self_type)); } } None } + // this resolves TraitName::some_static_method + fn resolve_trait_static_method(&mut self, path: &Path) -> Option<(HirExpression, Type)> { + if path.kind == PathKind::Plain && path.segments.len() == 2 { + let method = &path.segments[1]; + + let mut trait_path = path.clone(); + trait_path.pop(); + let trait_id = self.lookup(trait_path).ok()?; + let the_trait = self.interner.get_trait(trait_id); + + let method = the_trait.find_method(method.0.contents.as_str())?; + let self_type = Type::type_variable(the_trait.self_type_typevar_id); + return Some((HirExpression::TraitMethodReference(method), self_type)); + } + None + } + // this resolves a static trait method T::trait_method by iterating over the where clause fn resolve_trait_method_by_named_generic( &mut self, @@ -1630,7 +1689,7 @@ impl<'a> Resolver<'a> { { let the_trait = self.interner.get_trait(trait_id); if let Some(method) = - the_trait.find_method(path.segments.last().unwrap().clone()) + the_trait.find_method(path.segments.last().unwrap().0.contents.as_str()) { let self_type = self.resolve_type(typ.clone()); return Some((HirExpression::TraitMethodReference(method), self_type)); @@ -1643,6 +1702,7 @@ impl<'a> Resolver<'a> { fn resolve_trait_generic_path(&mut self, path: &Path) -> Option<(HirExpression, Type)> { self.resolve_trait_static_method_by_self(path) + .or_else(|| self.resolve_trait_static_method(path)) .or_else(|| self.resolve_trait_method_by_named_generic(path)) } @@ -1664,10 +1724,7 @@ impl<'a> Resolver<'a> { fn eval_global_as_array_length(&mut self, global: StmtId) -> u64 { let stmt = match self.interner.statement(&global) { HirStatement::Let(let_expr) => let_expr, - other => { - dbg!(other); - return 0; - } + _ => return 0, }; let length = stmt.expression; @@ -1689,7 +1746,7 @@ impl<'a> Resolver<'a> { span: Span, ) -> Result> { match self.interner.expression(&rhs) { - HirExpression::Literal(HirLiteral::Integer(int)) => { + HirExpression::Literal(HirLiteral::Integer(int, false)) => { int.try_into_u128().ok_or(Some(ResolverError::IntegerTooLarge { span })) } _other => Err(Some(ResolverError::InvalidArrayLengthExpr { span })), @@ -1786,6 +1843,7 @@ impl<'a> Resolver<'a> { self.verify_type_valid_for_program_input(element); } } + UnresolvedTypeData::Parenthesized(typ) => self.verify_type_valid_for_program_input(typ), } } diff --git a/compiler/noirc_frontend/src/hir/resolution/structs.rs b/compiler/noirc_frontend/src/hir/resolution/structs.rs new file mode 100644 index 00000000000..72a7b736436 --- /dev/null +++ b/compiler/noirc_frontend/src/hir/resolution/structs.rs @@ -0,0 +1,53 @@ +use std::collections::BTreeMap; + +use fm::FileId; +use iter_extended::vecmap; + +use crate::{ + graph::CrateId, + hir::{ + def_collector::dc_crate::{CompilationError, UnresolvedStruct}, + def_map::ModuleId, + Context, + }, + node_interner::StructId, + Generics, Ident, Type, +}; + +use super::{errors::ResolverError, path_resolver::StandardPathResolver, resolver::Resolver}; + +/// Create the mappings from TypeId -> StructType +/// so that expressions can access the fields of structs +pub(crate) fn resolve_structs( + context: &mut Context, + structs: BTreeMap, + crate_id: CrateId, +) -> Vec<(CompilationError, FileId)> { + let mut errors: Vec<(CompilationError, FileId)> = vec![]; + // Resolve each field in each struct. + // Each struct should already be present in the NodeInterner after def collection. + for (type_id, typ) in structs { + let file_id = typ.file_id; + let (generics, fields, resolver_errors) = resolve_struct_fields(context, crate_id, typ); + errors.extend(vecmap(resolver_errors, |err| (err.into(), file_id))); + context.def_interner.update_struct(type_id, |struct_def| { + struct_def.set_fields(fields); + struct_def.generics = generics; + }); + } + errors +} + +fn resolve_struct_fields( + context: &mut Context, + krate: CrateId, + unresolved: UnresolvedStruct, +) -> (Generics, Vec<(Ident, Type)>, Vec) { + let path_resolver = + StandardPathResolver::new(ModuleId { local_id: unresolved.module_id, krate }); + let file_id = unresolved.file_id; + let (generics, fields, errors) = + Resolver::new(&mut context.def_interner, &path_resolver, &context.def_maps, file_id) + .resolve_struct_fields(unresolved.struct_def); + (generics, fields, errors) +} diff --git a/compiler/noirc_frontend/src/hir/resolution/traits.rs b/compiler/noirc_frontend/src/hir/resolution/traits.rs new file mode 100644 index 00000000000..545a46fd8e4 --- /dev/null +++ b/compiler/noirc_frontend/src/hir/resolution/traits.rs @@ -0,0 +1,467 @@ +use std::collections::{BTreeMap, HashSet}; + +use fm::FileId; +use iter_extended::vecmap; +use noirc_errors::Location; + +use crate::{ + graph::CrateId, + hir::{ + def_collector::{ + dc_crate::{ + check_methods_signatures, CompilationError, UnresolvedTrait, UnresolvedTraitImpl, + }, + errors::{DefCollectorErrorKind, DuplicateType}, + }, + def_map::{CrateDefMap, ModuleDefId, ModuleId}, + Context, + }, + hir_def::traits::{TraitConstant, TraitFunction, TraitImpl, TraitType}, + node_interner::{FuncId, NodeInterner, TraitId}, + Path, Shared, TraitItem, Type, TypeBinding, TypeVariableKind, +}; + +use super::{ + functions, get_module_mut, get_struct_type, + path_resolver::{PathResolver, StandardPathResolver}, + resolver::Resolver, + take_errors, +}; + +/// Create the mappings from TypeId -> TraitType +/// so that expressions can access the elements of traits +pub(crate) fn resolve_traits( + context: &mut Context, + traits: BTreeMap, + crate_id: CrateId, +) -> Vec<(CompilationError, FileId)> { + for (trait_id, unresolved_trait) in &traits { + context.def_interner.push_empty_trait(*trait_id, unresolved_trait); + } + let mut res: Vec<(CompilationError, FileId)> = vec![]; + for (trait_id, unresolved_trait) in traits { + // Resolve order + // 1. Trait Types ( Trait constants can have a trait type, therefore types before constants) + let _ = resolve_trait_types(context, crate_id, &unresolved_trait); + // 2. Trait Constants ( Trait's methods can use trait types & constants, therefore they should be after) + let _ = resolve_trait_constants(context, crate_id, &unresolved_trait); + // 3. Trait Methods + let (methods, errors) = + resolve_trait_methods(context, trait_id, crate_id, &unresolved_trait); + res.extend(errors); + context.def_interner.update_trait(trait_id, |trait_def| { + trait_def.set_methods(methods); + }); + + // This check needs to be after the trait's methods are set since + // the interner may set `interner.ordering_type` based on the result type + // of the Cmp trait, if this is it. + if crate_id.is_stdlib() { + context.def_interner.try_add_operator_trait(trait_id); + } + } + res +} + +fn resolve_trait_types( + _context: &mut Context, + _crate_id: CrateId, + _unresolved_trait: &UnresolvedTrait, +) -> (Vec, Vec<(CompilationError, FileId)>) { + // TODO + (vec![], vec![]) +} +fn resolve_trait_constants( + _context: &mut Context, + _crate_id: CrateId, + _unresolved_trait: &UnresolvedTrait, +) -> (Vec, Vec<(CompilationError, FileId)>) { + // TODO + (vec![], vec![]) +} + +fn resolve_trait_methods( + context: &mut Context, + trait_id: TraitId, + crate_id: CrateId, + unresolved_trait: &UnresolvedTrait, +) -> (Vec, Vec<(CompilationError, FileId)>) { + let interner = &mut context.def_interner; + let def_maps = &mut context.def_maps; + + let path_resolver = StandardPathResolver::new(ModuleId { + local_id: unresolved_trait.module_id, + krate: crate_id, + }); + let file = def_maps[&crate_id].file_id(unresolved_trait.module_id); + + let mut functions = vec![]; + let mut resolver_errors = vec![]; + + for item in &unresolved_trait.trait_def.items { + if let TraitItem::Function { + name, + generics, + parameters, + return_type, + where_clause, + body: _, + } = item + { + let the_trait = interner.get_trait(trait_id); + let self_type = + Type::TypeVariable(the_trait.self_type_typevar.clone(), TypeVariableKind::Normal); + + let mut resolver = Resolver::new(interner, &path_resolver, def_maps, file); + resolver.add_generics(generics); + resolver.set_self_type(Some(self_type)); + + let func_id = unresolved_trait.method_ids[&name.0.contents]; + let (_, func_meta) = resolver.resolve_trait_function( + name, + parameters, + return_type, + where_clause, + func_id, + ); + resolver.interner.push_fn_meta(func_meta, func_id); + + let arguments = vecmap(parameters, |param| resolver.resolve_type(param.1.clone())); + let return_type = resolver.resolve_type(return_type.get_type().into_owned()); + + let mut generics = vecmap(resolver.get_generics(), |(_, type_var, _)| match &*type_var + .borrow() + { + TypeBinding::Unbound(id) => (*id, type_var.clone()), + TypeBinding::Bound(binding) => unreachable!("Trait generic was bound to {binding}"), + }); + + // Ensure the trait is generic over the Self type as well + let the_trait = resolver.interner.get_trait(trait_id); + generics.push((the_trait.self_type_typevar_id, the_trait.self_type_typevar.clone())); + + let default_impl_list: Vec<_> = unresolved_trait + .fns_with_default_impl + .functions + .iter() + .filter(|(_, _, q)| q.name() == name.0.contents) + .collect(); + + let default_impl = if default_impl_list.len() == 1 { + Some(Box::new(default_impl_list[0].2.clone())) + } else { + None + }; + + let no_environment = Box::new(Type::Unit); + let function_type = Type::Function(arguments, Box::new(return_type), no_environment); + + functions.push(TraitFunction { + name: name.clone(), + typ: Type::Forall(generics, Box::new(function_type)), + location: Location::new(name.span(), unresolved_trait.file_id), + default_impl, + default_impl_module_id: unresolved_trait.module_id, + }); + + let errors = resolver.take_errors().into_iter(); + resolver_errors.extend(errors.map(|resolution_error| (resolution_error.into(), file))); + } + } + (functions, resolver_errors) +} + +fn collect_trait_impl_methods( + interner: &mut NodeInterner, + def_maps: &BTreeMap, + crate_id: CrateId, + trait_id: TraitId, + trait_impl: &mut UnresolvedTraitImpl, +) -> Vec<(CompilationError, FileId)> { + // In this Vec methods[i] corresponds to trait.methods[i]. If the impl has no implementation + // for a particular method, the default implementation will be added at that slot. + let mut ordered_methods = Vec::new(); + + // check whether the trait implementation is in the same crate as either the trait or the type + let mut errors = + check_trait_impl_crate_coherence(interner, trait_id, trait_impl, crate_id, def_maps); + // set of function ids that have a corresponding method in the trait + let mut func_ids_in_trait = HashSet::new(); + + // Temporarily take ownership of the trait's methods so we can iterate over them + // while also mutating the interner + let the_trait = interner.get_trait_mut(trait_id); + let methods = std::mem::take(&mut the_trait.methods); + + for method in &methods { + let overrides: Vec<_> = trait_impl + .methods + .functions + .iter() + .filter(|(_, _, f)| f.name() == method.name.0.contents) + .collect(); + + if overrides.is_empty() { + if let Some(default_impl) = &method.default_impl { + let func_id = interner.push_empty_fn(); + let module = ModuleId { local_id: trait_impl.module_id, krate: crate_id }; + let location = Location::new(default_impl.def.span, trait_impl.file_id); + interner.push_function(func_id, &default_impl.def, module, location); + func_ids_in_trait.insert(func_id); + ordered_methods.push(( + method.default_impl_module_id, + func_id, + *default_impl.clone(), + )); + } else { + let error = DefCollectorErrorKind::TraitMissingMethod { + trait_name: interner.get_trait(trait_id).name.clone(), + method_name: method.name.clone(), + trait_impl_span: trait_impl.object_type.span.expect("type must have a span"), + }; + errors.push((error.into(), trait_impl.file_id)); + } + } else { + for (_, func_id, _) in &overrides { + func_ids_in_trait.insert(*func_id); + } + + if overrides.len() > 1 { + let error = DefCollectorErrorKind::Duplicate { + typ: DuplicateType::TraitAssociatedFunction, + first_def: overrides[0].2.name_ident().clone(), + second_def: overrides[1].2.name_ident().clone(), + }; + errors.push((error.into(), trait_impl.file_id)); + } + + ordered_methods.push(overrides[0].clone()); + } + } + + // Restore the methods that were taken before the for loop + let the_trait = interner.get_trait_mut(trait_id); + the_trait.set_methods(methods); + + // Emit MethodNotInTrait error for methods in the impl block that + // don't have a corresponding method signature defined in the trait + for (_, func_id, func) in &trait_impl.methods.functions { + if !func_ids_in_trait.contains(func_id) { + let error = DefCollectorErrorKind::MethodNotInTrait { + trait_name: the_trait.name.clone(), + impl_method: func.name_ident().clone(), + }; + errors.push((error.into(), trait_impl.file_id)); + } + } + + trait_impl.methods.functions = ordered_methods; + trait_impl.methods.trait_id = Some(trait_id); + errors +} + +fn collect_trait_impl( + context: &mut Context, + crate_id: CrateId, + trait_impl: &mut UnresolvedTraitImpl, +) -> Vec<(CompilationError, FileId)> { + let interner = &mut context.def_interner; + let def_maps = &mut context.def_maps; + let mut errors: Vec<(CompilationError, FileId)> = vec![]; + let unresolved_type = trait_impl.object_type.clone(); + let module = ModuleId { local_id: trait_impl.module_id, krate: crate_id }; + trait_impl.trait_id = + match resolve_trait_by_path(def_maps, module, trait_impl.trait_path.clone()) { + Ok(trait_id) => Some(trait_id), + Err(error) => { + errors.push((error.into(), trait_impl.file_id)); + None + } + }; + + if let Some(trait_id) = trait_impl.trait_id { + errors + .extend(collect_trait_impl_methods(interner, def_maps, crate_id, trait_id, trait_impl)); + + let path_resolver = StandardPathResolver::new(module); + let file = def_maps[&crate_id].file_id(trait_impl.module_id); + let mut resolver = Resolver::new(interner, &path_resolver, def_maps, file); + resolver.add_generics(&trait_impl.generics); + let typ = resolver.resolve_type(unresolved_type); + errors.extend(take_errors(trait_impl.file_id, resolver)); + + if let Some(struct_type) = get_struct_type(&typ) { + let struct_type = struct_type.borrow(); + let module = get_module_mut(def_maps, struct_type.id.module_id()); + + for (_, method_id, method) in &trait_impl.methods.functions { + // If this method was already declared, remove it from the module so it cannot + // be accessed with the `TypeName::method` syntax. We'll check later whether the + // object types in each method overlap or not. If they do, we issue an error. + // If not, that is specialization which is allowed. + if module.declare_function(method.name_ident().clone(), *method_id).is_err() { + module.remove_function(method.name_ident()); + } + } + } + } + errors +} + +pub(crate) fn collect_trait_impls( + context: &mut Context, + crate_id: CrateId, + collected_impls: &mut [UnresolvedTraitImpl], +) -> Vec<(CompilationError, FileId)> { + collected_impls + .iter_mut() + .flat_map(|trait_impl| collect_trait_impl(context, crate_id, trait_impl)) + .collect() +} + +fn check_trait_impl_crate_coherence( + interner: &mut NodeInterner, + trait_id: TraitId, + trait_impl: &UnresolvedTraitImpl, + current_crate: CrateId, + def_maps: &BTreeMap, +) -> Vec<(CompilationError, FileId)> { + let mut errors: Vec<(CompilationError, FileId)> = vec![]; + + let module = ModuleId { krate: current_crate, local_id: trait_impl.module_id }; + let file = def_maps[¤t_crate].file_id(trait_impl.module_id); + let path_resolver = StandardPathResolver::new(module); + let mut resolver = Resolver::new(interner, &path_resolver, def_maps, file); + + let object_crate = match resolver.resolve_type(trait_impl.object_type.clone()) { + Type::Struct(struct_type, _) => struct_type.borrow().id.krate(), + _ => CrateId::Dummy, + }; + + let the_trait = interner.get_trait(trait_id); + if current_crate != the_trait.crate_id && current_crate != object_crate { + let error = DefCollectorErrorKind::TraitImplOrphaned { + span: trait_impl.object_type.span.expect("object type must have a span"), + }; + errors.push((error.into(), trait_impl.file_id)); + } + + errors +} + +pub(crate) fn resolve_trait_by_path( + def_maps: &BTreeMap, + module: ModuleId, + path: Path, +) -> Result { + let path_resolver = StandardPathResolver::new(module); + + match path_resolver.resolve(def_maps, path.clone()) { + Ok(ModuleDefId::TraitId(trait_id)) => Ok(trait_id), + Ok(_) => Err(DefCollectorErrorKind::NotATrait { not_a_trait_name: path }), + Err(_) => Err(DefCollectorErrorKind::TraitNotFound { trait_path: path }), + } +} +pub(crate) fn resolve_trait_impls( + context: &mut Context, + traits: Vec, + crate_id: CrateId, + errors: &mut Vec<(CompilationError, FileId)>, +) -> Vec<(FileId, FuncId)> { + let interner = &mut context.def_interner; + let mut methods = Vec::<(FileId, FuncId)>::new(); + + for trait_impl in traits { + let unresolved_type = trait_impl.object_type; + let local_mod_id = trait_impl.module_id; + let module_id = ModuleId { krate: crate_id, local_id: local_mod_id }; + let path_resolver = StandardPathResolver::new(module_id); + + let self_type_span = unresolved_type.span; + + let mut resolver = + Resolver::new(interner, &path_resolver, &context.def_maps, trait_impl.file_id); + resolver.add_generics(&trait_impl.generics); + let self_type = resolver.resolve_type(unresolved_type.clone()); + let generics = resolver.get_generics().to_vec(); + + let impl_id = interner.next_trait_impl_id(); + + let mut impl_methods = functions::resolve_function_set( + interner, + crate_id, + &context.def_maps, + trait_impl.methods.clone(), + Some(self_type.clone()), + Some(impl_id), + generics.clone(), + errors, + ); + + let maybe_trait_id = trait_impl.trait_id; + if let Some(trait_id) = maybe_trait_id { + for (_, func) in &impl_methods { + interner.set_function_trait(*func, self_type.clone(), trait_id); + } + } + + if matches!(self_type, Type::MutableReference(_)) { + let span = self_type_span.unwrap_or_else(|| trait_impl.trait_path.span()); + let error = DefCollectorErrorKind::MutableReferenceInTraitImpl { span }; + errors.push((error.into(), trait_impl.file_id)); + } + + let mut new_resolver = + Resolver::new(interner, &path_resolver, &context.def_maps, trait_impl.file_id); + + new_resolver.set_generics(generics); + new_resolver.set_self_type(Some(self_type.clone())); + + if let Some(trait_id) = maybe_trait_id { + check_methods_signatures( + &mut new_resolver, + &impl_methods, + trait_id, + trait_impl.generics.len(), + errors, + ); + + let where_clause = trait_impl + .where_clause + .into_iter() + .flat_map(|item| new_resolver.resolve_trait_constraint(item)) + .collect(); + + let resolved_trait_impl = Shared::new(TraitImpl { + ident: trait_impl.trait_path.last_segment().clone(), + typ: self_type.clone(), + trait_id, + file: trait_impl.file_id, + where_clause, + methods: vecmap(&impl_methods, |(_, func_id)| *func_id), + }); + + if let Err((prev_span, prev_file)) = interner.add_trait_implementation( + self_type.clone(), + trait_id, + impl_id, + resolved_trait_impl, + ) { + let error = DefCollectorErrorKind::OverlappingImpl { + typ: self_type.clone(), + span: self_type_span.unwrap_or_else(|| trait_impl.trait_path.span()), + }; + errors.push((error.into(), trait_impl.file_id)); + + // The 'previous impl defined here' note must be a separate error currently + // since it may be in a different file and all errors have the same file id. + let error = DefCollectorErrorKind::OverlappingImplNote { span: prev_span }; + errors.push((error.into(), prev_file)); + } + + methods.append(&mut impl_methods); + } + } + + methods +} diff --git a/compiler/noirc_frontend/src/hir/resolution/type_aliases.rs b/compiler/noirc_frontend/src/hir/resolution/type_aliases.rs new file mode 100644 index 00000000000..f66f6c8dfa7 --- /dev/null +++ b/compiler/noirc_frontend/src/hir/resolution/type_aliases.rs @@ -0,0 +1,33 @@ +use super::{path_resolver::StandardPathResolver, resolver::Resolver}; +use crate::{ + graph::CrateId, + hir::{ + def_collector::dc_crate::{CompilationError, UnresolvedTypeAlias}, + def_map::ModuleId, + Context, + }, + node_interner::TypeAliasId, +}; +use fm::FileId; +use std::collections::BTreeMap; + +pub(crate) fn resolve_type_aliases( + context: &mut Context, + type_aliases: BTreeMap, + crate_id: CrateId, +) -> Vec<(CompilationError, FileId)> { + let mut errors: Vec<(CompilationError, FileId)> = vec![]; + for (type_id, unresolved_typ) in type_aliases { + let path_resolver = StandardPathResolver::new(ModuleId { + local_id: unresolved_typ.module_id, + krate: crate_id, + }); + let file = unresolved_typ.file_id; + let (typ, generics, resolver_errors) = + Resolver::new(&mut context.def_interner, &path_resolver, &context.def_maps, file) + .resolve_type_aliases(unresolved_typ.type_alias_def); + errors.extend(resolver_errors.iter().cloned().map(|e| (e.into(), file))); + context.def_interner.set_type_alias(type_id, typ, generics); + } + errors +} diff --git a/compiler/noirc_frontend/src/hir/type_check/expr.rs b/compiler/noirc_frontend/src/hir/type_check/expr.rs index 8222f212190..50ed98a794a 100644 --- a/compiler/noirc_frontend/src/hir/type_check/expr.rs +++ b/compiler/noirc_frontend/src/hir/type_check/expr.rs @@ -10,8 +10,8 @@ use crate::{ }, types::Type, }, - node_interner::{DefinitionKind, ExprId, FuncId, TraitId, TraitMethodId}, - BinaryOpKind, Signedness, TypeBinding, TypeVariableKind, UnaryOp, + node_interner::{DefinitionKind, ExprId, FuncId, TraitId, TraitImplKind, TraitMethodId}, + BinaryOpKind, TypeBinding, TypeBindings, TypeVariableKind, UnaryOp, }; use super::{errors::TypeCheckError, TypeChecker}; @@ -114,7 +114,7 @@ impl<'interner> TypeChecker<'interner> { Type::Array(Box::new(length), Box::new(elem_type)) } HirLiteral::Bool(_) => Type::Bool, - HirLiteral::Integer(_) => Type::polymorphic_integer(self.interner), + HirLiteral::Integer(_, _) => Type::polymorphic_integer(self.interner), HirLiteral::Str(string) => { let len = Type::Constant(string.len() as u64); Type::String(Box::new(len)) @@ -136,11 +136,21 @@ impl<'interner> TypeChecker<'interner> { let rhs_span = self.interner.expr_span(&infix_expr.rhs); let span = lhs_span.merge(rhs_span); - self.infix_operand_type_rules(&lhs_type, &infix_expr.operator, &rhs_type, span) - .unwrap_or_else(|error| { + let operator = &infix_expr.operator; + match self.infix_operand_type_rules(&lhs_type, operator, &rhs_type, span) { + Ok((typ, use_impl)) => { + if use_impl { + let id = infix_expr.trait_method_id; + self.verify_trait_constraint(&lhs_type, id.trait_id, *expr_id, span); + self.typecheck_operator_method(*expr_id, id, &lhs_type, span); + } + typ + } + Err(error) => { self.errors.push(error); Type::Error - }) + } + } } HirExpression::Index(index_expr) => self.check_index_expression(expr_id, index_expr), HirExpression::Call(call_expr) => { @@ -180,10 +190,11 @@ impl<'interner> TypeChecker<'interner> { // Automatically add `&mut` if the method expects a mutable reference and // the object is not already one. if *func_id != FuncId::dummy_id() { - let func_meta = self.interner.function_meta(func_id); + let function_type = + self.interner.function_meta(func_id).typ.clone(); self.try_add_mutable_reference_to_object( &mut method_call, - &func_meta.typ, + &function_type, &mut args, ); } @@ -289,15 +300,25 @@ impl<'interner> TypeChecker<'interner> { } HirExpression::TraitMethodReference(method) => { let the_trait = self.interner.get_trait(method.trait_id); - let method = &the_trait.methods[method.method_index]; - - let typ = Type::Function( - method.arguments.clone(), - Box::new(method.return_type.clone()), - Box::new(Type::Unit), - ); + let typ2 = &the_trait.methods[method.method_index].typ; + let (typ, mut bindings) = typ2.instantiate(self.interner); + + // We must also remember to apply these substitutions to the object_type + // referenced by the selected trait impl, if one has yet to be selected. + let impl_kind = self.interner.get_selected_impl_for_expression(*expr_id); + if let Some(TraitImplKind::Assumed { object_type }) = impl_kind { + let the_trait = self.interner.get_trait(method.trait_id); + let object_type = object_type.substitute(&bindings); + bindings.insert( + the_trait.self_type_typevar_id, + (the_trait.self_type_typevar.clone(), object_type.clone()), + ); + self.interner.select_impl_for_expression( + *expr_id, + TraitImplKind::Assumed { object_type }, + ); + } - let (typ, bindings) = typ.instantiate(self.interner); self.interner.store_instantiation_bindings(*expr_id, bindings); typ } @@ -315,14 +336,21 @@ impl<'interner> TypeChecker<'interner> { span: Span, ) { match self.interner.lookup_trait_implementation(object_type, trait_id) { - Ok(impl_kind) => self.interner.select_impl_for_ident(function_ident_id, impl_kind), + Ok(impl_kind) => self.interner.select_impl_for_expression(function_ident_id, impl_kind), Err(erroring_constraints) => { - let constraints = vecmap(erroring_constraints, |constraint| { - let r#trait = self.interner.get_trait(constraint.trait_id); - (constraint.typ, r#trait.name.to_string()) - }); + // Don't show any errors where try_get_trait returns None. + // This can happen if a trait is used that was never declared. + let constraints = erroring_constraints + .into_iter() + .map(|constraint| { + let r#trait = self.interner.try_get_trait(constraint.trait_id)?; + Some((constraint.typ, r#trait.name.to_string())) + }) + .collect::>>(); - self.errors.push(TypeCheckError::NoMatchingImplFound { constraints, span }); + if let Some(constraints) = constraints { + self.errors.push(TypeCheckError::NoMatchingImplFound { constraints, span }); + } } } } @@ -534,12 +562,12 @@ impl<'interner> TypeChecker<'interner> { let func_meta = self.interner.function_meta(&func_id); let param_len = func_meta.parameters.len(); - (func_meta.typ, param_len) + (func_meta.typ.clone(), param_len) } HirMethodReference::TraitMethodId(method) => { let the_trait = self.interner.get_trait(method.trait_id); let method = &the_trait.methods[method.method_index]; - (method.get_type(), method.arguments.len()) + (method.typ.clone(), method.arguments().len()) } }; @@ -738,19 +766,22 @@ impl<'interner> TypeChecker<'interner> { None } + // Given a binary comparison operator and another type. This method will produce the output type + // and a boolean indicating whether to use the trait impl corresponding to the operator + // or not. A value of false indicates the caller to use a primitive operation for this + // operator, while a true value indicates a user-provided trait impl is required. fn comparator_operand_type_rules( &mut self, lhs_type: &Type, rhs_type: &Type, op: &HirBinaryOp, span: Span, - ) -> Result { - use crate::BinaryOpKind::{Equal, NotEqual}; + ) -> Result<(Type, bool), TypeCheckError> { use Type::*; match (lhs_type, rhs_type) { // Avoid reporting errors multiple times - (Error, _) | (_, Error) => Ok(Bool), + (Error, _) | (_, Error) => Ok((Bool, false)), // Matches on TypeVariable must be first to follow any type // bindings. @@ -771,8 +802,12 @@ impl<'interner> TypeChecker<'interner> { })); } - if other.try_bind_to_polymorphic_int(int).is_ok() || other == &Type::Error { - Ok(Bool) + let mut bindings = TypeBindings::new(); + if other.try_bind_to_polymorphic_int(int, &mut bindings).is_ok() + || other == &Type::Error + { + Type::apply_type_bindings(bindings); + Ok((Bool, false)) } else { Err(TypeCheckError::TypeMismatchWithSource { expected: lhs_type.clone(), @@ -797,36 +832,23 @@ impl<'interner> TypeChecker<'interner> { span, }); } - Ok(Bool) - } - (Integer(..), FieldElement) | (FieldElement, Integer(..)) => { - Err(TypeCheckError::IntegerAndFieldBinaryOperation { span }) - } - (Integer(..), typ) | (typ, Integer(..)) => { - Err(TypeCheckError::IntegerTypeMismatch { typ: typ.clone(), span }) + Ok((Bool, false)) } (FieldElement, FieldElement) => { if op.kind.is_valid_for_field_type() { - Ok(Bool) + Ok((Bool, false)) } else { Err(TypeCheckError::FieldComparison { span }) } } // <= and friends are technically valid for booleans, just not very useful - (Bool, Bool) => Ok(Bool), + (Bool, Bool) => Ok((Bool, false)), // Special-case == and != for arrays (Array(x_size, x_type), Array(y_size, y_type)) - if matches!(op.kind, Equal | NotEqual) => + if matches!(op.kind, BinaryOpKind::Equal | BinaryOpKind::NotEqual) => { - self.unify(x_type, y_type, || TypeCheckError::TypeMismatchWithSource { - expected: lhs_type.clone(), - actual: rhs_type.clone(), - source: Source::ArrayElements, - span: op.location.span, - }); - self.unify(x_size, y_size, || TypeCheckError::TypeMismatchWithSource { expected: lhs_type.clone(), actual: rhs_type.clone(), @@ -834,19 +856,9 @@ impl<'interner> TypeChecker<'interner> { span: op.location.span, }); - Ok(Bool) - } - (lhs @ NamedGeneric(binding_a, _), rhs @ NamedGeneric(binding_b, _)) => { - if binding_a == binding_b { - return Ok(Bool); - } - Err(TypeCheckError::TypeMismatchWithSource { - expected: lhs.clone(), - actual: rhs.clone(), - source: Source::Comparison, - span, - }) + self.comparator_operand_type_rules(x_type, y_type, op, span) } + (String(x_size), String(y_size)) => { self.unify(x_size, y_size, || TypeCheckError::TypeMismatchWithSource { expected: *x_size.clone(), @@ -855,14 +867,17 @@ impl<'interner> TypeChecker<'interner> { source: Source::StringLen, }); - Ok(Bool) + Ok((Bool, false)) + } + (lhs, rhs) => { + self.unify(lhs, rhs, || TypeCheckError::TypeMismatchWithSource { + expected: lhs.clone(), + actual: rhs.clone(), + span: op.location.span, + source: Source::Binary, + }); + Ok((Bool, true)) } - (lhs, rhs) => Err(TypeCheckError::TypeMismatchWithSource { - expected: lhs.clone(), - actual: rhs.clone(), - source: Source::Comparison, - span, - }), } } @@ -887,7 +902,9 @@ impl<'interner> TypeChecker<'interner> { } } } - Type::TraitAsType(_trait) => { + // TODO: We should allow method calls on `impl Trait`s eventually. + // For now it is fine since they are only allowed on return types. + Type::TraitAsType(..) => { self.errors.push(TypeCheckError::UnresolvedMethodCall { method_name: method_name.to_string(), object_type: object_type.clone(), @@ -900,15 +917,17 @@ impl<'interner> TypeChecker<'interner> { &self.current_function.expect("unexpected method outside a function"), ); - for constraint in func_meta.trait_constraints { + for constraint in &func_meta.trait_constraints { if *object_type == constraint.typ { - let the_trait = self.interner.get_trait(constraint.trait_id); - - for (method_index, method) in the_trait.methods.iter().enumerate() { - if method.name.0.contents == method_name { - let trait_method = - TraitMethodId { trait_id: constraint.trait_id, method_index }; - return Some(HirMethodReference::TraitMethodId(trait_method)); + if let Some(the_trait) = self.interner.try_get_trait(constraint.trait_id) { + for (method_index, method) in the_trait.methods.iter().enumerate() { + if method.name.0.contents == method_name { + let trait_method = TraitMethodId { + trait_id: constraint.trait_id, + method_index, + }; + return Some(HirMethodReference::TraitMethodId(trait_method)); + } } } } @@ -1000,7 +1019,7 @@ impl<'interner> TypeChecker<'interner> { let env_type = self.interner.next_type_variable(); let expected = Type::Function(args, Box::new(ret.clone()), Box::new(env_type)); - if let Err(error) = binding.borrow_mut().bind_to(expected, span) { + if let Err(error) = binding.try_bind(expected, span) { self.errors.push(error); } ret @@ -1018,13 +1037,16 @@ impl<'interner> TypeChecker<'interner> { } // Given a binary operator and another type. This method will produce the output type + // and a boolean indicating whether to use the trait impl corresponding to the operator + // or not. A value of false indicates the caller to use a primitive operation for this + // operator, while a true value indicates a user-provided trait impl is required. fn infix_operand_type_rules( &mut self, lhs_type: &Type, op: &HirBinaryOp, rhs_type: &Type, span: Span, - ) -> Result { + ) -> Result<(Type, bool), TypeCheckError> { if op.kind.is_comparator() { return self.comparator_operand_type_rules(lhs_type, rhs_type, op, span); } @@ -1032,7 +1054,7 @@ impl<'interner> TypeChecker<'interner> { use Type::*; match (lhs_type, rhs_type) { // An error type on either side will always return an error - (Error, _) | (_, Error) => Ok(Error), + (Error, _) | (_, Error) => Ok((Error, false)), // Matches on TypeVariable must be first so that we follow any type // bindings. @@ -1068,8 +1090,12 @@ impl<'interner> TypeChecker<'interner> { })); } - if other.try_bind_to_polymorphic_int(int).is_ok() || other == &Type::Error { - Ok(other.clone()) + let mut bindings = TypeBindings::new(); + if other.try_bind_to_polymorphic_int(int, &mut bindings).is_ok() + || other == &Type::Error + { + Type::apply_type_bindings(bindings); + Ok((other.clone(), false)) } else { Err(TypeCheckError::TypeMismatchWithSource { expected: lhs_type.clone(), @@ -1094,31 +1120,8 @@ impl<'interner> TypeChecker<'interner> { span, }); } - if op.is_bit_shift() - && (*sign_x == Signedness::Signed || *sign_y == Signedness::Signed) - { - Err(TypeCheckError::InvalidInfixOp { kind: "Signed integer", span }) - } else { - Ok(Integer(*sign_x, *bit_width_x)) - } - } - (Integer(..), FieldElement) | (FieldElement, Integer(..)) => { - Err(TypeCheckError::IntegerAndFieldBinaryOperation { span }) - } - (Integer(..), typ) | (typ, Integer(..)) => { - Err(TypeCheckError::IntegerTypeMismatch { typ: typ.clone(), span }) - } - // These types are not supported in binary operations - (Array(..), _) | (_, Array(..)) => { - Err(TypeCheckError::InvalidInfixOp { kind: "Arrays", span }) - } - (Struct(..), _) | (_, Struct(..)) => { - Err(TypeCheckError::InvalidInfixOp { kind: "Structs", span }) + Ok((Integer(*sign_x, *bit_width_x), false)) } - (Tuple(_), _) | (_, Tuple(_)) => { - Err(TypeCheckError::InvalidInfixOp { kind: "Tuples", span }) - } - // The result of two Fields is always a witness (FieldElement, FieldElement) => { if op.is_bitwise() { @@ -1127,17 +1130,20 @@ impl<'interner> TypeChecker<'interner> { if op.is_modulo() { return Err(TypeCheckError::FieldModulo { span }); } - Ok(FieldElement) + Ok((FieldElement, false)) } - (Bool, Bool) => Ok(Bool), + (Bool, Bool) => Ok((Bool, false)), - (lhs, rhs) => Err(TypeCheckError::TypeMismatchWithSource { - expected: lhs.clone(), - actual: rhs.clone(), - source: Source::BinOp(op.kind), - span, - }), + (lhs, rhs) => { + self.unify(lhs, rhs, || TypeCheckError::TypeMismatchWithSource { + expected: lhs.clone(), + actual: rhs.clone(), + span: op.location.span, + source: Source::Binary, + }); + Ok((lhs.clone(), true)) + } } } @@ -1158,6 +1164,10 @@ impl<'interner> TypeChecker<'interner> { match op { crate::UnaryOp::Minus => { + if rhs_type.is_unsigned() { + self.errors + .push(TypeCheckError::InvalidUnaryOp { kind: rhs_type.to_string(), span }); + } let expected = Type::polymorphic_integer(self.interner); rhs_type.unify(&expected, &mut self.errors, || TypeCheckError::InvalidUnaryOp { kind: rhs_type.to_string(), @@ -1185,6 +1195,57 @@ impl<'interner> TypeChecker<'interner> { } } } + + /// Prerequisite: verify_trait_constraint of the operator's trait constraint. + /// + /// Although by this point the operator is expected to already have a trait impl, + /// we still need to match the operator's type against the method's instantiated type + /// to ensure the instantiation bindings are correct and the monomorphizer can + /// re-apply the needed bindings. + fn typecheck_operator_method( + &mut self, + expr_id: ExprId, + trait_method_id: TraitMethodId, + object_type: &Type, + span: Span, + ) { + let the_trait = self.interner.get_trait(trait_method_id.trait_id); + + let method = &the_trait.methods[trait_method_id.method_index]; + let (method_type, mut bindings) = method.typ.instantiate(self.interner); + + match method_type { + Type::Function(args, _, _) => { + // We can cheat a bit and match against only the object type here since no operator + // overload uses other generic parameters or return types aside from the object type. + let expected_object_type = &args[0]; + self.unify(object_type, expected_object_type, || TypeCheckError::TypeMismatch { + expected_typ: expected_object_type.to_string(), + expr_typ: object_type.to_string(), + expr_span: span, + }); + } + other => { + unreachable!("Expected operator method to have a function type, but found {other}") + } + } + + // We must also remember to apply these substitutions to the object_type + // referenced by the selected trait impl, if one has yet to be selected. + let impl_kind = self.interner.get_selected_impl_for_expression(expr_id); + if let Some(TraitImplKind::Assumed { object_type }) = impl_kind { + let the_trait = self.interner.get_trait(trait_method_id.trait_id); + let object_type = object_type.substitute(&bindings); + bindings.insert( + the_trait.self_type_typevar_id, + (the_trait.self_type_typevar.clone(), object_type.clone()), + ); + self.interner + .select_impl_for_expression(expr_id, TraitImplKind::Assumed { object_type }); + } + + self.interner.store_instantiation_bindings(expr_id, bindings); + } } /// Taken from: https://stackoverflow.com/a/47127500 diff --git a/compiler/noirc_frontend/src/hir/type_check/mod.rs b/compiler/noirc_frontend/src/hir/type_check/mod.rs index 956992a54d0..c05c233fe34 100644 --- a/compiler/noirc_frontend/src/hir/type_check/mod.rs +++ b/compiler/noirc_frontend/src/hir/type_check/mod.rs @@ -50,25 +50,32 @@ pub fn type_check_func(interner: &mut NodeInterner, func_id: FuncId) -> Vec Vec Vec Vec TypeChecker<'interner> { expr_span: range_span, }); - let fresh_id = self.interner.next_type_variable_id(); - let type_variable = Shared::new(TypeBinding::Unbound(fresh_id)); - let expected_type = Type::TypeVariable(type_variable, TypeVariableKind::IntegerOrField); + let expected_type = Type::polymorphic_integer(self.interner); self.unify(&start_range_type, &expected_type, || { TypeCheckError::TypeCannotBeUsed { @@ -150,7 +147,7 @@ impl<'interner> TypeChecker<'interner> { // Must push new lvalue to the interner, we've resolved any field indices self.interner.update_statement(stmt_id, |stmt| match stmt { HirStatement::Assign(assign) => assign.lvalue = new_lvalue, - _ => unreachable!(), + _ => unreachable!("statement is known to be assignment"), }); let span = self.interner.expr_span(&assign_stmt.expression); @@ -350,7 +347,7 @@ impl<'interner> TypeChecker<'interner> { let expr = self.interner.expression(rhs_expr); let span = self.interner.expr_span(rhs_expr); match expr { - HirExpression::Literal(HirLiteral::Integer(value)) => { + HirExpression::Literal(HirLiteral::Integer(value, false)) => { let v = value.to_u128(); if let Type::Integer(_, bit_count) = annotated_type { let max = 1 << bit_count; diff --git a/compiler/noirc_frontend/src/hir_def/expr.rs b/compiler/noirc_frontend/src/hir_def/expr.rs index 755817ba1e6..7c04398ca88 100644 --- a/compiler/noirc_frontend/src/hir_def/expr.rs +++ b/compiler/noirc_frontend/src/hir_def/expr.rs @@ -78,7 +78,7 @@ impl HirBinaryOp { pub enum HirLiteral { Array(HirArrayLiteral), Bool(bool), - Integer(FieldElement), + Integer(FieldElement, bool), //true for negative integer and false for positive Str(String), FmtStr(String, Vec), Unit, @@ -101,6 +101,12 @@ pub struct HirInfixExpression { pub lhs: ExprId, pub operator: HirBinaryOp, pub rhs: ExprId, + + /// The trait method id for the operator trait method that corresponds to this operator. + /// For derived operators like `!=`, this will lead to the method `Eq::eq`. For these + /// cases, it is up to the monomorphization pass to insert the appropriate `not` operation + /// after the call to `Eq::eq` to get the result of the `!=` operator. + pub trait_method_id: TraitMethodId, } /// This is always a struct field access `my_struct.field` diff --git a/compiler/noirc_frontend/src/hir_def/function.rs b/compiler/noirc_frontend/src/hir_def/function.rs index 085bda107e3..9fff301f5f7 100644 --- a/compiler/noirc_frontend/src/hir_def/function.rs +++ b/compiler/noirc_frontend/src/hir_def/function.rs @@ -131,22 +131,12 @@ impl FuncMeta { } } - pub fn into_function_signature(self) -> FunctionSignature { - // Doesn't use `self.return_type()` so we aren't working with references and don't need a `clone()` - let return_type = match self.typ { - Type::Function(_, ret, _env) => *ret, - Type::Forall(_, typ) => match *typ { - Type::Function(_, ret, _env) => *ret, - _ => unreachable!(), - }, - _ => unreachable!(), - }; - let return_type = match return_type { + pub fn function_signature(&self) -> FunctionSignature { + let return_type = match self.return_type() { Type::Unit => None, - typ => Some(typ), + typ => Some(typ.clone()), }; - - (self.parameters.0, return_type) + (self.parameters.0.clone(), return_type) } /// Gives the (uninstantiated) return type of this function. diff --git a/compiler/noirc_frontend/src/hir_def/traits.rs b/compiler/noirc_frontend/src/hir_def/traits.rs index 062f505c179..1d0449b6568 100644 --- a/compiler/noirc_frontend/src/hir_def/traits.rs +++ b/compiler/noirc_frontend/src/hir_def/traits.rs @@ -1,4 +1,4 @@ -use std::rc::Rc; +use std::collections::HashMap; use crate::{ graph::CrateId, @@ -6,17 +6,14 @@ use crate::{ Generics, Ident, NoirFunction, Type, TypeVariable, TypeVariableId, }; use fm::FileId; -use noirc_errors::Span; +use noirc_errors::{Location, Span}; #[derive(Clone, Debug, PartialEq, Eq)] pub struct TraitFunction { pub name: Ident, - pub generics: Vec<(Rc, TypeVariable, Span)>, - pub arguments: Vec, - pub return_type: Type, - pub span: Span, + pub typ: Type, + pub location: Location, pub default_impl: Option>, - pub default_impl_file_id: fm::FileId, pub default_impl_module_id: crate::hir::def_map::LocalModuleId, } @@ -37,7 +34,7 @@ pub struct TraitType { /// Represents a trait in the type system. Each instance of this struct /// will be shared across all Type::Trait variants that represent /// the same trait. -#[derive(Debug, Eq, Clone)] +#[derive(Debug, Eq)] pub struct Trait { /// A unique id representing this trait type. Used to check if two /// struct traits are equal. @@ -46,12 +43,19 @@ pub struct Trait { pub crate_id: CrateId, pub methods: Vec, + + /// Maps method_name -> method id. + /// This map is separate from methods since TraitFunction ids + /// are created during collection where we don't yet have all + /// the information needed to create the full TraitFunction. + pub method_ids: HashMap, + pub constants: Vec, pub types: Vec, pub name: Ident, pub generics: Generics, - pub span: Span, + pub location: Location, /// When resolving the types of Trait elements, all references to `Self` resolve /// to this TypeVariable. Then when we check if the types of trait impl elements @@ -61,6 +65,7 @@ pub struct Trait { pub self_type_typevar: TypeVariable, } +#[derive(Debug)] pub struct TraitImpl { pub ident: Ident, pub typ: Type, @@ -101,36 +106,13 @@ impl PartialEq for Trait { } impl Trait { - pub fn new( - id: TraitId, - name: Ident, - crate_id: CrateId, - span: Span, - generics: Generics, - self_type_typevar_id: TypeVariableId, - self_type_typevar: TypeVariable, - ) -> Trait { - Trait { - id, - name, - crate_id, - span, - methods: Vec::new(), - constants: Vec::new(), - types: Vec::new(), - generics, - self_type_typevar_id, - self_type_typevar, - } - } - pub fn set_methods(&mut self, methods: Vec) { self.methods = methods; } - pub fn find_method(&self, name: Ident) -> Option { + pub fn find_method(&self, name: &str) -> Option { for (idx, method) in self.methods.iter().enumerate() { - if method.name == name { + if &method.name == name { return Some(TraitMethodId { trait_id: self.id, method_index: idx }); } } @@ -145,12 +127,33 @@ impl std::fmt::Display for Trait { } impl TraitFunction { - pub fn get_type(&self) -> Type { - Type::Function( - self.arguments.clone(), - Box::new(self.return_type.clone()), - Box::new(Type::Unit), - ) - .generalize() + pub fn arguments(&self) -> &[Type] { + match &self.typ { + Type::Function(args, _, _) => args, + Type::Forall(_, typ) => match typ.as_ref() { + Type::Function(args, _, _) => args, + _ => unreachable!("Trait function does not have a function type"), + }, + _ => unreachable!("Trait function does not have a function type"), + } + } + + pub fn generics(&self) -> &[(TypeVariableId, TypeVariable)] { + match &self.typ { + Type::Function(..) => &[], + Type::Forall(generics, _) => generics, + _ => unreachable!("Trait function does not have a function type"), + } + } + + pub fn return_type(&self) -> &Type { + match &self.typ { + Type::Function(_, return_type, _) => return_type, + Type::Forall(_, typ) => match typ.as_ref() { + Type::Function(_, return_type, _) => return_type, + _ => unreachable!("Trait function does not have a function type"), + }, + _ => unreachable!("Trait function does not have a function type"), + } } } diff --git a/compiler/noirc_frontend/src/hir_def/types.rs b/compiler/noirc_frontend/src/hir_def/types.rs index 8ad38b526de..981d7e41b6e 100644 --- a/compiler/noirc_frontend/src/hir_def/types.rs +++ b/compiler/noirc_frontend/src/hir_def/types.rs @@ -1,4 +1,5 @@ use std::{ + borrow::Cow, cell::RefCell, collections::{BTreeSet, HashMap}, rc::Rc, @@ -6,18 +7,15 @@ use std::{ use crate::{ hir::type_check::TypeCheckError, - node_interner::{ExprId, NodeInterner, TypeAliasId}, + node_interner::{ExprId, NodeInterner, TraitId, TypeAliasId}, }; use iter_extended::vecmap; -use noirc_errors::Span; +use noirc_errors::{Location, Span}; use noirc_printable_type::PrintableType; use crate::{node_interner::StructId, Ident, Signedness}; -use super::{ - expr::{HirCallExpression, HirExpression, HirIdent}, - traits::Trait, -}; +use super::expr::{HirCallExpression, HirExpression, HirIdent}; #[derive(Debug, PartialEq, Eq, Clone, Hash)] pub enum Type { @@ -65,7 +63,10 @@ pub enum Type { /// different argument types each time. TypeVariable(TypeVariable, TypeVariableKind), - TraitAsType(Trait), + /// `impl Trait` when used in a type position. + /// These are only matched based on the TraitId. The trait name paramer is only + /// used for displaying error messages using the name of the trait. + TraitAsType(TraitId, /*name:*/ Rc), /// NamedGenerics are the 'T' or 'U' in a user-defined generic function /// like `fn foo(...) {}`. Unlike TypeVariables, they cannot be bound over. @@ -75,7 +76,7 @@ pub enum Type { /// the environment should be `Unit` by default, /// for closures it should contain a `Tuple` type with the captured /// variable types. - Function(Vec, Box, Box), + Function(Vec, /*return_type:*/ Box, /*environment:*/ Box), /// &mut T MutableReference(Box), @@ -103,6 +104,47 @@ pub enum Type { Error, } +impl Type { + /// Returns the number of field elements required to represent the type once encoded. + pub fn field_count(&self) -> u32 { + match self { + Type::FieldElement | Type::Integer { .. } | Type::Bool => 1, + Type::Array(size, typ) => { + let length = size + .evaluate_to_u64() + .expect("Cannot have variable sized arrays as a parameter to main"); + let typ = typ.as_ref(); + (length as u32) * typ.field_count() + } + Type::Struct(ref def, args) => { + let struct_type = def.borrow(); + let fields = struct_type.get_fields(args); + fields.iter().fold(0, |acc, (_, field_type)| acc + field_type.field_count()) + } + Type::Tuple(fields) => { + fields.iter().fold(0, |acc, field_typ| acc + field_typ.field_count()) + } + Type::String(size) => { + let size = size + .evaluate_to_u64() + .expect("Cannot have variable sized strings as a parameter to main"); + size as u32 + } + Type::FmtString(_, _) + | Type::Unit + | Type::TypeVariable(_, _) + | Type::TraitAsType(..) + | Type::NamedGeneric(_, _) + | Type::Function(_, _, _) + | Type::MutableReference(_) + | Type::Forall(_, _) + | Type::Constant(_) + | Type::NotConstant + | Type::Error => unreachable!("This type cannot exist as a parameter to main"), + } + } +} + /// A list of TypeVariableIds to bind to a type. Storing the /// TypeVariable in addition to the matching TypeVariableId allows /// the binding to later be undone if needed. @@ -125,7 +167,7 @@ pub struct StructType { fields: Vec<(Ident, Type)>, pub generics: Generics, - pub span: Span, + pub location: Location, } /// Corresponds to generic lists such as `` in the source @@ -150,11 +192,12 @@ impl StructType { pub fn new( id: StructId, name: Ident, - span: Span, + + location: Location, fields: Vec<(Ident, Type)>, generics: Generics, ) -> StructType { - StructType { id, fields, name, span, generics } + StructType { id, fields, name, location, generics } } /// To account for cyclic references between structs, a struct's @@ -372,7 +415,66 @@ pub enum TypeVariableKind { /// A TypeVariable is a mutable reference that is either /// bound to some type, or unbound with a given TypeVariableId. -pub type TypeVariable = Shared; +#[derive(Debug, PartialEq, Eq, Clone, Hash)] +pub struct TypeVariable(TypeVariableId, Shared); + +impl TypeVariable { + pub fn unbound(id: TypeVariableId) -> Self { + TypeVariable(id, Shared::new(TypeBinding::Unbound(id))) + } + + /// Bind this type variable to a value. + /// + /// Panics if this TypeVariable is already Bound. + /// Also Panics if the ID of this TypeVariable occurs within the given + /// binding, as that would cause an infinitely recursive type. + pub fn bind(&self, typ: Type) { + let id = match &*self.1.borrow() { + TypeBinding::Bound(binding) => { + unreachable!("TypeVariable::bind, cannot bind bound var {} to {}", binding, typ) + } + TypeBinding::Unbound(id) => *id, + }; + + assert!(!typ.occurs(id)); + *self.1.borrow_mut() = TypeBinding::Bound(typ); + } + + pub fn try_bind(&self, binding: Type, span: Span) -> Result<(), TypeCheckError> { + let id = match &*self.1.borrow() { + TypeBinding::Bound(binding) => { + unreachable!("Expected unbound, found bound to {binding}") + } + TypeBinding::Unbound(id) => *id, + }; + + if binding.occurs(id) { + Err(TypeCheckError::TypeAnnotationsNeeded { span }) + } else { + *self.1.borrow_mut() = TypeBinding::Bound(binding); + Ok(()) + } + } + + /// Borrows this TypeVariable to (e.g.) manually match on the inner TypeBinding. + pub fn borrow(&self) -> std::cell::Ref { + self.1.borrow() + } + + /// Unbind this type variable, setting it to Unbound(id). + /// + /// This is generally a logic error to use outside of monomorphization. + pub fn unbind(&self, id: TypeVariableId) { + *self.1.borrow_mut() = TypeBinding::Unbound(id); + } + + /// Forcibly bind a type variable to a new type - even if the type + /// variable is already bound to a different type. This generally + /// a logic error to use outside of monomorphization. + pub fn force_bind(&self, typ: Type) { + *self.1.borrow_mut() = TypeBinding::Bound(typ); + } +} /// TypeBindings are the mutable insides of a TypeVariable. /// They are either bound to some type, or are unbound. @@ -386,24 +488,6 @@ impl TypeBinding { pub fn is_unbound(&self) -> bool { matches!(self, TypeBinding::Unbound(_)) } - - pub fn bind_to(&mut self, binding: Type, span: Span) -> Result<(), TypeCheckError> { - match self { - TypeBinding::Bound(_) => panic!("Tried to bind an already bound type variable!"), - TypeBinding::Unbound(id) => { - if binding.occurs(*id) { - Err(TypeCheckError::TypeAnnotationsNeeded { span }) - } else { - *self = TypeBinding::Bound(binding); - Ok(()) - } - } - } - } - - pub fn unbind(&mut self, id: TypeVariableId) { - *self = TypeBinding::Unbound(id); - } } /// A unique ID used to differentiate different type variables @@ -420,7 +504,8 @@ impl Type { } pub fn type_variable(id: TypeVariableId) -> Type { - Type::TypeVariable(Shared::new(TypeBinding::Unbound(id)), TypeVariableKind::Normal) + let var = TypeVariable::unbound(id); + Type::TypeVariable(var, TypeVariableKind::Normal) } /// Returns a TypeVariable(_, TypeVariableKind::Constant(length)) to bind to @@ -428,13 +513,15 @@ impl Type { pub fn constant_variable(length: u64, interner: &mut NodeInterner) -> Type { let id = interner.next_type_variable_id(); let kind = TypeVariableKind::Constant(length); - Type::TypeVariable(Shared::new(TypeBinding::Unbound(id)), kind) + let var = TypeVariable::unbound(id); + Type::TypeVariable(var, kind) } pub fn polymorphic_integer(interner: &mut NodeInterner) -> Type { let id = interner.next_type_variable_id(); let kind = TypeVariableKind::IntegerOrField; - Type::TypeVariable(Shared::new(TypeBinding::Unbound(id)), kind) + let var = TypeVariable::unbound(id); + Type::TypeVariable(var, kind) } /// A bit of an awkward name for this function - this function returns @@ -489,7 +576,7 @@ impl Type { | Type::NamedGeneric(_, _) | Type::NotConstant | Type::Forall(_, _) - | Type::TraitAsType(_) => false, + | Type::TraitAsType(..) => false, Type::Array(length, elem) => { elem.contains_numeric_typevar(target_id) || named_generic_id_matches_target(length) @@ -576,15 +663,6 @@ impl Type { } } - /// Takes a monomorphic type and generalizes it over each of the given type variables. - pub(crate) fn generalize_from_variables( - self, - type_vars: HashMap, - ) -> Type { - let polymorphic_type_vars = vecmap(type_vars, |type_var| type_var); - Type::Forall(polymorphic_type_vars, Box::new(self)) - } - /// Takes a monomorphic type and generalizes it over each of the type variables in the /// given type bindings, ignoring what each type variable is bound to in the TypeBindings. pub(crate) fn generalize_from_substitutions(self, type_bindings: TypeBindings) -> Type { @@ -592,14 +670,23 @@ impl Type { Type::Forall(polymorphic_type_vars, Box::new(self)) } - /// Takes a monomorphic type and generalizes it over each type variable found within. - /// - /// Note that Noir's type system assumes any Type::Forall are only present at top-level, - /// and thus all type variable's within a type are free. - pub(crate) fn generalize(self) -> Type { - let mut type_variables = HashMap::new(); - self.find_all_unbound_type_variables(&mut type_variables); - self.generalize_from_variables(type_variables) + /// Return this type as a monomorphic type - without a `Type::Forall` if there is one. + /// This is only a shallow check since Noir's type system prohibits `Type::Forall` anywhere + /// inside other types. + pub fn as_monotype(&self) -> &Type { + match self { + Type::Forall(_, typ) => typ.as_ref(), + other => other, + } + } + + /// Return the generics and type within this `Type::Forall`. + /// Panics if `self` is not `Type::Forall` + pub fn unwrap_forall(&self) -> (Cow, &Type) { + match self { + Type::Forall(generics, typ) => (Cow::Borrowed(generics), typ.as_ref()), + other => (Cow::Owned(Generics::new()), other), + } } } @@ -620,7 +707,7 @@ impl std::fmt::Display for Type { Signedness::Signed => write!(f, "i{num_bits}"), Signedness::Unsigned => write!(f, "u{num_bits}"), }, - Type::TypeVariable(id, TypeVariableKind::Normal) => write!(f, "{}", id.borrow()), + Type::TypeVariable(var, TypeVariableKind::Normal) => write!(f, "{}", var.borrow()), Type::TypeVariable(binding, TypeVariableKind::IntegerOrField) => { if let TypeBinding::Unbound(_) = &*binding.borrow() { // Show a Field by default if this TypeVariableKind::IntegerOrField is unbound, since that is @@ -647,8 +734,8 @@ impl std::fmt::Display for Type { write!(f, "{}<{}>", s.borrow(), args.join(", ")) } } - Type::TraitAsType(tr) => { - write!(f, "impl {}", tr.name) + Type::TraitAsType(_id, name) => { + write!(f, "impl {}", name) } Type::Tuple(elements) => { let elements = vecmap(elements, ToString::to_string); @@ -720,58 +807,63 @@ pub struct UnificationError; impl Type { /// Try to bind a MaybeConstant variable to self, succeeding if self is a Constant, - /// MaybeConstant, or type variable. - pub fn try_bind_to_maybe_constant( + /// MaybeConstant, or type variable. If successful, the binding is placed in the + /// given TypeBindings map rather than linked immediately. + fn try_bind_to_maybe_constant( &self, var: &TypeVariable, target_length: u64, + bindings: &mut TypeBindings, ) -> Result<(), UnificationError> { let target_id = match &*var.borrow() { TypeBinding::Bound(_) => unreachable!(), TypeBinding::Unbound(id) => *id, }; - match self { + let this = self.substitute(bindings); + + match &this { Type::Constant(length) if *length == target_length => { - *var.borrow_mut() = TypeBinding::Bound(self.clone()); + bindings.insert(target_id, (var.clone(), this)); Ok(()) } Type::NotConstant => { - *var.borrow_mut() = TypeBinding::Bound(Type::NotConstant); + bindings.insert(target_id, (var.clone(), Type::NotConstant)); Ok(()) } - Type::TypeVariable(binding, kind) => { - let borrow = binding.borrow(); + // A TypeVariable is less specific than a MaybeConstant, so we bind + // to the other type variable instead. + Type::TypeVariable(new_var, kind) => { + let borrow = new_var.borrow(); match &*borrow { - TypeBinding::Bound(typ) => typ.try_bind_to_maybe_constant(var, target_length), + TypeBinding::Bound(typ) => { + typ.try_bind_to_maybe_constant(var, target_length, bindings) + } // Avoid infinitely recursive bindings TypeBinding::Unbound(id) if *id == target_id => Ok(()), - TypeBinding::Unbound(_) => match kind { + TypeBinding::Unbound(new_target_id) => match kind { TypeVariableKind::Normal => { - drop(borrow); let clone = Type::TypeVariable( var.clone(), TypeVariableKind::Constant(target_length), ); - *binding.borrow_mut() = TypeBinding::Bound(clone); + bindings.insert(*new_target_id, (new_var.clone(), clone)); Ok(()) } TypeVariableKind::Constant(length) if *length == target_length => { - drop(borrow); let clone = Type::TypeVariable( var.clone(), TypeVariableKind::Constant(target_length), ); - *binding.borrow_mut() = TypeBinding::Bound(clone); + bindings.insert(*new_target_id, (new_var.clone(), clone)); Ok(()) } // The lengths don't match, but neither are set in stone so we can // just set them both to NotConstant. See issue 2370 TypeVariableKind::Constant(_) => { // *length != target_length - drop(borrow); - *var.borrow_mut() = TypeBinding::Bound(Type::NotConstant); - *binding.borrow_mut() = TypeBinding::Bound(Type::NotConstant); + bindings.insert(target_id, (var.clone(), Type::NotConstant)); + bindings.insert(*new_target_id, (new_var.clone(), Type::NotConstant)); Ok(()) } TypeVariableKind::IntegerOrField => Err(UnificationError), @@ -783,27 +875,33 @@ impl Type { } /// Try to bind a PolymorphicInt variable to self, succeeding if self is an integer, field, - /// other PolymorphicInt type, or type variable. - pub fn try_bind_to_polymorphic_int(&self, var: &TypeVariable) -> Result<(), UnificationError> { + /// other PolymorphicInt type, or type variable. If successful, the binding is placed in the + /// given TypeBindings map rather than linked immediately. + pub fn try_bind_to_polymorphic_int( + &self, + var: &TypeVariable, + bindings: &mut TypeBindings, + ) -> Result<(), UnificationError> { let target_id = match &*var.borrow() { TypeBinding::Bound(_) => unreachable!(), TypeBinding::Unbound(id) => *id, }; - match self { + let this = self.substitute(bindings); + + match &this { Type::FieldElement | Type::Integer(..) => { - *var.borrow_mut() = TypeBinding::Bound(self.clone()); + bindings.insert(target_id, (var.clone(), this)); Ok(()) } Type::TypeVariable(self_var, TypeVariableKind::IntegerOrField) => { let borrow = self_var.borrow(); match &*borrow { - TypeBinding::Bound(typ) => typ.try_bind_to_polymorphic_int(var), + TypeBinding::Bound(typ) => typ.try_bind_to_polymorphic_int(var, bindings), // Avoid infinitely recursive bindings TypeBinding::Unbound(id) if *id == target_id => Ok(()), TypeBinding::Unbound(_) => { - drop(borrow); - *var.borrow_mut() = TypeBinding::Bound(self.clone()); + bindings.insert(target_id, (var.clone(), this.clone())); Ok(()) } } @@ -811,16 +909,15 @@ impl Type { Type::TypeVariable(binding, TypeVariableKind::Normal) => { let borrow = binding.borrow(); match &*borrow { - TypeBinding::Bound(typ) => typ.try_bind_to_polymorphic_int(var), + TypeBinding::Bound(typ) => typ.try_bind_to_polymorphic_int(var, bindings), // Avoid infinitely recursive bindings TypeBinding::Unbound(id) if *id == target_id => Ok(()), - TypeBinding::Unbound(_) => { - drop(borrow); - // PolymorphicInt is more specific than TypeVariable so we bind the type - // variable to PolymorphicInt instead. + TypeBinding::Unbound(new_target_id) => { + // IntegerOrField is more specific than TypeVariable so we bind the type + // variable to IntegerOrField instead. let clone = Type::TypeVariable(var.clone(), TypeVariableKind::IntegerOrField); - *binding.borrow_mut() = TypeBinding::Bound(clone); + bindings.insert(*new_target_id, (binding.clone(), clone)); Ok(()) } } @@ -829,102 +926,113 @@ impl Type { } } - pub fn try_bind_to(&self, var: &TypeVariable) -> Result<(), UnificationError> { + /// Try to bind the given type variable to self. Although the given type variable + /// is expected to be of TypeVariableKind::Normal, this binding can still fail + /// if the given type variable occurs within `self` as that would create a recursive type. + /// + /// If successful, the binding is placed in the + /// given TypeBindings map rather than linked immediately. + fn try_bind_to( + &self, + var: &TypeVariable, + bindings: &mut TypeBindings, + ) -> Result<(), UnificationError> { let target_id = match &*var.borrow() { TypeBinding::Bound(_) => unreachable!(), TypeBinding::Unbound(id) => *id, }; - if let Some(binding) = self.get_inner_type_variable() { + let this = self.substitute(bindings); + + if let Some(binding) = this.get_inner_type_variable() { match &*binding.borrow() { - TypeBinding::Bound(typ) => return typ.try_bind_to(var), + TypeBinding::Bound(typ) => return typ.try_bind_to(var, bindings), // Don't recursively bind the same id to itself TypeBinding::Unbound(id) if *id == target_id => return Ok(()), _ => (), } } - // Check if the target id occurs within self before binding. Otherwise this could + // Check if the target id occurs within `this` before binding. Otherwise this could // cause infinitely recursive types - if self.occurs(target_id) { + if this.occurs(target_id) { Err(UnificationError) } else { - *var.borrow_mut() = TypeBinding::Bound(self.clone()); + bindings.insert(target_id, (var.clone(), this.clone())); Ok(()) } } fn get_inner_type_variable(&self) -> Option> { match self { - Type::TypeVariable(var, _) | Type::NamedGeneric(var, _) => Some(var.clone()), + Type::TypeVariable(var, _) | Type::NamedGeneric(var, _) => Some(var.1.clone()), _ => None, } } /// Try to unify this type with another, setting any type variables found - /// equal to the other type in the process. Unification is more strict - /// than sub-typing but less strict than Eq. Returns true if the unification - /// succeeded. Note that any bindings performed in a failed unification are - /// not undone. This may cause further type errors later on. + /// equal to the other type in the process. When comparing types, unification + /// (including try_unify) are almost always preferred over Type::eq as unification + /// will correctly handle generic types. pub fn unify( &self, expected: &Type, errors: &mut Vec, make_error: impl FnOnce() -> TypeCheckError, ) { - if let Err(UnificationError) = self.try_unify(expected) { - errors.push(make_error()); + let mut bindings = TypeBindings::new(); + + match self.try_unify(expected, &mut bindings) { + Ok(()) => { + // Commit any type bindings on success + Self::apply_type_bindings(bindings); + } + Err(UnificationError) => errors.push(make_error()), } } /// `try_unify` is a bit of a misnomer since although errors are not committed, /// any unified bindings are on success. - pub fn try_unify(&self, other: &Type) -> Result<(), UnificationError> { + pub fn try_unify( + &self, + other: &Type, + bindings: &mut TypeBindings, + ) -> Result<(), UnificationError> { use Type::*; use TypeVariableKind as Kind; match (self, other) { (Error, _) | (_, Error) => Ok(()), - (TypeVariable(binding, Kind::IntegerOrField), other) - | (other, TypeVariable(binding, Kind::IntegerOrField)) => { - // If it is already bound, unify against what it is bound to - if let TypeBinding::Bound(link) = &*binding.borrow() { - return link.try_unify(other); - } - - // Otherwise, check it is unified against an integer and bind it - other.try_bind_to_polymorphic_int(binding) + (TypeVariable(var, Kind::IntegerOrField), other) + | (other, TypeVariable(var, Kind::IntegerOrField)) => { + other.try_unify_to_type_variable(var, bindings, |bindings| { + other.try_bind_to_polymorphic_int(var, bindings) + }) } - (TypeVariable(binding, Kind::Normal), other) - | (other, TypeVariable(binding, Kind::Normal)) => { - if let TypeBinding::Bound(link) = &*binding.borrow() { - return link.try_unify(other); - } - - other.try_bind_to(binding) + (TypeVariable(var, Kind::Normal), other) | (other, TypeVariable(var, Kind::Normal)) => { + other.try_unify_to_type_variable(var, bindings, |bindings| { + other.try_bind_to(var, bindings) + }) } - (TypeVariable(binding, Kind::Constant(length)), other) - | (other, TypeVariable(binding, Kind::Constant(length))) => { - if let TypeBinding::Bound(link) = &*binding.borrow() { - return link.try_unify(other); - } - - other.try_bind_to_maybe_constant(binding, *length) - } + (TypeVariable(var, Kind::Constant(length)), other) + | (other, TypeVariable(var, Kind::Constant(length))) => other + .try_unify_to_type_variable(var, bindings, |bindings| { + other.try_bind_to_maybe_constant(var, *length, bindings) + }), (Array(len_a, elem_a), Array(len_b, elem_b)) => { - len_a.try_unify(len_b)?; - elem_a.try_unify(elem_b) + len_a.try_unify(len_b, bindings)?; + elem_a.try_unify(elem_b, bindings) } - (String(len_a), String(len_b)) => len_a.try_unify(len_b), + (String(len_a), String(len_b)) => len_a.try_unify(len_b, bindings), (FmtString(len_a, elements_a), FmtString(len_b, elements_b)) => { - len_a.try_unify(len_b)?; - elements_a.try_unify(elements_b) + len_a.try_unify(len_b, bindings)?; + elements_a.try_unify(elements_b, bindings) } (Tuple(elements_a), Tuple(elements_b)) => { @@ -932,7 +1040,7 @@ impl Type { Err(UnificationError) } else { for (a, b) in elements_a.iter().zip(elements_b) { - a.try_unify(b)?; + a.try_unify(b, bindings)?; } Ok(()) } @@ -944,7 +1052,7 @@ impl Type { (Struct(id_a, args_a), Struct(id_b, args_b)) => { if id_a == id_b && args_a.len() == args_b.len() { for (a, b) in args_a.iter().zip(args_b) { - a.try_unify(b)?; + a.try_unify(b, bindings)?; } Ok(()) } else { @@ -952,24 +1060,18 @@ impl Type { } } - (NamedGeneric(binding, _), other) if !binding.borrow().is_unbound() => { - if let TypeBinding::Bound(link) = &*binding.borrow() { - link.try_unify(other) - } else { - unreachable!("If guard ensures binding is bound") - } - } - - (other, NamedGeneric(binding, _)) if !binding.borrow().is_unbound() => { + (NamedGeneric(binding, _), other) | (other, NamedGeneric(binding, _)) + if !binding.borrow().is_unbound() => + { if let TypeBinding::Bound(link) = &*binding.borrow() { - other.try_unify(link) + link.try_unify(other, bindings) } else { unreachable!("If guard ensures binding is bound") } } (NamedGeneric(binding_a, name_a), NamedGeneric(binding_b, name_b)) => { - // Unbound NamedGenerics are caught by the checks above + // Bound NamedGenerics are caught by the check above assert!(binding_a.borrow().is_unbound()); assert!(binding_b.borrow().is_unbound()); @@ -983,17 +1085,19 @@ impl Type { (Function(params_a, ret_a, env_a), Function(params_b, ret_b, env_b)) => { if params_a.len() == params_b.len() { for (a, b) in params_a.iter().zip(params_b.iter()) { - a.try_unify(b)?; + a.try_unify(b, bindings)?; } - env_a.try_unify(env_b)?; - ret_b.try_unify(ret_a) + env_a.try_unify(env_b, bindings)?; + ret_b.try_unify(ret_a, bindings) } else { Err(UnificationError) } } - (MutableReference(elem_a), MutableReference(elem_b)) => elem_a.try_unify(elem_b), + (MutableReference(elem_a), MutableReference(elem_b)) => { + elem_a.try_unify(elem_b, bindings) + } (other_a, other_b) => { if other_a == other_b { @@ -1005,6 +1109,34 @@ impl Type { } } + /// Try to unify a type variable to `self`. + /// This is a helper function factored out from try_unify. + fn try_unify_to_type_variable( + &self, + type_variable: &TypeVariable, + bindings: &mut TypeBindings, + + // Bind the type variable to a type. This is factored out since depending on the + // TypeVariableKind, there are different methods to check whether the variable can + // bind to the given type or not. + bind_variable: impl FnOnce(&mut TypeBindings) -> Result<(), UnificationError>, + ) -> Result<(), UnificationError> { + match &*type_variable.borrow() { + // If it is already bound, unify against what it is bound to + TypeBinding::Bound(link) => link.try_unify(self, bindings), + TypeBinding::Unbound(id) => { + // We may have already "bound" this type variable in this call to + // try_unify, so check those bindings as well. + match bindings.get(id) { + Some((_, binding)) => binding.clone().try_unify(self, bindings), + + // Otherwise, bind it + None => bind_variable(bindings), + } + } + } + } + /// Similar to `unify` but if the check fails this will attempt to coerce the /// argument to the target type. When this happens, the given expression is wrapped in /// a new expression to convert its type. E.g. `array` -> `array.as_slice()` @@ -1018,10 +1150,14 @@ impl Type { errors: &mut Vec, make_error: impl FnOnce() -> TypeCheckError, ) { - if let Err(UnificationError) = self.try_unify(expected) { + let mut bindings = TypeBindings::new(); + + if let Err(UnificationError) = self.try_unify(expected, &mut bindings) { if !self.try_array_to_slice_coercion(expected, expression, interner) { errors.push(make_error()); } + } else { + Type::apply_type_bindings(bindings); } } @@ -1044,8 +1180,10 @@ impl Type { if matches!(size1, Type::Constant(_)) && matches!(size2, Type::NotConstant) { // Still have to ensure the element types match. // Don't need to issue an error here if not, it will be done in unify_with_coercions - if element1.try_unify(element2).is_ok() { + let mut bindings = TypeBindings::new(); + if element1.try_unify(element2, &mut bindings).is_ok() { convert_array_expression_to_slice(expression, this, target, interner); + Self::apply_type_bindings(bindings); return true; } } @@ -1053,6 +1191,14 @@ impl Type { false } + /// Apply the given type bindings, making them permanently visible for each + /// clone of each type variable bound. + pub fn apply_type_bindings(bindings: TypeBindings) { + for (type_variable, binding) in bindings.values() { + type_variable.bind(binding.clone()); + } + } + /// If this type is a Type::Constant (used in array lengths), or is bound /// to a Type::Constant, return the constant as a u64. pub fn evaluate_to_u64(&self) -> Option { @@ -1119,7 +1265,7 @@ impl Type { }) .collect(); - let instantiated = typ.substitute(&replacements); + let instantiated = typ.force_substitute(&replacements); (instantiated, replacements) } other => (other.clone(), HashMap::new()), @@ -1153,7 +1299,7 @@ impl Type { | Type::Integer(_, _) | Type::Bool | Type::Unit - | Type::TraitAsType(_) + | Type::TraitAsType(..) | Type::Constant(_) | Type::NotConstant | Type::Error => (), @@ -1206,70 +1352,112 @@ impl Type { /// given bindings if found. If a type variable is not found within /// the given TypeBindings, it is unchanged. pub fn substitute(&self, type_bindings: &TypeBindings) -> Type { + self.substitute_helper(type_bindings, false) + } + + /// Forcibly substitute any type variables found within this type with the + /// given bindings if found. If a type variable is not found within + /// the given TypeBindings, it is unchanged. + /// + /// Compared to `substitute`, this function will also substitute any type variables + /// from type_bindings, even if they are bound in `self`. Since this can undo previous + /// bindings, this function should be avoided unless necessary. Currently, it is only + /// needed when handling bindings between trait methods and their corresponding impl + /// method during monomorphization. + pub fn force_substitute(&self, type_bindings: &TypeBindings) -> Type { + self.substitute_helper(type_bindings, true) + } + + /// This helper function only differs in the additional parameter which, if set, + /// allows substitutions on already-bound type variables. This should be `false` + /// for most uses, but is currently needed during monomorphization when instantiating + /// trait functions to shed any previous bindings from recursive parent calls to the + /// same trait. + fn substitute_helper( + &self, + type_bindings: &TypeBindings, + substitute_bound_typevars: bool, + ) -> Type { if type_bindings.is_empty() { return self.clone(); } - let substitute_binding = |binding: &TypeVariable| match &*binding.borrow() { - TypeBinding::Bound(binding) => binding.substitute(type_bindings), - TypeBinding::Unbound(id) => match type_bindings.get(id) { - Some((_, binding)) => binding.clone(), - None => self.clone(), - }, + let substitute_binding = |binding: &TypeVariable| { + // Check the id first to allow substituting to + // type variables that have already been bound over. + // This is needed for monomorphizing trait impl methods. + match type_bindings.get(&binding.0) { + Some((_, binding)) if substitute_bound_typevars => binding.clone(), + _ => match &*binding.borrow() { + TypeBinding::Bound(binding) => { + binding.substitute_helper(type_bindings, substitute_bound_typevars) + } + TypeBinding::Unbound(id) => match type_bindings.get(id) { + Some((_, binding)) => binding.clone(), + None => self.clone(), + }, + }, + } }; match self { Type::Array(size, element) => { - let size = Box::new(size.substitute(type_bindings)); - let element = Box::new(element.substitute(type_bindings)); - Type::Array(size, element) + let size = size.substitute_helper(type_bindings, substitute_bound_typevars); + let element = element.substitute_helper(type_bindings, substitute_bound_typevars); + Type::Array(Box::new(size), Box::new(element)) } Type::String(size) => { - let size = Box::new(size.substitute(type_bindings)); - Type::String(size) + let size = size.substitute_helper(type_bindings, substitute_bound_typevars); + Type::String(Box::new(size)) } Type::FmtString(size, fields) => { - let size = Box::new(size.substitute(type_bindings)); - let fields = Box::new(fields.substitute(type_bindings)); - Type::FmtString(size, fields) + let size = size.substitute_helper(type_bindings, substitute_bound_typevars); + let fields = fields.substitute_helper(type_bindings, substitute_bound_typevars); + Type::FmtString(Box::new(size), Box::new(fields)) } Type::NamedGeneric(binding, _) | Type::TypeVariable(binding, _) => { substitute_binding(binding) } - // Do not substitute fields, it can lead to infinite recursion + // Do not substitute_helper fields, it ca, substitute_bound_typevarsn lead to infinite recursion // and we should not match fields when type checking anyway. Type::Struct(fields, args) => { - let args = vecmap(args, |arg| arg.substitute(type_bindings)); + let args = vecmap(args, |arg| { + arg.substitute_helper(type_bindings, substitute_bound_typevars) + }); Type::Struct(fields.clone(), args) } Type::Tuple(fields) => { - let fields = vecmap(fields, |field| field.substitute(type_bindings)); + let fields = vecmap(fields, |field| { + field.substitute_helper(type_bindings, substitute_bound_typevars) + }); Type::Tuple(fields) } - Type::TraitAsType(_) => todo!(), Type::Forall(typevars, typ) => { - // Trying to substitute a variable defined within a nested Forall + // Trying to substitute_helper a variable de, substitute_bound_typevarsfined within a nested Forall // is usually impossible and indicative of an error in the type checker somewhere. for (var, _) in typevars { assert!(!type_bindings.contains_key(var)); } - let typ = Box::new(typ.substitute(type_bindings)); + let typ = Box::new(typ.substitute_helper(type_bindings, substitute_bound_typevars)); Type::Forall(typevars.clone(), typ) } Type::Function(args, ret, env) => { - let args = vecmap(args, |arg| arg.substitute(type_bindings)); - let ret = Box::new(ret.substitute(type_bindings)); - let env = Box::new(env.substitute(type_bindings)); + let args = vecmap(args, |arg| { + arg.substitute_helper(type_bindings, substitute_bound_typevars) + }); + let ret = Box::new(ret.substitute_helper(type_bindings, substitute_bound_typevars)); + let env = Box::new(env.substitute_helper(type_bindings, substitute_bound_typevars)); Type::Function(args, ret, env) } - Type::MutableReference(element) => { - Type::MutableReference(Box::new(element.substitute(type_bindings))) - } + Type::MutableReference(element) => Type::MutableReference(Box::new( + element.substitute_helper(type_bindings, substitute_bound_typevars), + )), Type::FieldElement | Type::Integer(_, _) | Type::Bool | Type::Constant(_) + | Type::TraitAsType(..) | Type::Error | Type::NotConstant | Type::Unit => self.clone(), @@ -1286,7 +1474,6 @@ impl Type { let field_occurs = fields.occurs(target_id); len_occurs || field_occurs } - Type::TraitAsType(_) => todo!(), Type::Struct(_, generic_args) => generic_args.iter().any(|arg| arg.occurs(target_id)), Type::Tuple(fields) => fields.iter().any(|field| field.occurs(target_id)), Type::NamedGeneric(binding, _) | Type::TypeVariable(binding, _) => { @@ -1309,6 +1496,7 @@ impl Type { | Type::Integer(_, _) | Type::Bool | Type::Constant(_) + | Type::TraitAsType(..) | Type::Error | Type::NotConstant | Type::Unit => false, @@ -1356,7 +1544,7 @@ impl Type { // Expect that this function should only be called on instantiated types Forall(..) => unreachable!(), - TraitAsType(_) + TraitAsType(..) | FieldElement | Integer(_, _) | Bool @@ -1464,7 +1652,7 @@ impl From<&Type> for PrintableType { let fields = vecmap(fields, |(name, typ)| (name, typ.into())); PrintableType::Struct { fields, name: struct_type.name.to_string() } } - Type::TraitAsType(_) => unreachable!(), + Type::TraitAsType(..) => unreachable!(), Type::Tuple(_) => todo!("printing tuple types is not yet implemented"), Type::TypeVariable(_, _) => unreachable!(), Type::NamedGeneric(..) => unreachable!(), diff --git a/compiler/noirc_frontend/src/lexer/lexer.rs b/compiler/noirc_frontend/src/lexer/lexer.rs index be24c1249c6..fd8168e36c6 100644 --- a/compiler/noirc_frontend/src/lexer/lexer.rs +++ b/compiler/noirc_frontend/src/lexer/lexer.rs @@ -126,6 +126,7 @@ impl<'a> Lexer<'a> { Some(']') => self.single_char_token(Token::RightBracket), Some('"') => self.eat_string_literal(), Some('f') => self.eat_format_string_or_alpha_numeric(), + Some('r') => self.eat_raw_string_or_alpha_numeric(), Some('#') => self.eat_attribute(), Some(ch) if ch.is_ascii_alphanumeric() || ch == '_' => self.eat_alpha_numeric(ch), Some(ch) => { @@ -322,11 +323,29 @@ impl<'a> Lexer<'a> { let start = self.position; let integer_str = self.eat_while(Some(initial_char), |ch| { - ch.is_ascii_digit() | ch.is_ascii_hexdigit() | (ch == 'x') + ch.is_ascii_digit() | ch.is_ascii_hexdigit() | (ch == 'x') | (ch == '_') }); let end = self.position; + // We want to enforce some simple rules about usage of underscores: + // 1. Underscores cannot appear at the end of a integer literal. e.g. 0x123_. + // 2. There cannot be more than one underscore consecutively, e.g. 0x5__5, 5__5. + // + // We're not concerned with an underscore at the beginning of a decimal literal + // such as `_5` as this would be lexed into an ident rather than an integer literal. + let invalid_underscore_location = integer_str.ends_with('_'); + let consecutive_underscores = integer_str.contains("__"); + if invalid_underscore_location || consecutive_underscores { + return Err(LexerErrorKind::InvalidIntegerLiteral { + span: Span::inclusive(start, end), + found: integer_str, + }); + } + + // Underscores needs to be stripped out before the literal can be converted to a `FieldElement. + let integer_str = integer_str.replace('_', ""); + let integer = match FieldElement::try_from_str(&integer_str) { None => { return Err(LexerErrorKind::InvalidIntegerLiteral { @@ -400,6 +419,78 @@ impl<'a> Lexer<'a> { } } + fn eat_raw_string(&mut self) -> SpannedTokenResult { + let start = self.position; + + let beginning_hashes = self.eat_while(None, |ch| ch == '#'); + let beginning_hashes_count = beginning_hashes.chars().count(); + if beginning_hashes_count > 255 { + // too many hashes (unlikely in practice) + // also, Rust disallows 256+ hashes as well + return Err(LexerErrorKind::UnexpectedCharacter { + span: Span::single_char(start + 255), + found: Some('#'), + expected: "\"".to_owned(), + }); + } + + if !self.peek_char_is('"') { + return Err(LexerErrorKind::UnexpectedCharacter { + span: Span::single_char(self.position), + found: self.next_char(), + expected: "\"".to_owned(), + }); + } + self.next_char(); + + let mut str_literal = String::new(); + loop { + let chars = self.eat_while(None, |ch| ch != '"'); + str_literal.push_str(&chars[..]); + if !self.peek_char_is('"') { + return Err(LexerErrorKind::UnexpectedCharacter { + span: Span::single_char(self.position), + found: self.next_char(), + expected: "\"".to_owned(), + }); + } + self.next_char(); + let mut ending_hashes_count = 0; + while let Some('#') = self.peek_char() { + if ending_hashes_count == beginning_hashes_count { + break; + } + self.next_char(); + ending_hashes_count += 1; + } + if ending_hashes_count == beginning_hashes_count { + break; + } else { + str_literal.push('"'); + for _ in 0..ending_hashes_count { + str_literal.push('#'); + } + } + } + + let str_literal_token = Token::RawStr(str_literal, beginning_hashes_count as u8); + + let end = self.position; + Ok(str_literal_token.into_span(start, end)) + } + + fn eat_raw_string_or_alpha_numeric(&mut self) -> SpannedTokenResult { + // Problem: we commit to eating raw strings once we see one or two characters. + // This is unclean, but likely ok in all practical cases, and works with existing + // `Lexer` methods. + let peek1 = self.peek_char().unwrap_or('X'); + let peek2 = self.peek2_char().unwrap_or('X'); + match (peek1, peek2) { + ('#', '#') | ('#', '"') | ('"', _) => self.eat_raw_string(), + _ => self.eat_alpha_numeric('r'), + } + } + fn parse_comment(&mut self, start: u32) -> SpannedTokenResult { let doc_style = match self.peek_char() { Some('!') => { @@ -560,15 +651,18 @@ mod tests { } #[test] - fn test_attribute_with_apostrophe() { - let input = r#"#[test(should_fail_with = "the eagle's feathers")]"#; + fn test_attribute_with_common_punctuation() { + let input = + r#"#[test(should_fail_with = "stmt. q? exclaim! & symbols, 1% shouldn't fail")]"#; let mut lexer = Lexer::new(input); let token = lexer.next_token().unwrap().token().clone(); assert_eq!( token, Token::Attribute(Attribute::Function(FunctionAttribute::Test( - TestScope::ShouldFailWith { reason: "the eagle's feathers".to_owned().into() } + TestScope::ShouldFailWith { + reason: "stmt. q? exclaim! & symbols, 1% shouldn't fail".to_owned().into() + } ))) ); } @@ -854,15 +948,33 @@ mod tests { } #[test] - fn test_eat_hex_int() { - let input = "0x05"; - - let expected = vec![Token::Int(5_i128.into())]; - let mut lexer = Lexer::new(input); + fn test_eat_integer_literals() { + let test_cases: Vec<(&str, Token)> = vec![ + ("0x05", Token::Int(5_i128.into())), + ("5", Token::Int(5_i128.into())), + ("0x1234_5678", Token::Int(0x1234_5678_u128.into())), + ("0x_01", Token::Int(0x1_u128.into())), + ("1_000_000", Token::Int(1_000_000_u128.into())), + ]; - for token in expected.into_iter() { + for (input, expected_token) in test_cases { + let mut lexer = Lexer::new(input); let got = lexer.next_token().unwrap(); - assert_eq!(got, token); + assert_eq!(got.token(), &expected_token); + } + } + + #[test] + fn test_reject_invalid_underscores_in_integer_literal() { + let test_cases: Vec<&str> = vec!["0x05_", "5_", "5__5", "0x5__5"]; + + for input in test_cases { + let mut lexer = Lexer::new(input); + let token = lexer.next_token(); + assert!( + matches!(token, Err(LexerErrorKind::InvalidIntegerLiteral { .. })), + "expected {input} to throw error" + ); } } diff --git a/compiler/noirc_frontend/src/lexer/token.rs b/compiler/noirc_frontend/src/lexer/token.rs index 72be71865cc..ab131ccd880 100644 --- a/compiler/noirc_frontend/src/lexer/token.rs +++ b/compiler/noirc_frontend/src/lexer/token.rs @@ -15,6 +15,7 @@ pub enum Token { Int(FieldElement), Bool(bool), Str(String), + RawStr(String, u8), FmtStr(String), Keyword(Keyword), IntType(IntType), @@ -157,6 +158,10 @@ impl fmt::Display for Token { Token::Bool(b) => write!(f, "{b}"), Token::Str(ref b) => write!(f, "{b}"), Token::FmtStr(ref b) => write!(f, "f{b}"), + Token::RawStr(ref b, hashes) => { + let h: String = std::iter::once('#').cycle().take(hashes as usize).collect(); + write!(f, "r{h}\"{b}\"{h}") + } Token::Keyword(k) => write!(f, "{k}"), Token::Attribute(ref a) => write!(f, "{a}"), Token::LineComment(ref s, _style) => write!(f, "//{s}"), @@ -227,7 +232,11 @@ impl Token { pub fn kind(&self) -> TokenKind { match *self { Token::Ident(_) => TokenKind::Ident, - Token::Int(_) | Token::Bool(_) | Token::Str(_) | Token::FmtStr(_) => TokenKind::Literal, + Token::Int(_) + | Token::Bool(_) + | Token::Str(_) + | Token::RawStr(..) + | Token::FmtStr(_) => TokenKind::Literal, Token::Keyword(_) => TokenKind::Keyword, Token::Attribute(_) => TokenKind::Attribute, ref tok => TokenKind::Token(tok.clone()), @@ -459,13 +468,8 @@ impl Attribute { .all(|ch| { ch.is_ascii_alphabetic() || ch.is_numeric() - || ch == '_' - || ch == '(' - || ch == ')' - || ch == '=' - || ch == '"' + || ch.is_ascii_punctuation() || ch == ' ' - || ch == '\'' }) .then_some(()); @@ -506,6 +510,7 @@ impl Attribute { Attribute::Secondary(SecondaryAttribute::ContractLibraryMethod) } ["event"] => Attribute::Secondary(SecondaryAttribute::Event), + ["export"] => Attribute::Secondary(SecondaryAttribute::Export), ["deprecated", name] => { if !name.starts_with('"') && !name.ends_with('"') { return Err(LexerErrorKind::MalformedFuncAttribute { @@ -584,6 +589,7 @@ pub enum SecondaryAttribute { // the entry point. ContractLibraryMethod, Event, + Export, Field(String), Custom(String), } @@ -598,6 +604,7 @@ impl fmt::Display for SecondaryAttribute { SecondaryAttribute::Custom(ref k) => write!(f, "#[{k}]"), SecondaryAttribute::ContractLibraryMethod => write!(f, "#[contract_library_method]"), SecondaryAttribute::Event => write!(f, "#[event]"), + SecondaryAttribute::Export => write!(f, "#[export]"), SecondaryAttribute::Field(ref k) => write!(f, "#[field({k})]"), } } @@ -621,7 +628,7 @@ impl AsRef for SecondaryAttribute { SecondaryAttribute::Deprecated(None) => "", SecondaryAttribute::Custom(string) | SecondaryAttribute::Field(string) => string, SecondaryAttribute::ContractLibraryMethod => "", - SecondaryAttribute::Event => "", + SecondaryAttribute::Event | SecondaryAttribute::Export => "", } } } @@ -635,6 +642,7 @@ pub enum Keyword { Assert, AssertEq, Bool, + CallData, Char, CompTime, Constrain, @@ -658,6 +666,7 @@ pub enum Keyword { Open, Pub, Return, + ReturnData, String, Struct, Trait, @@ -676,6 +685,7 @@ impl fmt::Display for Keyword { Keyword::AssertEq => write!(f, "assert_eq"), Keyword::Bool => write!(f, "bool"), Keyword::Char => write!(f, "char"), + Keyword::CallData => write!(f, "call_data"), Keyword::CompTime => write!(f, "comptime"), Keyword::Constrain => write!(f, "constrain"), Keyword::Contract => write!(f, "contract"), @@ -698,6 +708,7 @@ impl fmt::Display for Keyword { Keyword::Open => write!(f, "open"), Keyword::Pub => write!(f, "pub"), Keyword::Return => write!(f, "return"), + Keyword::ReturnData => write!(f, "return_data"), Keyword::String => write!(f, "str"), Keyword::Struct => write!(f, "struct"), Keyword::Trait => write!(f, "trait"), @@ -718,6 +729,7 @@ impl Keyword { "assert" => Keyword::Assert, "assert_eq" => Keyword::AssertEq, "bool" => Keyword::Bool, + "call_data" => Keyword::CallData, "char" => Keyword::Char, "comptime" => Keyword::CompTime, "constrain" => Keyword::Constrain, @@ -741,6 +753,7 @@ impl Keyword { "open" => Keyword::Open, "pub" => Keyword::Pub, "return" => Keyword::Return, + "return_data" => Keyword::ReturnData, "str" => Keyword::String, "struct" => Keyword::Struct, "trait" => Keyword::Trait, diff --git a/compiler/noirc_frontend/src/lib.rs b/compiler/noirc_frontend/src/lib.rs index 74057240de1..9582b80dcba 100644 --- a/compiler/noirc_frontend/src/lib.rs +++ b/compiler/noirc_frontend/src/lib.rs @@ -16,6 +16,7 @@ pub mod lexer; pub mod monomorphization; pub mod node_interner; pub mod parser; +pub mod resolve_locations; pub mod hir; pub mod hir_def; @@ -34,3 +35,46 @@ pub use hir_def::types::*; // Unit tests that involve all modules pub mod tests; + +// API for experimental macros feature +pub mod macros_api { + + pub use acvm::FieldElement; + pub use fm::FileId; + pub use noirc_errors::Span; + + pub use crate::graph::CrateId; + pub use crate::hir::def_collector::errors::MacroError; + pub use crate::hir_def::expr::{HirExpression, HirLiteral}; + pub use crate::hir_def::stmt::HirStatement; + pub use crate::node_interner::{NodeInterner, StructId}; + pub use crate::parser::SortedModule; + pub use crate::token::SecondaryAttribute; + + pub use crate::hir::def_map::ModuleDefId; + pub use crate::{ + hir::Context as HirContext, BlockExpression, CallExpression, CastExpression, Distinctness, + Expression, ExpressionKind, FunctionReturnType, Ident, IndexExpression, LetStatement, + Literal, MemberAccessExpression, MethodCallExpression, NoirFunction, Path, PathKind, + Pattern, Statement, UnresolvedType, UnresolvedTypeData, Visibility, + }; + pub use crate::{ + ForLoopStatement, ForRange, FunctionDefinition, FunctionVisibility, ImportStatement, + NoirStruct, Param, PrefixExpression, Signedness, StatementKind, StructType, Type, TypeImpl, + UnaryOp, + }; + + /// Methods to process the AST before and after type checking + pub trait MacroProcessor { + /// Function to manipulate the AST before type checking has been completed. + fn process_untyped_ast( + &self, + ast: SortedModule, + crate_id: &CrateId, + context: &HirContext, + ) -> Result; + /// Function to manipulate the AST after type checking has been completed. + /// The AST after type checking has been done is called the HIR. + fn process_typed_ast(&self, crate_id: &CrateId, context: &mut HirContext); + } +} diff --git a/compiler/noirc_frontend/src/monomorphization/ast.rs b/compiler/noirc_frontend/src/monomorphization/ast.rs index 0a005d766fe..42a618e7d77 100644 --- a/compiler/noirc_frontend/src/monomorphization/ast.rs +++ b/compiler/noirc_frontend/src/monomorphization/ast.rs @@ -2,7 +2,9 @@ use acvm::FieldElement; use iter_extended::vecmap; use noirc_errors::Location; -use crate::{hir_def::function::FunctionSignature, BinaryOpKind, Distinctness, Signedness}; +use crate::{ + hir_def::function::FunctionSignature, BinaryOpKind, Distinctness, Signedness, Visibility, +}; /// The monomorphized AST is expression-based, all statements are also /// folded into this expression enum. Compared to the HIR, the monomorphized @@ -243,6 +245,7 @@ pub struct Program { /// forwarding to the next phase. pub return_distinctness: Distinctness, pub return_location: Option, + pub return_visibility: Visibility, } impl Program { @@ -251,8 +254,15 @@ impl Program { main_function_signature: FunctionSignature, return_distinctness: Distinctness, return_location: Option, + return_visibility: Visibility, ) -> Program { - Program { functions, main_function_signature, return_distinctness, return_location } + Program { + functions, + main_function_signature, + return_distinctness, + return_location, + return_visibility, + } } pub fn main(&self) -> &Function { @@ -342,7 +352,7 @@ impl std::fmt::Display for Type { }; write!(f, "fn({}) -> {}{}", args.join(", "), ret, closure_env_text) } - Type::Slice(element) => write!(f, "[{element}"), + Type::Slice(element) => write!(f, "[{element}]"), Type::MutableReference(element) => write!(f, "&mut {element}"), } } diff --git a/compiler/noirc_frontend/src/monomorphization/mod.rs b/compiler/noirc_frontend/src/monomorphization/mod.rs index 57e4e6cdeb0..3510475e881 100644 --- a/compiler/noirc_frontend/src/monomorphization/mod.rs +++ b/compiler/noirc_frontend/src/monomorphization/mod.rs @@ -20,14 +20,14 @@ use std::{ use crate::{ hir_def::{ expr::*, - function::{FuncMeta, FunctionSignature, Parameters}, + function::{FunctionSignature, Parameters}, stmt::{HirAssignStatement, HirLValue, HirLetStatement, HirPattern, HirStatement}, types, }, node_interner::{self, DefinitionKind, NodeInterner, StmtId, TraitImplKind, TraitMethodId}, token::FunctionAttribute, - ContractFunctionType, FunctionKind, Type, TypeBinding, TypeBindings, TypeVariableKind, - Visibility, + ContractFunctionType, FunctionKind, Type, TypeBinding, TypeBindings, TypeVariable, + TypeVariableId, TypeVariableKind, UnaryOp, Visibility, }; use self::ast::{Definition, FuncId, Function, LocalId, Program}; @@ -58,8 +58,9 @@ struct Monomorphizer<'interner> { /// confuse users. locals: HashMap, - /// Queue of functions to monomorphize next - queue: VecDeque<(node_interner::FuncId, FuncId, TypeBindings)>, + /// Queue of functions to monomorphize next each item in the queue is a tuple of: + /// (old_id, new_monomorphized_id, any type bindings to apply, the trait method if old_id is from a trait impl) + queue: VecDeque<(node_interner::FuncId, FuncId, TypeBindings, Option)>, /// When a function finishes being monomorphized, the monomorphized ast::Function is /// stored here along with its FuncId. @@ -91,22 +92,32 @@ type HirType = crate::Type; /// Note that there is no requirement on the `main` function that can be passed into /// this function. Typically, this is the function named "main" in the source project, /// but it can also be, for example, an arbitrary test function for running `nargo test`. +#[tracing::instrument(level = "trace", skip(main, interner))] pub fn monomorphize(main: node_interner::FuncId, interner: &NodeInterner) -> Program { let mut monomorphizer = Monomorphizer::new(interner); let function_sig = monomorphizer.compile_main(main); while !monomorphizer.queue.is_empty() { - let (next_fn_id, new_id, bindings) = monomorphizer.queue.pop_front().unwrap(); + let (next_fn_id, new_id, bindings, trait_method) = monomorphizer.queue.pop_front().unwrap(); monomorphizer.locals.clear(); perform_instantiation_bindings(&bindings); + let impl_bindings = monomorphizer.perform_impl_bindings(trait_method, next_fn_id); monomorphizer.function(next_fn_id, new_id); + undo_instantiation_bindings(impl_bindings); undo_instantiation_bindings(bindings); } let functions = vecmap(monomorphizer.finished_functions, |(_, f)| f); - let FuncMeta { return_distinctness, .. } = interner.function_meta(&main); - Program::new(functions, function_sig, return_distinctness, monomorphizer.return_location) + let meta = interner.function_meta(&main); + + Program::new( + functions, + function_sig, + meta.return_distinctness, + monomorphizer.return_location, + meta.return_visibility, + ) } impl<'interner> Monomorphizer<'interner> { @@ -146,6 +157,7 @@ impl<'interner> Monomorphizer<'interner> { id: node_interner::FuncId, expr_id: node_interner::ExprId, typ: &HirType, + trait_method: Option, ) -> Definition { let typ = typ.follow_bindings(); match self.globals.get(&id).and_then(|inner_map| inner_map.get(&typ)) { @@ -169,7 +181,7 @@ impl<'interner> Monomorphizer<'interner> { Definition::Builtin(opcode) } FunctionKind::Normal => { - let id = self.queue_function(id, expr_id, typ); + let id = self.queue_function(id, expr_id, typ, trait_method); Definition::Function(id) } FunctionKind::Oracle => { @@ -209,13 +221,13 @@ impl<'interner> Monomorphizer<'interner> { }, ); let main_meta = self.interner.function_meta(&main_id); - main_meta.into_function_signature() + main_meta.function_signature() } fn function(&mut self, f: node_interner::FuncId, id: FuncId) { if let Some((self_type, trait_id)) = self.interner.get_function_trait(&f) { let the_trait = self.interner.get_trait(trait_id); - *the_trait.self_type_typevar.borrow_mut() = TypeBinding::Bound(self_type); + the_trait.self_type_typevar.force_bind(self_type); } let meta = self.interner.function_meta(&f); @@ -225,11 +237,12 @@ impl<'interner> Monomorphizer<'interner> { let body_expr_id = *self.interner.function(&f).as_expr(); let body_return_type = self.interner.id_type(body_expr_id); let return_type = self.convert_type(match meta.return_type() { - Type::TraitAsType(_) => &body_return_type, + Type::TraitAsType(..) => &body_return_type, _ => meta.return_type(), }); - let parameters = self.parameters(meta.parameters); + let parameters = self.parameters(&meta.parameters); + let body = self.expr(body_expr_id); let unconstrained = modifiers.is_unconstrained || matches!(modifiers.contract_function_type, Some(ContractFunctionType::Open)); @@ -245,17 +258,17 @@ impl<'interner> Monomorphizer<'interner> { /// Monomorphize each parameter, expanding tuple/struct patterns into multiple parameters /// and binding any generic types found. - fn parameters(&mut self, params: Parameters) -> Vec<(ast::LocalId, bool, String, ast::Type)> { + fn parameters(&mut self, params: &Parameters) -> Vec<(ast::LocalId, bool, String, ast::Type)> { let mut new_params = Vec::with_capacity(params.len()); - for parameter in params { - self.parameter(parameter.0, ¶meter.1, &mut new_params); + for (parameter, typ, _) in ¶ms.0 { + self.parameter(parameter, typ, &mut new_params); } new_params } fn parameter( &mut self, - param: HirPattern, + param: &HirPattern, typ: &HirType, new_params: &mut Vec<(ast::LocalId, bool, String, ast::Type)>, ) { @@ -267,11 +280,11 @@ impl<'interner> Monomorphizer<'interner> { new_params.push((new_id, definition.mutable, name, self.convert_type(typ))); self.define_local(ident.id, new_id); } - HirPattern::Mutable(pattern, _) => self.parameter(*pattern, typ, new_params), + HirPattern::Mutable(pattern, _) => self.parameter(pattern, typ, new_params), HirPattern::Tuple(fields, _) => { let tuple_field_types = unwrap_tuple_type(typ); - for (field, typ) in fields.into_iter().zip(tuple_field_types) { + for (field, typ) in fields.iter().zip(tuple_field_types) { self.parameter(field, &typ, new_params); } } @@ -279,7 +292,8 @@ impl<'interner> Monomorphizer<'interner> { let struct_field_types = unwrap_struct_type(typ); assert_eq!(struct_field_types.len(), fields.len()); - let mut fields = btree_map(fields, |(name, field)| (name.0.contents, field)); + let mut fields = + btree_map(fields, |(name, field)| (name.0.contents.clone(), field)); // Iterate over `struct_field_types` since `unwrap_struct_type` will always // return the fields in the order defined by the struct type. @@ -310,10 +324,23 @@ impl<'interner> Monomorphizer<'interner> { )) } HirExpression::Literal(HirLiteral::Bool(value)) => Literal(Bool(value)), - HirExpression::Literal(HirLiteral::Integer(value)) => { - let typ = self.convert_type(&self.interner.id_type(expr)); - let location = self.interner.id_location(expr); - Literal(Integer(value, typ, location)) + HirExpression::Literal(HirLiteral::Integer(value, sign)) => { + if sign { + let typ = self.convert_type(&self.interner.id_type(expr)); + let location = self.interner.id_location(expr); + match typ { + ast::Type::Field => Literal(Integer(-value, typ, location)), + ast::Type::Integer(_, bit_size) => { + let base = 1_u128 << bit_size; + Literal(Integer(FieldElement::from(base) - value, typ, location)) + } + _ => unreachable!("Integer literal must be numeric"), + } + } else { + let typ = self.convert_type(&self.interner.id_type(expr)); + let location = self.interner.id_location(expr); + Literal(Integer(value, typ, location)) + } } HirExpression::Literal(HirLiteral::Array(array)) => match array { HirArrayLiteral::Standard(array) => self.standard_array(expr, array), @@ -335,11 +362,37 @@ impl<'interner> Monomorphizer<'interner> { } HirExpression::Infix(infix) => { - let lhs = Box::new(self.expr(infix.lhs)); - let rhs = Box::new(self.expr(infix.rhs)); + let lhs = self.expr(infix.lhs); + let rhs = self.expr(infix.rhs); let operator = infix.operator.kind; let location = self.interner.expr_location(&expr); - ast::Expression::Binary(ast::Binary { lhs, rhs, operator, location }) + + if self.interner.get_selected_impl_for_expression(expr).is_some() { + // If an impl was selected for this infix operator, replace it + // with a method call to the appropriate trait impl method. + let lhs_type = self.interner.id_type(infix.lhs); + let args = vec![lhs_type.clone(), lhs_type]; + + // If this is a comparison operator, the result is a boolean but + // the actual method call returns an Ordering + use crate::BinaryOpKind::*; + let ret = if matches!(operator, Less | LessEqual | Greater | GreaterEqual) { + self.interner.ordering_type() + } else { + self.interner.id_type(expr) + }; + + let env = Box::new(Type::Unit); + let function_type = Type::Function(args, Box::new(ret.clone()), env); + + let method = infix.trait_method_id; + let func = self.resolve_trait_method_reference(expr, function_type, method); + self.create_operator_impl_call(func, lhs, infix.operator, rhs, ret, location) + } else { + let lhs = Box::new(lhs); + let rhs = Box::new(rhs); + ast::Expression::Binary(ast::Binary { lhs, rhs, operator, location }) + } } HirExpression::Index(index) => self.index(expr, index), @@ -379,13 +432,8 @@ impl<'interner> Monomorphizer<'interner> { HirExpression::Lambda(lambda) => self.lambda(lambda, expr), HirExpression::TraitMethodReference(method) => { - if let Type::Function(_, _, _) = self.interner.id_type(expr) { - self.resolve_trait_method_reference(expr, method) - } else { - unreachable!( - "Calling a non-function, this should've been caught in typechecking" - ); - } + let function_type = self.interner.id_type(expr); + self.resolve_trait_method_reference(expr, function_type, method) } HirExpression::MethodCall(hir_method_call) => { @@ -641,7 +689,7 @@ impl<'interner> Monomorphizer<'interner> { let location = Some(ident.location); let name = definition.name.clone(); let typ = self.interner.id_type(expr_id); - let definition = self.lookup_function(*func_id, expr_id, &typ); + let definition = self.lookup_function(*func_id, expr_id, &typ, None); let typ = self.convert_type(&typ); let ident = ast::Ident { location, mutable, definition, name, typ: typ.clone() }; let ident_expression = ast::Expression::Ident(ident); @@ -701,7 +749,7 @@ impl<'interner> Monomorphizer<'interner> { ast::Type::Slice(element) } } - HirType::TraitAsType(_) => { + HirType::TraitAsType(..) => { unreachable!("All TraitAsType should be replaced before calling convert_type"); } HirType::NamedGeneric(binding, _) => { @@ -712,11 +760,7 @@ impl<'interner> Monomorphizer<'interner> { // Default any remaining unbound type variables. // This should only happen if the variable in question is unused // and within a larger generic type. - // NOTE: Make sure to review this if there is ever type-directed dispatch, - // like automatic solving of traits. It should be fine since it is strictly - // after type checking, but care should be taken that it doesn't change which - // impls are chosen. - *binding.borrow_mut() = TypeBinding::Bound(HirType::default_int_type()); + binding.bind(HirType::default_int_type()); ast::Type::Field } @@ -728,10 +772,6 @@ impl<'interner> Monomorphizer<'interner> { // Default any remaining unbound type variables. // This should only happen if the variable in question is unused // and within a larger generic type. - // NOTE: Make sure to review this if there is ever type-directed dispatch, - // like automatic solving of traits. It should be fine since it is strictly - // after type checking, but care should be taken that it doesn't change which - // impls are chosen. let default = if self.is_range_loop && matches!(kind, TypeVariableKind::IntegerOrField) { Type::default_range_loop_type() @@ -740,7 +780,7 @@ impl<'interner> Monomorphizer<'interner> { }; let monomorphized_default = self.convert_type(&default); - *binding.borrow_mut() = TypeBinding::Bound(default); + binding.bind(default); monomorphized_default } @@ -813,16 +853,15 @@ impl<'interner> Monomorphizer<'interner> { fn resolve_trait_method_reference( &mut self, expr_id: node_interner::ExprId, + function_type: HirType, method: TraitMethodId, ) -> ast::Expression { - let function_type = self.interner.id_type(expr_id); - let trait_impl = self .interner - .get_selected_impl_for_ident(expr_id) + .get_selected_impl_for_expression(expr_id) .expect("ICE: missing trait impl - should be caught during type checking"); - let hir_func_id = match trait_impl { + let func_id = match trait_impl { node_interner::TraitImplKind::Normal(impl_id) => { self.interner.get_trait_implementation(impl_id).borrow().methods [method.method_index] @@ -850,7 +889,7 @@ impl<'interner> Monomorphizer<'interner> { } }; - let func_def = self.lookup_function(hir_func_id, expr_id, &function_type); + let func_def = self.lookup_function(func_id, expr_id, &function_type, Some(method)); let func_id = match func_def { Definition::Function(func_id) => func_id, _ => unreachable!(), @@ -875,7 +914,6 @@ impl<'interner> Monomorphizer<'interner> { let original_func = Box::new(self.expr(call.func)); let mut arguments = vecmap(&call.arguments, |id| self.expr(*id)); let hir_arguments = vecmap(&call.arguments, |id| self.interner.expression(id)); - let func: Box; let return_type = self.interner.id_type(id); let return_type = self.convert_type(&return_type); @@ -883,10 +921,11 @@ impl<'interner> Monomorphizer<'interner> { if let ast::Expression::Ident(ident) = original_func.as_ref() { if let Definition::Oracle(name) = &ident.definition { - if name.as_str() == "println" { + if name.as_str() == "print" { // Oracle calls are required to be wrapped in an unconstrained function - // Thus, the only argument to the `println` oracle is expected to always be an ident - self.append_printable_type_info(&hir_arguments[0], &mut arguments); + // The first argument to the `print` oracle is a bool, indicating a newline to be inserted at the end of the input + // The second argument is expected to always be an ident + self.append_printable_type_info(&hir_arguments[1], &mut arguments); } } } @@ -895,7 +934,8 @@ impl<'interner> Monomorphizer<'interner> { let func_type = self.interner.id_type(call.func); let func_type = self.convert_type(&func_type); let is_closure = self.is_function_closure(func_type); - if is_closure { + + let func = if is_closure { let local_id = self.next_local_id(); // store the function in a temporary variable before calling it @@ -917,14 +957,13 @@ impl<'interner> Monomorphizer<'interner> { typ: self.convert_type(&self.interner.id_type(call.func)), }); - func = Box::new(ast::Expression::ExtractTupleField( - Box::new(extracted_func.clone()), - 1usize, - )); - let env_argument = ast::Expression::ExtractTupleField(Box::new(extracted_func), 0usize); + let env_argument = + ast::Expression::ExtractTupleField(Box::new(extracted_func.clone()), 0usize); arguments.insert(0, env_argument); + + Box::new(ast::Expression::ExtractTupleField(Box::new(extracted_func), 1usize)) } else { - func = original_func.clone(); + original_func.clone() }; let call = self @@ -940,7 +979,7 @@ impl<'interner> Monomorphizer<'interner> { } /// Adds a function argument that contains type metadata that is required to tell - /// `println` how to convert values passed to an foreign call back to a human-readable string. + /// `println` how to convert values passed to an foreign call back to a human-readable string. /// The values passed to an foreign call will be a simple list of field elements, /// thus requiring extra metadata to correctly decode this list of elements. /// @@ -1072,14 +1111,14 @@ impl<'interner> Monomorphizer<'interner> { id: node_interner::FuncId, expr_id: node_interner::ExprId, function_type: HirType, + trait_method: Option, ) -> FuncId { let new_id = self.next_function_id(); - self.define_global(id, function_type, new_id); + self.define_global(id, function_type.clone(), new_id); let bindings = self.interner.get_instantiation_bindings(expr_id); let bindings = self.follow_bindings(bindings); - - self.queue.push_back((id, new_id, bindings)); + self.queue.push_back((id, new_id, bindings, trait_method)); new_id } @@ -1149,7 +1188,7 @@ impl<'interner> Monomorphizer<'interner> { let parameters = vecmap(lambda.parameters, |(pattern, typ)| (pattern, typ, Visibility::Private)).into(); - let parameters = self.parameters(parameters); + let parameters = self.parameters(¶meters); let body = self.expr(lambda.body); let id = self.next_function_id(); @@ -1200,7 +1239,7 @@ impl<'interner> Monomorphizer<'interner> { let parameters = vecmap(lambda.parameters, |(pattern, typ)| (pattern, typ, Visibility::Private)).into(); - let mut converted_parameters = self.parameters(parameters); + let mut converted_parameters = self.parameters(¶meters); let id = self.next_function_id(); let name = lambda_name.to_owned(); @@ -1404,6 +1443,110 @@ impl<'interner> Monomorphizer<'interner> { ), }) } + + /// Call an operator overloading method for the given operator. + /// This function handles the special cases some operators have which don't map + /// 1 to 1 onto their operator function. For example: != requires a negation on + /// the result of its `eq` method, and the comparison operators each require a + /// conversion from the `Ordering` result to a boolean. + fn create_operator_impl_call( + &self, + func: ast::Expression, + lhs: ast::Expression, + operator: HirBinaryOp, + rhs: ast::Expression, + ret: Type, + location: Location, + ) -> ast::Expression { + let arguments = vec![lhs, rhs]; + let func = Box::new(func); + let return_type = self.convert_type(&ret); + + let mut result = + ast::Expression::Call(ast::Call { func, arguments, return_type, location }); + + use crate::BinaryOpKind::*; + match operator.kind { + // Negate the result of the == operation + NotEqual => { + result = ast::Expression::Unary(ast::Unary { + operator: UnaryOp::Not, + rhs: Box::new(result), + result_type: ast::Type::Bool, + location, + }); + } + // All the comparison operators require special handling since their `cmp` method + // returns an `Ordering` rather than a boolean value. + // + // (a < b) => a.cmp(b) == Ordering::Less + // (a <= b) => a.cmp(b) != Ordering::Greater + // (a > b) => a.cmp(b) == Ordering::Greater + // (a >= b) => a.cmp(b) != Ordering::Less + Less | LessEqual | Greater | GreaterEqual => { + // Comparing an Ordering directly to a field value in this way takes advantage + // of the fact the Ordering struct contains a single Field type, and our SSA + // pass will automatically unpack tuple values. + let ordering_value = if matches!(operator.kind, Less | GreaterEqual) { + FieldElement::zero() // Ordering::Less + } else { + 2u128.into() // Ordering::Greater + }; + + let operator = + if matches!(operator.kind, Less | Greater) { Equal } else { NotEqual }; + + let int_value = ast::Literal::Integer(ordering_value, ast::Type::Field, location); + let rhs = Box::new(ast::Expression::Literal(int_value)); + let lhs = Box::new(ast::Expression::ExtractTupleField(Box::new(result), 0)); + + result = ast::Expression::Binary(ast::Binary { lhs, operator, rhs, location }); + } + _ => (), + } + + result + } + + /// Call sites are instantiated against the trait method, but when an impl is later selected, + /// the corresponding method in the impl will have a different set of generics. `perform_impl_bindings` + /// is needed to apply the generics from the trait method to the impl method. Without this, + /// static method references to generic impls (e.g. `Eq::eq` for `[T; N]`) will fail to re-apply + /// the correct type bindings during monomorphization. + fn perform_impl_bindings( + &self, + trait_method: Option, + impl_method: node_interner::FuncId, + ) -> TypeBindings { + let mut bindings = TypeBindings::new(); + + if let Some(trait_method) = trait_method { + let the_trait = self.interner.get_trait(trait_method.trait_id); + + let trait_method_type = the_trait.methods[trait_method.method_index].typ.as_monotype(); + + // Make each NamedGeneric in this type bindable by replacing it with a TypeVariable + // with the same internal id and binding. + let (generics, impl_method_type) = + self.interner.function_meta(&impl_method).typ.unwrap_forall(); + + let replace_type_variable = |(id, var): &(TypeVariableId, TypeVariable)| { + (*id, (var.clone(), Type::TypeVariable(var.clone(), TypeVariableKind::Normal))) + }; + + // Replace each NamedGeneric with a TypeVariable containing the same internal type variable + let type_bindings = generics.iter().map(replace_type_variable).collect(); + let impl_method_type = impl_method_type.force_substitute(&type_bindings); + + trait_method_type.try_unify(&impl_method_type, &mut bindings).unwrap_or_else(|_| { + unreachable!("Impl method type {} does not unify with trait method type {} during monomorphization", impl_method_type, trait_method_type) + }); + + perform_instantiation_bindings(&bindings); + } + + bindings + } } fn unwrap_tuple_type(typ: &HirType) -> Vec { @@ -1430,12 +1573,12 @@ fn unwrap_struct_type(typ: &HirType) -> Vec<(String, HirType)> { fn perform_instantiation_bindings(bindings: &TypeBindings) { for (var, binding) in bindings.values() { - *var.borrow_mut() = TypeBinding::Bound(binding.clone()); + var.force_bind(binding.clone()); } } fn undo_instantiation_bindings(bindings: TypeBindings) { for (id, (var, _)) in bindings { - *var.borrow_mut() = TypeBinding::Unbound(id); + var.unbind(id); } } diff --git a/compiler/noirc_frontend/src/node_interner.rs b/compiler/noirc_frontend/src/node_interner.rs index e4532e2dceb..e30f2019f35 100644 --- a/compiler/noirc_frontend/src/node_interner.rs +++ b/compiler/noirc_frontend/src/node_interner.rs @@ -9,7 +9,7 @@ use crate::ast::Ident; use crate::graph::CrateId; use crate::hir::def_collector::dc_crate::{UnresolvedStruct, UnresolvedTrait, UnresolvedTypeAlias}; use crate::hir::def_map::{LocalModuleId, ModuleId}; -use crate::hir::StorageSlot; + use crate::hir_def::stmt::HirLetStatement; use crate::hir_def::traits::TraitImpl; use crate::hir_def::traits::{Trait, TraitConstraint}; @@ -21,8 +21,8 @@ use crate::hir_def::{ }; use crate::token::{Attributes, SecondaryAttribute}; use crate::{ - ContractFunctionType, FunctionDefinition, FunctionVisibility, Generics, Shared, TypeAliasType, - TypeBinding, TypeBindings, TypeVariable, TypeVariableId, TypeVariableKind, + BinaryOpKind, ContractFunctionType, FunctionDefinition, FunctionVisibility, Generics, Shared, + TypeAliasType, TypeBindings, TypeVariable, TypeVariableId, TypeVariableKind, }; /// An arbitrary number to limit the recursion depth when searching for trait impls. @@ -37,9 +37,10 @@ type StructAttributes = Vec; /// each definition or struct, etc. Because it is used on the Hir, the NodeInterner is /// useful in passes where the Hir is used - name resolution, type checking, and /// monomorphization - and it is not useful afterward. +#[derive(Debug)] pub struct NodeInterner { - nodes: Arena, - func_meta: HashMap, + pub(crate) nodes: Arena, + pub(crate) func_meta: HashMap, function_definition_ids: HashMap, // For a given function ID, this gives the function's modifiers which includes @@ -51,7 +52,7 @@ pub struct NodeInterner { function_modules: HashMap, // Map each `Index` to it's own location - id_to_location: HashMap, + pub(crate) id_to_location: HashMap, // Maps each DefinitionId to a DefinitionInfo. definitions: Vec, @@ -84,14 +85,14 @@ pub struct NodeInterner { // Each trait definition is possibly shared across multiple type nodes. // It is also mutated through the RefCell during name resolution to append // methods from impls to the type. - traits: HashMap, + pub(crate) traits: HashMap, // Trait implementation map // For each type that implements a given Trait ( corresponding TraitId), there should be an entry here // The purpose for this hashmap is to detect duplication of trait implementations ( if any ) // // Indexed by TraitImplIds - trait_implementations: Vec>, + pub(crate) trait_implementations: Vec>, /// Trait implementations on each type. This is expected to always have the same length as /// `self.trait_implementations`. @@ -109,6 +110,12 @@ pub struct NodeInterner { /// the context to get the concrete type of the object and select the correct impl itself. selected_trait_implementations: HashMap, + /// Holds the trait ids of the traits used for operator overloading + operator_traits: HashMap, + + /// The `Ordering` type is a semi-builtin type that is the result of the comparison traits. + ordering_type: Option, + /// Map from ExprId (referring to a Function/Method call) to its corresponding TypeBindings, /// filled out during type checking from instantiated variables. Used during monomorphization /// to map call site types back onto function parameter types, and undo this binding as needed. @@ -157,7 +164,7 @@ pub enum TraitImplKind { /// /// Additionally, types can define specialized impls with methods of the same name /// as long as these specialized impls do not overlap. E.g. `impl Struct` and `impl Struct` -#[derive(Default)] +#[derive(Default, Debug)] pub struct Methods { direct: Vec, trait_impl_methods: Vec, @@ -166,6 +173,7 @@ pub struct Methods { /// All the information from a function that is filled out during definition collection rather than /// name resolution. As a result, if information about a function is needed during name resolution, /// this is the only place where it is safe to retrieve it (where all fields are guaranteed to be initialized). +#[derive(Debug, Clone)] pub struct FunctionModifiers { pub name: String, @@ -342,7 +350,7 @@ partialeq!(StmtId); /// This data structure is never accessed directly, so API wise there is no difference between using /// Multiple arenas and a single Arena #[derive(Debug, Clone)] -enum Node { +pub(crate) enum Node { Function(HirFunction), Statement(HirStatement), Expression(HirExpression), @@ -353,6 +361,7 @@ pub struct DefinitionInfo { pub name: String, pub mutable: bool, pub kind: DefinitionKind, + pub location: Location, } impl DefinitionInfo { @@ -399,10 +408,6 @@ impl DefinitionKind { pub struct GlobalInfo { pub ident: Ident, pub local_id: LocalModuleId, - - /// Global definitions have an associated storage slot if they are defined within - /// a contract. If they're defined elsewhere, this value is None. - pub storage_slot: Option, } impl Default for NodeInterner { @@ -424,6 +429,8 @@ impl Default for NodeInterner { trait_implementations: Vec::new(), trait_implementation_map: HashMap::new(), selected_trait_implementations: HashMap::new(), + operator_traits: HashMap::new(), + ordering_type: None, instantiation_bindings: HashMap::new(), field_indices: HashMap::new(), next_type_variable_id: std::cell::Cell::new(0), @@ -466,29 +473,31 @@ impl NodeInterner { self.id_to_type.insert(expr_id.into(), typ); } - pub fn push_empty_trait(&mut self, type_id: TraitId, typ: &UnresolvedTrait) { + pub fn push_empty_trait(&mut self, type_id: TraitId, unresolved_trait: &UnresolvedTrait) { let self_type_typevar_id = self.next_type_variable_id(); - let self_type_typevar = Shared::new(TypeBinding::Unbound(self_type_typevar_id)); - self.traits.insert( - type_id, - Trait::new( - type_id, - typ.trait_def.name.clone(), - typ.crate_id, - typ.trait_def.span, - vecmap(&typ.trait_def.generics, |_| { - // Temporary type variable ids before the trait is resolved to its actual ids. - // This lets us record how many arguments the type expects so that other types - // can refer to it with generic arguments before the generic parameters themselves - // are resolved. - let id = TypeVariableId(0); - (id, Shared::new(TypeBinding::Unbound(id))) - }), - self_type_typevar_id, - self_type_typevar, - ), - ); + let new_trait = Trait { + id: type_id, + name: unresolved_trait.trait_def.name.clone(), + crate_id: unresolved_trait.crate_id, + location: Location::new(unresolved_trait.trait_def.span, unresolved_trait.file_id), + generics: vecmap(&unresolved_trait.trait_def.generics, |_| { + // Temporary type variable ids before the trait is resolved to its actual ids. + // This lets us record how many arguments the type expects so that other types + // can refer to it with generic arguments before the generic parameters themselves + // are resolved. + let id = TypeVariableId(0); + (id, TypeVariable::unbound(id)) + }), + self_type_typevar_id, + self_type_typevar: TypeVariable::unbound(self_type_typevar_id), + methods: Vec::new(), + method_ids: unresolved_trait.method_ids.clone(), + constants: Vec::new(), + types: Vec::new(), + }; + + self.traits.insert(type_id, new_trait); } pub fn new_struct( @@ -496,6 +505,7 @@ impl NodeInterner { typ: &UnresolvedStruct, krate: CrateId, local_id: LocalModuleId, + file_id: FileId, ) -> StructId { let struct_id = StructId(ModuleId { krate, local_id }); let name = typ.struct_def.name.clone(); @@ -508,10 +518,11 @@ impl NodeInterner { // can refer to it with generic arguments before the generic parameters themselves // are resolved. let id = TypeVariableId(0); - (id, Shared::new(TypeBinding::Unbound(id))) + (id, TypeVariable::unbound(id)) }); - let new_struct = StructType::new(struct_id, name, typ.struct_def.span, no_fields, generics); + let location = Location::new(typ.struct_def.span, file_id); + let new_struct = StructType::new(struct_id, name, location, no_fields, generics); self.structs.insert(struct_id, Shared::new(new_struct)); self.struct_attributes.insert(struct_id, typ.struct_def.attributes.clone()); struct_id @@ -527,7 +538,7 @@ impl NodeInterner { Type::Error, vecmap(&typ.type_alias_def.generics, |_| { let id = TypeVariableId(0); - (id, Shared::new(TypeBinding::Unbound(id))) + (id, TypeVariable::unbound(id)) }), )); @@ -578,14 +589,8 @@ impl NodeInterner { self.id_to_type.insert(definition_id.into(), typ); } - pub fn push_global( - &mut self, - stmt_id: StmtId, - ident: Ident, - local_id: LocalModuleId, - storage_slot: Option, - ) { - self.globals.insert(stmt_id, GlobalInfo { ident, local_id, storage_slot }); + pub fn push_global(&mut self, stmt_id: StmtId, ident: Ident, local_id: LocalModuleId) { + self.globals.insert(stmt_id, GlobalInfo { ident, local_id }); } /// Intern an empty global stmt. Used for collecting globals @@ -646,13 +651,14 @@ impl NodeInterner { name: String, mutable: bool, definition: DefinitionKind, + location: Location, ) -> DefinitionId { let id = DefinitionId(self.definitions.len()); if let DefinitionKind::Function(func_id) = definition { self.function_definition_ids.insert(func_id, id); } - self.definitions.push(DefinitionInfo { name, mutable, kind: definition }); + self.definitions.push(DefinitionInfo { name, mutable, kind: definition, location }); id } @@ -663,7 +669,8 @@ impl NodeInterner { let mut modifiers = FunctionModifiers::new(); modifiers.name = name; let module = ModuleId::dummy_id(); - self.push_function_definition(id, modifiers, module); + let location = Location::dummy(); + self.push_function_definition(id, modifiers, module, location); id } @@ -672,6 +679,7 @@ impl NodeInterner { id: FuncId, function: &FunctionDefinition, module: ModuleId, + location: Location, ) -> DefinitionId { use ContractFunctionType::*; @@ -685,7 +693,7 @@ impl NodeInterner { contract_function_type: Some(if function.is_open { Open } else { Secret }), is_internal: Some(function.is_internal), }; - self.push_function_definition(id, modifiers, module) + self.push_function_definition(id, modifiers, module, location) } pub fn push_function_definition( @@ -693,11 +701,12 @@ impl NodeInterner { func: FuncId, modifiers: FunctionModifiers, module: ModuleId, + location: Location, ) -> DefinitionId { let name = modifiers.name.clone(); self.function_modifiers.insert(func, modifiers); self.function_modules.insert(func, module); - self.push_definition(name, false, DefinitionKind::Function(func)) + self.push_definition(name, false, DefinitionKind::Function(func), location) } pub fn set_function_trait(&mut self, func: FuncId, self_type: Type, trait_id: TraitId) { @@ -734,12 +743,12 @@ impl NodeInterner { } /// Returns the interned meta data corresponding to `func_id` - pub fn function_meta(&self, func_id: &FuncId) -> FuncMeta { - self.func_meta.get(func_id).cloned().expect("ice: all function ids should have metadata") + pub fn function_meta(&self, func_id: &FuncId) -> &FuncMeta { + self.func_meta.get(func_id).expect("ice: all function ids should have metadata") } - pub fn try_function_meta(&self, func_id: &FuncId) -> Option { - self.func_meta.get(func_id).cloned() + pub fn try_function_meta(&self, func_id: &FuncId) -> Option<&FuncMeta> { + self.func_meta.get(func_id) } pub fn function_ident(&self, func_id: &FuncId) -> crate::Ident { @@ -841,8 +850,16 @@ impl NodeInterner { self.structs[&id].clone() } - pub fn get_trait(&self, id: TraitId) -> Trait { - self.traits[&id].clone() + pub fn get_trait(&self, id: TraitId) -> &Trait { + &self.traits[&id] + } + + pub fn get_trait_mut(&mut self, id: TraitId) -> &mut Trait { + self.traits.get_mut(&id).expect("get_trait_mut given invalid TraitId") + } + + pub fn try_get_trait(&self, id: TraitId) -> Option<&Trait> { + self.traits.get(&id) } pub fn get_type_alias(&self, id: TypeAliasId) -> &TypeAliasType { @@ -866,7 +883,7 @@ impl NodeInterner { let typ = self.id_type(def_id); if let Type::Function(args, ret, env) = &typ { let def = self.definition(def_id); - if let Type::TraitAsType(_trait) = ret.as_ref() { + if let Type::TraitAsType(..) = ret.as_ref() { if let DefinitionKind::Function(func_id) = def.kind { let f = self.function(&func_id); let func_body = f.as_expr(); @@ -980,13 +997,32 @@ impl NodeInterner { object_type: &Type, trait_id: TraitId, ) -> Result> { - self.lookup_trait_implementation_helper(object_type, trait_id, IMPL_SEARCH_RECURSION_LIMIT) + let (impl_kind, bindings) = self.try_lookup_trait_implementation(object_type, trait_id)?; + Type::apply_type_bindings(bindings); + Ok(impl_kind) + } + + /// Similar to `lookup_trait_implementation` but does not apply any type bindings on success. + pub fn try_lookup_trait_implementation( + &self, + object_type: &Type, + trait_id: TraitId, + ) -> Result<(TraitImplKind, TypeBindings), Vec> { + let mut bindings = TypeBindings::new(); + let impl_kind = self.lookup_trait_implementation_helper( + object_type, + trait_id, + &mut bindings, + IMPL_SEARCH_RECURSION_LIMIT, + )?; + Ok((impl_kind, bindings)) } fn lookup_trait_implementation_helper( &self, object_type: &Type, trait_id: TraitId, + type_bindings: &mut TypeBindings, recursion_limit: u32, ) -> Result> { let make_constraint = || TraitConstraint::new(object_type.clone(), trait_id); @@ -996,20 +1032,29 @@ impl NodeInterner { return Err(vec![make_constraint()]); } + let object_type = object_type.substitute(type_bindings); + let impls = self.trait_implementation_map.get(&trait_id).ok_or_else(|| vec![make_constraint()])?; for (existing_object_type, impl_kind) in impls { - let (existing_object_type, type_bindings) = existing_object_type.instantiate(self); + let (existing_object_type, instantiation_bindings) = + existing_object_type.instantiate(self); + + let mut fresh_bindings = TypeBindings::new(); + + if object_type.try_unify(&existing_object_type, &mut fresh_bindings).is_ok() { + // The unification was successful so we can append fresh_bindings to our bindings list + type_bindings.extend(fresh_bindings); - if object_type.try_unify(&existing_object_type).is_ok() { if let TraitImplKind::Normal(impl_id) = impl_kind { let trait_impl = self.get_trait_implementation(*impl_id); let trait_impl = trait_impl.borrow(); if let Err(mut errors) = self.validate_where_clause( &trait_impl.where_clause, - &type_bindings, + type_bindings, + &instantiation_bindings, recursion_limit, ) { errors.push(make_constraint()); @@ -1029,14 +1074,23 @@ impl NodeInterner { fn validate_where_clause( &self, where_clause: &[TraitConstraint], - type_bindings: &TypeBindings, + type_bindings: &mut TypeBindings, + instantiation_bindings: &TypeBindings, recursion_limit: u32, ) -> Result<(), Vec> { for constraint in where_clause { - let constraint_type = constraint.typ.substitute(type_bindings); + // Instantiation bindings are generally safe to force substitute into the same type. + // This is needed here to undo any bindings done to trait methods by monomorphization. + // Otherwise, an impl for (A, B) could get narrowed to only an impl for e.g. (u8, u16). + let constraint_type = constraint.typ.force_substitute(instantiation_bindings); + let constraint_type = constraint_type.substitute(type_bindings); + self.lookup_trait_implementation_helper( &constraint_type, constraint.trait_id, + // Use a fresh set of type bindings here since the constraint_type originates from + // our impl list, which we don't want to bind to. + &mut TypeBindings::new(), recursion_limit - 1, )?; } @@ -1057,7 +1111,7 @@ impl NodeInterner { trait_id: TraitId, ) -> bool { // Make sure there are no overlapping impls - if self.lookup_trait_implementation(&object_type, trait_id).is_ok() { + if self.try_lookup_trait_implementation(&object_type, trait_id).is_ok() { return false; } @@ -1085,8 +1139,8 @@ impl NodeInterner { let (instantiated_object_type, substitutions) = object_type.instantiate_type_variables(self); - if let Ok(TraitImplKind::Normal(existing)) = - self.lookup_trait_implementation(&instantiated_object_type, trait_id) + if let Ok((TraitImplKind::Normal(existing), _)) = + self.try_lookup_trait_implementation(&instantiated_object_type, trait_id) { let existing_impl = self.get_trait_implementation(existing); let existing_impl = existing_impl.borrow(); @@ -1187,16 +1241,99 @@ impl NodeInterner { /// Tags the given identifier with the selected trait_impl so that monomorphization /// can later recover which impl was selected, or alternatively see if it needs to /// decide which impl to select (because the impl was Assumed). - pub fn select_impl_for_ident(&mut self, ident_id: ExprId, trait_impl: TraitImplKind) { + pub fn select_impl_for_expression(&mut self, ident_id: ExprId, trait_impl: TraitImplKind) { self.selected_trait_implementations.insert(ident_id, trait_impl); } - /// Tags the given identifier with the selected trait_impl so that monomorphization - /// can later recover which impl was selected, or alternatively see if it needs to - /// decide which (because the impl was Assumed). - pub fn get_selected_impl_for_ident(&self, ident_id: ExprId) -> Option { + /// Retrieves the impl selected for a given ExprId during name resolution. + pub fn get_selected_impl_for_expression(&self, ident_id: ExprId) -> Option { self.selected_trait_implementations.get(&ident_id).cloned() } + + /// Retrieves the trait id for a given binary operator. + /// All binary operators correspond to a trait - although multiple may correspond + /// to the same trait (such as `==` and `!=`). + /// `self.operator_traits` is expected to be filled before name resolution, + /// during definition collection. + pub fn get_operator_trait_method(&self, operator: BinaryOpKind) -> TraitMethodId { + let trait_id = self.operator_traits[&operator]; + + // Assume that the operator's method to be overloaded is the first method of the trait. + TraitMethodId { trait_id, method_index: 0 } + } + + /// Add the given trait as an operator trait if its name matches one of the + /// operator trait names (Add, Sub, ...). + pub fn try_add_operator_trait(&mut self, trait_id: TraitId) { + let the_trait = self.get_trait(trait_id); + + let operator = match the_trait.name.0.contents.as_str() { + "Add" => BinaryOpKind::Add, + "Sub" => BinaryOpKind::Subtract, + "Mul" => BinaryOpKind::Multiply, + "Div" => BinaryOpKind::Divide, + "Rem" => BinaryOpKind::Modulo, + "Eq" => BinaryOpKind::Equal, + "Ord" => BinaryOpKind::Less, + "BitAnd" => BinaryOpKind::And, + "BitOr" => BinaryOpKind::Or, + "BitXor" => BinaryOpKind::Xor, + "Shl" => BinaryOpKind::ShiftLeft, + "Shr" => BinaryOpKind::ShiftRight, + _ => return, + }; + + self.operator_traits.insert(operator, trait_id); + + // Some operators also require we insert a matching entry for related operators + match operator { + BinaryOpKind::Equal => { + self.operator_traits.insert(BinaryOpKind::NotEqual, trait_id); + } + BinaryOpKind::Less => { + self.operator_traits.insert(BinaryOpKind::LessEqual, trait_id); + self.operator_traits.insert(BinaryOpKind::Greater, trait_id); + self.operator_traits.insert(BinaryOpKind::GreaterEqual, trait_id); + + let the_trait = self.get_trait(trait_id); + self.ordering_type = match &the_trait.methods[0].typ { + Type::Forall(_, typ) => match typ.as_ref() { + Type::Function(_, return_type, _) => Some(return_type.as_ref().clone()), + other => unreachable!("Expected function type for `cmp`, found {}", other), + }, + other => unreachable!("Expected Forall type for `cmp`, found {}", other), + }; + } + _ => (), + } + } + + /// This function is needed when creating a NodeInterner for testing so that calls + /// to `get_operator_trait` do not panic when the stdlib isn't present. + #[cfg(test)] + pub fn populate_dummy_operator_traits(&mut self) { + let dummy_trait = TraitId(ModuleId::dummy_id()); + self.operator_traits.insert(BinaryOpKind::Add, dummy_trait); + self.operator_traits.insert(BinaryOpKind::Subtract, dummy_trait); + self.operator_traits.insert(BinaryOpKind::Multiply, dummy_trait); + self.operator_traits.insert(BinaryOpKind::Divide, dummy_trait); + self.operator_traits.insert(BinaryOpKind::Modulo, dummy_trait); + self.operator_traits.insert(BinaryOpKind::Equal, dummy_trait); + self.operator_traits.insert(BinaryOpKind::NotEqual, dummy_trait); + self.operator_traits.insert(BinaryOpKind::Less, dummy_trait); + self.operator_traits.insert(BinaryOpKind::LessEqual, dummy_trait); + self.operator_traits.insert(BinaryOpKind::Greater, dummy_trait); + self.operator_traits.insert(BinaryOpKind::GreaterEqual, dummy_trait); + self.operator_traits.insert(BinaryOpKind::And, dummy_trait); + self.operator_traits.insert(BinaryOpKind::Or, dummy_trait); + self.operator_traits.insert(BinaryOpKind::Xor, dummy_trait); + self.operator_traits.insert(BinaryOpKind::ShiftLeft, dummy_trait); + self.operator_traits.insert(BinaryOpKind::ShiftRight, dummy_trait); + } + + pub(crate) fn ordering_type(&self) -> Type { + self.ordering_type.clone().expect("Expected ordering_type to be set in the NodeInterner") + } } impl Methods { @@ -1234,8 +1371,10 @@ impl Methods { match interner.function_meta(&method).typ.instantiate(interner).0 { Type::Function(args, _, _) => { if let Some(object) = args.get(0) { - // TODO #3089: This is dangerous! try_unify may commit type bindings even on failure - if object.try_unify(typ).is_ok() { + let mut bindings = TypeBindings::new(); + + if object.try_unify(typ, &mut bindings).is_ok() { + Type::apply_type_bindings(bindings); return Some(method); } } @@ -1249,7 +1388,7 @@ impl Methods { } /// These are the primitive type variants that we support adding methods to -#[derive(Copy, Clone, Hash, PartialEq, Eq)] +#[derive(Copy, Clone, Hash, PartialEq, Eq, Debug)] enum TypeMethodKey { /// Fields and integers share methods for ease of use. These methods may still /// accept only fields or integers, it is just that their names may not clash. @@ -1288,6 +1427,6 @@ fn get_type_method_key(typ: &Type) -> Option { | Type::Error | Type::NotConstant | Type::Struct(_, _) - | Type::TraitAsType(_) => None, + | Type::TraitAsType(..) => None, } } diff --git a/compiler/noirc_frontend/src/parser/errors.rs b/compiler/noirc_frontend/src/parser/errors.rs index 09dc6dfff8d..5c869ff4719 100644 --- a/compiler/noirc_frontend/src/parser/errors.rs +++ b/compiler/noirc_frontend/src/parser/errors.rs @@ -26,6 +26,8 @@ pub enum ParserErrorReason { EarlyReturn, #[error("Patterns aren't allowed in a trait's function declarations")] PatternInTraitFunctionParameter, + #[error("Modifiers are ignored on a trait impl method")] + TraitImplFunctionModifiers, #[error("comptime keyword is deprecated")] ComptimeDeprecated, #[error("{0} are experimental and aren't fully supported yet")] @@ -148,6 +150,11 @@ impl From for Diagnostic { "".into(), error.span, ), + ParserErrorReason::TraitImplFunctionModifiers => Diagnostic::simple_warning( + reason.to_string(), + "".into(), + error.span, + ), ParserErrorReason::ExpectedPatternButFoundType(ty) => { Diagnostic::simple_error("Expected a ; separating these two statements".into(), format!("{ty} is a type and cannot be used as a variable name"), error.span) } diff --git a/compiler/noirc_frontend/src/parser/parser.rs b/compiler/noirc_frontend/src/parser/parser.rs index 6b8589cc6e5..954b531abff 100644 --- a/compiler/noirc_frontend/src/parser/parser.rs +++ b/compiler/noirc_frontend/src/parser/parser.rs @@ -312,7 +312,7 @@ fn function_return_type() -> impl NoirParser<((Distinctness, Visibility), Functi fn attribute() -> impl NoirParser { token_kind(TokenKind::Attribute).map(|token| match token { Token::Attribute(attribute) => attribute, - _ => unreachable!(), + _ => unreachable!("Parser should have already errored due to token not being an attribute"), }) } @@ -369,7 +369,7 @@ fn function_parameters<'a>(allow_self: bool) -> impl NoirParser> + 'a /// This parser always parses no input and fails fn nothing() -> impl NoirParser { - one_of([]).map(|_| unreachable!()) + one_of([]).map(|_| unreachable!("parser should always error")) } fn self_parameter() -> impl NoirParser { @@ -414,7 +414,12 @@ fn trait_definition() -> impl NoirParser { .then(trait_body()) .then_ignore(just(Token::RightBrace)) .validate(|(((name, generics), where_clause), items), span, emit| { - emit(ParserError::with_reason(ParserErrorReason::ExperimentalFeature("Traits"), span)); + if !generics.is_empty() { + emit(ParserError::with_reason( + ParserErrorReason::ExperimentalFeature("Generic traits"), + span, + )); + } TopLevelStatement::Trait(NoirTrait { name, generics, where_clause, span, items }) }) } @@ -437,7 +442,13 @@ fn trait_constant_declaration() -> impl NoirParser { .then(parse_type()) .then(optional_default_value()) .then_ignore(just(Token::Semicolon)) - .map(|((name, typ), default_value)| TraitItem::Constant { name, typ, default_value }) + .validate(|((name, typ), default_value), span, emit| { + emit(ParserError::with_reason( + ParserErrorReason::ExperimentalFeature("Associated constants"), + span, + )); + TraitItem::Constant { name, typ, default_value } + }) } /// trait_function_declaration: 'fn' ident generics '(' declaration_parameters ')' function_return_type @@ -544,10 +555,15 @@ fn function_declaration_parameters() -> impl NoirParser impl NoirParser { - keyword(Keyword::Type) - .ignore_then(ident()) - .then_ignore(just(Token::Semicolon)) - .map(|name| TraitItem::Type { name }) + keyword(Keyword::Type).ignore_then(ident()).then_ignore(just(Token::Semicolon)).validate( + |name, span, emit| { + emit(ParserError::with_reason( + ParserErrorReason::ExperimentalFeature("Associated types"), + span, + )); + TraitItem::Type { name } + }, + ) } /// Parses a non-trait implementation, adding a set of methods to a type. @@ -581,11 +597,10 @@ fn trait_implementation() -> impl NoirParser { .then_ignore(just(Token::LeftBrace)) .then(trait_implementation_body()) .then_ignore(just(Token::RightBrace)) - .validate(|args, span, emit| { + .map(|args| { let ((other_args, where_clause), items) = args; let (((impl_generics, trait_name), trait_generics), object_type) = other_args; - emit(ParserError::with_reason(ParserErrorReason::ExperimentalFeature("Traits"), span)); TopLevelStatement::TraitImpl(NoirTraitImpl { impl_generics, trait_name, @@ -598,7 +613,18 @@ fn trait_implementation() -> impl NoirParser { } fn trait_implementation_body() -> impl NoirParser> { - let function = function_definition(true).map(TraitImplItem::Function); + let function = function_definition(true).validate(|mut f, span, emit| { + if f.def().is_internal + || f.def().is_unconstrained + || f.def().is_open + || f.def().visibility != FunctionVisibility::Private + { + emit(ParserError::with_reason(ParserErrorReason::TraitImplFunctionModifiers, span)); + } + // Trait impl functions are always public + f.def_mut().visibility = FunctionVisibility::Public; + TraitImplItem::Function(f) + }); let alias = keyword(Keyword::Type) .ignore_then(ident()) @@ -616,12 +642,10 @@ fn where_clause() -> impl NoirParser> { trait_bounds: Vec, } - let constraints = parse_type().then_ignore(just(Token::Colon)).then(trait_bounds()).validate( - |(typ, trait_bounds), span, emit| { - emit(ParserError::with_reason(ParserErrorReason::ExperimentalFeature("Traits"), span)); - MultiTraitConstraint { typ, trait_bounds } - }, - ); + let constraints = parse_type() + .then_ignore(just(Token::Colon)) + .then(trait_bounds()) + .map(|(typ, trait_bounds)| MultiTraitConstraint { typ, trait_bounds }); keyword(Keyword::Where) .ignore_then(constraints.separated_by(just(Token::Comma))) @@ -726,21 +750,21 @@ fn token_kind(token_kind: TokenKind) -> impl NoirParser { fn path() -> impl NoirParser { let idents = || ident().separated_by(just(Token::DoubleColon)).at_least(1); - let make_path = |kind| move |segments| Path { segments, kind }; + let make_path = |kind| move |segments, span| Path { segments, kind, span }; let prefix = |key| keyword(key).ignore_then(just(Token::DoubleColon)); - let path_kind = |key, kind| prefix(key).ignore_then(idents()).map(make_path(kind)); + let path_kind = |key, kind| prefix(key).ignore_then(idents()).map_with_span(make_path(kind)); choice(( path_kind(Keyword::Crate, PathKind::Crate), path_kind(Keyword::Dep, PathKind::Dep), - idents().map(make_path(PathKind::Plain)), + idents().map_with_span(make_path(PathKind::Plain)), )) } fn empty_path() -> impl NoirParser { - let make_path = |kind| move |_| Path { segments: Vec::new(), kind }; - let path_kind = |key, kind| keyword(key).map(make_path(kind)); + let make_path = |kind| move |_, span| Path { segments: Vec::new(), kind, span }; + let path_kind = |key, kind| keyword(key).map_with_span(make_path(kind)); choice((path_kind(Keyword::Crate, PathKind::Crate), path_kind(Keyword::Dep, PathKind::Dep))) } @@ -1015,18 +1039,37 @@ fn parse_type_inner( named_type(recursive_type_parser.clone()), named_trait(recursive_type_parser.clone()), array_type(recursive_type_parser.clone()), - recursive_type_parser.clone().delimited_by(just(Token::LeftParen), just(Token::RightParen)), + parenthesized_type(recursive_type_parser.clone()), tuple_type(recursive_type_parser.clone()), function_type(recursive_type_parser.clone()), mutable_reference_type(recursive_type_parser), )) } +fn parenthesized_type( + recursive_type_parser: impl NoirParser, +) -> impl NoirParser { + recursive_type_parser + .delimited_by(just(Token::LeftParen), just(Token::RightParen)) + .map_with_span(|typ, span| UnresolvedType { + typ: UnresolvedTypeData::Parenthesized(Box::new(typ)), + span: span.into(), + }) +} + fn optional_visibility() -> impl NoirParser { - keyword(Keyword::Pub).or_not().map(|opt| match opt { - Some(_) => Visibility::Public, - None => Visibility::Private, - }) + keyword(Keyword::Pub) + .or(keyword(Keyword::CallData)) + .or(keyword(Keyword::ReturnData)) + .or_not() + .map(|opt| match opt { + Some(Token::Keyword(Keyword::Pub)) => Visibility::Public, + Some(Token::Keyword(Keyword::CallData)) | Some(Token::Keyword(Keyword::ReturnData)) => { + Visibility::DataBus + } + None => Visibility::Private, + _ => unreachable!("unexpected token found"), + }) } fn optional_distinctness() -> impl NoirParser { @@ -1087,14 +1130,7 @@ fn int_type() -> impl NoirParser { Err(ParserError::expected_label(ParsingRuleLabel::IntegerType, unexpected, span)) } })) - .validate(|(_, token), span, emit| { - let typ = UnresolvedTypeData::from_int_token(token).with_span(span); - if let UnresolvedTypeData::Integer(crate::Signedness::Signed, _) = &typ.typ { - let reason = ParserErrorReason::ExperimentalFeature("Signed integer types"); - emit(ParserError::with_reason(reason, span)); - } - typ - }) + .map_with_span(|(_, token), span| UnresolvedTypeData::from_int_token(token).with_span(span)) } fn named_type(type_parser: impl NoirParser) -> impl NoirParser { @@ -1177,7 +1213,9 @@ where .ignore_then(type_parser.clone()) .then_ignore(just(Token::RightBracket)) .or_not() - .map_with_span(|t, span| t.unwrap_or_else(|| UnresolvedTypeData::Unit.with_span(span))); + .map_with_span(|t, span| { + t.unwrap_or_else(|| UnresolvedTypeData::Unit.with_span(Span::empty(span.end()))) + }); keyword(Keyword::Fn) .ignore_then(env) @@ -1657,15 +1695,28 @@ fn literal() -> impl NoirParser { Token::Int(x) => ExpressionKind::integer(x), Token::Bool(b) => ExpressionKind::boolean(b), Token::Str(s) => ExpressionKind::string(s), + Token::RawStr(s, hashes) => ExpressionKind::raw_string(s, hashes), Token::FmtStr(s) => ExpressionKind::format_string(s), unexpected => unreachable!("Non-literal {} parsed as a literal", unexpected), }) } +fn literal_with_sign() -> impl NoirParser { + choice(( + literal(), + just(Token::Minus).then(literal()).map(|(_, exp)| match exp { + ExpressionKind::Literal(Literal::Integer(value, sign)) => { + ExpressionKind::Literal(Literal::Integer(value, !sign)) + } + _ => unreachable!(), + }), + )) +} + fn literal_or_collection<'a>( expr_parser: impl ExprParser + 'a, ) -> impl NoirParser + 'a { - choice((literal(), constructor(expr_parser.clone()), array_expr(expr_parser))) + choice((literal_with_sign(), constructor(expr_parser.clone()), array_expr(expr_parser))) } #[cfg(test)] @@ -2242,7 +2293,7 @@ mod test { let hex = parse_with(literal(), "0x05").unwrap(); match (expr_to_lit(int), expr_to_lit(hex)) { - (Literal::Integer(int), Literal::Integer(hex)) => assert_eq!(int, hex), + (Literal::Integer(int, false), Literal::Integer(hex, false)) => assert_eq!(int, hex), _ => unreachable!(), } } @@ -2549,4 +2600,79 @@ mod test { check_cases_with_errors(&cases[..], block(fresh_statement())); } + + #[test] + fn parse_raw_string_expr() { + let cases = vec![ + Case { source: r##" r"foo" "##, expect: r##"r"foo""##, errors: 0 }, + Case { source: r##" r#"foo"# "##, expect: r##"r#"foo"#"##, errors: 0 }, + // backslash + Case { source: r##" r"\\" "##, expect: r##"r"\\""##, errors: 0 }, + Case { source: r##" r#"\"# "##, expect: r##"r#"\"#"##, errors: 0 }, + Case { source: r##" r#"\\"# "##, expect: r##"r#"\\"#"##, errors: 0 }, + Case { source: r##" r#"\\\"# "##, expect: r##"r#"\\\"#"##, errors: 0 }, + // escape sequence + Case { + source: r##" r#"\t\n\\t\\n\\\t\\\n\\\\"# "##, + expect: r##"r#"\t\n\\t\\n\\\t\\\n\\\\"#"##, + errors: 0, + }, + Case { source: r##" r#"\\\\\\\\"# "##, expect: r##"r#"\\\\\\\\"#"##, errors: 0 }, + // mismatch - errors: + Case { source: r###" r#"foo"## "###, expect: r###"r#"foo"#"###, errors: 1 }, + Case { source: r###" r##"foo"# "###, expect: "(none)", errors: 2 }, + // mismatch: short: + Case { source: r###" r"foo"# "###, expect: r###"r"foo""###, errors: 1 }, + Case { source: r###" r#"foo" "###, expect: "(none)", errors: 2 }, + // empty string + Case { source: r####"r"""####, expect: r####"r"""####, errors: 0 }, + Case { source: r####"r###""###"####, expect: r####"r###""###"####, errors: 0 }, + // miscellaneous + Case { source: r###" r#\"foo\"# "###, expect: "plain::r", errors: 2 }, + Case { source: r###" r\"foo\" "###, expect: "plain::r", errors: 1 }, + Case { source: r###" r##"foo"# "###, expect: "(none)", errors: 2 }, + // missing 'r' letter + Case { source: r###" ##"foo"# "###, expect: r#""foo""#, errors: 2 }, + Case { source: r###" #"foo" "###, expect: "plain::foo", errors: 2 }, + // whitespace + Case { source: r###" r #"foo"# "###, expect: "plain::r", errors: 2 }, + Case { source: r###" r# "foo"# "###, expect: "plain::r", errors: 3 }, + Case { source: r###" r#"foo" # "###, expect: "(none)", errors: 2 }, + // after identifier + Case { source: r###" bar#"foo"# "###, expect: "plain::bar", errors: 2 }, + // nested + Case { + source: r###"r##"foo r#"bar"# r"baz" ### bye"##"###, + expect: r###"r##"foo r#"bar"# r"baz" ### bye"##"###, + errors: 0, + }, + ]; + + check_cases_with_errors(&cases[..], expression()); + } + + #[test] + fn parse_raw_string_lit() { + let lit_cases = vec![ + Case { source: r##" r"foo" "##, expect: r##"r"foo""##, errors: 0 }, + Case { source: r##" r#"foo"# "##, expect: r##"r#"foo"#"##, errors: 0 }, + // backslash + Case { source: r##" r"\\" "##, expect: r##"r"\\""##, errors: 0 }, + Case { source: r##" r#"\"# "##, expect: r##"r#"\"#"##, errors: 0 }, + Case { source: r##" r#"\\"# "##, expect: r##"r#"\\"#"##, errors: 0 }, + Case { source: r##" r#"\\\"# "##, expect: r##"r#"\\\"#"##, errors: 0 }, + // escape sequence + Case { + source: r##" r#"\t\n\\t\\n\\\t\\\n\\\\"# "##, + expect: r##"r#"\t\n\\t\\n\\\t\\\n\\\\"#"##, + errors: 0, + }, + Case { source: r##" r#"\\\\\\\\"# "##, expect: r##"r#"\\\\\\\\"#"##, errors: 0 }, + // mismatch - errors: + Case { source: r###" r#"foo"## "###, expect: r###"r#"foo"#"###, errors: 1 }, + Case { source: r###" r##"foo"# "###, expect: "(none)", errors: 2 }, + ]; + + check_cases_with_errors(&lit_cases[..], literal()); + } } diff --git a/compiler/noirc_frontend/src/resolve_locations.rs b/compiler/noirc_frontend/src/resolve_locations.rs new file mode 100644 index 00000000000..bfacee0ef96 --- /dev/null +++ b/compiler/noirc_frontend/src/resolve_locations.rs @@ -0,0 +1,169 @@ +use arena::Index; +use noirc_errors::Location; + +use crate::hir_def::expr::HirExpression; +use crate::hir_def::types::Type; + +use crate::node_interner::{DefinitionKind, Node, NodeInterner}; + +impl NodeInterner { + /// Scans the interner for the item which is located at that [Location] + /// + /// The [Location] may not necessarily point to the beginning of the item + /// so we check if the location's span is contained within the start or end + /// of each items [Span] + pub fn find_location_index(&self, location: Location) -> Option> { + let mut location_candidate: Option<(&Index, &Location)> = None; + + // Note: we can modify this in the future to not do a linear + // scan by storing a separate map of the spans or by sorting the locations. + for (index, interned_location) in self.id_to_location.iter() { + if interned_location.contains(&location) { + if let Some(current_location) = location_candidate { + if interned_location.span.is_smaller(¤t_location.1.span) { + location_candidate = Some((index, interned_location)); + } + } else { + location_candidate = Some((index, interned_location)); + } + } + } + location_candidate.map(|(index, _location)| *index) + } + + /// Returns the [Location] of the definition of the given Ident found at [Span] of the given [FileId]. + /// Returns [None] when definition is not found. + pub fn get_definition_location_from(&self, location: Location) -> Option { + self.find_location_index(location) + .and_then(|index| self.resolve_location(index)) + .or_else(|| self.try_resolve_trait_impl_location(location)) + .or_else(|| self.try_resolve_trait_method_declaration(location)) + } + + pub fn get_declaration_location_from(&self, location: Location) -> Option { + self.try_resolve_trait_method_declaration(location).or_else(|| { + self.find_location_index(location) + .and_then(|index| self.resolve_location(index)) + .and_then(|found_impl_location| { + self.try_resolve_trait_method_declaration(found_impl_location) + }) + }) + } + + /// For a given [Index] we return [Location] to which we resolved to + /// We currently return None for features not yet implemented + /// TODO(#3659): LSP goto def should error when Ident at Location could not resolve + fn resolve_location(&self, index: impl Into) -> Option { + let node = self.nodes.get(index.into())?; + + match node { + Node::Function(func) => self.resolve_location(func.as_expr()), + Node::Expression(expression) => self.resolve_expression_location(expression), + _ => None, + } + } + + /// Resolves the [Location] of the definition for a given [HirExpression] + /// + /// Note: current the code returns None because some expressions are not yet implemented. + fn resolve_expression_location(&self, expression: &HirExpression) -> Option { + match expression { + HirExpression::Ident(ident) => { + let definition_info = self.definition(ident.id); + match definition_info.kind { + DefinitionKind::Function(func_id) => { + Some(self.function_meta(&func_id).location) + } + DefinitionKind::Local(_local_id) => Some(definition_info.location), + _ => None, + } + } + HirExpression::Constructor(expr) => { + let struct_type = &expr.r#type.borrow(); + Some(struct_type.location) + } + HirExpression::MemberAccess(expr_member_access) => { + self.resolve_struct_member_access(expr_member_access) + } + HirExpression::Call(expr_call) => { + let func = expr_call.func; + self.resolve_location(func) + } + + _ => None, + } + } + + /// Resolves the [Location] of the definition for a given [crate::hir_def::expr::HirMemberAccess] + /// This is used to resolve the location of a struct member access. + /// For example, in the expression `foo.bar` we want to resolve the location of `bar` + /// to the location of the definition of `bar` in the struct `foo`. + fn resolve_struct_member_access( + &self, + expr_member_access: &crate::hir_def::expr::HirMemberAccess, + ) -> Option { + let expr_lhs = &expr_member_access.lhs; + let expr_rhs = &expr_member_access.rhs; + + let lhs_self_struct = match self.id_type(expr_lhs) { + Type::Struct(struct_type, _) => struct_type, + _ => return None, + }; + + let struct_type = lhs_self_struct.borrow(); + let field_names = struct_type.field_names(); + + field_names.iter().find(|field_name| field_name.0 == expr_rhs.0).map(|found_field_name| { + Location::new(found_field_name.span(), struct_type.location.file) + }) + } + + /// Attempts to resolve [Location] of [Trait] based on [Location] of [TraitImpl] + /// This is used by LSP to resolve the location of a trait based on the location of a trait impl. + /// + /// Example: + /// impl Foo for Bar { ... } -> trait Foo { ... } + fn try_resolve_trait_impl_location(&self, location: Location) -> Option { + self.trait_implementations + .iter() + .find(|shared_trait_impl| { + let trait_impl = shared_trait_impl.borrow(); + trait_impl.file == location.file && trait_impl.ident.span().contains(&location.span) + }) + .and_then(|shared_trait_impl| { + let trait_impl = shared_trait_impl.borrow(); + self.traits.get(&trait_impl.trait_id).map(|trait_| trait_.location) + }) + } + + /// Attempts to resolve [Location] of [Trait]'s [TraitFunction] declaration based on [Location] of [TraitFunction] call. + /// + /// This is used by LSP to resolve the location. + /// + /// ### Example: + /// ```nr + /// trait Fieldable { + /// fn to_field(self) -> Field; + /// ^------------------------------\ + /// } | + /// | + /// fn main_func(x: u32) { | + /// assert(x.to_field() == 15); | + /// \......................./ + /// } + /// ``` + /// + fn try_resolve_trait_method_declaration(&self, location: Location) -> Option { + self.func_meta + .iter() + .find(|(_, func_meta)| func_meta.location.contains(&location)) + .and_then(|(func_id, _func_meta)| { + let (_, trait_id) = self.get_function_trait(func_id)?; + + let mut methods = self.traits.get(&trait_id)?.methods.iter(); + let method = + methods.find(|method| method.name.0.contents == self.function_name(func_id)); + method.map(|method| method.location) + }) + } +} diff --git a/compiler/noirc_frontend/src/tests.rs b/compiler/noirc_frontend/src/tests.rs index 6a1cf80accd..063e0215a30 100644 --- a/compiler/noirc_frontend/src/tests.rs +++ b/compiler/noirc_frontend/src/tests.rs @@ -9,6 +9,7 @@ mod test { use fm::FileId; + use iter_extended::vecmap; use noirc_errors::Location; use crate::hir::def_collector::dc_crate::CompilationError; @@ -20,7 +21,6 @@ mod test { use crate::hir::Context; use crate::node_interner::{NodeInterner, StmtId}; - use crate::graph::CrateGraph; use crate::hir::def_collector::dc_crate::DefCollector; use crate::hir_def::expr::HirExpression; use crate::hir_def::stmt::HirStatement; @@ -38,71 +38,55 @@ mod test { errors.iter().any(|(e, _f)| matches!(e, CompilationError::ParseError(_))) } - pub(crate) fn remove_experimental_feature_warnings( - errors: Vec<(CompilationError, FileId)>, - ) -> Vec<(CompilationError, FileId)> { - errors - .iter() - .filter(|(e, _f)| match e.clone() { - CompilationError::ParseError(parser_error) => !matches!( - parser_error.reason(), - Some(ParserErrorReason::ExperimentalFeature(_)) - ), - _ => true, - }) - .cloned() - .collect() + pub(crate) fn remove_experimental_warnings(errors: &mut Vec<(CompilationError, FileId)>) { + errors.retain(|(error, _)| match error { + CompilationError::ParseError(error) => { + !matches!(error.reason(), Some(ParserErrorReason::ExperimentalFeature(..))) + } + _ => true, + }); } pub(crate) fn get_program( src: &str, ) -> (ParsedModule, Context, Vec<(CompilationError, FileId)>) { let root = std::path::Path::new("/"); - let fm = FileManager::new(root, Box::new(|path| std::fs::read_to_string(path))); - //let fm = FileManager::new(root, Box::new(get_non_stdlib_asset)); - let graph = CrateGraph::default(); - let mut context = Context::new(fm, graph); + let fm = FileManager::new(root); + let mut context = Context::new(fm); + context.def_interner.populate_dummy_operator_traits(); let root_file_id = FileId::dummy(); let root_crate_id = context.crate_graph.add_crate_root(root_file_id); let (program, parser_errors) = parse_program(src); - let mut errors = remove_experimental_feature_warnings( - parser_errors.iter().cloned().map(|e| (e.into(), root_file_id)).collect(), - ); + let mut errors = vecmap(parser_errors, |e| (e.into(), root_file_id)); + remove_experimental_warnings(&mut errors); + if !has_parser_error(&errors) { // Allocate a default Module for the root, giving it a ModuleId let mut modules: Arena = Arena::default(); let location = Location::new(Default::default(), root_file_id); let root = modules.insert(ModuleData::new(None, location, false)); + let def_map = CrateDefMap { root: LocalModuleId(root), modules, krate: root_crate_id, extern_prelude: BTreeMap::new(), }; + // Now we want to populate the CrateDefMap using the DefCollector errors.extend(DefCollector::collect( def_map, &mut context, program.clone().into_sorted(), root_file_id, + Vec::new(), // No macro processors )); } (program, context, errors) } pub(crate) fn get_program_errors(src: &str) -> Vec<(CompilationError, FileId)> { - let (_program, _context, errors) = get_program(src); - errors - .iter() - .filter(|(e, _f)| match e.clone() { - CompilationError::ParseError(parser_error) => !matches!( - parser_error.reason(), - Some(ParserErrorReason::ExperimentalFeature(_)) - ), - _ => true, - }) - .cloned() - .collect() + get_program(src).2 } #[test] @@ -457,7 +441,7 @@ mod test { }"; let errors = get_program_errors(src); assert!(!has_parser_error(&errors)); - assert!(errors.len() == 1, "Expected 1 error, got: {:?}", errors); + assert!(errors.len() == 2, "Expected 2 errors, got: {:?}", errors); for (err, _file_id) in errors { match &err { diff --git a/compiler/noirc_printable_type/Cargo.toml b/compiler/noirc_printable_type/Cargo.toml index 5f2eea92257..fbbe778e561 100644 --- a/compiler/noirc_printable_type/Cargo.toml +++ b/compiler/noirc_printable_type/Cargo.toml @@ -14,5 +14,6 @@ regex = "1.9.1" serde.workspace = true serde_json.workspace = true thiserror.workspace = true +jsonrpc.workspace = true [dev-dependencies] diff --git a/compiler/noirc_printable_type/src/lib.rs b/compiler/noirc_printable_type/src/lib.rs index 348f5ef3274..273e2d512ea 100644 --- a/compiler/noirc_printable_type/src/lib.rs +++ b/compiler/noirc_printable_type/src/lib.rs @@ -73,6 +73,9 @@ pub enum ForeignCallError { #[error("Could not parse PrintableType argument. {0}")] ParsingError(#[from] serde_json::Error), + + #[error("Failed calling external resolver. {0}")] + ExternalResolverError(#[from] jsonrpc::Error), } impl TryFrom<&[ForeignCallParam]> for PrintableValueDisplay { @@ -168,15 +171,24 @@ fn fetch_printable_type( fn to_string(value: &PrintableValue, typ: &PrintableType) -> Option { let mut output = String::new(); match (value, typ) { - ( - PrintableValue::Field(f), - PrintableType::Field - // TODO(#2401): We should print the sign for these and probably print normal integers instead of field strings - | PrintableType::SignedInteger { .. } - | PrintableType::UnsignedInteger { .. }, - ) => { + (PrintableValue::Field(f), PrintableType::Field) => { output.push_str(&format_field_string(*f)); } + (PrintableValue::Field(f), PrintableType::UnsignedInteger { width }) => { + let uint_cast = f.to_u128() & ((1 << width) - 1); // Retain the lower 'width' bits + output.push_str(&uint_cast.to_string()); + } + (PrintableValue::Field(f), PrintableType::SignedInteger { width }) => { + let mut uint = f.to_u128(); // Interpret as uint + + // Extract sign relative to width of input + if (uint >> (width - 1)) == 1 { + output.push('-'); + uint = (uint ^ ((1 << width) - 1)) + 1; // Two's complement relative to width of input + } + + output.push_str(&uint.to_string()); + } (PrintableValue::Field(f), PrintableType::Boolean) => { if f.is_one() { output.push_str("true"); @@ -187,8 +199,11 @@ fn to_string(value: &PrintableValue, typ: &PrintableType) -> Option { (PrintableValue::Vec(vector), PrintableType::Array { typ, .. }) => { output.push('['); let mut values = vector.iter().peekable(); - while let Some(value) = values.next() { - output.push_str(&format!("{}", PrintableValueDisplay::Plain(value.clone(), *typ.clone()))); + while let Some(value) = values.next() { + output.push_str(&format!( + "{}", + PrintableValueDisplay::Plain(value.clone(), *typ.clone()) + )); if values.peek().is_some() { output.push_str(", "); } @@ -197,16 +212,19 @@ fn to_string(value: &PrintableValue, typ: &PrintableType) -> Option { } (PrintableValue::String(s), PrintableType::String { .. }) => { - output.push_str(&format!(r#""{s}""#)); + output.push_str(s); } (PrintableValue::Struct(map), PrintableType::Struct { name, fields, .. }) => { output.push_str(&format!("{name} {{ ")); let mut fields = fields.iter().peekable(); - while let Some((key, field_type)) = fields.next() { + while let Some((key, field_type)) = fields.next() { let value = &map[key]; - output.push_str(&format!("{key}: {}", PrintableValueDisplay::Plain(value.clone(), field_type.clone()))); + output.push_str(&format!( + "{key}: {}", + PrintableValueDisplay::Plain(value.clone(), field_type.clone()) + )); if fields.peek().is_some() { output.push_str(", "); } @@ -215,7 +233,7 @@ fn to_string(value: &PrintableValue, typ: &PrintableType) -> Option { output.push_str(" }"); } - _ => return None + _ => return None, }; Some(output) diff --git a/compiler/source-resolver/.eslintignore b/compiler/source-resolver/.eslintignore deleted file mode 100644 index 3c3629e647f..00000000000 --- a/compiler/source-resolver/.eslintignore +++ /dev/null @@ -1 +0,0 @@ -node_modules diff --git a/compiler/source-resolver/.eslintrc.js b/compiler/source-resolver/.eslintrc.js deleted file mode 100644 index 33335c2a877..00000000000 --- a/compiler/source-resolver/.eslintrc.js +++ /dev/null @@ -1,3 +0,0 @@ -module.exports = { - extends: ["../../.eslintrc.js"], -}; diff --git a/compiler/source-resolver/package.json b/compiler/source-resolver/package.json deleted file mode 100644 index fb214a822eb..00000000000 --- a/compiler/source-resolver/package.json +++ /dev/null @@ -1,43 +0,0 @@ -{ - "name": "@noir-lang/source-resolver", - "version": "0.19.2", - "license": "MIT", - "main": "./lib-node/index_node.js", - "types": "./types/index_node.d.ts", - "module": "./lib/index.js", - "browser": "./lib/index.js", - "exports": { - ".": { - "require": "./lib-node/index_node.js", - "import": "./lib/index.js" - } - }, - "repository": { - "type": "git", - "url": "https://github.com/noir-lang/noir.git" - }, - "scripts": { - "clean-modules": "rm -rf lib", - "build:node": "tsc -p tsconfig.cjs.json", - "build:web": "tsc -p tsconfig.esm.json", - "build": "npm run clean-modules && npm run build:node && npm run build:web && npm run generate-types", - "test": "ava", - "generate-types": "tsc src/*.ts --declaration --emitDeclarationOnly --outDir types", - "clean": "rm -rf ./lib ./lib-node", - "nightly:version": "jq --arg new_version \"-$(git rev-parse --short HEAD)$1\" '.version = .version + $new_version' package.json > package-tmp.json && mv package-tmp.json package.json", - "publish": "echo 📡 publishing `$npm_package_name` && yarn npm publish", - "lint": "NODE_NO_WARNINGS=1 eslint . --ext .ts --ignore-path ./.eslintignore --max-warnings 0" - }, - "devDependencies": { - "@types/node": "^20.5.7", - "ava": "^5.2.0", - "typescript": "4.9.4" - }, - "files": [ - "lib", - "lib-node", - "src", - "types", - "package.json" - ] -} diff --git a/compiler/source-resolver/src/index.ts b/compiler/source-resolver/src/index.ts deleted file mode 100644 index 3498c195fbd..00000000000 --- a/compiler/source-resolver/src/index.ts +++ /dev/null @@ -1,31 +0,0 @@ -let resolveFunction: ((source_id: string) => string) | null = null; - -export const read_file = function (source_id: string): string { - if (resolveFunction) { - const result = resolveFunction(source_id); - - if (typeof result === 'string') { - return result; - } else { - throw new Error( - 'Noir source resolver function MUST return String synchronously. Are you trying to return anything else, eg. `Promise`?', - ); - } - } else { - throw new Error('Not yet initialized. Use initializeResolver(() => string)'); - } -}; - -function initialize(noir_resolver: (source_id: string) => string): (source_id: string) => string { - if (typeof noir_resolver === 'function') { - return noir_resolver; - } else { - throw new Error( - 'Provided Noir Resolver is not a function, hint: use function(module_id) => NoirSource as second parameter', - ); - } -} - -export function initializeResolver(resolver: (source_id: string) => string): void { - resolveFunction = initialize(resolver); -} diff --git a/compiler/source-resolver/src/index_node.ts b/compiler/source-resolver/src/index_node.ts deleted file mode 100644 index 14b7efd30c5..00000000000 --- a/compiler/source-resolver/src/index_node.ts +++ /dev/null @@ -1,17 +0,0 @@ -/// - -import { initializeResolver, read_file } from './index.js'; - -initializeResolver((source_id: string) => { - let fileContent = ''; - try { - // eslint-disable-next-line @typescript-eslint/no-var-requires - const fs = require('fs'); - fileContent = fs.readFileSync(source_id, { encoding: 'utf8' }) as string; - } catch (e) { - console.log(e); - } - return fileContent; -}); - -export { initializeResolver, read_file }; diff --git a/compiler/source-resolver/test/cjs_initialization.test.cjs b/compiler/source-resolver/test/cjs_initialization.test.cjs deleted file mode 100644 index 1dc7784fa40..00000000000 --- a/compiler/source-resolver/test/cjs_initialization.test.cjs +++ /dev/null @@ -1,51 +0,0 @@ -const test = require('ava'); - -const { initializeResolver, read_file } = require("../lib-node/index_node.js"); - -test('It reads file from file system within read_file using default implementation.', t => { - - const readResult = read_file("./package.json"); - - t.assert(readResult, "return from `read_file` should by truthy"); - -}); - -test('It calls function from initializer within read_file function.', t => { - - const RESULT_RESPONSE = "TEST"; - - initializeResolver((source) => { - return source; - }); - - const readResult = read_file(RESULT_RESPONSE); - - t.is(readResult, RESULT_RESPONSE); - -}); - -test('It communicates error when resolver returns non-String to read_file function.', t => { - - const RESULT_RESPONSE = "TEST"; - - initializeResolver((source) => { - return Promise.resolve(source); - }); - - const error = t.throws(() => { - read_file(RESULT_RESPONSE); - }, { instanceOf: Error }); - - t.is(error.message, 'Noir source resolver function MUST return String synchronously. Are you trying to return anything else, eg. `Promise`?'); - -}); - -test('It communicates error when resolver is initialized to anything but a function.', t => { - - const error = t.throws(() => { - initializeResolver(null); - }, { instanceOf: Error }); - - t.is(error.message, 'Provided Noir Resolver is not a function, hint: use function(module_id) => NoirSource as second parameter'); - -}); diff --git a/compiler/source-resolver/test/esm_initialization.test.mjs b/compiler/source-resolver/test/esm_initialization.test.mjs deleted file mode 100644 index 197b48e48d0..00000000000 --- a/compiler/source-resolver/test/esm_initialization.test.mjs +++ /dev/null @@ -1,58 +0,0 @@ -/** - * Below tests are commented because they require - * "type": "module", in package.json - * Seems that both CJS and MJS modes are not going to work. -*/ -import test from 'ava'; - -import { initializeResolver, read_file } from "../lib-node/index.js"; - -test('It communicates error when read_file was called before initializeResolver.', t => { - - const error = t.throws(() => { - const readResult = read_file("./package.json"); - }, { instanceOf: Error }); - - t.is(error.message, 'Not yet initialized. Use initializeResolver(() => string)'); - -}); - -test('It calls function from initializer within read_file function.', t => { - - const RESULT_RESPONSE = "TEST"; - - initializeResolver((source) => { - return source; - }); - - const readResult = read_file(RESULT_RESPONSE); - - t.is(readResult, RESULT_RESPONSE); - -}); - -test('It communicates error when resolver returns non-String to read_file function.', t => { - - const RESULT_RESPONSE = "TEST"; - - initializeResolver((source) => { - return Promise.resolve(source); - }); - - const error = t.throws(() => { - read_file(RESULT_RESPONSE); - }, { instanceOf: Error }); - - t.is(error.message, 'Noir source resolver function MUST return String synchronously. Are you trying to return anything else, eg. `Promise`?'); - -}); - -test('It communicates error when resolver is initialized to anything but a function.', t => { - - const error = t.throws(() => { - initializeResolver(null); - }, { instanceOf: Error }); - - t.is(error.message, 'Provided Noir Resolver is not a function, hint: use function(module_id) => NoirSource as second parameter'); - -}); diff --git a/compiler/source-resolver/tsconfig.cjs.json b/compiler/source-resolver/tsconfig.cjs.json deleted file mode 100644 index 51863a34cec..00000000000 --- a/compiler/source-resolver/tsconfig.cjs.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "compilerOptions": { - "target": "es2018", - "moduleResolution": "node", - "outDir": "lib-node", - "module": "commonjs", - "strict": true, - "esModuleInterop": true, - "noImplicitAny": true, - "removeComments": false, - "preserveConstEnums": true, - "sourceMap": true, - "importHelpers": true - }, - "include": ["src"] -} diff --git a/compiler/source-resolver/tsconfig.esm.json b/compiler/source-resolver/tsconfig.esm.json deleted file mode 100644 index dcf4441c4ad..00000000000 --- a/compiler/source-resolver/tsconfig.esm.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "compilerOptions": { - "target": "es2018", - "moduleResolution": "node", - "outDir": "lib", - "module": "ES6", - "strict": true, - "esModuleInterop": true, - "noImplicitAny": true, - "removeComments": false, - "preserveConstEnums": true, - "sourceMap": true, - "importHelpers": true - }, - "include": ["src/index.ts"] -} diff --git a/compiler/source-resolver/types/index.d.ts b/compiler/source-resolver/types/index.d.ts deleted file mode 100644 index a17f7bc36bb..00000000000 --- a/compiler/source-resolver/types/index.d.ts +++ /dev/null @@ -1,2 +0,0 @@ -export declare const read_file: (source_id: string) => string; -export declare function initializeResolver(resolver: (source_id: string) => string): void; diff --git a/compiler/source-resolver/types/index_node.d.ts b/compiler/source-resolver/types/index_node.d.ts deleted file mode 100644 index 3a109e47e73..00000000000 --- a/compiler/source-resolver/types/index_node.d.ts +++ /dev/null @@ -1,2 +0,0 @@ -import { initializeResolver, read_file } from './index.js'; -export { initializeResolver, read_file }; diff --git a/compiler/utils/iter-extended/src/lib.rs b/compiler/utils/iter-extended/src/lib.rs index aef89b58b30..d2ef271aebf 100644 --- a/compiler/utils/iter-extended/src/lib.rs +++ b/compiler/utils/iter-extended/src/lib.rs @@ -44,7 +44,7 @@ where } /// Given an iterator over a Result, filter out the Ok values from the Err values -/// and return both in separate Vecs. Unlike other collect-like functions over Results, +/// and return both in separate `Vec`s. Unlike other collect-like functions over Results, /// this function will always consume the entire iterator. pub fn partition_results(iterable: It, mut f: F) -> (Vec, Vec) where diff --git a/compiler/wasm/.eslintrc.js b/compiler/wasm/.eslintrc.js index 33335c2a877..5a2cc7f1ec0 100644 --- a/compiler/wasm/.eslintrc.js +++ b/compiler/wasm/.eslintrc.js @@ -1,3 +1,3 @@ module.exports = { - extends: ["../../.eslintrc.js"], + extends: ['../../.eslintrc.js'], }; diff --git a/compiler/wasm/.gitignore b/compiler/wasm/.gitignore index f968dafbcc3..1b823a2c19a 100644 --- a/compiler/wasm/.gitignore +++ b/compiler/wasm/.gitignore @@ -1 +1,2 @@ -noir-script/target +dist +build diff --git a/compiler/wasm/.mocharc.json b/compiler/wasm/.mocharc.json index 5e3ee32d901..8009c66f9a8 100644 --- a/compiler/wasm/.mocharc.json +++ b/compiler/wasm/.mocharc.json @@ -1,7 +1,13 @@ { - "extension": [ - "ts" - ], - "spec": "test/node/**/*.test.ts", - "require": "ts-node/register" -} \ No newline at end of file + "require": "ts-node/register", + "extensions": [ + "ts" + ], + "spec": [ + "./test/**/!(browser)/*.test.ts" + ], + "node-option": [ + "loader=ts-node" + ] + } + \ No newline at end of file diff --git a/compiler/wasm/Cargo.toml b/compiler/wasm/Cargo.toml index 9ece26c6df4..7af26269106 100644 --- a/compiler/wasm/Cargo.toml +++ b/compiler/wasm/Cargo.toml @@ -21,16 +21,20 @@ noirc_errors.workspace = true wasm-bindgen.workspace = true serde.workspace = true js-sys.workspace = true -cfg-if.workspace = true console_error_panic_hook.workspace = true gloo-utils.workspace = true - -log = "0.4.17" -wasm-logger = "0.2.0" +tracing-subscriber.workspace = true +tracing-web.workspace = true # This is an unused dependency, we are adding it # so that we can enable the js feature in getrandom. getrandom = { workspace = true, features = ["js"] } +# This is an unused dependency, we are adding it +# so that we can enable the debug-embed feature in rust-embed. +# This is needed for rust-embed to include the stdlib sources in dev mode +# while simultaneously allowing us to deactivate wasm-opt for speed. +rust-embed = { workspace = true, features = ["debug-embed"] } + [build-dependencies] -build-data.workspace = true +build-data.workspace = true \ No newline at end of file diff --git a/compiler/wasm/build-fixtures.sh b/compiler/wasm/build-fixtures.sh new file mode 100755 index 00000000000..3a2330d4726 --- /dev/null +++ b/compiler/wasm/build-fixtures.sh @@ -0,0 +1,5 @@ +#!/usr/bin/env bash + +nargo compile --program-dir ./test/fixtures/simple +nargo compile --program-dir ./test/fixtures/with-deps +nargo compile --program-dir ./test/fixtures/noir-contract \ No newline at end of file diff --git a/compiler/wasm/build.sh b/compiler/wasm/build.sh deleted file mode 100755 index 37f2fd0a5a9..00000000000 --- a/compiler/wasm/build.sh +++ /dev/null @@ -1,47 +0,0 @@ -#!/usr/bin/env bash - -function require_command { - if ! command -v "$1" >/dev/null 2>&1; then - echo "Error: $1 is required but not installed." >&2 - exit 1 - fi -} -function check_installed { - if ! command -v "$1" >/dev/null 2>&1; then - echo "$1 is not installed. Please install it." >&2 - return 1 - fi - return 0 -} -function run_or_fail { - "$@" - local status=$? - if [ $status -ne 0 ]; then - echo "Command '$*' failed with exit code $status" >&2 - exit $status - fi -} - -require_command jq -require_command cargo -require_command wasm-bindgen -check_installed wasm-opt - -self_path=$(dirname "$(readlink -f "$0")") -export pname=$(cargo read-manifest | jq -r '.name') -export CARGO_TARGET_DIR=$self_path/target - -rm -rf $self_path/outputs >/dev/null 2>&1 -rm -rf $self_path/result >/dev/null 2>&1 - -if [ -v out ]; then - echo "Will install package to $out (defined outside installPhase.sh script)" -else - export out="$self_path/outputs/out" - echo "Will install package to $out" -fi - -run_or_fail $self_path/buildPhaseCargoCommand.sh -run_or_fail $self_path/installPhase.sh - -ln -s $out $self_path/result diff --git a/compiler/wasm/buildPhaseCargoCommand.sh b/compiler/wasm/buildPhaseCargoCommand.sh deleted file mode 100755 index 2ab0f1eb3cb..00000000000 --- a/compiler/wasm/buildPhaseCargoCommand.sh +++ /dev/null @@ -1,39 +0,0 @@ -#!/usr/bin/env bash - -function run_or_fail { - "$@" - local status=$? - if [ $status -ne 0 ]; then - echo "Command '$*' failed with exit code $status" >&2 - exit $status - fi -} -function run_if_available { - if command -v "$1" >/dev/null 2>&1; then - "$@" - else - echo "$1 is not installed. Please install it to use this feature." >&2 - fi -} - -export self_path=$(dirname "$(readlink -f "$0")") - -# Clear out the existing build artifacts as these aren't automatically removed by wasm-pack. -if [ -d ./pkg/ ]; then - rm -rf $self_path/pkg/ -fi - -TARGET=wasm32-unknown-unknown -WASM_BINARY=$CARGO_TARGET_DIR/$TARGET/release/${pname}.wasm - -NODE_DIR=$self_path/nodejs/ -BROWSER_DIR=$self_path/web/ -NODE_WASM=${NODE_DIR}/${pname}_bg.wasm -BROWSER_WASM=${BROWSER_DIR}/${pname}_bg.wasm - -# Build the new wasm package -run_or_fail cargo build --lib --release --target $TARGET --package ${pname} ${cargoExtraArgs} -run_or_fail wasm-bindgen $WASM_BINARY --out-dir $NODE_DIR --typescript --target nodejs -run_or_fail wasm-bindgen $WASM_BINARY --out-dir $BROWSER_DIR --typescript --target web -run_if_available wasm-opt $NODE_WASM -o $NODE_WASM -O -run_if_available wasm-opt $BROWSER_WASM -o $BROWSER_WASM -O \ No newline at end of file diff --git a/compiler/wasm/fixtures/deps/noir-script/Nargo.toml b/compiler/wasm/fixtures/deps/noir-script/Nargo.toml deleted file mode 100644 index 7c8182a02ae..00000000000 --- a/compiler/wasm/fixtures/deps/noir-script/Nargo.toml +++ /dev/null @@ -1,7 +0,0 @@ -[package] -name="noir_wasm_testing" -type="bin" -authors = [""] - -[dependencies] -lib_a = { path="../lib-a" } diff --git a/compiler/wasm/installPhase.sh b/compiler/wasm/installPhase.sh deleted file mode 100755 index e5be98a3339..00000000000 --- a/compiler/wasm/installPhase.sh +++ /dev/null @@ -1,10 +0,0 @@ -#!/usr/bin/env bash -export self_path=$(dirname "$(readlink -f "$0")") - -export out_path=$out/noir_wasm - -mkdir -p $out_path -cp $self_path/README.md $out_path/ -cp $self_path/package.json $out_path/ -cp -r $self_path/nodejs $out_path/ -cp -r $self_path/web $out_path/ diff --git a/compiler/wasm/package.json b/compiler/wasm/package.json index 75b876e1db9..4a71d8aa77d 100644 --- a/compiler/wasm/package.json +++ b/compiler/wasm/package.json @@ -1,43 +1,79 @@ { "name": "@noir-lang/noir_wasm", - "collaborators": [ + "contributors": [ "The Noir Team " ], - "version": "0.19.2", + "version": "0.22.0", "license": "(MIT OR Apache-2.0)", - "main": "./nodejs/noir_wasm.js", - "types": "./web/noir_wasm.d.ts", - "module": "./web/noir_wasm.js", + "main": "dist/main.js", + "types": "./dist/types/src/index.d.cts", + "exports": { + "node": "./dist/node/main.js", + "import": "./dist/web/main.mjs", + "require": "./dist/node/main.js", + "types": "./dist/types/src/index.d.cts", + "default": "./dist/web/main.mjs" + }, "files": [ - "nodejs", - "web", + "dist", "package.json" ], "sideEffects": false, + "homepage": "https://noir-lang.org/", "repository": { - "type": "git", - "url": "https://github.com/noir-lang/noir.git" + "url": "https://github.com/noir-lang/noir.git", + "directory": "compiler/wasm", + "type": "git" + }, + "bugs": { + "url": "https://github.com/noir-lang/noir/issues" }, "scripts": { - "build": "bash ./build.sh", - "test": "yarn test:node && yarn test:browser", - "test:node": "env TS_NODE_COMPILER_OPTIONS='{\"module\": \"commonjs\"}' mocha", + "build": "WASM_OPT=$(bash -c ./wasm-opt-check.sh) webpack", + "test": "yarn test:build_fixtures && yarn test:node && yarn test:browser", + "test:build_fixtures": "./build-fixtures.sh", "test:browser": "web-test-runner", - "clean": "chmod u+w web nodejs || true && rm -rf ./nodejs ./web ./target ./result", + "test:node": "NODE_NO_WARNINGS=1 mocha --config ./.mocharc.json", + "clean": "rm -rf ./build ./target ./dist public/fixtures/simple/target public/fixtures/with-deps/target", "nightly:version": "jq --arg new_version \"-$(git rev-parse --short HEAD)$1\" '.version = .version + $new_version' package.json > package-tmp.json && mv package-tmp.json package.json", "publish": "echo 📡 publishing `$npm_package_name` && yarn npm publish", - "lint": "NODE_NO_WARNINGS=1 eslint . --ext .ts --ignore-path ./.eslintignore --max-warnings 0", - "build:nix": "nix build -L .#noir_wasm", - "install:from:nix": "yarn clean && yarn build:nix && cp -rL ./result/noir_wasm/nodejs ./ && cp -rL ./result/noir_wasm/web ./" - }, - "peerDependencies": { - "@noir-lang/source-resolver": "workspace:*" + "lint": "NODE_NO_WARNINGS=1 eslint . --ext .ts --ignore-path ./.eslintignore --max-warnings 0" }, "devDependencies": { "@esm-bundle/chai": "^4.3.4-fix.0", + "@ltd/j-toml": "^1.38.0", + "@noir-lang/noirc_abi": "workspace:*", + "@types/adm-zip": "^0.5.0", + "@types/chai": "^4", + "@types/mocha": "^10.0.6", + "@types/mocha-each": "^2", + "@types/node": "^20.10.5", + "@types/path-browserify": "^1", + "@types/readable-stream": "^4", + "@types/sinon": "^17", + "@wasm-tool/wasm-pack-plugin": "^1.7.0", "@web/dev-server-esbuild": "^0.3.6", - "@web/test-runner": "^0.15.3", - "@web/test-runner-playwright": "^0.10.0", - "mocha": "^10.2.0" + "@web/test-runner": "^0.18.0", + "@web/test-runner-playwright": "^0.11.0", + "adm-zip": "^0.5.0", + "assert": "^2.1.0", + "browserify-fs": "^1.0.0", + "chai": "^4.3.10", + "copy-webpack-plugin": "^11.0.0", + "html-webpack-plugin": "^5.5.4", + "memfs": "^4.6.0", + "mocha": "^10.2.0", + "mocha-each": "^2.0.1", + "path-browserify": "^1.0.1", + "process": "^0.11.10", + "readable-stream": "^4.4.2", + "sinon": "^17.0.1", + "ts-loader": "^9.5.1", + "ts-node": "^10.9.1", + "typescript": "~5.2.2", + "unzipit": "^1.4.3", + "url": "^0.11.3", + "webpack": "^5.49.0", + "webpack-cli": "^4.7.2" } } diff --git a/compiler/wasm/src/circuit.rs b/compiler/wasm/src/circuit.rs deleted file mode 100644 index fdd9a7d9a20..00000000000 --- a/compiler/wasm/src/circuit.rs +++ /dev/null @@ -1,18 +0,0 @@ -use acvm::acir::circuit::Circuit; -use gloo_utils::format::JsValueSerdeExt; -use wasm_bindgen::prelude::*; - -// Deserializes bytes into ACIR structure -#[wasm_bindgen] -pub fn acir_read_bytes(bytes: Vec) -> JsValue { - console_error_panic_hook::set_once(); - let circuit = Circuit::deserialize_circuit(&bytes).unwrap(); - ::from_serde(&circuit).unwrap() -} - -#[wasm_bindgen] -pub fn acir_write_bytes(acir: JsValue) -> Vec { - console_error_panic_hook::set_once(); - let circuit: Circuit = JsValueSerdeExt::into_serde(&acir).unwrap(); - Circuit::serialize_circuit(&circuit) -} diff --git a/compiler/wasm/src/compile.rs b/compiler/wasm/src/compile.rs index e7fd3dd5212..54fdccf1369 100644 --- a/compiler/wasm/src/compile.rs +++ b/compiler/wasm/src/compile.rs @@ -2,16 +2,17 @@ use fm::FileManager; use gloo_utils::format::JsValueSerdeExt; use js_sys::{JsString, Object}; use nargo::artifacts::{ - contract::{PreprocessedContract, PreprocessedContractFunction}, + contract::{ContractArtifact, ContractFunctionArtifact}, debug::DebugArtifact, - program::PreprocessedProgram, + program::ProgramArtifact, }; use noirc_driver::{ - add_dep, compile_contract, compile_main, prepare_crate, prepare_dependency, CompileOptions, - CompiledContract, CompiledProgram, NOIR_ARTIFACT_VERSION_STRING, + add_dep, compile_contract, compile_main, file_manager_with_stdlib, prepare_crate, + prepare_dependency, CompileOptions, CompiledContract, CompiledProgram, + NOIR_ARTIFACT_VERSION_STRING, }; use noirc_frontend::{ - graph::{CrateGraph, CrateId, CrateName}, + graph::{CrateId, CrateName}, hir::Context, }; use serde::Deserialize; @@ -20,8 +21,6 @@ use wasm_bindgen::prelude::*; use crate::errors::{CompileError, JsCompileError}; -const BACKEND_IDENTIFIER: &str = "acvm-backend-barretenberg"; - #[wasm_bindgen(typescript_custom_section)] const DEPENDENCY_GRAPH: &'static str = r#" export type DependencyGraph = { @@ -32,14 +31,12 @@ export type DependencyGraph = { export type CompiledContract = { noir_version: string; name: string; - backend: string; functions: Array; events: Array; }; export type CompiledProgram = { noir_version: string; - backend: string; abi: any; bytecode: string; } @@ -120,15 +117,39 @@ impl JsCompileResult { } } -#[derive(Deserialize)] -struct DependencyGraph { - root_dependencies: Vec, - library_dependencies: HashMap>, +#[derive(Deserialize, Default)] +pub(crate) struct DependencyGraph { + pub(crate) root_dependencies: Vec, + pub(crate) library_dependencies: HashMap>, +} +#[wasm_bindgen] +// This is a map containing the paths of all of the files in the entry-point crate and +// the transitive dependencies of the entry-point crate. +// +// This is for all intents and purposes the file system that the compiler will use to resolve/compile +// files in the crate being compiled and its dependencies. +#[derive(Deserialize, Default)] +pub struct PathToFileSourceMap(pub(crate) HashMap); + +#[wasm_bindgen] +impl PathToFileSourceMap { + #[wasm_bindgen(constructor)] + pub fn new() -> PathToFileSourceMap { + PathToFileSourceMap::default() + } + // Inserts a path and its source code into the map. + // + // Returns true, if there was already source code in the map for the given path + pub fn add_source_code(&mut self, path: String, source_code: String) -> bool { + let path_buf = Path::new(&path).to_path_buf(); + let old_value = self.0.insert(path_buf, source_code); + old_value.is_some() + } } pub enum CompileResult { - Contract { contract: PreprocessedContract, debug: DebugArtifact }, - Program { program: PreprocessedProgram, debug: DebugArtifact }, + Contract { contract: ContractArtifact, debug: DebugArtifact }, + Program { program: ProgramArtifact, debug: DebugArtifact }, } #[wasm_bindgen] @@ -136,6 +157,7 @@ pub fn compile( entry_point: String, contracts: Option, dependency_graph: Option, + file_source_map: PathToFileSourceMap, ) -> Result { console_error_panic_hook::set_once(); @@ -146,10 +168,9 @@ pub fn compile( DependencyGraph { root_dependencies: vec![], library_dependencies: HashMap::new() } }; - let root = Path::new("/"); - let fm = FileManager::new(root, Box::new(get_non_stdlib_asset)); - let graph = CrateGraph::default(); - let mut context = Context::new(fm, graph); + let fm = file_manager_with_source_map(file_source_map); + + let mut context = Context::new(fm); let path = Path::new(&entry_point); let crate_id = prepare_crate(&mut context, path); @@ -158,10 +179,8 @@ pub fn compile( let compile_options = CompileOptions::default(); - // For now we default to plonk width = 3, though we can add it as a parameter - let np_language = acvm::Language::PLONKCSat { width: 3 }; - #[allow(deprecated)] - let is_opcode_supported = acvm::pwg::default_is_opcode_supported(np_language); + // For now we default to a bounded width of 3, though we can add it as a parameter + let expression_width = acvm::ExpressionWidth::Bounded { width: 3 }; if contracts.unwrap_or_default() { let compiled_contract = compile_contract(&mut context, crate_id, &compile_options) @@ -174,11 +193,9 @@ pub fn compile( })? .0; - let optimized_contract = - nargo::ops::optimize_contract(compiled_contract, np_language, &is_opcode_supported) - .expect("Contract optimization failed"); + let optimized_contract = nargo::ops::optimize_contract(compiled_contract, expression_width); - let compile_output = preprocess_contract(optimized_contract); + let compile_output = generate_contract_artifact(optimized_contract); Ok(JsCompileResult::new(compile_output)) } else { let compiled_program = compile_main(&mut context, crate_id, &compile_options, None, true) @@ -191,15 +208,38 @@ pub fn compile( })? .0; - let optimized_program = - nargo::ops::optimize_program(compiled_program, np_language, &is_opcode_supported) - .expect("Program optimization failed"); + let optimized_program = nargo::ops::optimize_program(compiled_program, expression_width); - let compile_output = preprocess_program(optimized_program); + let compile_output = generate_program_artifact(optimized_program); Ok(JsCompileResult::new(compile_output)) } } +// Create a new FileManager with the given source map +// +// Note: Use this method whenever initializing a new FileManager +// to ensure that the file manager contains all of the files +// that one intends the compiler to use. +// +// For all intents and purposes, the file manager being returned +// should be considered as immutable. +pub(crate) fn file_manager_with_source_map(source_map: PathToFileSourceMap) -> FileManager { + let root = Path::new(""); + let mut fm = file_manager_with_stdlib(root); + + for (path, source) in source_map.0 { + fm.add_file_with_source(path.as_path(), source); + } + + fm +} + +// Root dependencies are dependencies which the entry-point package relies upon. +// These will be in the Nargo.toml of the package being compiled. +// +// Library dependencies are transitive dependencies; for example, if the entry-point relies +// upon some library `lib1`. Then the packages that `lib1` depend upon will be placed in the +// `library_dependencies` list and the `lib1` will be placed in the `root_dependencies` list. fn process_dependency_graph(context: &mut Context, dependency_graph: DependencyGraph) { let mut crate_names: HashMap<&CrateName, CrateId> = HashMap::new(); @@ -232,98 +272,50 @@ fn add_noir_lib(context: &mut Context, library_name: &CrateName) -> CrateId { prepare_dependency(context, &path_to_lib) } -fn preprocess_program(program: CompiledProgram) -> CompileResult { +pub(crate) fn generate_program_artifact(program: CompiledProgram) -> CompileResult { let debug_artifact = DebugArtifact { - debug_symbols: vec![program.debug], - file_map: program.file_map, - warnings: program.warnings, - }; - - let preprocessed_program = PreprocessedProgram { - hash: program.hash, - backend: String::from(BACKEND_IDENTIFIER), - abi: program.abi, - noir_version: NOIR_ARTIFACT_VERSION_STRING.to_string(), - bytecode: program.circuit, + debug_symbols: vec![program.debug.clone()], + file_map: program.file_map.clone(), + warnings: program.warnings.clone(), }; - CompileResult::Program { program: preprocessed_program, debug: debug_artifact } + CompileResult::Program { program: program.into(), debug: debug_artifact } } -fn preprocess_contract(contract: CompiledContract) -> CompileResult { +pub(crate) fn generate_contract_artifact(contract: CompiledContract) -> CompileResult { let debug_artifact = DebugArtifact { debug_symbols: contract.functions.iter().map(|function| function.debug.clone()).collect(), file_map: contract.file_map, warnings: contract.warnings, }; - let preprocessed_functions = contract - .functions - .into_iter() - .map(|func| PreprocessedContractFunction { - name: func.name, - function_type: func.function_type, - is_internal: func.is_internal, - abi: func.abi, - bytecode: func.bytecode, - }) - .collect(); - - let preprocessed_contract = PreprocessedContract { + let functions = contract.functions.into_iter().map(ContractFunctionArtifact::from).collect(); + + let contract_artifact = ContractArtifact { noir_version: String::from(NOIR_ARTIFACT_VERSION_STRING), name: contract.name, - backend: String::from(BACKEND_IDENTIFIER), - functions: preprocessed_functions, + functions, events: contract.events, }; - CompileResult::Contract { contract: preprocessed_contract, debug: debug_artifact } -} - -cfg_if::cfg_if! { - if #[cfg(target_os = "wasi")] { - fn get_non_stdlib_asset(path_to_file: &Path) -> std::io::Result { - std::fs::read_to_string(path_to_file) - } - } else { - use std::io::{Error, ErrorKind}; - - #[wasm_bindgen(module = "@noir-lang/source-resolver")] - extern "C" { - #[wasm_bindgen(catch)] - fn read_file(path: &str) -> Result; - } - - fn get_non_stdlib_asset(path_to_file: &Path) -> std::io::Result { - let path_str = path_to_file.to_str().unwrap(); - match read_file(path_str) { - Ok(buffer) => Ok(buffer), - Err(_) => Err(Error::new(ErrorKind::Other, "could not read file using wasm")), - } - } - } + CompileResult::Contract { contract: contract_artifact, debug: debug_artifact } } #[cfg(test)] mod test { - use fm::FileManager; use noirc_driver::prepare_crate; - use noirc_frontend::{ - graph::{CrateGraph, CrateName}, - hir::Context, - }; + use noirc_frontend::{graph::CrateName, hir::Context}; - use super::{process_dependency_graph, DependencyGraph}; - use std::{collections::HashMap, path::Path}; + use crate::compile::PathToFileSourceMap; - fn mock_get_non_stdlib_asset(_path_to_file: &Path) -> std::io::Result { - Ok("".to_string()) - } + use super::{file_manager_with_source_map, process_dependency_graph, DependencyGraph}; + use std::{collections::HashMap, path::Path}; - fn setup_test_context() -> Context { - let fm = FileManager::new(Path::new("/"), Box::new(mock_get_non_stdlib_asset)); - let graph = CrateGraph::default(); - let mut context = Context::new(fm, graph); + fn setup_test_context(source_map: PathToFileSourceMap) -> Context<'static> { + let mut fm = file_manager_with_source_map(source_map); + // Add this due to us calling prepare_crate on "/main.nr" below + fm.add_file_with_source(Path::new("/main.nr"), "fn foo() {}".to_string()); + let mut context = Context::new(fm); prepare_crate(&mut context, Path::new("/main.nr")); context @@ -335,10 +327,12 @@ mod test { #[test] fn test_works_with_empty_dependency_graph() { - let mut context = setup_test_context(); let dependency_graph = DependencyGraph { root_dependencies: vec![], library_dependencies: HashMap::new() }; + let source_map = PathToFileSourceMap::default(); + let mut context = setup_test_context(source_map); + process_dependency_graph(&mut context, dependency_graph); // one stdlib + one root crate @@ -347,12 +341,19 @@ mod test { #[test] fn test_works_with_root_dependencies() { - let mut context = setup_test_context(); let dependency_graph = DependencyGraph { root_dependencies: vec![crate_name("lib1")], library_dependencies: HashMap::new(), }; + let source_map = PathToFileSourceMap( + vec![(Path::new("lib1/lib.nr").to_path_buf(), "fn foo() {}".to_string())] + .into_iter() + .collect(), + ); + + let mut context = setup_test_context(source_map); + process_dependency_graph(&mut context, dependency_graph); assert_eq!(context.crate_graph.number_of_crates(), 3); @@ -360,12 +361,18 @@ mod test { #[test] fn test_works_with_duplicate_root_dependencies() { - let mut context = setup_test_context(); let dependency_graph = DependencyGraph { root_dependencies: vec![crate_name("lib1"), crate_name("lib1")], library_dependencies: HashMap::new(), }; + let source_map = PathToFileSourceMap( + vec![(Path::new("lib1/lib.nr").to_path_buf(), "fn foo() {}".to_string())] + .into_iter() + .collect(), + ); + let mut context = setup_test_context(source_map); + process_dependency_graph(&mut context, dependency_graph); assert_eq!(context.crate_graph.number_of_crates(), 3); @@ -373,7 +380,6 @@ mod test { #[test] fn test_works_with_transitive_dependencies() { - let mut context = setup_test_context(); let dependency_graph = DependencyGraph { root_dependencies: vec![crate_name("lib1")], library_dependencies: HashMap::from([ @@ -382,6 +388,17 @@ mod test { ]), }; + let source_map = PathToFileSourceMap( + vec![ + (Path::new("lib1/lib.nr").to_path_buf(), "fn foo() {}".to_string()), + (Path::new("lib2/lib.nr").to_path_buf(), "fn foo() {}".to_string()), + (Path::new("lib3/lib.nr").to_path_buf(), "fn foo() {}".to_string()), + ] + .into_iter() + .collect(), + ); + + let mut context = setup_test_context(source_map); process_dependency_graph(&mut context, dependency_graph); assert_eq!(context.crate_graph.number_of_crates(), 5); @@ -389,12 +406,22 @@ mod test { #[test] fn test_works_with_missing_dependencies() { - let mut context = setup_test_context(); let dependency_graph = DependencyGraph { root_dependencies: vec![crate_name("lib1")], library_dependencies: HashMap::from([(crate_name("lib2"), vec![crate_name("lib3")])]), }; + let source_map = PathToFileSourceMap( + vec![ + (Path::new("lib1/lib.nr").to_path_buf(), "fn foo() {}".to_string()), + (Path::new("lib2/lib.nr").to_path_buf(), "fn foo() {}".to_string()), + (Path::new("lib3/lib.nr").to_path_buf(), "fn foo() {}".to_string()), + ] + .into_iter() + .collect(), + ); + + let mut context = setup_test_context(source_map); process_dependency_graph(&mut context, dependency_graph); assert_eq!(context.crate_graph.number_of_crates(), 5); diff --git a/compiler/wasm/src/compile_new.rs b/compiler/wasm/src/compile_new.rs new file mode 100644 index 00000000000..9ac080ffcae --- /dev/null +++ b/compiler/wasm/src/compile_new.rs @@ -0,0 +1,341 @@ +use crate::compile::{ + file_manager_with_source_map, generate_contract_artifact, generate_program_artifact, + JsCompileResult, PathToFileSourceMap, +}; +use crate::errors::{CompileError, JsCompileError}; +use noirc_driver::{ + add_dep, compile_contract, compile_main, prepare_crate, prepare_dependency, CompileOptions, +}; +use noirc_frontend::{ + graph::{CrateId, CrateName}, + hir::Context, +}; +use std::path::Path; +use wasm_bindgen::prelude::wasm_bindgen; + +/// This is a wrapper class that is wasm-bindgen compatible +/// We do not use js_name and rename it like CrateId because +/// then the impl block is not picked up in javascript. +#[wasm_bindgen] +pub struct CompilerContext { + // `wasm_bindgen` currently doesn't allow lifetime parameters on structs so we must use a `'static` lifetime. + // `Context` must then own the `FileManager` to satisfy this lifetime. + context: Context<'static>, +} + +#[wasm_bindgen(js_name = "CrateId")] +#[derive(Debug, Copy, Clone)] +pub struct CrateIDWrapper(CrateId); + +#[wasm_bindgen] +impl CompilerContext { + #[wasm_bindgen(constructor)] + pub fn new(source_map: PathToFileSourceMap) -> CompilerContext { + console_error_panic_hook::set_once(); + + let fm = file_manager_with_source_map(source_map); + CompilerContext { context: Context::new(fm) } + } + + #[cfg(test)] + pub(crate) fn crate_graph(&self) -> &noirc_frontend::graph::CrateGraph { + &self.context.crate_graph + } + #[cfg(test)] + pub(crate) fn root_crate_id(&self) -> CrateIDWrapper { + CrateIDWrapper(*self.context.root_crate_id()) + } + + // Processes the root crate by adding it to the package graph and automatically + // importing the stdlib as a dependency for it. + // + // Its ID in the package graph is returned + pub fn process_root_crate(&mut self, path_to_crate: String) -> CrateIDWrapper { + let path_to_crate = Path::new(&path_to_crate); + + // Adds the root crate to the crate graph and returns its crate id + CrateIDWrapper(prepare_crate(&mut self.context, path_to_crate)) + } + + pub fn process_dependency_crate(&mut self, path_to_crate: String) -> CrateIDWrapper { + let path_to_crate = Path::new(&path_to_crate); + + // Adds the root crate to the crate graph and returns its crate id + CrateIDWrapper(prepare_dependency(&mut self.context, path_to_crate)) + } + + // Adds a named edge from one crate to the other. + // + // For example, lets say we have two crates CrateId1 and CrateId2 + // This function will add an edge from CrateId1 to CrateId2 and the edge will be named `crate_name` + // + // This essentially says that CrateId1 depends on CrateId2 and the dependency is named `crate_name` + // + // We pass references to &CrateIdWrapper even though it is a copy because Rust's move semantics are + // not respected once we use javascript. ie it will actually allocated a new object in javascript + // then deallocate that object if we do not pass as a reference. + pub fn add_dependency_edge( + &mut self, + crate_name: String, + from: &CrateIDWrapper, + to: &CrateIDWrapper, + ) -> Result<(), JsCompileError> { + let parsed_crate_name: CrateName = + crate_name.parse().map_err(|err_string| JsCompileError::new(err_string, Vec::new()))?; + + add_dep(&mut self.context, from.0, to.0, parsed_crate_name); + Ok(()) + } + + pub fn compile_program( + mut self, + program_width: usize, + ) -> Result { + let compile_options = CompileOptions::default(); + let np_language = acvm::ExpressionWidth::Bounded { width: program_width }; + + let root_crate_id = *self.context.root_crate_id(); + + let compiled_program = + compile_main(&mut self.context, root_crate_id, &compile_options, None, true) + .map_err(|errs| { + CompileError::with_file_diagnostics( + "Failed to compile program", + errs, + &self.context.file_manager, + ) + })? + .0; + + let optimized_program = nargo::ops::optimize_program(compiled_program, np_language); + + let compile_output = generate_program_artifact(optimized_program); + Ok(JsCompileResult::new(compile_output)) + } + + pub fn compile_contract( + mut self, + program_width: usize, + ) -> Result { + let compile_options = CompileOptions::default(); + let np_language = acvm::ExpressionWidth::Bounded { width: program_width }; + let root_crate_id = *self.context.root_crate_id(); + + let compiled_contract = + compile_contract(&mut self.context, root_crate_id, &compile_options) + .map_err(|errs| { + CompileError::with_file_diagnostics( + "Failed to compile contract", + errs, + &self.context.file_manager, + ) + })? + .0; + + let optimized_contract = nargo::ops::optimize_contract(compiled_contract, np_language); + + let compile_output = generate_contract_artifact(optimized_contract); + Ok(JsCompileResult::new(compile_output)) + } +} + +/// This is a method that exposes the same API as `compile` +/// But uses the Context based APi internally +#[wasm_bindgen] +pub fn compile_( + entry_point: String, + contracts: Option, + dependency_graph: Option, + file_source_map: PathToFileSourceMap, +) -> Result { + use std::collections::HashMap; + + console_error_panic_hook::set_once(); + + let dependency_graph: crate::compile::DependencyGraph = + if let Some(dependency_graph) = dependency_graph { + ::into_serde( + &wasm_bindgen::JsValue::from(dependency_graph), + ) + .map_err(|err| err.to_string())? + } else { + crate::compile::DependencyGraph::default() + }; + + let mut compiler_context = CompilerContext::new(file_source_map); + + // Set the root crate + let root_id = compiler_context.process_root_crate(entry_point.clone()); + + let add_noir_lib = |context: &mut CompilerContext, lib_name: &CrateName| -> CrateIDWrapper { + let lib_name_string = lib_name.to_string(); + let path_to_lib = Path::new(&lib_name_string) + .join("lib.nr") + .to_str() + .expect("paths are expected to be valid utf-8") + .to_string(); + context.process_dependency_crate(path_to_lib) + }; + + // Add the dependency graph + let mut crate_names: HashMap = HashMap::new(); + // + // Process the direct dependencies of the root + for lib_name in dependency_graph.root_dependencies { + let lib_name_string = lib_name.to_string(); + + let crate_id = add_noir_lib(&mut compiler_context, &lib_name); + + crate_names.insert(lib_name.clone(), crate_id); + + // Add the dependency edges + compiler_context.add_dependency_edge(lib_name_string, &root_id, &crate_id)?; + } + + // Process the transitive dependencies of the root + for (lib_name, dependencies) in &dependency_graph.library_dependencies { + // first create the library crate if needed + // this crate might not have been registered yet because of the order of the HashMap + // e.g. {root: [lib1], libs: { lib2 -> [lib3], lib1 -> [lib2] }} + let crate_id = *crate_names + .entry(lib_name.clone()) + .or_insert_with(|| add_noir_lib(&mut compiler_context, lib_name)); + + for dependency_name in dependencies { + let dependency_name_string = dependency_name.to_string(); + + let dep_crate_id = crate_names + .entry(dependency_name.clone()) + .or_insert_with(|| add_noir_lib(&mut compiler_context, dependency_name)); + + compiler_context.add_dependency_edge( + dependency_name_string, + &crate_id, + dep_crate_id, + )?; + } + } + + let is_contract = contracts.unwrap_or(false); + let program_width = 3; + + if is_contract { + compiler_context.compile_contract(program_width) + } else { + compiler_context.compile_program(program_width) + } +} + +#[cfg(test)] +mod test { + use noirc_driver::prepare_crate; + use noirc_frontend::hir::Context; + + use crate::compile::{file_manager_with_source_map, PathToFileSourceMap}; + + use std::path::Path; + + use super::CompilerContext; + + fn setup_test_context(source_map: PathToFileSourceMap) -> CompilerContext { + let mut fm = file_manager_with_source_map(source_map); + // Add this due to us calling prepare_crate on "/main.nr" below + fm.add_file_with_source(Path::new("/main.nr"), "fn foo() {}".to_string()); + + let mut context = Context::new(fm); + prepare_crate(&mut context, Path::new("/main.nr")); + + CompilerContext { context } + } + + #[test] + fn test_works_with_empty_dependency_graph() { + let source_map = PathToFileSourceMap::default(); + let context = setup_test_context(source_map); + + // one stdlib + one root crate + assert_eq!(context.crate_graph().number_of_crates(), 2); + } + + #[test] + fn test_works_with_root_dependencies() { + let source_map = PathToFileSourceMap( + vec![(Path::new("lib1/lib.nr").to_path_buf(), "fn foo() {}".to_string())] + .into_iter() + .collect(), + ); + + let mut context = setup_test_context(source_map); + context.process_dependency_crate("lib1/lib.nr".to_string()); + + assert_eq!(context.crate_graph().number_of_crates(), 3); + } + + #[test] + fn test_works_with_duplicate_root_dependencies() { + let source_map = PathToFileSourceMap( + vec![(Path::new("lib1/lib.nr").to_path_buf(), "fn foo() {}".to_string())] + .into_iter() + .collect(), + ); + let mut context = setup_test_context(source_map); + + let lib1_crate_id = context.process_dependency_crate("lib1/lib.nr".to_string()); + let root_crate_id = context.root_crate_id(); + + context.add_dependency_edge("lib1".to_string(), &root_crate_id, &lib1_crate_id).unwrap(); + context.add_dependency_edge("lib1".to_string(), &root_crate_id, &lib1_crate_id).unwrap(); + + assert_eq!(context.crate_graph().number_of_crates(), 3); + } + + #[test] + fn test_works_with_transitive_dependencies() { + let source_map = PathToFileSourceMap( + vec![ + (Path::new("lib1/lib.nr").to_path_buf(), "fn foo() {}".to_string()), + (Path::new("lib2/lib.nr").to_path_buf(), "fn foo() {}".to_string()), + (Path::new("lib3/lib.nr").to_path_buf(), "fn foo() {}".to_string()), + ] + .into_iter() + .collect(), + ); + + let mut context = setup_test_context(source_map); + + let lib1_crate_id = context.process_dependency_crate("lib1/lib.nr".to_string()); + let lib2_crate_id = context.process_dependency_crate("lib2/lib.nr".to_string()); + let lib3_crate_id = context.process_dependency_crate("lib3/lib.nr".to_string()); + let root_crate_id = context.root_crate_id(); + + context.add_dependency_edge("lib1".to_string(), &root_crate_id, &lib1_crate_id).unwrap(); + context.add_dependency_edge("lib2".to_string(), &lib1_crate_id, &lib2_crate_id).unwrap(); + context.add_dependency_edge("lib3".to_string(), &lib2_crate_id, &lib3_crate_id).unwrap(); + + assert_eq!(context.crate_graph().number_of_crates(), 5); + } + + #[test] + fn test_works_with_missing_dependencies() { + let source_map = PathToFileSourceMap( + vec![ + (Path::new("lib1/lib.nr").to_path_buf(), "fn foo() {}".to_string()), + (Path::new("lib2/lib.nr").to_path_buf(), "fn foo() {}".to_string()), + (Path::new("lib3/lib.nr").to_path_buf(), "fn foo() {}".to_string()), + ] + .into_iter() + .collect(), + ); + let mut context = setup_test_context(source_map); + + let lib1_crate_id = context.process_dependency_crate("lib1/lib.nr".to_string()); + let lib2_crate_id = context.process_dependency_crate("lib2/lib.nr".to_string()); + let lib3_crate_id = context.process_dependency_crate("lib3/lib.nr".to_string()); + let root_crate_id = context.root_crate_id(); + + context.add_dependency_edge("lib1".to_string(), &root_crate_id, &lib1_crate_id).unwrap(); + context.add_dependency_edge("lib3".to_string(), &lib2_crate_id, &lib3_crate_id).unwrap(); + + assert_eq!(context.crate_graph().number_of_crates(), 5); + } +} diff --git a/compiler/wasm/src/index.cts b/compiler/wasm/src/index.cts new file mode 100644 index 00000000000..31d169e8268 --- /dev/null +++ b/compiler/wasm/src/index.cts @@ -0,0 +1,49 @@ +import { FileManager } from './noir/file-manager/file-manager'; +import { createNodejsFileManager } from './noir/file-manager/nodejs-file-manager'; +import { NoirWasmCompiler } from './noir/noir-wasm-compiler'; +import { LogData, LogFn } from './utils'; +import { CompilationResult } from './types/noir_artifact'; + +async function compile( + fileManager: FileManager, + projectPath?: string, + logFn?: LogFn, + debugLogFn?: LogFn, +): Promise { + if (logFn && !debugLogFn) { + debugLogFn = logFn; + } + + const cjs = await require('../build/cjs'); + const compiler = await NoirWasmCompiler.new( + fileManager, + projectPath ?? fileManager.getDataDir(), + cjs, + new cjs.PathToFileSourceMap(), + { + log: + logFn ?? + function (msg: string, data?: LogData) { + if (data) { + console.log(msg, data); + } else { + console.log(msg); + } + }, + debugLog: + debugLogFn ?? + function (msg: string, data?: LogData) { + if (data) { + console.debug(msg, data); + } else { + console.debug(msg); + } + }, + }, + ); + return await compiler.compile(); +} + +const createFileManager = createNodejsFileManager; + +export { compile, createFileManager, CompilationResult }; diff --git a/compiler/wasm/src/index.mts b/compiler/wasm/src/index.mts new file mode 100644 index 00000000000..6340e2dc37a --- /dev/null +++ b/compiler/wasm/src/index.mts @@ -0,0 +1,51 @@ +import { FileManager } from './noir/file-manager/file-manager'; +import { createNodejsFileManager } from './noir/file-manager/nodejs-file-manager'; +import { NoirWasmCompiler } from './noir/noir-wasm-compiler'; +import { LogData, LogFn } from './utils'; +import { CompilationResult } from './types/noir_artifact'; + +async function compile( + fileManager: FileManager, + projectPath?: string, + logFn?: LogFn, + debugLogFn?: LogFn, +): Promise { + if (logFn && !debugLogFn) { + debugLogFn = logFn; + } + + const esm = await import(/* webpackMode: "eager" */ '../build/esm'); + await esm.default(); + + const compiler = await NoirWasmCompiler.new( + fileManager, + projectPath ?? fileManager.getDataDir(), + esm, + new esm.PathToFileSourceMap(), + { + log: + logFn ?? + function (msg: string, data?: LogData) { + if (data) { + console.log(msg, data); + } else { + console.log(msg); + } + }, + debugLog: + debugLogFn ?? + function (msg: string, data?: LogData) { + if (data) { + console.debug(msg, data); + } else { + console.debug(msg); + } + }, + }, + ); + return await compiler.compile(); +} + +const createFileManager = createNodejsFileManager; + +export { compile, createFileManager, CompilationResult }; diff --git a/compiler/wasm/src/lib.rs b/compiler/wasm/src/lib.rs index 9f2f558f85c..6d737a0ea6d 100644 --- a/compiler/wasm/src/lib.rs +++ b/compiler/wasm/src/lib.rs @@ -4,21 +4,26 @@ // See Cargo.toml for explanation. use getrandom as _; +use rust_embed as _; use gloo_utils::format::JsValueSerdeExt; -use log::Level; + use noirc_driver::{GIT_COMMIT, GIT_DIRTY, NOIRC_VERSION}; use serde::{Deserialize, Serialize}; -use std::str::FromStr; -use wasm_bindgen::prelude::*; +use tracing_subscriber::prelude::*; +use tracing_subscriber::EnvFilter; +use tracing_web::MakeWebConsoleWriter; -mod circuit; mod compile; +mod compile_new; mod errors; -pub use circuit::{acir_read_bytes, acir_write_bytes}; pub use compile::compile; +// Expose the new Context-Centric API +pub use compile_new::{compile_, CompilerContext, CrateIDWrapper}; +use wasm_bindgen::{prelude::wasm_bindgen, JsValue}; + #[derive(Serialize, Deserialize)] pub struct BuildInfo { git_hash: &'static str, @@ -27,14 +32,21 @@ pub struct BuildInfo { } #[wasm_bindgen] -pub fn init_log_level(level: String) { +pub fn init_log_level(filter: String) { // Set the static variable from Rust use std::sync::Once; - let log_level = Level::from_str(&level).unwrap_or(Level::Error); + let filter: EnvFilter = + filter.parse().expect("Could not parse log filter while initializing logger"); + static SET_HOOK: Once = Once::new(); SET_HOOK.call_once(|| { - wasm_logger::init(wasm_logger::Config::new(log_level)); + let fmt_layer = tracing_subscriber::fmt::layer() + .with_ansi(false) + .without_time() + .with_writer(MakeWebConsoleWriter::new()); + + tracing_subscriber::registry().with(fmt_layer.with_filter(filter)).init(); }); } diff --git a/compiler/wasm/src/noir/dependencies/dependency-manager.ts b/compiler/wasm/src/noir/dependencies/dependency-manager.ts new file mode 100644 index 00000000000..944ec0070c5 --- /dev/null +++ b/compiler/wasm/src/noir/dependencies/dependency-manager.ts @@ -0,0 +1,150 @@ +import { join } from 'path'; + +import { Package } from '../package'; +import { Dependency, DependencyResolver } from './dependency-resolver'; +import { DependencyConfig } from '../../types/noir_package_config'; +import { LogData, LogFn } from '../../utils'; + +/** + * Noir Dependency Resolver + */ +export class DependencyManager { + #entryPoint: Package; + #libraries = new Map(); + #dependencies = new Map(); + #log: LogFn; + #resolvers: readonly DependencyResolver[]; + + /** + * Creates a new dependency resolver + * @param resolvers - A list of dependency resolvers to use + * @param entryPoint - The entry point of the project + */ + constructor(resolvers: readonly DependencyResolver[] = [], entryPoint: Package) { + this.#resolvers = resolvers; + this.#entryPoint = entryPoint; + this.#log = (msg: string, _data?: LogData) => { + console.log(msg); + }; + } + + /** + * Gets dependencies for the entry point + */ + public getEntrypointDependencies() { + return this.#dependencies.get('') ?? []; + } + + /** + * Get transitive libraries used by the package + */ + public getLibraries() { + return Array.from(this.#libraries.entries()); + } + + /** + * A map of library dependencies + */ + public getLibraryDependencies() { + const entries = Array.from(this.#dependencies.entries()); + return Object.fromEntries(entries.filter(([name]) => name !== '')); + } + + /** + * Resolves dependencies for a package. + */ + public async resolveDependencies(): Promise { + await this.#breadthFirstResolveDependencies(); + } + + /** + * Gets the version of a dependency in the dependency tree + * @param name - Dependency name + * @returns The dependency's version + */ + public getVersionOf(name: string): string | undefined { + const dep = this.#libraries.get(name); + return dep?.version; + } + + async #breadthFirstResolveDependencies(): Promise { + /** Represents a package to resolve dependencies for */ + type Job = { + /** Package name */ + packageName: string; + /** The package location */ + noirPackage: Package; + }; + + const queue: Job[] = [ + { + packageName: '', + noirPackage: this.#entryPoint, + }, + ]; + + while (queue.length > 0) { + const { packageName, noirPackage } = queue.shift()!; + for (const [name, config] of Object.entries(noirPackage.getDependencies())) { + // TODO what happens if more than one package has the same name but different versions? + if (this.#libraries.has(name)) { + this.#log(`skipping already resolved dependency ${name}`); + this.#dependencies.set(packageName, [...(this.#dependencies.get(packageName) ?? []), name]); + + continue; + } + const dependency = await this.#resolveDependency(noirPackage, config); + if (dependency.package.getType() !== 'lib') { + this.#log(`Non-library package ${name}`, config); + throw new Error(`Dependency ${name} is not a library`); + } + + this.#libraries.set(name, dependency); + this.#dependencies.set(packageName, [...(this.#dependencies.get(packageName) ?? []), name]); + + queue.push({ + noirPackage: dependency.package, + packageName: name, + }); + } + } + } + + async #resolveDependency(pkg: Package, config: DependencyConfig): Promise { + let dependency: Dependency | null = null; + for (const resolver of this.#resolvers) { + dependency = await resolver.resolveDependency(pkg, config); + if (dependency) { + break; + } + } + + if (!dependency) { + throw new Error('Dependency not resolved'); + } + + return dependency; + } + + /** + * Gets the names of the crates in this dependency list + */ + public getPackageNames() { + return [...this.#libraries.keys()]; + } + + /** + * Looks up a dependency + * @param sourceId - The source being resolved + * @returns The path to the resolved file + */ + public findFile(sourceId: string): string | null { + const [lib, ...path] = sourceId.split('/').filter((x) => x); + const dep = this.#libraries.get(lib); + if (dep) { + return join(dep.package.getSrcPath(), ...path); + } else { + return null; + } + } +} diff --git a/compiler/wasm/src/noir/dependencies/dependency-resolver.ts b/compiler/wasm/src/noir/dependencies/dependency-resolver.ts new file mode 100644 index 00000000000..266d2075e1e --- /dev/null +++ b/compiler/wasm/src/noir/dependencies/dependency-resolver.ts @@ -0,0 +1,24 @@ +import { DependencyConfig } from '../../types/noir_package_config'; +import { Package } from '../package'; + +/** + * A Noir dependency + */ +export type Dependency = { + /** version string as determined by the resolver */ + version?: string; + /** the actual package source code */ + package: Package; +}; + +/** + * Resolves a dependency for a package. + */ +export interface DependencyResolver { + /** + * Resolve a dependency for a package. + * @param pkg - The package to resolve dependencies for + * @param dep - The dependency config to resolve + */ + resolveDependency(pkg: Package, dep: DependencyConfig): Promise; +} diff --git a/compiler/wasm/src/noir/dependencies/github-dependency-resolver.ts b/compiler/wasm/src/noir/dependencies/github-dependency-resolver.ts new file mode 100644 index 00000000000..8b08b6f0dd8 --- /dev/null +++ b/compiler/wasm/src/noir/dependencies/github-dependency-resolver.ts @@ -0,0 +1,145 @@ +import { delimiter, join, sep } from 'path'; +import { unzip } from 'unzipit'; + +import { FileManager } from '../file-manager/file-manager'; +import { Package } from '../package'; +import { Dependency, DependencyResolver } from './dependency-resolver'; +import { DependencyConfig, GitDependencyConfig } from '../../types/noir_package_config'; +import { LogData } from '../../utils'; + +/** + * Downloads dependencies from github + */ +export class GithubDependencyResolver implements DependencyResolver { + #fm: FileManager; + #log; + + constructor(fm: FileManager) { + this.#fm = fm; + this.#log = (msg: string, _data?: LogData) => { + console.log(msg); + }; + } + + /** + * Resolves a dependency from github. Returns null if URL is for a different website. + * @param _pkg - The package to resolve the dependency for + * @param dependency - The dependency configuration + * @returns asd + */ + async resolveDependency(_pkg: Package, dependency: DependencyConfig): Promise { + // TODO accept ssh urls? + // TODO github authentication? + if (!('git' in dependency) || !dependency.git.startsWith('https://github.com')) { + return null; + } + + const archivePath = await this.#fetchZipFromGithub(dependency); + const libPath = await this.#extractZip(dependency, archivePath); + return { + version: dependency.tag, + package: await Package.open(libPath, this.#fm), + }; + } + + async #fetchZipFromGithub(dependency: Pick): Promise { + if (!dependency.git.startsWith('https://github.com')) { + throw new Error('Only github dependencies are supported'); + } + + const url = resolveGithubCodeArchive(dependency, 'zip'); + const localArchivePath = join('archives', safeFilename(url.pathname)); + + // TODO should check signature before accepting any file + if (this.#fm.hasFileSync(localArchivePath)) { + this.#log('using cached archive', { url: url.href, path: localArchivePath }); + return localArchivePath; + } + + const response = await fetch(url, { + method: 'GET', + }); + + if (!response.ok || !response.body) { + throw new Error(`Failed to fetch ${url}: ${response.statusText}`); + } + + const tmpFile = localArchivePath + '.tmp'; + await this.#fm.writeFile(tmpFile, response.body); + await this.#fm.moveFile(tmpFile, localArchivePath); + + return localArchivePath; + } + + async #extractZip(dependency: GitDependencyConfig, archivePath: string): Promise { + const gitUrl = new URL(dependency.git); + // extract the archive to this location + const extractLocation = join('libs', safeFilename(gitUrl.pathname + '@' + (dependency.tag ?? 'HEAD'))); + + // where we expect to find this package after extraction + // it might already exist if the archive got unzipped previously + const packagePath = join(extractLocation, dependency.directory ?? ''); + + if (this.#fm.hasFileSync(packagePath)) { + return packagePath; + } + + const { entries } = await unzip(await this.#fm.readFile(archivePath)); + + // extract to a temporary directory, then move it to the final location + // TODO empty the temp directory first + const tmpExtractLocation = extractLocation + '.tmp'; + for (const entry of Object.values(entries)) { + if (entry.isDirectory) { + continue; + } + + // remove the first path segment, because it'll be the archive name + const name = stripSegments(entry.name, 1); + const path = join(tmpExtractLocation, name); + await this.#fm.writeFile(path, (await entry.blob()).stream()); + } + + await this.#fm.moveFile(tmpExtractLocation, extractLocation); + + return packagePath; + } +} + +/** + * Strips the first n segments from a path + */ +function stripSegments(path: string, count: number): string { + const segments = path.split(sep).filter(Boolean); + return segments.slice(count).join(sep); +} + +/** + * Returns a safe filename for a value + * @param val - The value to convert + */ +export function safeFilename(val: string): string { + if (!val) { + throw new Error('invalid value'); + } + + return val.replaceAll(sep, '_').replaceAll(delimiter, '_').replace(/^_+/, ''); +} + +/** + * Resolves a dependency's archive URL. + * @param dependency - The dependency configuration + * @returns The URL to the library archive + */ +export function resolveGithubCodeArchive(dependency: GitDependencyConfig, format: 'zip' | 'tar'): URL { + const gitUrl = new URL(dependency.git); + const [owner, repo] = gitUrl.pathname.slice(1).split('/'); + const ref = dependency.tag ?? 'HEAD'; + const extension = format === 'zip' ? 'zip' : 'tar.gz'; + + if (!owner || !repo || gitUrl.hostname !== 'github.com') { + throw new Error('Invalid Github repository URL'); + } + + return new URL(`https://github.com/${owner}/${repo}/archive/${ref}.${extension}`); +} diff --git a/compiler/wasm/src/noir/dependencies/local-dependency-resolver.ts b/compiler/wasm/src/noir/dependencies/local-dependency-resolver.ts new file mode 100644 index 00000000000..50338421143 --- /dev/null +++ b/compiler/wasm/src/noir/dependencies/local-dependency-resolver.ts @@ -0,0 +1,31 @@ +import { isAbsolute, join } from 'path'; + +import { FileManager } from '../file-manager/file-manager'; +import { Package } from '../package'; +import { Dependency, DependencyResolver } from './dependency-resolver'; +import { DependencyConfig } from '../../types/noir_package_config'; + +/** + * Resolves dependencies on-disk, relative to current package + */ +export class LocalDependencyResolver implements DependencyResolver { + #fm: FileManager; + + constructor(fm: FileManager) { + this.#fm = fm; + } + + async resolveDependency(parent: Package, config: DependencyConfig): Promise { + if ('path' in config) { + const parentPath = parent.getPackagePath(); + const dependencyPath = isAbsolute(config.path) ? config.path : join(parentPath, config.path); + return { + // unknown version, Nargo.toml doesn't have a version field + version: undefined, + package: await Package.open(dependencyPath, this.#fm), + }; + } else { + return null; + } + } +} diff --git a/compiler/wasm/src/noir/file-manager/file-manager.ts b/compiler/wasm/src/noir/file-manager/file-manager.ts new file mode 100644 index 00000000000..297e9f541e6 --- /dev/null +++ b/compiler/wasm/src/noir/file-manager/file-manager.ts @@ -0,0 +1,163 @@ +import { dirname, isAbsolute, join } from 'path'; + +/** + * A file system interface that matches the node fs module. + */ +export interface FileSystem { + /** Checks if the file exists */ + existsSync: (path: string) => boolean; + /** Creates a directory structure */ + mkdir: ( + dir: string, + opts?: { + /** Create parent directories as needed */ + recursive: boolean; + }, + ) => Promise; + /** Writes a file */ + writeFile: (path: string, data: Uint8Array) => Promise; + /** Reads a file */ + readFile: (path: string, encoding?: 'utf-8') => Promise; + /** Renames a file */ + rename: (oldPath: string, newPath: string) => Promise; + /** Reads a directory */ + readdir: ( + path: string, + options?: { + /** Traverse child directories recursively */ + recursive: boolean; + }, + ) => Promise; +} + +/** + * A file manager that writes file to a specific directory but reads globally. + */ +export class FileManager { + #fs: FileSystem; + #dataDir: string; + + constructor(fs: FileSystem, dataDir: string) { + this.#fs = fs; + this.#dataDir = dataDir; + } + + /** + * Returns the data directory + */ + getDataDir() { + return this.#dataDir; + } + + /** + * Saves a file to the data directory. + * @param name - File to save + * @param stream - File contents + */ + public async writeFile(name: string, stream: ReadableStream): Promise { + if (isAbsolute(name)) { + throw new Error("can't write absolute path"); + } + + const path = this.#getPath(name); + const chunks: Uint8Array[] = []; + const reader = stream.getReader(); + + // eslint-disable-next-line no-constant-condition + while (true) { + const { done, value } = await reader.read(); + if (done) { + break; + } + + chunks.push(value); + } + + const file = new Uint8Array(chunks.reduce((acc, chunk) => acc + chunk.length, 0)); + let offset = 0; + for (const chunk of chunks) { + file.set(chunk, offset); + offset += chunk.length; + } + + await this.#fs.mkdir(dirname(path), { recursive: true }); + await this.#fs.writeFile(this.#getPath(path), file); + } + + /** + * Reads a file from the filesystem and returns a buffer + * Saves a file to the data directory. + * @param oldName - File to save + * @param newName - File contents + */ + async moveFile(oldName: string, newName: string) { + if (isAbsolute(oldName) || isAbsolute(newName)) { + throw new Error("can't move absolute path"); + } + + const oldPath = this.#getPath(oldName); + const newPath = this.#getPath(newName); + + await this.#fs.mkdir(dirname(newPath), { recursive: true }); + await this.#fs.rename(oldPath, newPath); + } + + /** + * Reads a file from the disk and returns a buffer + * @param name - File to read + */ + public async readFile(name: string): Promise; + /** + * Reads a file from the filesystem as a string + * @param name - File to read + * @param encoding - Encoding to use + */ + public async readFile(name: string, encoding: 'utf-8'): Promise; + /** + * Reads a file from the filesystem + * @param name - File to read + * @param encoding - Encoding to use + */ + public async readFile(name: string, encoding?: 'utf-8'): Promise { + const path = this.#getPath(name); + const data = await this.#fs.readFile(path, encoding); + + if (!encoding) { + return typeof data === 'string' + ? new TextEncoder().encode(data) // this branch shouldn't be hit, but just in case + : new Uint8Array(data.buffer, data.byteOffset, data.byteLength / Uint8Array.BYTES_PER_ELEMENT); + } + + return data; + } + + /** + * Checks if a file exists and is accessible + * @param name - File to check + */ + public hasFileSync(name: string): boolean { + return this.#fs.existsSync(this.#getPath(name)); + } + + #getPath(name: string) { + return isAbsolute(name) ? name : join(this.#dataDir, name); + } + + /** + * Reads a file from the filesystem + * @param dir - File to read + * @param options - Readdir options + */ + public async readdir( + dir: string, + options?: { + /** + * Traverse child directories recursively + */ + recursive: boolean; + }, + ) { + const dirPath = this.#getPath(dir); + return await this.#fs.readdir(dirPath, options); + } +} diff --git a/compiler/wasm/src/noir/file-manager/memfs-file-manager.ts b/compiler/wasm/src/noir/file-manager/memfs-file-manager.ts new file mode 100644 index 00000000000..b9ee3226be2 --- /dev/null +++ b/compiler/wasm/src/noir/file-manager/memfs-file-manager.ts @@ -0,0 +1,58 @@ +import { IFs, fs } from 'memfs'; +import { IDirent } from 'memfs/lib/node/types/misc'; + +import { FileManager } from './file-manager'; + +/** + * Creates a new FileManager instance based on a MemFS instance + * @param memFS - the memfs backing instance + * @param dataDir - where to store files + */ +export function createMemFSFileManager(memFS: IFs = fs, dataDir = '/'): FileManager { + const readdirRecursive = async (dir: string): Promise => { + const contents = await memFS.promises.readdir(dir); + let files: string[] = []; + for (const handle in contents) { + if ((handle as unknown as IDirent).isFile()) { + files.push(handle.toString()); + } else { + files = files.concat(await readdirRecursive(handle.toString())); + } + } + return files; + }; + return new FileManager( + { + existsSync: memFS.existsSync.bind(memFS), + mkdir: async ( + dir: string, + options?: { + /** + * Traverse child directories + */ + recursive: boolean; + }, + ) => { + await memFS.promises.mkdir(dir, options); + }, + writeFile: memFS.promises.writeFile.bind(memFS), + rename: memFS.promises.rename.bind(memFS), + readFile: memFS.promises.readFile.bind(memFS), + readdir: async ( + dir: string, + options?: { + /** + * Traverse child directories + */ + recursive: boolean; + }, + ) => { + if (options?.recursive) { + return readdirRecursive(dir); + } + return (await memFS.promises.readdir(dir)).map((handles) => handles.toString()); + }, + }, + dataDir, + ); +} diff --git a/compiler/wasm/src/noir/file-manager/nodejs-file-manager.ts b/compiler/wasm/src/noir/file-manager/nodejs-file-manager.ts new file mode 100644 index 00000000000..1a8250f49cc --- /dev/null +++ b/compiler/wasm/src/noir/file-manager/nodejs-file-manager.ts @@ -0,0 +1,62 @@ +import { existsSync } from 'fs'; +import { promises as fs } from 'fs'; + +import { FileManager } from './file-manager'; + +// This is needed because memfs doesn't support the recursive flag yet +export async function readdirRecursive(dir: string): Promise { + const contents = await fs.readdir(dir); + let files: string[] = []; + for (const handle of contents) { + if ((await fs.stat(`${dir}/${handle}`)).isFile()) { + files.push(`${dir}/${handle.toString()}`); + } else { + files = files.concat(await readdirRecursive(`${dir}/${handle.toString()}`)); + } + } + return files; +} + +/** + * Creates a new FileManager instance based on nodejs fs + * @param dataDir - where to store files + */ +export function createNodejsFileManager(dataDir: string): FileManager { + return new FileManager( + { + ...fs, + ...{ + // ExistsSync is not available in the fs/promises module + existsSync, + // This is added here because the node types are not compatible with the FileSystem type for mkdir + // Typescripts tries to use a different variant of the function that is not the one that has the optional options. + mkdir: async ( + dir: string, + opts?: { + /** + * Traverse child directories + */ + recursive: boolean; + }, + ) => { + await fs.mkdir(dir, opts); + }, + readdir: async ( + dir: string, + options?: { + /** + * Traverse child directories + */ + recursive: boolean; + }, + ) => { + if (options?.recursive) { + return readdirRecursive(dir); + } + return (await fs.readdir(dir)).map((handles) => handles.toString()); + }, + }, + }, + dataDir, + ); +} diff --git a/compiler/wasm/src/noir/noir-wasm-compiler.ts b/compiler/wasm/src/noir/noir-wasm-compiler.ts new file mode 100644 index 00000000000..2a0af5d8fee --- /dev/null +++ b/compiler/wasm/src/noir/noir-wasm-compiler.ts @@ -0,0 +1,173 @@ +import { isAbsolute } from 'path'; + +import { DependencyManager } from './dependencies/dependency-manager'; +import { GithubDependencyResolver as GithubCodeArchiveDependencyResolver } from './dependencies/github-dependency-resolver'; +import { LocalDependencyResolver } from './dependencies/local-dependency-resolver'; +import { FileManager } from './file-manager/file-manager'; +import { Package } from './package'; +import { LogFn } from '../utils'; +import { CompilationResult } from '../types/noir_artifact'; + +/** Compilation options */ +export type NoirWasmCompileOptions = { + /** Logging function */ + log: LogFn; + /** Log debugging information through this function */ + debugLog: LogFn; +}; + +/** + * Noir Package Compiler + */ +export class NoirWasmCompiler { + #log: LogFn; + #debugLog: LogFn; + #package: Package; + /* eslint-disable @typescript-eslint/no-explicit-any */ + #wasmCompiler: any; + #sourceMap: any; + /* eslint-disable @typescript-eslint/no-explicit-any */ + #fm: FileManager; + #dependencyManager: DependencyManager; + + private constructor( + entrypoint: Package, + dependencyManager: DependencyManager, + fileManager: FileManager, + wasmCompiler: unknown, + sourceMap: unknown, + opts: NoirWasmCompileOptions, + ) { + this.#log = opts.log; + this.#debugLog = opts.debugLog; + this.#package = entrypoint; + this.#fm = fileManager; + this.#wasmCompiler = wasmCompiler; + this.#sourceMap = sourceMap; + this.#dependencyManager = dependencyManager; + } + + /** + * Creates a new compiler instance. + * @param fileManager - The file manager to use + * @param projectPath - The path to the project + * @param opts - Compilation options + */ + public static async new( + fileManager: FileManager, + projectPath: string, + /* eslint-disable @typescript-eslint/no-explicit-any */ + wasmCompiler: any, + sourceMap: any, + /* eslint-enable @typescript-eslint/no-explicit-any */ + opts: NoirWasmCompileOptions, + ) { + // Assume the filemanager is initialized at the project root + if (!isAbsolute(projectPath)) { + throw new Error('projectPath must be an absolute path'); + } + + const noirPackage = await Package.open(projectPath, fileManager); + + const dependencyManager = new DependencyManager( + [ + new LocalDependencyResolver(fileManager), + new GithubCodeArchiveDependencyResolver(fileManager), + // TODO support actual Git repositories + ], + noirPackage, + ); + + return new NoirWasmCompiler(noirPackage, dependencyManager, fileManager, wasmCompiler, sourceMap, opts); + } + + /** + * Compile EntryPoint + */ + /** + * Compile EntryPoint + */ + public async compile(): Promise { + console.log(`Compiling at ${this.#package.getEntryPointPath()}`); + + if (!(this.#package.getType() === 'contract' || this.#package.getType() === 'bin')) { + throw new Error(`Only supports compiling "contract" and "bin" package types (${this.#package.getType()})`); + } + await this.#dependencyManager.resolveDependencies(); + this.#debugLog(`Dependencies: ${this.#dependencyManager.getPackageNames().join(', ')}`); + + try { + const isContract: boolean = this.#package.getType() === 'contract'; + + const entrypoint = this.#package.getEntryPointPath(); + const deps = { + /* eslint-disable camelcase */ + root_dependencies: this.#dependencyManager.getEntrypointDependencies(), + library_dependencies: this.#dependencyManager.getLibraryDependencies(), + /* eslint-enable camelcase */ + }; + const packageSources = await this.#package.getSources(this.#fm); + const librarySources = ( + await Promise.all( + this.#dependencyManager + .getLibraries() + .map(async ([alias, library]) => await library.package.getSources(this.#fm, alias)), + ) + ).flat(); + [...packageSources, ...librarySources].forEach((sourceFile) => { + this.#debugLog(`Adding source ${sourceFile.path}`); + this.#sourceMap.add_source_code(sourceFile.path, sourceFile.source); + }); + const result = this.#wasmCompiler.compile(entrypoint, isContract, deps, this.#sourceMap); + + if ((isContract && !('contract' in result)) || (!isContract && !('program' in result))) { + throw new Error('Invalid compilation result'); + } + + return result; + } catch (err) { + if (err instanceof Error && err.name === 'CompileError') { + const logs = await this.#processCompileError(err); + for (const log of logs) { + this.#log(log); + } + throw new Error(logs.join('\n')); + } + + throw err; + } + } + + async #resolveFile(path: string) { + try { + const libFile = this.#dependencyManager.findFile(path); + return await this.#fm.readFile(libFile ?? path, 'utf-8'); + } catch (err) { + return ''; + } + } + + // eslint-disable-next-line @typescript-eslint/no-explicit-any + async #processCompileError(err: any): Promise { + const logs = []; + for (const diag of err.diagnostics) { + logs.push(` ${diag.message}`); + const contents = await this.#resolveFile(diag.file); + const lines = contents.split('\n'); + const lineOffsets = lines.reduce((accum, _, idx) => { + if (idx === 0) { + accum.push(0); + } else { + accum.push(accum[idx - 1] + lines[idx - 1].length + 1); + } + return accum; + }, []); + + for (const secondary of diag.secondaries) { + const errorLine = lineOffsets.findIndex((offset) => offset > secondary.start); + logs.push(` ${diag.file}:${errorLine}: ${contents.slice(secondary.start, secondary.end)}`); + } + } + return logs; + } +} diff --git a/compiler/wasm/src/noir/package.ts b/compiler/wasm/src/noir/package.ts new file mode 100644 index 00000000000..a2496a03b3a --- /dev/null +++ b/compiler/wasm/src/noir/package.ts @@ -0,0 +1,129 @@ +import { parse } from '@ltd/j-toml'; +import { join } from 'path'; + +import { FileManager } from './file-manager/file-manager'; +import { DependencyConfig, PackageConfig, parseNoirPackageConfig } from '../types/noir_package_config'; + +const CONFIG_FILE_NAME = 'Nargo.toml'; +const SOURCE_EXTENSIONS = ['.nr']; + +/** + * An array of sources for a package + */ +type SourceList = Array<{ + /** + * The source path, taking into account modules and aliases. Eg: mylib/mod/mysource.nr + */ + path: string; + /** + * Resolved source plaintext + */ + source: string; +}>; + +/** + * A Noir package. + */ +export class Package { + #packagePath: string; + #srcPath: string; + #config: PackageConfig; + #version: string | null = null; + + public constructor(path: string, srcDir: string, config: PackageConfig) { + this.#packagePath = path; + this.#srcPath = srcDir; + this.#config = config; + } + + /** + * Gets this package's path. + */ + public getPackagePath() { + return this.#packagePath; + } + + /** + * Gets this package's Nargo.toml (NoirPackage)Config. + */ + public getPackageConfig() { + return this.#config; + } + + /** + * The path to the source directory. + */ + public getSrcPath() { + return this.#srcPath; + } + + /** + * Gets the entrypoint path for this package. + */ + public getEntryPointPath(): string { + let entrypoint: string; + + switch (this.getType()) { + case 'lib': + // we shouldn't need to compile `lib` type, since the .nr source is read directly + // when the lib is used as a dependency elsewhere. + entrypoint = 'lib.nr'; + break; + case 'contract': + case 'bin': + entrypoint = 'main.nr'; + break; + default: + throw new Error(`Unknown package type: ${this.getType()}`); + } + // TODO check that `src` exists + return join(this.#srcPath, entrypoint); + } + + /** + * Gets the project type + */ + public getType() { + return this.#config.package.type; + } + + /** + * Gets this package's dependencies. + */ + public getDependencies(): Record { + return this.#config.dependencies; + } + + /** + * Gets this package's sources. + * @param fm - A file manager to use + * @param alias - An alias for the sources, if this package is a dependency + */ + public async getSources(fm: FileManager, alias?: string): Promise { + const handles = await fm.readdir(this.#srcPath, { recursive: true }); + return Promise.all( + handles + .filter((handle) => SOURCE_EXTENSIONS.find((ext) => handle.endsWith(ext))) + .map(async (file) => { + const suffix = file.replace(this.#srcPath, ''); + return { + path: this.getType() === 'lib' ? `${alias ? alias : this.#config.package.name}${suffix}` : file, + source: (await fm.readFile(file, 'utf-8')).toString(), + }; + }), + ); + } + + /** + * Opens a path on the filesystem. + * @param path - Path to the package. + * @param fm - A file manager to use. + * @returns The Noir package at the given location + */ + public static async open(path: string, fm: FileManager): Promise { + const fileContents = await fm.readFile(join(path, CONFIG_FILE_NAME), 'utf-8'); + const config = parseNoirPackageConfig(parse(fileContents)); + + return new Package(path, join(path, 'src'), config); + } +} diff --git a/compiler/wasm/src/types/noir_artifact.ts b/compiler/wasm/src/types/noir_artifact.ts new file mode 100644 index 00000000000..f2147cfbeda --- /dev/null +++ b/compiler/wasm/src/types/noir_artifact.ts @@ -0,0 +1,210 @@ +import { Abi, AbiType } from '@noir-lang/noirc_abi'; + +/** + * A named type. + */ +export interface ABIVariable { + /** + * The name of the variable. + */ + name: string; + /** + * The type of the variable. + */ + type: AbiType; +} + +/** + * A contract event. + */ +export interface EventAbi { + /** + * The event name. + */ + name: string; + /** + * Fully qualified name of the event. + */ + path: string; + /** + * The fields of the event. + */ + fields: ABIVariable[]; +} + +/** The Noir function types. */ +export type NoirFunctionType = 'Open' | 'Secret' | 'Unconstrained'; + +/** + * The compilation result of an Noir function. + */ +export interface NoirFunctionEntry { + /** The name of the function. */ + name: string; + /** The type of the function. */ + function_type: NoirFunctionType; + /** Whether the function is internal. */ + is_internal: boolean; + /** The ABI of the function. */ + abi: Abi; + /** The bytecode of the function in base64. */ + bytecode: string; +} + +/** + * The compilation result of an Noir contract. + */ +export interface CompiledContract { + /** The name of the contract. */ + name: string; + /** Compilation backend. */ + backend: string; + /** The functions of the contract. */ + functions: NoirFunctionEntry[]; + /** The events of the contract */ + events: EventAbi[]; +} + +/** + * The compilation result of an Noir contract. + */ +export interface CompiledCircuit { + /** The hash of the circuit. */ + hash?: number; + /** Compilation backend. */ + backend: string; + /** + * The ABI of the function. + */ + abi: Abi; + /** The bytecode of the circuit in base64. */ + bytecode: string; +} + +/** + * A file ID. It's assigned during compilation. + */ +export type FileId = number; + +/** + * A pointer to a specific section of the source code. + */ +export interface SourceCodeLocation { + /** + * The section of the source code. + */ + span: { + /** + * The byte where the section starts. + */ + start: number; + /** + * The byte where the section ends. + */ + end: number; + }; + /** + * The source code file pointed to. + */ + file: FileId; +} + +/** + * The location of an opcode in the bytecode. + * It's a string of the form `{acirIndex}` or `{acirIndex}:{brilligIndex}`. + */ +export type OpcodeLocation = string; + +/** + * The debug information for a given function. + */ +export interface DebugInfo { + /** + * A map of the opcode location to the source code location. + */ + locations: Record; +} + +/** + * Maps a file ID to its metadata for debugging purposes. + */ +export type DebugFileMap = Record< + FileId, + { + /** + * The source code of the file. + */ + source: string; + /** + * The path of the file. + */ + path: string; + } +>; + +/** + * The debug metadata of an Noir contract. + */ +export interface DebugMetadata { + /** + * The debug information for each function. + */ + debug_symbols: DebugInfo[]; + /** + * The map of file ID to the source code and path of the file. + */ + file_map: DebugFileMap; +} + +/** + * The compilation artifacts of a given contract. + */ +export interface ContractCompilationArtifacts { + /** + * The compiled contract. + */ + contract: CompiledContract; + + /** + * The artifact that contains the debug metadata about the contract. + */ + debug?: DebugMetadata; +} + +/** + * The compilation artifacts of a given program. + */ +export interface ProgramCompilationArtifacts { + /** + * not part of the compilation output, injected later + */ + name: string; + /** + * The compiled contract. + */ + program: CompiledCircuit; + + /** + * The artifact that contains the debug metadata about the contract. + */ + debug?: DebugMetadata; +} + +/** + * output of Noir Wasm compilation, can be for a contract or lib/binary + */ +export type CompilationResult = ContractCompilationArtifacts | ProgramCompilationArtifacts; + +/** + * Check if it has Contract unique property + */ +export function isContractCompilationArtifacts(artifact: CompilationResult): artifact is ContractCompilationArtifacts { + return (artifact as ContractCompilationArtifacts).contract !== undefined; +} + +/** + * Check if it has Contract unique property + */ +export function isProgramCompilationArtifacts(artifact: CompilationResult): artifact is ProgramCompilationArtifacts { + return (artifact as ProgramCompilationArtifacts).program !== undefined; +} diff --git a/compiler/wasm/src/types/noir_package_config.ts b/compiler/wasm/src/types/noir_package_config.ts new file mode 100644 index 00000000000..5f07c380cf3 --- /dev/null +++ b/compiler/wasm/src/types/noir_package_config.ts @@ -0,0 +1,53 @@ +type NoirGitDependencySchema = { + git: string; + tag: string; + directory?: string; +}; + +type NoirLocalDependencySchema = { + path: string; +}; + +type NoirPackageType = 'lib' | 'contract' | 'bin'; +type NoirPackageConfigSchema = { + package: { + name: string; + type: NoirPackageType; + entry?: string; + description?: string; + authors?: string[]; + compiler_version?: string; + backend?: string; + license?: string; + }; + dependencies: Record; +}; + +/** + * Noir package configuration. + */ +export type PackageConfig = NoirPackageConfigSchema; + +/** + * A remote package dependency. + */ +export type GitDependencyConfig = NoirGitDependencySchema; + +/** + * A local package dependency. + */ +export type LocalDependencyConfig = NoirLocalDependencySchema; + +/** + * A package dependency. + */ +export type DependencyConfig = GitDependencyConfig | LocalDependencyConfig; + +/** + * Checks that an object is a package configuration. + * @param config - Config to check + */ +// eslint-disable-next-line @typescript-eslint/no-explicit-any +export function parseNoirPackageConfig(config: any): PackageConfig { + return config; +} diff --git a/compiler/wasm/src/utils.ts b/compiler/wasm/src/utils.ts new file mode 100644 index 00000000000..513f7c51617 --- /dev/null +++ b/compiler/wasm/src/utils.ts @@ -0,0 +1,53 @@ +import { sep } from 'path'; + +/** Structured log data to include with the message. */ +export type LogData = Record; + +/** A callable logger instance. */ +export type LogFn = (msg: string, data?: LogData) => void; + +export function fileURLToPath(uri: string): string { + if (typeof uri !== 'string' || uri.length <= 7 || uri.substring(0, 7) !== 'file://') { + throw new TypeError('must pass in a file:// URI to convert to a file path'); + } + + const rest = decodeURI(uri.substring(7)); + const firstSlash = rest.indexOf('/'); + let host = rest.substring(0, firstSlash); + let path = rest.substring(firstSlash + 1); + + // 2. Scheme Definition + // As a special case, can be the string "localhost" or the empty + // string; this is interpreted as "the machine from which the URL is + // being interpreted". + if (host === 'localhost') { + host = ''; + } + + if (host) { + host = sep + sep + host; + } + + // 3.2 Drives, drive letters, mount points, file system root + // Drive letters are mapped into the top of a file URI in various ways, + // depending on the implementation; some applications substitute + // vertical bar ("|") for the colon after the drive letter, yielding + // "file:///c|/tmp/test.txt". In some cases, the colon is left + // unchanged, as in "file:///c:/tmp/test.txt". In other cases, the + // colon is simply omitted, as in "file:///c/tmp/test.txt". + path = path.replace(/^(.+)\|/, '$1:'); + + // for Windows, we need to invert the path separators from what a URI uses + if (sep === '\\') { + path = path.replace(/\//g, '\\'); + } + + if (/^.+:/.test(path)) { + // has Windows drive at beginning of path + } else { + // unix path… + path = sep + path; + } + + return host + path; +} diff --git a/compiler/wasm/test/browser/index.test.ts b/compiler/wasm/test/browser/index.test.ts deleted file mode 100644 index 8a3f82ffff9..00000000000 --- a/compiler/wasm/test/browser/index.test.ts +++ /dev/null @@ -1,106 +0,0 @@ -import { expect } from '@esm-bundle/chai'; -import initNoirWasm, { compile } from '@noir-lang/noir_wasm'; -import { initializeResolver } from '@noir-lang/source-resolver'; -import { - depsScriptExpectedArtifact, - depsScriptSourcePath, - libASourcePath, - libBSourcePath, - simpleScriptExpectedArtifact, - simpleScriptSourcePath, -} from '../shared'; - -beforeEach(async () => { - await initNoirWasm(); -}); - -async function getFileContent(path: string): Promise { - const url = new URL(path, import.meta.url); - const response = await fetch(url); - return await response.text(); -} - -// eslint-disable-next-line @typescript-eslint/no-explicit-any -async function getPrecompiledSource(path: string): Promise { - const compiledData = await getFileContent(path); - return JSON.parse(compiledData); -} - -describe('noir wasm', () => { - describe('can compile script without dependencies', () => { - beforeEach(async () => { - const source = await getFileContent(simpleScriptSourcePath); - initializeResolver((id: string) => { - console.log(`Resolving source ${id}`); - - if (typeof source === 'undefined') { - throw Error(`Could not resolve source for '${id}'`); - } else if (id !== '/main.nr') { - throw Error(`Unexpected id: '${id}'`); - } else { - return source; - } - }); - }); - - it('matching nargos compilation', async () => { - const wasmCircuit = await compile('/main.nr'); - const cliCircuit = await getPrecompiledSource(simpleScriptExpectedArtifact); - - if (!('program' in wasmCircuit)) { - throw Error('Expected program to be present'); - } - - // We don't expect the hashes to match due to how `noir_wasm` handles dependencies - expect(wasmCircuit.program.bytecode).to.eq(cliCircuit.bytecode); - expect(wasmCircuit.program.abi).to.deep.eq(cliCircuit.abi); - expect(wasmCircuit.program.backend).to.eq(cliCircuit.backend); - }).timeout(20e3); // 20 seconds - }); - - describe('can compile script with dependencies', () => { - beforeEach(async () => { - const [scriptSource, libASource, libBSource] = await Promise.all([ - getFileContent(depsScriptSourcePath), - getFileContent(libASourcePath), - getFileContent(libBSourcePath), - ]); - - initializeResolver((file: string) => { - switch (file) { - case '/script/main.nr': - return scriptSource; - - case '/lib_a/lib.nr': - return libASource; - - case '/lib_b/lib.nr': - return libBSource; - - default: - return ''; - } - }); - }); - - it('matching nargos compilation', async () => { - const wasmCircuit = await compile('/script/main.nr', false, { - root_dependencies: ['lib_a'], - library_dependencies: { - lib_a: ['lib_b'], - }, - }); - - if (!('program' in wasmCircuit)) { - throw Error('Expected program to be present'); - } - - const cliCircuit = await getPrecompiledSource(depsScriptExpectedArtifact); - - // We don't expect the hashes to match due to how `noir_wasm` handles dependencies - expect(wasmCircuit.program.bytecode).to.eq(cliCircuit.bytecode); - expect(wasmCircuit.program.abi).to.deep.eq(cliCircuit.abi); - expect(wasmCircuit.program.backend).to.eq(cliCircuit.backend); - }).timeout(20e3); // 20 seconds - }); -}); diff --git a/compiler/wasm/test/compiler/browser/compile_with_deps.test.ts b/compiler/wasm/test/compiler/browser/compile_with_deps.test.ts new file mode 100644 index 00000000000..3580779ff1d --- /dev/null +++ b/compiler/wasm/test/compiler/browser/compile_with_deps.test.ts @@ -0,0 +1,44 @@ +/* eslint-disable @typescript-eslint/ban-ts-comment */ +import { getPaths } from '../../shared'; +import { expect } from '@esm-bundle/chai'; +import { compile, createFileManager } from '@noir-lang/noir_wasm'; +import { CompiledContract } from '../../../src/types/noir_artifact'; + +const paths = getPaths('.'); + +async function getFile(path: string) { + // @ts-ignore + const basePath = new URL('./../../', import.meta.url).toString().replace(/\/$/g, ''); + const url = `${basePath}${path.replace('.', '')}`; + const response = await fetch(url); + return response; +} + +// eslint-disable-next-line @typescript-eslint/no-explicit-any +async function getPrecompiledSource(path: string): Promise { + const response = await getFile(path); + const compiledData = await response.text(); + return JSON.parse(compiledData); +} + +describe('noir-compiler', () => { + it('both nargo and noir_wasm should compile identically', async () => { + const { contractExpectedArtifact } = paths; + const fm = createFileManager('/'); + const files = Object.values(paths).filter((fileOrDir) => /^\.?\/.*\..*$/.test(fileOrDir)); + for (const path of files) { + console.log(path); + await fm.writeFile(path, (await getFile(path)).body as ReadableStream); + } + const nargoArtifact = (await getPrecompiledSource(contractExpectedArtifact)) as CompiledContract; + nargoArtifact.functions.sort((a, b) => a.name.localeCompare(b.name)); + const noirWasmArtifact = await compile(fm, '/fixtures/noir-contract'); + if (!('contract' in noirWasmArtifact)) { + throw new Error('Compilation failed'); + } + const noirWasmContract = noirWasmArtifact.contract; + expect(noirWasmContract).not.to.be.undefined; + noirWasmContract.functions.sort((a, b) => a.name.localeCompare(b.name)); + expect(nargoArtifact).to.deep.eq(noirWasmContract); + }).timeout(60 * 20e3); +}); diff --git a/compiler/wasm/test/compiler/node/compile_with_deps.test.ts b/compiler/wasm/test/compiler/node/compile_with_deps.test.ts new file mode 100644 index 00000000000..546ec03c183 --- /dev/null +++ b/compiler/wasm/test/compiler/node/compile_with_deps.test.ts @@ -0,0 +1,26 @@ +import { join, resolve } from 'path'; +import { getPaths } from '../../shared'; + +import { expect } from 'chai'; +import { readFile } from 'fs/promises'; +import { compile, createFileManager } from '@noir-lang/noir_wasm'; +import { CompiledContract } from '../../../src/types/noir_artifact'; + +const basePath = resolve(join(__dirname, '../../')); +const { contractProjectPath, contractExpectedArtifact } = getPaths(basePath); + +describe('noir-compiler', () => { + it('both nargo and noir_wasm should compile identically', async () => { + const fm = createFileManager(contractProjectPath); + const nargoArtifact = JSON.parse((await readFile(contractExpectedArtifact)).toString()) as CompiledContract; + nargoArtifact.functions.sort((a, b) => a.name.localeCompare(b.name)); + const noirWasmArtifact = await compile(fm); + if (!('contract' in noirWasmArtifact)) { + throw new Error('Compilation failed'); + } + const noirWasmContract = noirWasmArtifact.contract; + expect(noirWasmContract).not.to.be.undefined; + noirWasmContract.functions.sort((a, b) => a.name.localeCompare(b.name)); + expect(nargoArtifact).to.deep.eq(noirWasmContract); + }); +}); diff --git a/compiler/wasm/test/dependencies/dependency-manager.test.ts b/compiler/wasm/test/dependencies/dependency-manager.test.ts new file mode 100644 index 00000000000..4e2fdbae515 --- /dev/null +++ b/compiler/wasm/test/dependencies/dependency-manager.test.ts @@ -0,0 +1,108 @@ +import { DependencyConfig } from '../../src/types/noir_package_config'; +import { Package } from '../../src/noir/package'; +import { DependencyManager } from '../../src/noir/dependencies/dependency-manager'; +import { Dependency, DependencyResolver } from '../../src/noir/dependencies/dependency-resolver'; + +import { expect } from 'chai'; + +describe('DependencyManager', () => { + let manager: DependencyManager; + + beforeEach(() => { + manager = new DependencyManager( + [new TestDependencyResolver()], + new Package('/test_contract', '/test_contract/src', { + dependencies: { + lib1: { + path: '/lib1', + }, + lib2: { + path: '/lib2', + }, + lib3: { + path: '/lib3', + }, + }, + package: { + name: 'test_contract', + type: 'contract', + }, + }), + ); + }); + + it('successfully resolves dependencies', async () => { + await expect(manager.resolveDependencies()).to.eventually.be.undefined; + }); + + it('resolves all libraries', async () => { + await manager.resolveDependencies(); + expect(manager.getPackageNames()).to.eql(['lib1', 'lib2', 'lib3']); + }); + + it('resolves root dependencies', async () => { + await manager.resolveDependencies(); + expect(manager.getEntrypointDependencies()).to.eql(['lib1', 'lib2', 'lib3']); + }); + + it('resolves library dependencies', async () => { + await manager.resolveDependencies(); + expect(manager.getLibraryDependencies()).to.eql({ + lib2: ['lib3'], + }); + }); +}); + +class TestDependencyResolver implements DependencyResolver { + // eslint-disable-next-line require-await + public async resolveDependency(pkg: Package, dep: DependencyConfig): Promise { + if (!('path' in dep)) { + return null; + } + + switch (dep.path) { + case '/lib1': + return { + version: '', + package: new Package('/lib1', '/lib1/src', { + dependencies: {}, + package: { + name: 'lib1', + type: 'lib', + }, + }), + }; + + case '/lib2': + return { + version: '', + package: new Package('/lib2', '/lib2/src', { + dependencies: { + lib3: { + path: '../lib3', + }, + }, + package: { + name: 'lib2', + type: 'lib', + }, + }), + }; + + case '/lib3': + return { + version: '', + package: new Package('/lib3', '/lib3/src', { + dependencies: {}, + package: { + name: 'lib3', + type: 'lib', + }, + }), + }; + + default: + throw new Error(); + } + } +} diff --git a/compiler/wasm/test/dependencies/github-dependency-resolver.test.ts b/compiler/wasm/test/dependencies/github-dependency-resolver.test.ts new file mode 100644 index 00000000000..e7fae8afe8e --- /dev/null +++ b/compiler/wasm/test/dependencies/github-dependency-resolver.test.ts @@ -0,0 +1,149 @@ +import { Volume, createFsFromVolume } from 'memfs'; +import { dirname, join, resolve } from 'path'; + +import { FileManager } from '../../src/noir/file-manager/file-manager'; +import { createMemFSFileManager } from '../../src/noir/file-manager/memfs-file-manager'; +import { readdirRecursive } from '../../src/noir/file-manager/nodejs-file-manager'; + +import { Package } from '../../src/noir/package'; +import { DependencyResolver } from '../../src/noir/dependencies/dependency-resolver'; +import { + GithubDependencyResolver, + resolveGithubCodeArchive, + safeFilename, +} from '../../src/noir/dependencies/github-dependency-resolver'; +import { GitDependencyConfig } from '../../src/types/noir_package_config'; +import Sinon, { SinonStub } from 'sinon'; +import chai, { expect } from 'chai'; +import forEach from 'mocha-each'; +import chaiAsPromised from 'chai-as-promised'; +import AdmZip from 'adm-zip'; + +chai.use(chaiAsPromised); + +const fixtures = resolve(join(__dirname, '../fixtures')); + +describe('GithubDependencyResolver', () => { + let resolver: DependencyResolver; + let fm: FileManager; + let pkg: Package; + let libDependency: GitDependencyConfig; + let fetchStub: SinonStub | undefined; + + beforeEach(() => { + fetchStub = Sinon.stub(globalThis, 'fetch'); + fm = createMemFSFileManager(createFsFromVolume(new Volume()), '/'); + + libDependency = { + git: 'https://github.com/example/repo', + tag: 'v1.0.0', + }; + + pkg = new Package('/test_contract', '/test_contract/src', { + dependencies: { + // eslint-disable-next-line camelcase + lib_c: libDependency, + }, + package: { + name: 'test_contract', + type: 'contract', + }, + }); + + resolver = new GithubDependencyResolver(fm); + + // cut off outside access + fetchStub.onCall(0).throws(new Error()); + }); + + afterEach(() => { + fetchStub?.restore(); + }); + + it("returns null if it can't resolve a dependency", async () => { + const dep = await resolver.resolveDependency(pkg, { + path: '/lib-c', + }); + + expect(dep).to.be.null; + }); + + it('resolves Github dependency', async () => { + const zip = new AdmZip(); + const testLibPath = join(fixtures, 'deps', 'lib-c'); + for (const filePath of await readdirRecursive(testLibPath)) { + zip.addLocalFile(filePath, dirname(filePath.replace(testLibPath, 'lib-c'))); + } + + fetchStub?.onCall(0).returns(new Response(zip.toBuffer(), { status: 200 })); + + const lib = await resolver.resolveDependency(pkg, libDependency); + expect(lib).not.to.be.undefined; + expect(lib!.version).to.eq(libDependency.tag); + expect(fm.hasFileSync(lib!.package.getEntryPointPath())).to.eq(true); + }); + + forEach([ + [ + 'https://github.com/example/lib.nr/archive/v1.0.0.zip', + 'zip', + { + git: 'https://github.com/example/lib.nr', + tag: 'v1.0.0', + }, + ], + [ + 'https://github.com/example/lib.nr/archive/v1.0.0.tar.gz', + 'tar', + { + git: 'https://github.com/example/lib.nr', + tag: 'v1.0.0', + }, + ], + [ + 'https://github.com/example/lib.nr/archive/HEAD.zip', + 'zip', + { + git: 'https://github.com/example/lib.nr', + tag: 'HEAD', + }, + ], + [ + 'https://github.com/example/lib.nr/archive/HEAD.tar.gz', + 'tar', + { + git: 'https://github.com/example/lib.nr', + tag: 'HEAD', + }, + ], + ]).it( + 'resolves to the correct code archive URL %s', + async (href: string, format: 'zip' | 'tar', dep: GitDependencyConfig) => { + const archiveUrl = resolveGithubCodeArchive(dep, format); + expect(archiveUrl.href).to.eq(href); + }, + ); + + forEach([ + { git: 'https://github.com/', tag: 'v1' }, + { git: 'https://github.com/foo', tag: 'v1' }, + { git: 'https://example.com', tag: 'v1' }, + ]).it('throws if the Github URL is invalid %j', (dep) => { + expect(() => resolveGithubCodeArchive(dep, 'zip')).to.throw(); + }); + + forEach([ + ['main', 'main'], + ['v1.0.0', 'v1.0.0'], + ['../../../etc/passwd', '.._.._.._etc_passwd'], + ['/etc/passwd', 'etc_passwd'], + ['/SomeOrg/some-repo@v1.0.0', 'SomeOrg_some-repo@v1.0.0'], + ['SomeOrg/some-repo@v1.0.0', 'SomeOrg_some-repo@v1.0.0'], + ]).it('generates safe file names from %s', (value, expected) => { + expect(safeFilename(value)).to.eq(expected); + }); + + forEach(['']).it('rejects invalid values', (value) => { + expect(() => safeFilename(value)).to.throw(); + }); +}); diff --git a/compiler/wasm/test/dependencies/local-dependency-resolver.test.ts b/compiler/wasm/test/dependencies/local-dependency-resolver.test.ts new file mode 100644 index 00000000000..f44f618a7cb --- /dev/null +++ b/compiler/wasm/test/dependencies/local-dependency-resolver.test.ts @@ -0,0 +1,52 @@ +import { createFsFromVolume, Volume } from 'memfs'; +import { readFile } from 'fs/promises'; + +import { FileManager } from '../../src/noir/file-manager/file-manager'; +import { createMemFSFileManager } from '../../src/noir/file-manager/memfs-file-manager'; +import { Package } from '../../src/noir/package'; +import { DependencyResolver } from '../../src/noir/dependencies/dependency-resolver'; +import { LocalDependencyResolver } from '../../src/noir/dependencies/local-dependency-resolver'; +import { expect } from 'chai'; +import forEach from 'mocha-each'; +import { join } from 'path'; + +describe('DependencyResolver', () => { + let resolver: DependencyResolver; + let fm: FileManager; + let pkg: Package; + + beforeEach(async () => { + const fixtures = join(__dirname, '../fixtures'); + const memFS = createFsFromVolume(new Volume()); + memFS.mkdirSync('/noir-contract/src', { recursive: true }); + memFS.mkdirSync('/lib-c/src', { recursive: true }); + memFS.writeFileSync('/noir-contract/Nargo.toml', await readFile(join(fixtures, 'noir-contract/Nargo.toml'))); + memFS.writeFileSync('/noir-contract/src/main.nr', await readFile(join(fixtures, 'noir-contract/src/main.nr'))); + memFS.writeFileSync('/lib-c/Nargo.toml', await readFile(join(fixtures, 'deps/lib-c/Nargo.toml'))); + memFS.writeFileSync('/lib-c/src/lib.nr', await readFile(join(fixtures, 'deps/lib-c/src/lib.nr'))); + + fm = createMemFSFileManager(memFS, '/'); + + pkg = await Package.open('/noir-contract', fm); + resolver = new LocalDependencyResolver(fm); + }); + + it("returns null if it can't resolve a dependency", async () => { + const dep = await resolver.resolveDependency(pkg, { + git: 'git@some-git-host', + directory: '/', + tag: 'v1.0.0', + }); + + expect(dep).to.be.null; + }); + + forEach(['../noir-contract', '/noir-contract']).it('resolves a known dependency %s', async (path) => { + const lib = await resolver.resolveDependency(pkg, { + path, + }); + expect(lib).not.to.be.undefined; + expect(lib!.version).to.be.undefined; + expect(fm.hasFileSync(lib!.package.getEntryPointPath())).to.eq(true); + }); +}); diff --git a/compiler/wasm/test/file-manager/file-manager.test.ts b/compiler/wasm/test/file-manager/file-manager.test.ts new file mode 100644 index 00000000000..8a80854109a --- /dev/null +++ b/compiler/wasm/test/file-manager/file-manager.test.ts @@ -0,0 +1,96 @@ +import { Volume, createFsFromVolume } from 'memfs'; +import { existsSync, mkdtempSync, rmSync } from 'fs'; +import * as fs from 'fs/promises'; +import { tmpdir } from 'os'; +import { join } from 'path'; + +import { FileManager, FileSystem } from '../../src/noir/file-manager/file-manager'; +import { createMemFSFileManager } from '../../src/noir/file-manager/memfs-file-manager'; + +import { expect } from 'chai'; +import forEach from 'mocha-each'; + +const memFS = (): { fm: FileManager; teardown: () => void } => { + const fm = createMemFSFileManager(createFsFromVolume(new Volume()), '/'); + return { + fm, + // no-op, it's all in memory + teardown: () => {}, + }; +}; + +const nodeFS = (): { fm: FileManager; teardown: () => void } => { + const fileSystem: FileSystem = { + existsSync: existsSync, + mkdir: async (dir: string, opts?: { recursive: boolean }) => { + await fs.mkdir(dir, opts); + }, + writeFile: fs.writeFile, + readFile: fs.readFile, + rename: fs.rename, + readdir: fs.readdir, + }; + + const dir = mkdtempSync(join(tmpdir(), 'noir-compiler-test')); + const fm = new FileManager(fileSystem, dir); + + return { + fm, + teardown: () => { + rmSync(dir, { + recursive: true, + }); + }, + }; +}; + +/** + * Declare the default test suite for a file manager + * @param setup - Function to setup a file manager + * @param teardown - Optional function to call at the end of the test + */ +forEach([ + ['memFs', memFS], + ['nodeFS', nodeFS], +]).describe('FileManager: %s', (name, fs) => { + let fm: FileManager; + let testFileContent: string; + let testFileBytes: Uint8Array; + let teardown: () => void; + + beforeEach(() => { + ({ fm, teardown } = fs()); + testFileContent = 'foo'; + testFileBytes = new TextEncoder().encode(testFileContent); + }); + + afterEach(() => { + return teardown?.(); + }); + + it(`saves files and correctly reads bytes back using ${name}`, async () => { + await fm.writeFile('test.txt', new Blob([testFileBytes]).stream()); + expect(fm.readFile('test.txt')).to.eventually.eq(testFileBytes); + }); + + it(`saves files and correctly reads UTF-8 string back using ${name}`, async () => { + await fm.writeFile('test.txt', new Blob([testFileBytes]).stream()); + expect(fm.readFile('test.txt', 'utf-8')).to.eventually.eq(testFileContent); + }); + + it(`correctly checks if file exists or not using ${name}`, async () => { + expect(fm.hasFileSync('test.txt')).to.eq(false); + await fm.writeFile('test.txt', new Blob([testFileBytes]).stream()); + expect(fm.hasFileSync('test.txt')).to.eq(true); + }); + + it(`moves files using ${name}`, async () => { + await fm.writeFile('test.txt.tmp', new Blob([testFileBytes]).stream()); + expect(fm.hasFileSync('test.txt.tmp')).to.eq(true); + + await fm.moveFile('test.txt.tmp', 'test.txt'); + + expect(fm.hasFileSync('test.txt.tmp')).to.eq(false); + expect(fm.hasFileSync('test.txt')).to.eq(true); + }); +}); diff --git a/compiler/wasm/fixtures/deps/lib-a/Nargo.toml b/compiler/wasm/test/fixtures/deps/lib-a/Nargo.toml similarity index 100% rename from compiler/wasm/fixtures/deps/lib-a/Nargo.toml rename to compiler/wasm/test/fixtures/deps/lib-a/Nargo.toml diff --git a/compiler/wasm/fixtures/deps/lib-a/src/lib.nr b/compiler/wasm/test/fixtures/deps/lib-a/src/lib.nr similarity index 100% rename from compiler/wasm/fixtures/deps/lib-a/src/lib.nr rename to compiler/wasm/test/fixtures/deps/lib-a/src/lib.nr diff --git a/compiler/wasm/fixtures/deps/lib-b/Nargo.toml b/compiler/wasm/test/fixtures/deps/lib-b/Nargo.toml similarity index 100% rename from compiler/wasm/fixtures/deps/lib-b/Nargo.toml rename to compiler/wasm/test/fixtures/deps/lib-b/Nargo.toml diff --git a/compiler/wasm/fixtures/deps/lib-b/src/lib.nr b/compiler/wasm/test/fixtures/deps/lib-b/src/lib.nr similarity index 100% rename from compiler/wasm/fixtures/deps/lib-b/src/lib.nr rename to compiler/wasm/test/fixtures/deps/lib-b/src/lib.nr diff --git a/compiler/wasm/test/fixtures/deps/lib-c/Nargo.toml b/compiler/wasm/test/fixtures/deps/lib-c/Nargo.toml new file mode 100644 index 00000000000..dafdb62e045 --- /dev/null +++ b/compiler/wasm/test/fixtures/deps/lib-c/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "lib_c" +authors = [""] +compiler_version = ">=0.18.0" +type = "lib" + +[dependencies] \ No newline at end of file diff --git a/compiler/wasm/test/fixtures/deps/lib-c/src/lib.nr b/compiler/wasm/test/fixtures/deps/lib-c/src/lib.nr new file mode 100644 index 00000000000..5c0b5a621e0 --- /dev/null +++ b/compiler/wasm/test/fixtures/deps/lib-c/src/lib.nr @@ -0,0 +1 @@ +mod module; \ No newline at end of file diff --git a/compiler/wasm/test/fixtures/deps/lib-c/src/module.nr b/compiler/wasm/test/fixtures/deps/lib-c/src/module.nr new file mode 100644 index 00000000000..2746c97edf0 --- /dev/null +++ b/compiler/wasm/test/fixtures/deps/lib-c/src/module.nr @@ -0,0 +1 @@ +mod foo; \ No newline at end of file diff --git a/compiler/wasm/test/fixtures/deps/lib-c/src/module/foo.nr b/compiler/wasm/test/fixtures/deps/lib-c/src/module/foo.nr new file mode 100644 index 00000000000..e0c82fb1960 --- /dev/null +++ b/compiler/wasm/test/fixtures/deps/lib-c/src/module/foo.nr @@ -0,0 +1,3 @@ +pub fn bar(param: Field) -> Field { + dep::std::hash::pedersen_hash([param]) +} diff --git a/compiler/wasm/test/fixtures/noir-contract/Nargo.toml b/compiler/wasm/test/fixtures/noir-contract/Nargo.toml new file mode 100644 index 00000000000..2e64f3ebc9c --- /dev/null +++ b/compiler/wasm/test/fixtures/noir-contract/Nargo.toml @@ -0,0 +1,8 @@ +[package] +name = "test" +authors = [""] +compiler_version = ">=0.18.0" +type = "contract" + +[dependencies] +test = { path = "../deps/lib-c" } diff --git a/compiler/wasm/test/fixtures/noir-contract/src/main.nr b/compiler/wasm/test/fixtures/noir-contract/src/main.nr new file mode 100644 index 00000000000..b980af369cf --- /dev/null +++ b/compiler/wasm/test/fixtures/noir-contract/src/main.nr @@ -0,0 +1,12 @@ +contract TestContract { + use dep::test::module::foo; + + fn constructor(param: Field, pub_param: pub Field) -> pub [Field; 2] { + [foo::bar(param), param + pub_param] + } + + open fn openFunction() -> pub Field { + 42 + } + +} diff --git a/compiler/wasm/fixtures/simple/noir-script/Nargo.toml b/compiler/wasm/test/fixtures/simple/Nargo.toml similarity index 100% rename from compiler/wasm/fixtures/simple/noir-script/Nargo.toml rename to compiler/wasm/test/fixtures/simple/Nargo.toml diff --git a/compiler/wasm/fixtures/simple/noir-script/src/main.nr b/compiler/wasm/test/fixtures/simple/src/main.nr similarity index 100% rename from compiler/wasm/fixtures/simple/noir-script/src/main.nr rename to compiler/wasm/test/fixtures/simple/src/main.nr diff --git a/compiler/wasm/test/fixtures/with-deps/Nargo.toml b/compiler/wasm/test/fixtures/with-deps/Nargo.toml new file mode 100644 index 00000000000..b7543525059 --- /dev/null +++ b/compiler/wasm/test/fixtures/with-deps/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name="noir_wasm_testing" +type="bin" +authors = [""] + +[dependencies] +lib_a = { path="../deps/lib-a" } diff --git a/compiler/wasm/fixtures/deps/noir-script/src/main.nr b/compiler/wasm/test/fixtures/with-deps/src/main.nr similarity index 100% rename from compiler/wasm/fixtures/deps/noir-script/src/main.nr rename to compiler/wasm/test/fixtures/with-deps/src/main.nr diff --git a/compiler/wasm/test/node/index.test.ts b/compiler/wasm/test/node/index.test.ts deleted file mode 100644 index c0d5f88e407..00000000000 --- a/compiler/wasm/test/node/index.test.ts +++ /dev/null @@ -1,78 +0,0 @@ -import { expect } from 'chai'; -import { - depsScriptSourcePath, - depsScriptExpectedArtifact, - libASourcePath, - libBSourcePath, - simpleScriptSourcePath, - simpleScriptExpectedArtifact, -} from '../shared'; -import { readFileSync } from 'node:fs'; -import { join, resolve } from 'node:path'; -import { compile } from '@noir-lang/noir_wasm'; -import { initializeResolver } from '@noir-lang/source-resolver'; - -// eslint-disable-next-line @typescript-eslint/no-explicit-any -async function getPrecompiledSource(path: string): Promise { - const compiledData = readFileSync(resolve(__dirname, path)).toString(); - return JSON.parse(compiledData); -} - -describe('noir wasm compilation', () => { - describe('can compile simple scripts', () => { - it('matching nargos compilation', async () => { - const wasmCircuit = await compile(join(__dirname, simpleScriptSourcePath)); - const cliCircuit = await getPrecompiledSource(simpleScriptExpectedArtifact); - - if (!('program' in wasmCircuit)) { - throw Error('Expected program to be present'); - } - - // We don't expect the hashes to match due to how `noir_wasm` handles dependencies - expect(wasmCircuit.program.bytecode).to.eq(cliCircuit.bytecode); - expect(wasmCircuit.program.abi).to.deep.eq(cliCircuit.abi); - expect(wasmCircuit.program.backend).to.eq(cliCircuit.backend); - }).timeout(10e3); - }); - - describe('can compile scripts with dependencies', () => { - beforeEach(() => { - // this test requires a custom resolver in order to correctly resolve dependencies - initializeResolver((file) => { - switch (file) { - case '/script/main.nr': - return readFileSync(join(__dirname, depsScriptSourcePath), 'utf-8'); - - case '/lib_a/lib.nr': - return readFileSync(join(__dirname, libASourcePath), 'utf-8'); - - case '/lib_b/lib.nr': - return readFileSync(join(__dirname, libBSourcePath), 'utf-8'); - - default: - return ''; - } - }); - }); - - it('matching nargos compilation', async () => { - const wasmCircuit = await compile('/script/main.nr', false, { - root_dependencies: ['lib_a'], - library_dependencies: { - lib_a: ['lib_b'], - }, - }); - - const cliCircuit = await getPrecompiledSource(depsScriptExpectedArtifact); - - if (!('program' in wasmCircuit)) { - throw Error('Expected program to be present'); - } - - // We don't expect the hashes to match due to how `noir_wasm` handles dependencies - expect(wasmCircuit.program.bytecode).to.eq(cliCircuit.bytecode); - expect(wasmCircuit.program.abi).to.deep.eq(cliCircuit.abi); - expect(wasmCircuit.program.backend).to.eq(cliCircuit.backend); - }).timeout(10e3); - }); -}); diff --git a/compiler/wasm/test/shared.ts b/compiler/wasm/test/shared.ts index 6fc370f7ac8..9181919ff39 100644 --- a/compiler/wasm/test/shared.ts +++ b/compiler/wasm/test/shared.ts @@ -1,8 +1,41 @@ -export const simpleScriptSourcePath = '../../fixtures/simple/noir-script/src/main.nr'; -export const simpleScriptExpectedArtifact = '../../fixtures/simple/noir-script/target/noir_wasm_testing.json'; +export function getPaths(basePath: string) { + const fixtures = `${basePath}/fixtures`; -export const depsScriptSourcePath = '../../fixtures/deps/noir-script/src/main.nr'; -export const depsScriptExpectedArtifact = '../../fixtures/deps/noir-script/target/noir_wasm_testing.json'; + const simpleScriptSourcePath = `${fixtures}/simple/src/main.nr`; + const simpleScriptExpectedArtifact = `${fixtures}/simple/target/noir_wasm_testing.json`; -export const libASourcePath = '../../fixtures/deps/lib-a/src/lib.nr'; -export const libBSourcePath = '../../fixtures/deps/lib-b/src/lib.nr'; + const depsScriptSourcePath = `${fixtures}/with-deps/src/main.nr`; + const depsScriptExpectedArtifact = `${fixtures}/with-deps/target/noir_wasm_testing.json`; + + const libASourcePath = `${fixtures}/deps/lib-a/src/lib.nr`; + const libBSourcePath = `${fixtures}/deps/lib-b/src/lib.nr`; + + const contractProjectPath = `${fixtures}/noir-contract`; + const contractSourcePath = `${contractProjectPath}/src/main.nr`; + const contractTOMLPath = `${contractProjectPath}/Nargo.toml`; + const contractExpectedArtifact = `${contractProjectPath}/target/test-TestContract.json`; + + const libCProjectPath = `${fixtures}/deps/lib-c`; + const libCSourcePath = `${libCProjectPath}/src/lib.nr`; + const libCModulePath = `${libCProjectPath}/src/module.nr`; + const libCModuleSourcePath = `${libCProjectPath}/src/module/foo.nr`; + const libCTOMLPath = `${libCProjectPath}/Nargo.toml`; + + return { + simpleScriptSourcePath, + simpleScriptExpectedArtifact, + depsScriptSourcePath, + depsScriptExpectedArtifact, + libASourcePath, + libBSourcePath, + contractProjectPath, + contractSourcePath, + contractTOMLPath, + contractExpectedArtifact, + libCProjectPath, + libCSourcePath, + libCModulePath, + libCModuleSourcePath, + libCTOMLPath, + }; +} diff --git a/compiler/wasm/test/wasm/browser/index.test.ts b/compiler/wasm/test/wasm/browser/index.test.ts new file mode 100644 index 00000000000..3122fa57945 --- /dev/null +++ b/compiler/wasm/test/wasm/browser/index.test.ts @@ -0,0 +1,159 @@ +/* eslint-disable @typescript-eslint/ban-ts-comment */ +import { getPaths } from '../../shared'; +import { expect } from '@esm-bundle/chai'; + +import init, { compile, PathToFileSourceMap, compile_, CompilerContext } from '../../../build/esm'; + +// @ts-ignore +await init(); + +const { + simpleScriptSourcePath, + simpleScriptExpectedArtifact, + depsScriptSourcePath, + depsScriptExpectedArtifact, + libASourcePath, + libBSourcePath, +} = getPaths('.'); + +async function getFileAsString(path: string) { + // @ts-ignore + const basePath = new URL('./../../', import.meta.url).toString().replace(/\/$/g, ''); + const url = `${basePath}${path.replace('.', '')}`; + const response = await fetch(url); + return response.text(); +} + +// eslint-disable-next-line @typescript-eslint/no-explicit-any +async function getPrecompiledSource(path: string): Promise { + const response = await getFileAsString(path); + return JSON.parse(response); +} + +describe('noir wasm compilation', () => { + describe('can compile simple scripts', () => { + it('matching nargos compilation', async () => { + const sourceMap = new PathToFileSourceMap(); + sourceMap.add_source_code('script/main.nr', await getFileAsString(simpleScriptSourcePath)); + const wasmCircuit = compile('script/main.nr', undefined, undefined, sourceMap); + const cliCircuit = await getPrecompiledSource(simpleScriptExpectedArtifact); + + if (!('program' in wasmCircuit)) { + throw Error('Expected program to be present'); + } + + // We don't expect the hashes to match due to how `noir_wasm` handles dependencies + expect(wasmCircuit.program.bytecode).to.eq(cliCircuit.bytecode); + expect(wasmCircuit.program.abi).to.deep.eq(cliCircuit.abi); + expect(wasmCircuit.program.noir_version).to.eq(cliCircuit.noir_version); + }).timeout(60 * 20e3); + }); + + describe('can compile scripts with dependencies', () => { + const sourceMap = new PathToFileSourceMap(); + beforeEach(async () => { + sourceMap.add_source_code('script/main.nr', await getFileAsString(depsScriptSourcePath)); + sourceMap.add_source_code('lib_a/lib.nr', await getFileAsString(libASourcePath)); + sourceMap.add_source_code('lib_b/lib.nr', await getFileAsString(libBSourcePath)); + }); + + it('matching nargos compilation', async () => { + const wasmCircuit = compile( + 'script/main.nr', + false, + { + root_dependencies: ['lib_a'], + library_dependencies: { + lib_a: ['lib_b'], + }, + }, + sourceMap, + ); + + const cliCircuit = await getPrecompiledSource(depsScriptExpectedArtifact); + + if (!('program' in wasmCircuit)) { + throw Error('Expected program to be present'); + } + + // We don't expect the hashes to match due to how `noir_wasm` handles dependencies + expect(wasmCircuit.program.bytecode).to.eq(cliCircuit.bytecode); + expect(wasmCircuit.program.abi).to.deep.eq(cliCircuit.abi); + expect(wasmCircuit.program.noir_version).to.eq(cliCircuit.noir_version); + }).timeout(60 * 20e3); + }); + + describe('can compile scripts with dependencies -- context-api', () => { + let sourceMap: PathToFileSourceMap; + beforeEach(async () => { + sourceMap = new PathToFileSourceMap(); + sourceMap.add_source_code('script/main.nr', await getFileAsString(depsScriptSourcePath)); + sourceMap.add_source_code('lib_a/lib.nr', await getFileAsString(libASourcePath)); + sourceMap.add_source_code('lib_b/lib.nr', await getFileAsString(libBSourcePath)); + }); + + it('matching nargos compilation - context-api', async () => { + const compilerContext = new CompilerContext(sourceMap); + + // Process root crate + const root_crate_id = compilerContext.process_root_crate('script/main.nr'); + // Process dependencies + // + // This can be direct dependencies or transitive dependencies + // I have named these crate_id_1 and crate_id_2 instead of `lib_a_crate_id` and `lib_b_crate_id` + // because the names of crates in a dependency graph are not determined by the actual package. + // + // It is true that each package is given a name, but if I include a `lib_a` as a dependency + // in my library, I do not need to refer to it as `lib_a` in my dependency graph. + // See https://doc.rust-lang.org/cargo/reference/specifying-dependencies.html#renaming-dependencies-in-cargotoml + // + // If you have looked at graphs before, then you can think of the dependency graph as a directed acyclic graph (DAG) + const crate_id_1 = compilerContext.process_dependency_crate('lib_a/lib.nr'); + const crate_id_2 = compilerContext.process_dependency_crate('lib_b/lib.nr'); + + // Root crate depends on `crate_id_1` and this edge is called `lib_a` + compilerContext.add_dependency_edge('lib_a', root_crate_id, crate_id_1); + // `crate_id_1` depends on `crate_id_2` and this edge is called `lib_b` + compilerContext.add_dependency_edge('lib_b', crate_id_1, crate_id_2); + + const program_width = 3; + const wasmCircuit = await compilerContext.compile_program(program_width); + + const cliCircuit = await getPrecompiledSource(depsScriptExpectedArtifact); + + if (!('program' in wasmCircuit)) { + throw Error('Expected program to be present'); + } + + // We don't expect the hashes to match due to how `noir_wasm` handles dependencies + expect(wasmCircuit.program.noir_version).to.eq(cliCircuit.noir_version); + expect(wasmCircuit.program.abi).to.deep.eq(cliCircuit.abi); + expect(wasmCircuit.program.bytecode).to.eq(cliCircuit.bytecode); + }).timeout(60 * 20e3); + + it('matching nargos compilation - context-implementation-compile-api', async () => { + const wasmCircuit = await compile_( + 'script/main.nr', + false, + { + root_dependencies: ['lib_a'], + library_dependencies: { + lib_a: ['lib_b'], + }, + }, + sourceMap, + ); + + const cliCircuit = await getPrecompiledSource(depsScriptExpectedArtifact); + + if (!('program' in wasmCircuit)) { + throw Error('Expected program to be present'); + } + + // We don't expect the hashes to match due to how `noir_wasm` handles dependencies + expect(wasmCircuit.program.noir_version).to.eq(cliCircuit.noir_version); + expect(wasmCircuit.program.abi).to.deep.eq(cliCircuit.abi); + expect(wasmCircuit.program.bytecode).to.eq(cliCircuit.bytecode); + }).timeout(60 * 20e3); + }); +}); diff --git a/compiler/wasm/test/wasm/node/index.test.ts b/compiler/wasm/test/wasm/node/index.test.ts new file mode 100644 index 00000000000..c73ce7477e5 --- /dev/null +++ b/compiler/wasm/test/wasm/node/index.test.ts @@ -0,0 +1,150 @@ +import { getPaths } from '../../shared'; +import { readFileSync } from 'fs'; +import { join, resolve } from 'path'; +import { expect } from 'chai'; + +import { compile, PathToFileSourceMap, compile_, CompilerContext } from '../../../build/cjs'; + +const basePath = resolve(join(__dirname, '../../')); +const { + simpleScriptSourcePath, + simpleScriptExpectedArtifact, + depsScriptSourcePath, + depsScriptExpectedArtifact, + libASourcePath, + libBSourcePath, +} = getPaths(basePath); + +// eslint-disable-next-line @typescript-eslint/no-explicit-any +async function getPrecompiledSource(path: string): Promise { + const compiledData = readFileSync(resolve(__dirname, path)).toString(); + return JSON.parse(compiledData); +} + +describe('noir wasm compilation', () => { + describe('can compile simple scripts', () => { + it('matching nargos compilation', async () => { + const sourceMap = new PathToFileSourceMap(); + sourceMap.add_source_code(simpleScriptSourcePath, readFileSync(simpleScriptSourcePath, 'utf-8')); + const wasmCircuit = compile(simpleScriptSourcePath, undefined, undefined, sourceMap); + const cliCircuit = await getPrecompiledSource(simpleScriptExpectedArtifact); + + if (!('program' in wasmCircuit)) { + throw Error('Expected program to be present'); + } + + // We don't expect the hashes to match due to how `noir_wasm` handles dependencies + expect(wasmCircuit.program.bytecode).to.eq(cliCircuit.bytecode); + expect(wasmCircuit.program.abi).to.deep.eq(cliCircuit.abi); + expect(wasmCircuit.program.noir_version).to.eq(cliCircuit.noir_version); + }).timeout(60 * 20e3); + }); + + describe('can compile scripts with dependencies', () => { + const sourceMap = new PathToFileSourceMap(); + beforeEach(() => { + sourceMap.add_source_code('script/main.nr', readFileSync(depsScriptSourcePath, 'utf-8')); + sourceMap.add_source_code('lib_a/lib.nr', readFileSync(libASourcePath, 'utf-8')); + sourceMap.add_source_code('lib_b/lib.nr', readFileSync(libBSourcePath, 'utf-8')); + }); + + it('matching nargos compilation', async () => { + const wasmCircuit = compile( + 'script/main.nr', + false, + { + root_dependencies: ['lib_a'], + library_dependencies: { + lib_a: ['lib_b'], + }, + }, + sourceMap, + ); + + const cliCircuit = await getPrecompiledSource(depsScriptExpectedArtifact); + + if (!('program' in wasmCircuit)) { + throw Error('Expected program to be present'); + } + + // We don't expect the hashes to match due to how `noir_wasm` handles dependencies + expect(wasmCircuit.program.bytecode).to.eq(cliCircuit.bytecode); + expect(wasmCircuit.program.abi).to.deep.eq(cliCircuit.abi); + expect(wasmCircuit.program.noir_version).to.eq(cliCircuit.noir_version); + }).timeout(60 * 20e3); + }); + + describe('can compile scripts with dependencies -- context-api', () => { + let sourceMap: PathToFileSourceMap; + beforeEach(() => { + sourceMap = new PathToFileSourceMap(); + sourceMap.add_source_code('script/main.nr', readFileSync(depsScriptSourcePath, 'utf-8')); + sourceMap.add_source_code('lib_a/lib.nr', readFileSync(libASourcePath, 'utf-8')); + sourceMap.add_source_code('lib_b/lib.nr', readFileSync(libBSourcePath, 'utf-8')); + }); + + it('matching nargos compilation - context-api', async () => { + const compilerContext = new CompilerContext(sourceMap); + + // Process root crate + const root_crate_id = compilerContext.process_root_crate('script/main.nr'); + // Process dependencies + // + // This can be direct dependencies or transitive dependencies + // I have named these crate_id_1 and crate_id_2 instead of `lib_a_crate_id` and `lib_b_crate_id` + // because the names of crates in a dependency graph are not determined by the actual package. + // + // It is true that each package is given a name, but if I include a `lib_a` as a dependency + // in my library, I do not need to refer to it as `lib_a` in my dependency graph. + // See https://doc.rust-lang.org/cargo/reference/specifying-dependencies.html#renaming-dependencies-in-cargotoml + // + // If you have looked at graphs before, then you can think of the dependency graph as a directed acyclic graph (DAG) + const crate_id_1 = compilerContext.process_dependency_crate('lib_a/lib.nr'); + const crate_id_2 = compilerContext.process_dependency_crate('lib_b/lib.nr'); + + // Root crate depends on `crate_id_1` and this edge is called `lib_a` + compilerContext.add_dependency_edge('lib_a', root_crate_id, crate_id_1); + // `crate_id_1` depends on `crate_id_2` and this edge is called `lib_b` + compilerContext.add_dependency_edge('lib_b', crate_id_1, crate_id_2); + + const program_width = 3; + const wasmCircuit = await compilerContext.compile_program(program_width); + + const cliCircuit = await getPrecompiledSource(depsScriptExpectedArtifact); + + if (!('program' in wasmCircuit)) { + throw Error('Expected program to be present'); + } + + // We don't expect the hashes to match due to how `noir_wasm` handles dependencies + expect(wasmCircuit.program.noir_version).to.eq(cliCircuit.noir_version); + expect(wasmCircuit.program.abi).to.deep.eq(cliCircuit.abi); + expect(wasmCircuit.program.bytecode).to.eq(cliCircuit.bytecode); + }).timeout(60 * 20e3); + + it('matching nargos compilation - context-implementation-compile-api', async () => { + const wasmCircuit = await compile_( + 'script/main.nr', + false, + { + root_dependencies: ['lib_a'], + library_dependencies: { + lib_a: ['lib_b'], + }, + }, + sourceMap, + ); + + const cliCircuit = await getPrecompiledSource(depsScriptExpectedArtifact); + + if (!('program' in wasmCircuit)) { + throw Error('Expected program to be present'); + } + + // We don't expect the hashes to match due to how `noir_wasm` handles dependencies + expect(wasmCircuit.program.noir_version).to.eq(cliCircuit.noir_version); + expect(wasmCircuit.program.abi).to.deep.eq(cliCircuit.abi); + expect(wasmCircuit.program.bytecode).to.eq(cliCircuit.bytecode); + }).timeout(60 * 20e3); + }); +}); diff --git a/compiler/wasm/tsconfig.esm.json b/compiler/wasm/tsconfig.esm.json new file mode 100644 index 00000000000..5826fee7c52 --- /dev/null +++ b/compiler/wasm/tsconfig.esm.json @@ -0,0 +1,6 @@ +{ + "extends": "./tsconfig.webpack.json", + "compilerOptions": { + "module": "ESNext", + }, +} \ No newline at end of file diff --git a/compiler/wasm/tsconfig.json b/compiler/wasm/tsconfig.json index eef2ad84833..6096b419d78 100644 --- a/compiler/wasm/tsconfig.json +++ b/compiler/wasm/tsconfig.json @@ -1,17 +1,20 @@ { "compilerOptions": { - "moduleResolution": "node", - "outDir": "lib", - "target": "ESNext", - "module": "ESNext", + "declaration": true, + "declarationDir": "dist/types", "strict": true, + "baseUrl": ".", "experimentalDecorators": true, - "esModuleInterop": true, "noImplicitAny": true, "removeComments": false, "preserveConstEnums": true, "sourceMap": true, - "resolveJsonModule": true, - "importHelpers": true + "importHelpers": true, + "allowSyntheticDefaultImports": true, + "esModuleInterop": true, + "target": "ESNext", + "module": "CommonJS", + "moduleResolution": "Node", + "allowJs": true, } } \ No newline at end of file diff --git a/compiler/wasm/tsconfig.webpack.json b/compiler/wasm/tsconfig.webpack.json new file mode 100644 index 00000000000..dd1c218a352 --- /dev/null +++ b/compiler/wasm/tsconfig.webpack.json @@ -0,0 +1,7 @@ +{ + "extends": "./tsconfig.json", + "exclude": [ + "./test/**/*", + "node_modules" + ] +} \ No newline at end of file diff --git a/compiler/wasm/wasm-opt-check.sh b/compiler/wasm/wasm-opt-check.sh new file mode 100755 index 00000000000..8612b48d3f1 --- /dev/null +++ b/compiler/wasm/wasm-opt-check.sh @@ -0,0 +1 @@ +command -v wasm-opt >/dev/null 2>&1 && echo "true" || { echo >&2 "wasm-opt is not installed, building in dev mode" && echo "false"; exit 1; } \ No newline at end of file diff --git a/compiler/wasm/web-test-runner.config.mjs b/compiler/wasm/web-test-runner.config.mjs index 3fd65d56618..4d05c95f7d6 100644 --- a/compiler/wasm/web-test-runner.config.mjs +++ b/compiler/wasm/web-test-runner.config.mjs @@ -1,14 +1,14 @@ -import { defaultReporter } from "@web/test-runner"; -import { summaryReporter } from "@web/test-runner"; -import { fileURLToPath } from "url"; -import { esbuildPlugin } from "@web/dev-server-esbuild"; -import { playwrightLauncher } from "@web/test-runner-playwright"; +import { defaultReporter } from '@web/test-runner'; +import { summaryReporter } from '@web/test-runner'; +import { fileURLToPath } from 'url'; +import { esbuildPlugin } from '@web/dev-server-esbuild'; +import { playwrightLauncher } from '@web/test-runner-playwright'; const reporter = process.env.CI ? summaryReporter() : defaultReporter(); export default { browsers: [ - playwrightLauncher({ product: "chromium" }), + playwrightLauncher({ product: 'chromium' }), // playwrightLauncher({ product: "webkit" }), // playwrightLauncher({ product: "firefox" }), ], @@ -17,15 +17,8 @@ export default { ts: true, }), ], - files: ["test/browser/**/*.test.ts"], + files: ['./test/**/browser/*.test.ts'], nodeResolve: true, - testFramework: { - config: { - ui: "bdd", - timeout: 40000, - }, - }, - rootDir: fileURLToPath(new URL('./../../', import.meta.url)), + rootDir: fileURLToPath(new URL('./../../', import.meta.url)), reporters: [reporter], - }; diff --git a/compiler/wasm/webpack.config.ts b/compiler/wasm/webpack.config.ts new file mode 100644 index 00000000000..d5d70df2b8a --- /dev/null +++ b/compiler/wasm/webpack.config.ts @@ -0,0 +1,134 @@ +import { resolve, join } from 'path'; +import webpack from 'webpack'; +import 'webpack-dev-server'; +import WasmPackPlugin from '@wasm-tool/wasm-pack-plugin'; +import HtmlWebpackPlugin from 'html-webpack-plugin'; +import CopyWebpackPlugin from 'copy-webpack-plugin'; + +const config: webpack.Configuration = { + output: { + path: resolve(__dirname, 'dist'), + }, + mode: 'development', + devtool: 'source-map', + optimization: { + minimize: false, + }, + resolve: { + extensions: ['.cts', '.mts', '.ts', '.js', '.json', '.wasm'], + fallback: { + path: require.resolve('path-browserify'), + stream: require.resolve('readable-stream'), + fs: require.resolve('browserify-fs'), + buffer: require.resolve('buffer'), + }, + }, +}; + +const webConfig: webpack.Configuration = { + name: 'web', + entry: './src/index.mts', + ...config, + experiments: { asyncWebAssembly: true, outputModule: true }, + output: { + filename: 'main.mjs', + ...config.output, + path: resolve(__dirname, 'dist/web'), + library: { + type: 'module', + }, + }, + plugins: [ + new WasmPackPlugin({ + crateDirectory: resolve(__dirname), + outDir: resolve(__dirname, './build/esm'), + extraArgs: '--target web', + forceMode: process.env.WASM_OPT === 'true' ? 'production' : 'development', + }), + new HtmlWebpackPlugin({ + title: 'Noir Wasm ESM', + }), + new webpack.DefinePlugin({ + 'process.env.NODE_DEBUG': JSON.stringify(process.env.NODE_DEBUG), + }), + new webpack.ProvidePlugin({ + process: 'process/browser', + }), + new webpack.ProvidePlugin({ + Buffer: ['buffer', 'Buffer'], + }), + ], + module: { + rules: [ + { + test: /.m?ts$/, + loader: 'ts-loader', + options: { + configFile: 'tsconfig.esm.json', + }, + exclude: /node_modules/, + }, + { + test: /\.wasm$/, + type: 'asset/inline', + }, + ], + }, + devServer: { + static: join(__dirname, 'dist'), + }, + resolve: { + ...config.resolve, + alias: { + fs: 'memfs', + }, + }, +}; + +const nodeConfig: webpack.Configuration = { + name: 'node', + entry: './src/index.cts', + ...config, + output: { + ...config.output, + path: resolve(__dirname, 'dist/node'), + library: { + type: 'commonjs2', + }, + }, + target: 'node', + plugins: [ + new WasmPackPlugin({ + crateDirectory: resolve(__dirname), + outDir: resolve(__dirname, './build/cjs'), + extraArgs: '--target nodejs', + forceMode: process.env.WASM_OPT === 'true' ? 'production' : 'development', + }), + new CopyWebpackPlugin({ + patterns: [ + { + from: resolve(__dirname, './build/cjs/index_bg.wasm'), + to: resolve(__dirname, 'dist/node/index_bg.wasm'), + }, + ], + }), + ], + module: { + rules: [ + { + test: /.c?ts$/, + loader: 'ts-loader', + options: { + configFile: 'tsconfig.webpack.json', + }, + exclude: /node_modules/, + }, + { + test: /\.wasm$/, + type: 'webassembly/async', + }, + ], + }, +}; + +export default [webConfig, nodeConfig]; diff --git a/cspell.json b/cspell.json index 90d3963566d..6864e863740 100644 --- a/cspell.json +++ b/cspell.json @@ -1,34 +1,53 @@ { "version": "0.2", + "dictionaries": [ + "rust" + ], "words": [ "aarch", "acir", "acvm", "aeiou", + "appender", + "Arbitrum", "arithmetization", "arity", "arkworks", "arraysort", + "barebones", "barretenberg", "bincode", "bindgen", "bitand", "blackbox", + "bridgekeeper", "brillig", + "bytecount", "cachix", "callsite", "callsites", + "callstack", + "callstacks", "canonicalize", "castable", + "catmcgee", + "Celo", "chumsky", - "clippy", "codegen", + "codegenned", "codegens", + "Codespaces", "codespan", "coeff", "combinators", "comptime", + "cpus", "cranelift", + "critesjosh", + "csat", + "curvegroup", + "databus", + "deflater", "deflatten", "deflattened", "deflattening", @@ -39,26 +58,31 @@ "desugared", "direnv", "eddsa", + "Elligator", "endianness", "envrc", "Flamegraph", "flate", "fmtstr", "foldl", + "foos", "forall", "foralls", "formatcp", + "frontends", "fxhash", "getrandom", "gloo", "grumpkin", "Guillaume", + "gzipped", "hasher", "hexdigit", "higher-kinded", "Hindley-Milner", "idents", "impls", + "indexmap", "injective", "Inlines", "interner", @@ -67,46 +91,68 @@ "jmpif", "jmpifs", "jmps", + "jsdoc", + "Jubjub", "keccak", + "keccakf", "krate", "lvalue", + "Maddiaa", + "mathbb", "merkle", "metas", + "minreq", "monomorphization", "monomorphize", "monomorphized", "monomorphizer", "monomorphizes", + "monomorphizing", + "montcurve", "nand", "nargo", + "neovim", + "newtype", + "nightlies", "nixpkgs", "noirc", + "noirfmt", + "noirjs", "noirup", + "nomicfoundation", + "noncanonical", "pedersen", "peekable", "plonkc", + "PLONKish", "pprof", "preprocess", "prettytable", "printstd", "pseudocode", + "pubkey", "quantile", + "repr", + "reqwest", + "rfind", "rustc", "rustup", "schnorr", "sdiv", "secp256k1", "secp256r1", - "serde", "signedness", + "signorecello", "smol", "splitn", "srem", "stdlib", - "struct", + "structs", "subexpression", "subshell", "subtyping", + "swcurve", + "tecurve", "tempdir", "tempfile", "termcolor", @@ -121,7 +167,15 @@ "unnormalized", "unoptimized", "urem", + "USERPROFILE", "vecmap", - "wasi" + "wasi", + "wasmer", + "Weierstraß", + "zshell" + ], + "ignorePaths": [ + "./**/node_modules/**", + "./**/target/**" ] } diff --git a/deny.toml b/deny.toml index d9ffd4d37f0..5edce08fb70 100644 --- a/deny.toml +++ b/deny.toml @@ -67,6 +67,7 @@ exceptions = [ # so we prefer to not have dependencies using it # https://tldrlegal.com/license/creative-commons-cc0-1.0-universal { allow = ["CC0-1.0"], name = "more-asserts" }, + { allow = ["CC0-1.0"], name = "jsonrpc" }, { allow = ["MPL-2.0"], name = "sized-chunks" }, { allow = ["MPL-2.0"], name = "webpki-roots" }, diff --git a/docs/.eslintignore b/docs/.eslintignore new file mode 100644 index 00000000000..dd87e2d73f9 --- /dev/null +++ b/docs/.eslintignore @@ -0,0 +1,2 @@ +node_modules +build diff --git a/docs/.gitignore b/docs/.gitignore index e76d8caf6bc..4f6eee8284e 100644 --- a/docs/.gitignore +++ b/docs/.gitignore @@ -7,7 +7,7 @@ # Generated files .docusaurus .cache-loader -/docs/noir_js/reference/ +/docs/docs/reference/NoirJS # Misc .DS_Store @@ -21,4 +21,5 @@ npm-debug.log* yarn-debug.log* yarn-error.log* -package-lock.json \ No newline at end of file +package-lock.json +versions.json diff --git a/docs/.prettierrc b/docs/.prettierrc deleted file mode 100644 index 6031f40b267..00000000000 --- a/docs/.prettierrc +++ /dev/null @@ -1,6 +0,0 @@ -{ - "arrowParens": "avoid", - "singleQuote": true, - "trailingComma": "all", - "printWidth": 100 -} diff --git a/docs/babel.config.js b/docs/babel.config.js deleted file mode 100644 index e00595dae7d..00000000000 --- a/docs/babel.config.js +++ /dev/null @@ -1,3 +0,0 @@ -module.exports = { - presets: [require.resolve('@docusaurus/core/lib/babel/preset')], -}; diff --git a/docs/docs/explainers/explainer-recursion.md b/docs/docs/explainers/explainer-recursion.md new file mode 100644 index 00000000000..8f992ec29fd --- /dev/null +++ b/docs/docs/explainers/explainer-recursion.md @@ -0,0 +1,177 @@ +--- +title: Recursive proofs +description: Explore the concept of recursive proofs in Zero-Knowledge programming. Understand how recursion works in Noir, a language for writing smart contracts on the EVM blockchain. Learn through practical examples like Alice and Bob's guessing game, Charlie's recursive merkle tree, and Daniel's reusable components. Discover how to use recursive proofs to optimize computational resources and improve efficiency. + +keywords: + [ + "Recursive Proofs", + "Zero-Knowledge Programming", + "Noir", + "EVM Blockchain", + "Smart Contracts", + "Recursion in Noir", + "Alice and Bob Guessing Game", + "Recursive Merkle Tree", + "Reusable Components", + "Optimizing Computational Resources", + "Improving Efficiency", + "Verification Key", + "Aggregation Objects", + "Recursive zkSNARK schemes", + "PLONK", + "Proving and Verification Keys" + ] +sidebar_position: 1 +--- + +In programming, we tend to think of recursion as something calling itself. A classic example would be the calculation of the factorial of a number: + +```js +function factorial(n) { + if (n === 0 || n === 1) { + return 1; + } else { + return n * factorial(n - 1); + } +} +``` + +In this case, while `n` is not `1`, this function will keep calling itself until it hits the base case, bubbling up the result on the call stack: + +```md + Is `n` 1? <--------- + /\ / + / \ n = n -1 + / \ / + Yes No -------- +``` + +In Zero-Knowledge, recursion has some similarities. + +It is not a Noir function calling itself, but a proof being used as an input to another circuit. In short, you verify one proof *inside* another proof, returning the proof that both proofs are valid. + +This means that, given enough computational resources, you can prove the correctness of any arbitrary number of proofs in a single proof. This could be useful to design state channels (for which a common example would be [Bitcoin's Lightning Network](https://en.wikipedia.org/wiki/Lightning_Network)), to save on gas costs by settling one proof on-chain, or simply to make business logic less dependent on a consensus mechanism. + +## Examples + +Let us look at some of these examples + +### Alice and Bob - Guessing game + +Alice and Bob are friends, and they like guessing games. They want to play a guessing game online, but for that, they need a trusted third-party that knows both of their secrets and finishes the game once someone wins. + +So, they use zero-knowledge proofs. Alice tries to guess Bob's number, and Bob will generate a ZK proof stating whether she succeeded or failed. + +This ZK proof can go on a smart contract, revealing the winner and even giving prizes. However, this means every turn needs to be verified on-chain. This incurs some cost and waiting time that may simply make the game too expensive or time-consuming to be worth it. + +As a solution, Alice proposes the following: "what if Bob generates his proof, and instead of sending it on-chain, I verify it *within* my own proof before playing my own turn?". + +She can then generate a proof that she verified his proof, and so on. + +```md + Did you fail? <-------------------------- + / \ / + / \ n = n -1 + / \ / + Yes No / + | | / + | | / + | You win / + | / + | / +Generate proof of that / + + / + my own guess ---------------- +``` + +### Charlie - Recursive merkle tree + +Charlie is a concerned citizen, and wants to be sure his vote in an election is accounted for. He votes with a ZK proof, but he has no way of knowing that his ZK proof was included in the total vote count! + +If the vote collector puts all of the votes into a [Merkle tree](https://en.wikipedia.org/wiki/Merkle_tree), everyone can prove the verification of two proofs within one proof, as such: + +```md + abcd + __________|______________ + | | + ab cd + _____|_____ ______|______ + | | | | + alice bob charlie daniel +``` + +Doing this recursively allows us to arrive on a final proof `abcd` which if true, verifies the correctness of all the votes. + +### Daniel - Reusable components + +Daniel has a big circuit and a big headache. A part of his circuit is a setup phase that finishes with some assertions that need to be made. But that section alone takes most of the proving time, and is largely independent of the rest of the circuit. + +He might find it more efficient to generate a proof for that setup phase separately, and verify that proof recursively in the actual business logic section of his circuit. This will allow for parallelization of both proofs, which results in a considerable speedup. + +## What params do I need + +As you can see in the [recursion reference](noir/standard_library/recursion.md), a simple recursive proof requires: + +- The proof to verify +- The Verification Key of the circuit that generated the proof +- A hash of this verification key, as it's needed for some backends +- The public inputs for the proof +- The input aggregation object + +It also returns the `output aggregation object`. These aggregation objects can be confusing at times, so let's dive in a little bit. + +### Aggregation objects + +Recursive zkSNARK schemes do not necessarily "verify a proof" in the sense that you expect a true or false to be spit out by the verifier. Rather an aggregation object is built over the public inputs. + +In the case of PLONK the recursive aggregation object is two G1 points (expressed as 16 witness values). The final verifier (in our case this is most often the smart contract verifier) has to be aware of this aggregation object to execute a pairing and check the validity of these points. + +So, taking the example of Alice and Bob and their guessing game: + +- Alice makes her guess. Her proof is *not* recursive: it doesn't verify any proof within it! It's just a standard `assert(x != y)` circuit +- Bob verifies Alice's proof and makes his own guess. In this circuit, he is verifying a proof, so it needs to output an `aggregation object`: he is generating a recursive proof! +- Alice verifies Bob's *recursive proof*, and uses Bob's `output aggregation object` as the `input aggregation object` in her proof... Which in turn, generates another `output aggregation object`. + +One should notice that when Bob generates his first proof, he has no input aggregation object. Because he is not verifying an recursive proof, he has no `input aggregation object`. In this case, he may use zeros instead. + +We can imagine the `aggregation object` as the baton in a [relay race](https://en.wikipedia.org/wiki/Relay_race). The first runner doesn't have to receive the baton from anyone else, as he/she already starts with it. But when his/her turn is over, the next runner needs to receive it, run a bit more, and pass it along. Even though every runner could theoretically verify the baton mid-run (why not? 🏃🔍), only at the end of the race does the referee verify that the whole race is valid. + +## Some architecture + +As with everything in computer science, there's no one-size-fits all. But there are some patterns that could help understanding and implementing them. To give three examples: + +### Adding some logic to a proof verification + +This would be an approach for something like our guessing game, where proofs are sent back and forth and are verified by each opponent. This circuit would be divided in two sections: + +- A `recursive verification` section, which would be just the call to `std::verify_proof`, and that would be skipped on the first move (since there's no proof to verify) +- A `guessing` section, which is basically the logic part where the actual guessing happens + +In such a situation, and assuming Alice is first, she would skip the first part and try to guess Bob's number. Bob would then verify her proof on the first section of his run, and try to guess Alice's number on the second part, and so on. + +### Aggregating proofs + +In some one-way interaction situations, recursion would allow for aggregation of simple proofs that don't need to be immediately verified on-chain or elsewhere. + +To give a practical example, a barman wouldn't need to verify a "proof-of-age" on-chain every time he serves alcohol to a customer. Instead, the architecture would comprise two circuits: + +- A `main`, non-recursive circuit with some logic +- A `recursive` circuit meant to verify two proofs in one proof + +The customer's proofs would be intermediate, and made on their phones, and the barman could just verify them locally. He would then aggregate them into a final proof sent on-chain (or elsewhere) at the end of the day. + +### Recursively verifying different circuits + +Nothing prevents you from verifying different circuits in a recursive proof, for example: + +- A `circuit1` circuit +- A `circuit2` circuit +- A `recursive` circuit + +In this example, a regulator could verify that taxes were paid for a specific purchase by aggregating both a `payer` circuit (proving that a purchase was made and taxes were paid), and a `receipt` circuit (proving that the payment was received) + +## How fast is it + +At the time of writing, verifying recursive proofs is surprisingly fast. This is because most of the time is spent on generating the verification key that will be used to generate the next proof. So you are able to cache the verification key and reuse it later. + +Currently, Noir JS packages don't expose the functionality of loading proving and verification keys, but that feature exists in the underlying `bb.js` package. diff --git a/docs/docs/getting_started/00_nargo_installation.md b/docs/docs/getting_started/00_nargo_installation.md deleted file mode 100644 index 725c5f4d373..00000000000 --- a/docs/docs/getting_started/00_nargo_installation.md +++ /dev/null @@ -1,249 +0,0 @@ ---- -title: Nargo Installation -description: - nargo is a command line tool for interacting with Noir programs (e.g. compiling, proving, - verifying and more). Learn how to install and use Nargo for your projects with this comprehensive - guide. -keywords: [Nargo, command line tool, Noir programs, installation guide, how to use Nargo] ---- - -`nargo` is a command line tool for interacting with Noir programs (e.g. compiling, proving, -verifying and more). - -Alternatively, the interactions can also be performed in [NoirJS](../noir_js/noir_js.md). - -### UltraPlonk - -Nargo versions <0.5.0 of `aztec_backend` and `aztec_wasm_backend` are based on the TurboPlonk -version of Aztec Backend, which lacks efficient implementations of useful primitives (e.g. Keccak256 in 18k constraints, ECDSA verification in 36k constraints) that the UltraPlonk version offers. - -## Installation - -There are four approaches for installing Nargo: - -- [Option 1: Noirup](#option-1-noirup) -- [Option 2: Binaries](#option-2-binaries) -- [Option 3: Compile from Source](#option-3-compile-from-source) -- [Option 4: WSL for Windows](#option-4-wsl-for-windows) - -Optionally you can also install [Noir VS Code extension] for syntax highlighting. - -### Option 1: Noirup - -If you're on OSX or Linux, the easiest way to start using Noir and Nargo is via noirup. Just open a -terminal and run: - -```bash -curl -L https://raw.githubusercontent.com/noir-lang/noirup/main/install | bash -``` - -Close the terminal, open another one, and run - -```bash -noirup -``` - -Done, you should have the latest version working. You can check with `nargo --version`. - -You can also install nightlies, specific versions -or branches, check out the [noirup repository](https://github.com/noir-lang/noirup) for more -information. - -#### GitHub Actions - -You can use `noirup` with GitHub Actions for CI/CD and automated testing. It is as simple as -installing `noirup` and running tests in your GitHub Action `yml` file. - -See the -[config file](https://github.com/TomAFrench/noir-hashes/blob/master/.github/workflows/noir.yml) in -this repo containing hash functions in Noir for an example. - -#### Nightly versions - -To install the nightly version of Noir (updated daily) run: - -```bash -noirup -n -``` - -### Option 2: Binaries - -See [GitHub Releases](https://github.com/noir-lang/noir/releases) for the latest and previous -platform specific binaries. - -#### Step 1 - -Paste and run the following in the terminal to extract and install the binary: - -> **macOS / Linux:** If you are prompted with `Permission denied` when running commands, prepend -> `sudo` and re-run it. - -##### macOS (Apple Silicon) - -```bash -mkdir -p $HOME/.nargo/bin && \ -curl -o $HOME/.nargo/bin/nargo-aarch64-apple-darwin.tar.gz -L https://github.com/noir-lang/noir/releases/download/v0.6.0/nargo-aarch64-apple-darwin.tar.gz && \ -tar -xvf $HOME/.nargo/bin/nargo-aarch64-apple-darwin.tar.gz -C $HOME/.nargo/bin/ && \ -echo '\nexport PATH=$PATH:$HOME/.nargo/bin' >> ~/.zshrc && \ -source ~/.zshrc -``` - -##### macOS (Intel) - -```bash -mkdir -p $HOME/.nargo/bin && \ -curl -o $HOME/.nargo/bin/nargo-x86_64-apple-darwin.tar.gz -L https://github.com/noir-lang/noir/releases/download/v0.6.0/nargo-x86_64-apple-darwin.tar.gz && \ -tar -xvf $HOME/.nargo/bin/nargo-x86_64-apple-darwin.tar.gz -C $HOME/.nargo/bin/ && \ -echo '\nexport PATH=$PATH:$HOME/.nargo/bin' >> ~/.zshrc && \ -source ~/.zshrc -``` - -##### Linux (Bash) - -```bash -mkdir -p $HOME/.nargo/bin && \ -curl -o $HOME/.nargo/bin/nargo-x86_64-unknown-linux-gnu.tar.gz -L https://github.com/noir-lang/noir/releases/download/v0.6.0/nargo-x86_64-unknown-linux-gnu.tar.gz && \ -tar -xvf $HOME/.nargo/bin/nargo-x86_64-unknown-linux-gnu.tar.gz -C $HOME/.nargo/bin/ && \ -echo -e '\nexport PATH=$PATH:$HOME/.nargo/bin' >> ~/.bashrc && \ -source ~/.bashrc -``` - -#### Step 2 - -Check if the installation was successful by running `nargo --help`. - -> **macOS:** If you are prompted with an OS alert, right-click and open the _nargo_ executable from -> Finder. Close the new terminal popped up and `nargo` should now be accessible. - -For a successful installation, you should see something similar to the following after running the -command: - -```sh -$ nargo --help - -Noir's package manager - -Usage: nargo - -Commands: - check Checks the constraint system for errors - codegen-verifier Generates a Solidity verifier smart contract for the program - compile Compile the program and its secret execution trace into ACIR format - new Create a new binary project - execute Executes a circuit to calculate its return value - prove Create proof for this program. The proof is returned as a hex encoded string - verify Given a proof and a program, verify whether the proof is valid - test Run the tests for this program - gates Counts the occurrences of different gates in circuit - help Print this message or the help of the given subcommand(s) -``` - -### Option 3: Compile from Source - -Due to the large number of native dependencies, Noir projects uses [Nix](https://nixos.org/) and [direnv](https://direnv.net/) to streamline the development experience. It helps mitigating ssues commonly associated with dependency management, such as conflicts between required package versions for different projects (often referred to as "dependency hell"). - -Combined with direnv, which automatically sets or unsets environment variables based on the directory, it further simplifies the development process by seamlessly integrating with the developer's shell, facilitating an efficient and reliable workflow for managing and deploying Noir projects with multiple dependencies. - -#### Setting up your environment - -For the best experience, please follow these instructions to setup your environment: - -1. Install Nix following [their guide](https://nixos.org/download.html) for your operating system. -2. Create the file `~/.config/nix/nix.conf` with the contents: - -```ini -experimental-features = nix-command -extra-experimental-features = flakes -``` - -3. Install direnv into your Nix profile by running: - -```sh -nix profile install nixpkgs#direnv -``` - -4. Add direnv to your shell following [their guide](https://direnv.net/docs/hook.html). - 1. For bash or zshell, add `eval "$(direnv hook bash)"` or `eval "$(direnv hook zsh)"` to your ~/.bashrc or ~/.zshrc file, respectively. -5. Restart your shell. - -#### Shell & editor experience - -Now that your environment is set up, you can get to work on the project. - -1. Clone the repository, such as: - -```sh -git clone git@github.com:noir-lang/noir -``` - -> Replacing `noir` with whichever repository you want to work on. - -2. Navigate to the directory: - -```sh -cd noir -``` - -> Replacing `noir` with whichever repository you cloned. - -3. You should see a **direnv error** because projects aren't allowed by default. Make sure you've reviewed and trust our `.envrc` file, then you need to run: - -```sh -direnv allow -``` - -4. Now, wait awhile for all the native dependencies to be built. This will take some time and direnv will warn you that it is taking a long time, but we just need to let it run. - -5. Once you are presented with your prompt again, you can start your editor within the project directory (we recommend [VSCode](https://code.visualstudio.com/)): - -```sh -code . -``` - -6. (Recommended) When launching VSCode for the first time, you should be prompted to install our recommended plugins. We highly recommend installing these for the best development experience. - -#### Building and testing - -Assuming you are using `direnv` to populate your environment, building and testing the project can be done -with the typical `cargo build`, `cargo test`, and `cargo clippy` commands. You'll notice that the `cargo` version matches the version we specify in `rust-toolchain.toml`, which is 1.71.1 at the time of this writing. - -If you want to build the entire project in an isolated sandbox, you can use Nix commands: - -1. `nix build .` (or `nix build . -L` for verbose output) to build the project in a Nix sandbox. -2. `nix flake check` (or `nix flake check -L` for verbose output) to run clippy and tests in a Nix sandbox. - -#### Without `direnv` - -If you have hesitations with using direnv, you can launch a subshell with `nix develop` and then launch your editor from within the subshell. However, if VSCode was already launched in the project directory, the environment won't be updated. - -Advanced: If you aren't using direnv nor launching your editor within the subshell, you can try to install Barretenberg and other global dependencies the package needs. This is an advanced workflow and likely won't receive support! - -### Option 4: WSL (for Windows) - -The default backend for Noir (Barretenberg) doesn't provide Windows binaries at this time. For that reason, Noir cannot be installed nativerly. However, it is available by using Windows Subsystem for Linux (WSL). - -Step 1: Follow the instructions [here](https://learn.microsoft.com/en-us/windows/wsl/install) to install and run WSL. - -step 2: Follow the [Noirup instructions](#option-1-noirup). - -## Uninstalling Nargo - -### Noirup - -If you installed Noir with `noirup`, you can uninstall Noir by removing the files in `~/.nargo`, `~/nargo` and `~/noir_cache`. - -```bash -rm -r ~/.nargo -rm -r ~/nargo -rm -r ~/noir_cache -``` - -### Nix - -If you installed Noir with Nix or from source, you can remove the binary located at `~/.nix-profile/bin/nargo`. - -```bash -rm ~/.nix-profile/bin/nargo -``` - -[noir vs code extension]: https://marketplace.visualstudio.com/items?itemName=noir-lang.vscode-noir diff --git a/docs/docs/getting_started/02_breakdown.md b/docs/docs/getting_started/02_breakdown.md deleted file mode 100644 index 9a17f5d6360..00000000000 --- a/docs/docs/getting_started/02_breakdown.md +++ /dev/null @@ -1,198 +0,0 @@ ---- -title: Project Breakdown -description: - Learn about the anatomy of a Nargo project, including the purpose of the Prover and Verifier TOML - files, and how to prove and verify your program. -keywords: - [Nargo, Nargo project, Prover.toml, Verifier.toml, proof verification, private asset transfer] ---- - -This section breaks down our hello world program in section _1.2_. We elaborate on the project -structure and what the `prove` and `verify` commands did in the previous section. - -## Anatomy of a Nargo Project - -Upon creating a new project with `nargo new` and building the in/output files with `nargo check` -commands, you would get a minimal Nargo project of the following structure: - - - src - - Prover.toml - - Verifier.toml - - Nargo.toml - -The source directory _src_ holds the source code for your Noir program. By default only a _main.nr_ -file will be generated within it. - -### Prover.toml - -_Prover.toml_ is used for specifying the input values for executing and proving the program. You can specify `toml` files with different names by using the `--prover-name` or `-p` flags, see the [Prover](#provertoml) section below. Optionally you may specify expected output values for prove-time checking as well. - -### Verifier.toml - -_Verifier.toml_ contains public in/output values computed when executing the Noir program. - -### Nargo.toml - -_Nargo.toml_ contains the environmental options of your project. It contains a "package" section and a "dependencies" section. - -Example Nargo.toml: - -```toml -[package] -name = "noirstarter" -type = "bin" -authors = ["Alice"] -compiler_version = "0.9.0" -description = "Getting started with Noir" -entry = "circuit/main.nr" -license = "MIT" - -[dependencies] -ecrecover = {tag = "v0.9.0", git = "https://github.com/colinnielsen/ecrecover-noir.git"} -``` - -Nargo.toml for a [workspace](../modules_packages_crates/workspaces) will look a bit different. For example: - -```toml -[workspace] -members = ["crates/a", "crates/b"] -default-member = "crates/a" -``` - -#### Package section - -The package section requires a number of fields including: - -- `name` (**required**) - the name of the package -- `type` (**required**) - can be "bin", "lib", or "contract" to specify whether its a binary, library or Aztec contract -- `authors` (optional) - authors of the project -- `compiler_version` - specifies the version of the compiler to use. This is enforced by the compiler and follow's [Rust's versioning](https://doc.rust-lang.org/cargo/reference/manifest.html#the-version-field), so a `compiler_version = 0.18.0` will enforce Nargo version 0.18.0, `compiler_version = ^0.18.0` will enforce anything above 0.18.0 but below 0.19.0, etc. For more information, see how [Rust handles these operators](https://docs.rs/semver/latest/semver/enum.Op.html) -- `description` (optional) -- `entry` (optional) - a relative filepath to use as the entry point into your package (overrides the default of `src/lib.nr` or `src/main.nr`) -- `backend` (optional) -- `license` (optional) - -#### Dependencies section - -This is where you will specify any dependencies for your project. See the [Dependencies page](../modules_packages_crates/dependencies) for more info. - -`./proofs/` and `./contract/` directories will not be immediately visible until you create a proof or -verifier contract respectively. - -### main.nr - -The _main.nr_ file contains a `main` method, this method is the entry point into your Noir program. - -In our sample program, _main.nr_ looks like this: - -```rust -fn main(x : Field, y : Field) { - assert(x != y); -} -``` - -The parameters `x` and `y` can be seen as the API for the program and must be supplied by the -prover. Since neither `x` nor `y` is marked as public, the verifier does not supply any inputs, when -verifying the proof. - -The prover supplies the values for `x` and `y` in the _Prover.toml_ file. - -As for the program body, `assert` ensures the satisfaction of the condition (e.g. `x != y`) is -constrained by the proof of the execution of said program (i.e. if the condition was not met, the -verifier would reject the proof as an invalid proof). - -### Prover.toml - -The _Prover.toml_ file is a file which the prover uses to supply his witness values(both private and -public). - -In our hello world program the _Prover.toml_ file looks like this: - -```toml -x = "1" -y = "2" -``` - -When the command `nargo prove` is executed, two processes happen: - -1. Noir creates a proof that `x` which holds the value of `1` and `y` which holds the value of `2` - is not equal. This not equal constraint is due to the line `assert(x != y)`. - -2. Noir creates and stores the proof of this statement in the _proofs_ directory in a file called your-project.proof. So if your project is named "private_voting" (defined in the project Nargo.toml), the proof will be saved at `./proofs/private_voting.proof`. Opening this file will display the proof in hex format. - -#### Arrays of Structs - -The following code shows how to pass an array of structs to a Noir program to generate a proof. - -```rust -// main.nr -struct Foo { - bar: Field, - baz: Field, -} - -fn main(foos: [Foo; 3]) -> pub Field { - foos[2].bar + foos[2].baz -} -``` - -Prover.toml: - -```toml -[[foos]] # foos[0] -bar = 0 -baz = 0 - -[[foos]] # foos[1] -bar = 0 -baz = 0 - -[[foos]] # foos[2] -bar = 1 -baz = 2 -``` - -#### Custom toml files - -You can specify a `toml` file with a different name to use for proving by using the `--prover-name` or `-p` flags. - -This command looks for proof inputs in the default **Prover.toml** and generates the proof and saves it at `./proofs/.proof`: - -```bash -nargo prove -``` - -This command looks for proof inputs in the custom **OtherProver.toml** and generates proof and saves it at `./proofs/.proof`: - -```bash -nargo prove -p OtherProver -``` - -## Verifying a Proof - -When the command `nargo verify` is executed, two processes happen: - -1. Noir checks in the _proofs_ directory for a proof file with the project name (eg. test_project.proof) - -2. If that file is found, the proof's validity is checked - -> **Note:** The validity of the proof is linked to the current Noir program; if the program is -> changed and the verifier verifies the proof, it will fail because the proof is not valid for the -> _modified_ Noir program. - -In production, the prover and the verifier are usually two separate entities. A prover would -retrieve the necessary inputs, execute the Noir program, generate a proof and pass it to the -verifier. The verifier would then retrieve the public inputs from usually external sources and -verifies the validity of the proof against it. - -Take a private asset transfer as an example: - -A user on browser as the prover would retrieve private inputs (e.g. the user's private key) and -public inputs (e.g. the user's encrypted balance on-chain), compute the transfer, generate a proof -and submit it to the verifier smart contract. - -The verifier contract would then draw the user's encrypted balance directly from the blockchain and -verify the proof submitted against it. If the verification passes, additional functions in the -verifier contract could trigger (e.g. approve the asset transfer). - -Now that you understand the concepts, you'll probably want some editor feedback while you are writing more complex code. diff --git a/docs/docs/getting_started/_category_.json b/docs/docs/getting_started/_category_.json new file mode 100644 index 00000000000..5d694210bbf --- /dev/null +++ b/docs/docs/getting_started/_category_.json @@ -0,0 +1,5 @@ +{ + "position": 0, + "collapsible": true, + "collapsed": true +} diff --git a/docs/docs/getting_started/create_a_project.md b/docs/docs/getting_started/create_a_project.md new file mode 100644 index 00000000000..26ff265c389 --- /dev/null +++ b/docs/docs/getting_started/create_a_project.md @@ -0,0 +1,142 @@ +--- +title: Creating A Project +description: + Learn how to create and verify your first Noir program using Nargo, a programming language for + zero-knowledge proofs. +keywords: + [ + Nargo, + Noir, + zero-knowledge proofs, + programming language, + create Noir program, + verify Noir program, + step-by-step guide, + ] +sidebar_position: 1 + +--- + +Now that we have installed Nargo, it is time to make our first hello world program! + +## Create a Project Directory + +Noir code can live anywhere on your computer. Let us create a _projects_ folder in the home +directory to house our Noir programs. + +For Linux, macOS, and Windows PowerShell, create the directory and change directory into it by +running: + +```sh +mkdir ~/projects +cd ~/projects +``` + +## Create Our First Nargo Project + +Now that we are in the projects directory, create a new Nargo project by running: + +```sh +nargo new hello_world +``` + +> **Note:** `hello_world` can be any arbitrary project name, we are simply using `hello_world` for +> demonstration. +> +> In production, the common practice is to name the project folder as `circuits` for better +> identifiability when sitting alongside other folders in the codebase (e.g. `contracts`, `scripts`, +> `test`). + +A `hello_world` folder would be created. Similar to Rust, the folder houses _src/main.nr_ and +_Nargo.toml_ that contains the source code and environmental options of your Noir program +respectively. + +### Intro to Noir Syntax + +Let us take a closer look at _main.nr_. The default _main.nr_ generated should look like this: + +```rust +fn main(x : Field, y : pub Field) { + assert(x != y); +} +``` + +The first line of the program specifies the program's inputs: + +```rust +x : Field, y : pub Field +``` + +Program inputs in Noir are private by default (e.g. `x`), but can be labeled public using the +keyword `pub` (e.g. `y`). To learn more about private and public values, check the +[Data Types](../noir/concepts/data_types/index.md) section. + +The next line of the program specifies its body: + +```rust +assert(x != y); +``` + +The Noir syntax `assert` can be interpreted as something similar to constraints in other zk-contract languages. + +For more Noir syntax, check the [Language Concepts](../noir/concepts/comments.md) chapter. + +## Build In/Output Files + +Change directory into _hello_world_ and build in/output files for your Noir program by running: + +```sh +cd hello_world +nargo check +``` + +Two additional files would be generated in your project directory: + +_Prover.toml_ houses input values, and _Verifier.toml_ houses public values. + +## Prove Our Noir Program + +Now that the project is set up, we can create a proof of correct execution on our Noir program. + +Fill in input values for execution in the _Prover.toml_ file. For example: + +```toml +x = "1" +y = "2" +``` + +Prove the valid execution of your Noir program: + +```sh +nargo prove +``` + +A new folder _proofs_ would then be generated in your project directory, containing the proof file +`.proof`, where the project name is defined in Nargo.toml. + +The _Verifier.toml_ file would also be updated with the public values computed from program +execution (in this case the value of `y`): + +```toml +y = "0x0000000000000000000000000000000000000000000000000000000000000002" +``` + +> **Note:** Values in _Verifier.toml_ are computed as 32-byte hex values. + +## Verify Our Noir Program + +Once a proof is generated, we can verify correct execution of our Noir program by verifying the +proof file. + +Verify your proof by running: + +```sh +nargo verify +``` + +The verification will complete in silence if it is successful. If it fails, it will log the +corresponding error instead. + +Congratulations, you have now created and verified a proof for your very first Noir program! + +In the [next section](./project_breakdown.md), we will go into more detail on each step performed. diff --git a/docs/docs/getting_started/installation/_category_.json b/docs/docs/getting_started/installation/_category_.json new file mode 100644 index 00000000000..0c02fb5d4d7 --- /dev/null +++ b/docs/docs/getting_started/installation/_category_.json @@ -0,0 +1,6 @@ +{ + "position": 0, + "label": "Install Nargo", + "collapsible": true, + "collapsed": true +} diff --git a/docs/docs/getting_started/installation/index.md b/docs/docs/getting_started/installation/index.md new file mode 100644 index 00000000000..27eeeca88ed --- /dev/null +++ b/docs/docs/getting_started/installation/index.md @@ -0,0 +1,45 @@ +--- +title: Nargo Installation +description: + nargo is a command line tool for interacting with Noir programs. This page is a quick guide on how to install Nargo through the most common and easy method, noirup +keywords: [ + Nargo + Noir + Rust + Cargo + Noirup + Installation + Terminal Commands + Version Check + Nightlies + Specific Versions + Branches + Noirup Repository +] +--- + +`nargo` is the one-stop-shop for almost everything related with Noir. The name comes from our love for Rust and its package manager `cargo`. + +With `nargo`, you can start new projects, compile, execute, prove, verify, test, generate solidity contracts, and do pretty much all that is available in Noir. + +Similarly to `rustup`, we also maintain an easy installation method that covers most machines: `noirup`. + +## Installing Noirup + +Open a terminal on your machine, and write: + +```bash +curl -L https://raw.githubusercontent.com/noir-lang/noirup/main/install | bash +``` + +Close the terminal, open another one, and run + +```bash +noirup +``` + +Done. That's it. You should have the latest version working. You can check with `nargo --version`. + +You can also install nightlies, specific versions +or branches. Check out the [noirup repository](https://github.com/noir-lang/noirup) for more +information. diff --git a/docs/docs/getting_started/installation/other_install_methods.md b/docs/docs/getting_started/installation/other_install_methods.md new file mode 100644 index 00000000000..a532f83750e --- /dev/null +++ b/docs/docs/getting_started/installation/other_install_methods.md @@ -0,0 +1,190 @@ +--- +title: Alternative Install Methods +description: + There are different ways to install Nargo, the one-stop shop and command-line tool for developing Noir programs. This guide explains other methods that don't rely on noirup, such as compiling from source, installing from binaries, and using WSL for windows +keywords: [ + Installation + Nargo + Noirup + Binaries + Compiling from Source + WSL for Windows + macOS + Linux + Nix + Direnv + Shell & editor experience + Building and testing + Uninstalling Nargo + Noir vs code extension +] +sidebar_position: 1 +--- + + +## Installation + +The most common method of installing Nargo is through [Noirup](./index.md) + +However, there are other methods for installing Nargo: + +- [Binaries](#binaries) +- [Compiling from Source](#compile-from-source) +- [WSL for Windows](#wsl-for-windows) + +### Binaries + +See [GitHub Releases](https://github.com/noir-lang/noir/releases) for the latest and previous +platform specific binaries. + +#### Step 1 + +Paste and run the following in the terminal to extract and install the binary: + +> **macOS / Linux:** If you are prompted with `Permission denied` when running commands, prepend +> `sudo` and re-run it. + +##### macOS (Apple Silicon) + +```bash +mkdir -p $HOME/.nargo/bin && \ +curl -o $HOME/.nargo/bin/nargo-aarch64-apple-darwin.tar.gz -L https://github.com/noir-lang/noir/releases/download/v0.6.0/nargo-aarch64-apple-darwin.tar.gz && \ +tar -xvf $HOME/.nargo/bin/nargo-aarch64-apple-darwin.tar.gz -C $HOME/.nargo/bin/ && \ +echo '\nexport PATH=$PATH:$HOME/.nargo/bin' >> ~/.zshrc && \ +source ~/.zshrc +``` + +##### macOS (Intel) + +```bash +mkdir -p $HOME/.nargo/bin && \ +curl -o $HOME/.nargo/bin/nargo-x86_64-apple-darwin.tar.gz -L https://github.com/noir-lang/noir/releases/download/v0.6.0/nargo-x86_64-apple-darwin.tar.gz && \ +tar -xvf $HOME/.nargo/bin/nargo-x86_64-apple-darwin.tar.gz -C $HOME/.nargo/bin/ && \ +echo '\nexport PATH=$PATH:$HOME/.nargo/bin' >> ~/.zshrc && \ +source ~/.zshrc +``` + +##### Linux (Bash) + +```bash +mkdir -p $HOME/.nargo/bin && \ +curl -o $HOME/.nargo/bin/nargo-x86_64-unknown-linux-gnu.tar.gz -L https://github.com/noir-lang/noir/releases/download/v0.6.0/nargo-x86_64-unknown-linux-gnu.tar.gz && \ +tar -xvf $HOME/.nargo/bin/nargo-x86_64-unknown-linux-gnu.tar.gz -C $HOME/.nargo/bin/ && \ +echo -e '\nexport PATH=$PATH:$HOME/.nargo/bin' >> ~/.bashrc && \ +source ~/.bashrc +``` + +#### Step 2 + +Check if the installation was successful by running `nargo --version`. You should get a version number. + +> **macOS:** If you are prompted with an OS alert, right-click and open the _nargo_ executable from +> Finder. Close the new terminal popped up and `nargo` should now be accessible. + +### Option 3: Compile from Source + +Due to the large number of native dependencies, Noir projects uses [Nix](https://nixos.org/) and [direnv](https://direnv.net/) to streamline the development experience. It helps mitigating issues commonly associated with dependency management, such as conflicts between required package versions for different projects (often referred to as "dependency hell"). + +Combined with direnv, which automatically sets or clears environment variables based on the directory, it further simplifies the development process by seamlessly integrating with the developer's shell, facilitating an efficient and reliable workflow for managing and deploying Noir projects with multiple dependencies. + +#### Setting up your environment + +For the best experience, please follow these instructions to setup your environment: + +1. Install Nix following [their guide](https://nixos.org/download.html) for your operating system. +2. Create the file `~/.config/nix/nix.conf` with the contents: + +```ini +experimental-features = nix-command +extra-experimental-features = flakes +``` + +3. Install direnv into your Nix profile by running: + +```sh +nix profile install nixpkgs#direnv +``` + +4. Add direnv to your shell following [their guide](https://direnv.net/docs/hook.html). + 1. For bash or zshell, add `eval "$(direnv hook bash)"` or `eval "$(direnv hook zsh)"` to your ~/.bashrc or ~/.zshrc file, respectively. +5. Restart your shell. + +#### Shell & editor experience + +Now that your environment is set up, you can get to work on the project. + +1. Clone the repository, such as: + +```sh +git clone git@github.com:noir-lang/noir +``` + +> Replacing `noir` with whichever repository you want to work on. + +2. Navigate to the directory: + +```sh +cd noir +``` + +> Replacing `noir` with whichever repository you cloned. + +3. You should see a **direnv error** because projects aren't allowed by default. Make sure you've reviewed and trust our `.envrc` file, then you need to run: + +```sh +direnv allow +``` + +4. Now, wait awhile for all the native dependencies to be built. This will take some time and direnv will warn you that it is taking a long time, but we just need to let it run. + +5. Once you are presented with your prompt again, you can start your editor within the project directory (we recommend [VSCode](https://code.visualstudio.com/)): + +```sh +code . +``` + +6. (Recommended) When launching VSCode for the first time, you should be prompted to install our recommended plugins. We highly recommend installing these for the best development experience. + +#### Building and testing + +Assuming you are using `direnv` to populate your environment, building and testing the project can be done +with the typical `cargo build`, `cargo test`, and `cargo clippy` commands. You'll notice that the `cargo` version matches the version we specify in `rust-toolchain.toml`, which is 1.71.1 at the time of this writing. + +If you want to build the entire project in an isolated sandbox, you can use Nix commands: + +1. `nix build .` (or `nix build . -L` for verbose output) to build the project in a Nix sandbox. +2. `nix flake check` (or `nix flake check -L` for verbose output) to run clippy and tests in a Nix sandbox. + +#### Without `direnv` + +If you have hesitations with using direnv, you can launch a subshell with `nix develop` and then launch your editor from within the subshell. However, if VSCode was already launched in the project directory, the environment won't be updated. + +Advanced: If you aren't using direnv nor launching your editor within the subshell, you can try to install Barretenberg and other global dependencies the package needs. This is an advanced workflow and likely won't receive support! + +### Option 4: WSL (for Windows) + +The default backend for Noir (Barretenberg) doesn't provide Windows binaries at this time. For that reason, Noir cannot be installed natively. However, it is available by using Windows Subsystem for Linux (WSL). + +Step 1: Follow the instructions [here](https://learn.microsoft.com/en-us/windows/wsl/install) to install and run WSL. + +step 2: Follow the [Noirup instructions](./index.md). + +## Uninstalling Nargo + +### Noirup + +If you installed Noir with `noirup`, you can uninstall Noir by removing the files in `~/.nargo`, `~/nargo` and `~/noir_cache`. + +```bash +rm -r ~/.nargo +rm -r ~/nargo +rm -r ~/noir_cache +``` + +### Nix + +If you installed Noir with Nix or from source, you can remove the binary located at `~/.nix-profile/bin/nargo`. + +```bash +rm ~/.nix-profile/bin/nargo +``` diff --git a/docs/docs/getting_started/project_breakdown.md b/docs/docs/getting_started/project_breakdown.md new file mode 100644 index 00000000000..c4e2a9ae003 --- /dev/null +++ b/docs/docs/getting_started/project_breakdown.md @@ -0,0 +1,199 @@ +--- +title: Project Breakdown +description: + Learn about the anatomy of a Nargo project, including the purpose of the Prover and Verifier TOML + files, and how to prove and verify your program. +keywords: + [Nargo, Nargo project, Prover.toml, Verifier.toml, proof verification, private asset transfer] +sidebar_position: 2 +--- + +This section breaks down our hello world program in section _1.2_. We elaborate on the project +structure and what the `prove` and `verify` commands did in the previous section. + +## Anatomy of a Nargo Project + +Upon creating a new project with `nargo new` and building the in/output files with `nargo check` +commands, you would get a minimal Nargo project of the following structure: + + - src + - Prover.toml + - Verifier.toml + - Nargo.toml + +The source directory _src_ holds the source code for your Noir program. By default only a _main.nr_ +file will be generated within it. + +### Prover.toml + +_Prover.toml_ is used for specifying the input values for executing and proving the program. You can specify `toml` files with different names by using the `--prover-name` or `-p` flags, see the [Prover](#provertoml) section below. Optionally you may specify expected output values for prove-time checking as well. + +### Verifier.toml + +_Verifier.toml_ contains public in/output values computed when executing the Noir program. + +### Nargo.toml + +_Nargo.toml_ contains the environmental options of your project. It contains a "package" section and a "dependencies" section. + +Example Nargo.toml: + +```toml +[package] +name = "noir_starter" +type = "bin" +authors = ["Alice"] +compiler_version = "0.9.0" +description = "Getting started with Noir" +entry = "circuit/main.nr" +license = "MIT" + +[dependencies] +ecrecover = {tag = "v0.9.0", git = "https://github.com/colinnielsen/ecrecover-noir.git"} +``` + +Nargo.toml for a [workspace](../noir/modules_packages_crates/workspaces.md) will look a bit different. For example: + +```toml +[workspace] +members = ["crates/a", "crates/b"] +default-member = "crates/a" +``` + +#### Package section + +The package section requires a number of fields including: + +- `name` (**required**) - the name of the package +- `type` (**required**) - can be "bin", "lib", or "contract" to specify whether its a binary, library or Aztec contract +- `authors` (optional) - authors of the project +- `compiler_version` - specifies the version of the compiler to use. This is enforced by the compiler and follow's [Rust's versioning](https://doc.rust-lang.org/cargo/reference/manifest.html#the-version-field), so a `compiler_version = 0.18.0` will enforce Nargo version 0.18.0, `compiler_version = ^0.18.0` will enforce anything above 0.18.0 but below 0.19.0, etc. For more information, see how [Rust handles these operators](https://docs.rs/semver/latest/semver/enum.Op.html) +- `description` (optional) +- `entry` (optional) - a relative filepath to use as the entry point into your package (overrides the default of `src/lib.nr` or `src/main.nr`) +- `backend` (optional) +- `license` (optional) + +#### Dependencies section + +This is where you will specify any dependencies for your project. See the [Dependencies page](../noir/modules_packages_crates/dependencies.md) for more info. + +`./proofs/` and `./contract/` directories will not be immediately visible until you create a proof or +verifier contract respectively. + +### main.nr + +The _main.nr_ file contains a `main` method, this method is the entry point into your Noir program. + +In our sample program, _main.nr_ looks like this: + +```rust +fn main(x : Field, y : Field) { + assert(x != y); +} +``` + +The parameters `x` and `y` can be seen as the API for the program and must be supplied by the +prover. Since neither `x` nor `y` is marked as public, the verifier does not supply any inputs, when +verifying the proof. + +The prover supplies the values for `x` and `y` in the _Prover.toml_ file. + +As for the program body, `assert` ensures the satisfaction of the condition (e.g. `x != y`) is +constrained by the proof of the execution of said program (i.e. if the condition was not met, the +verifier would reject the proof as an invalid proof). + +### Prover.toml + +The _Prover.toml_ file is a file which the prover uses to supply his witness values(both private and +public). + +In our hello world program the _Prover.toml_ file looks like this: + +```toml +x = "1" +y = "2" +``` + +When the command `nargo prove` is executed, two processes happen: + +1. Noir creates a proof that `x` which holds the value of `1` and `y` which holds the value of `2` + is not equal. This not equal constraint is due to the line `assert(x != y)`. + +2. Noir creates and stores the proof of this statement in the _proofs_ directory in a file called your-project.proof. So if your project is named "private_voting" (defined in the project Nargo.toml), the proof will be saved at `./proofs/private_voting.proof`. Opening this file will display the proof in hex format. + +#### Arrays of Structs + +The following code shows how to pass an array of structs to a Noir program to generate a proof. + +```rust +// main.nr +struct Foo { + bar: Field, + baz: Field, +} + +fn main(foos: [Foo; 3]) -> pub Field { + foos[2].bar + foos[2].baz +} +``` + +Prover.toml: + +```toml +[[foos]] # foos[0] +bar = 0 +baz = 0 + +[[foos]] # foos[1] +bar = 0 +baz = 0 + +[[foos]] # foos[2] +bar = 1 +baz = 2 +``` + +#### Custom toml files + +You can specify a `toml` file with a different name to use for proving by using the `--prover-name` or `-p` flags. + +This command looks for proof inputs in the default **Prover.toml** and generates the proof and saves it at `./proofs/.proof`: + +```bash +nargo prove +``` + +This command looks for proof inputs in the custom **OtherProver.toml** and generates proof and saves it at `./proofs/.proof`: + +```bash +nargo prove -p OtherProver +``` + +## Verifying a Proof + +When the command `nargo verify` is executed, two processes happen: + +1. Noir checks in the _proofs_ directory for a proof file with the project name (eg. test_project.proof) + +2. If that file is found, the proof's validity is checked + +> **Note:** The validity of the proof is linked to the current Noir program; if the program is +> changed and the verifier verifies the proof, it will fail because the proof is not valid for the +> _modified_ Noir program. + +In production, the prover and the verifier are usually two separate entities. A prover would +retrieve the necessary inputs, execute the Noir program, generate a proof and pass it to the +verifier. The verifier would then retrieve the public inputs from usually external sources and +verifies the validity of the proof against it. + +Take a private asset transfer as an example: + +A user on browser as the prover would retrieve private inputs (e.g. the user's private key) and +public inputs (e.g. the user's encrypted balance on-chain), compute the transfer, generate a proof +and submit it to the verifier smart contract. + +The verifier contract would then draw the user's encrypted balance directly from the blockchain and +verify the proof submitted against it. If the verification passes, additional functions in the +verifier contract could trigger (e.g. approve the asset transfer). + +Now that you understand the concepts, you'll probably want some editor feedback while you are writing more complex code. diff --git a/docs/docs/getting_started/tooling/_category_.json b/docs/docs/getting_started/tooling/_category_.json new file mode 100644 index 00000000000..dff520ebc41 --- /dev/null +++ b/docs/docs/getting_started/tooling/_category_.json @@ -0,0 +1,6 @@ +{ + "position": 3, + "label": "Tooling", + "collapsible": true, + "collapsed": true +} diff --git a/docs/docs/getting_started/tooling/index.mdx b/docs/docs/getting_started/tooling/index.mdx new file mode 100644 index 00000000000..ac480f3c9f5 --- /dev/null +++ b/docs/docs/getting_started/tooling/index.mdx @@ -0,0 +1,38 @@ +--- +title: Tooling +Description: This section provides information about the various tools and utilities available for Noir development. It covers the Noir playground, IDE tools, Codespaces, and community projects. +Keywords: [Noir, Development, Playground, IDE Tools, Language Service Provider, VS Code Extension, Codespaces, noir-starter, Community Projects, Awesome Noir Repository, Developer Tooling] +--- + +Noir is meant to be easy to develop with. For that reason, a number of utilities have been put together to ease the development process as much as feasible in the zero-knowledge world. + +## Playground + +The Noir playground is an easy way to test small ideas, share snippets, and integrate in other websites. You can access it at [play.noir-lang.org](https://play.noir-lang.org). + +## IDE tools + +When you install Nargo, you're also installing a Language Service Provider (LSP), which can be used by IDEs to provide syntax highlighting, codelens, warnings, and more. + +The easiest way to use these tools is by installing the [Noir VS Code extension](https://marketplace.visualstudio.com/items?itemName=noir-lang.vscode-noir). + +## Codespaces + +Some Noir repos have leveraged Codespaces in order to ease the development process. You can visit the [noir-starter](https://github.com/noir-lang/noir-starter) for an example. + + + +## GitHub Actions + +You can use `noirup` with GitHub Actions for CI/CD and automated testing. It is as simple as +installing `noirup` and running tests in your GitHub Action `yml` file. + +See the +[config file in the Noir repo](https://github.com/TomAFrench/noir-hashes/blob/master/.github/workflows/noir.yml) for an example usage. + +## Community projects + +As an open-source project, Noir has received many contributions over time. Some of them are related with developer tooling, and you can see some of them in [Awesome Noir repository](https://github.com/noir-lang/awesome-noir#dev-tools) diff --git a/docs/docs/getting_started/tooling/language_server.md b/docs/docs/getting_started/tooling/language_server.md new file mode 100644 index 00000000000..81e0356ef8a --- /dev/null +++ b/docs/docs/getting_started/tooling/language_server.md @@ -0,0 +1,43 @@ +--- +title: Language Server +description: Learn about the Noir Language Server, how to install the components, and configuration that may be required. +keywords: [Nargo, Language Server, LSP, VSCode, Visual Studio Code] +sidebar_position: 0 +--- + +This section helps you install and configure the Noir Language Server. + +The Language Server Protocol (LSP) has two components, the [Server](#language-server) and the [Client](#language-client). Below we describe each in the context of Noir. + +## Language Server + +The Server component is provided by the Nargo command line tool that you installed at the beginning of this guide. +As long as Nargo is installed and you've used it to run other commands in this guide, it should be good to go! + +If you'd like to verify that the `nargo lsp` command is available, you can run `nargo --help` and look for `lsp` in the list of commands. If you see it, you're using a version of Noir with LSP support. + +## Language Client + +The Client component is usually an editor plugin that launches the Server. It communicates LSP messages between the editor and the Server. For example, when you save a file, the Client will alert the Server, so it can try to compile the project and report any errors. + +Currently, Noir provides a Language Client for Visual Studio Code via the [vscode-noir](https://github.com/noir-lang/vscode-noir) extension. You can install it via the [Visual Studio Marketplace](https://marketplace.visualstudio.com/items?itemName=noir-lang.vscode-noir). + +> **Note:** Noir's Language Server Protocol support currently assumes users' VSCode workspace root to be the same as users' Noir project root (i.e. where Nargo.toml lies). +> +> If LSP features seem to be missing / malfunctioning, make sure you are opening your Noir project directly (instead of as a sub-folder) in your VSCode instance. + +When your language server is running correctly and the VSCode plugin is installed, you should see handy codelens buttons for compilation, measuring circuit size, execution, and tests: + +![Compile and Execute](@site/static/img/codelens_compile_execute.png) +![Run test](@site/static/img/codelens_run_test.png) + +You should also see your tests in the `testing` panel: + +![Testing panel](@site/static/img/codelens_testing_panel.png) + +### Configuration + +- **Noir: Enable LSP** - If checked, the extension will launch the Language Server via `nargo lsp` and communicate with it. +- **Noir: Nargo Flags** - Additional flags may be specified if you require them to be added when the extension calls `nargo lsp`. +- **Noir: Nargo Path** - An absolute path to a Nargo binary with the `lsp` command. This may be useful if Nargo is not within the `PATH` of your editor. +- **Noir > Trace: Server** - Setting this to `"messages"` or `"verbose"` will log LSP messages between the Client and Server. Useful for debugging. diff --git a/docs/docs/getting_started/tooling/testing.md b/docs/docs/getting_started/tooling/testing.md new file mode 100644 index 00000000000..d3e0c522473 --- /dev/null +++ b/docs/docs/getting_started/tooling/testing.md @@ -0,0 +1,62 @@ +--- +title: Testing in Noir +description: Learn how to use Nargo to test your Noir program in a quick and easy way +keywords: [Nargo, testing, Noir, compile, test] +sidebar_position: 1 +--- + +You can test your Noir programs using Noir circuits. + +Nargo will automatically compile and run any functions which have the decorator `#[test]` on them if +you run `nargo test`. + +For example if you have a program like: + +```rust +fn add(x: u64, y: u64) -> u64 { + x + y +} +#[test] +fn test_add() { + assert(add(2,2) == 4); + assert(add(0,1) == 1); + assert(add(1,0) == 1); +} +``` + +Running `nargo test` will test that the `test_add` function can be executed while satisfying all +the constraints which allows you to test that add returns the expected values. Test functions can't +have any arguments currently. + +### Test fail + +You can write tests that are expected to fail by using the decorator `#[test(should_fail)]`. For example: + +```rust +fn add(x: u64, y: u64) -> u64 { + x + y +} +#[test(should_fail)] +fn test_add() { + assert(add(2,2) == 5); +} +``` + +You can be more specific and make it fail with a specific reason by using `should_fail_with = "`: + +```rust +fn main(african_swallow_avg_speed : Field) { + assert(african_swallow_avg_speed == 65, "What is the airspeed velocity of an unladen swallow"); +} + +#[test] +fn test_king_arthur() { + main(65); +} + +#[test(should_fail_with = "What is the airspeed velocity of an unladen swallow")] +fn test_bridgekeeper() { + main(32); +} + +``` diff --git a/docs/docs/how_to/_category_.json b/docs/docs/how_to/_category_.json new file mode 100644 index 00000000000..23b560f610b --- /dev/null +++ b/docs/docs/how_to/_category_.json @@ -0,0 +1,5 @@ +{ + "position": 1, + "collapsible": true, + "collapsed": true +} diff --git a/docs/docs/how_to/how-to-recursion.md b/docs/docs/how_to/how-to-recursion.md new file mode 100644 index 00000000000..f60aa3ff2d9 --- /dev/null +++ b/docs/docs/how_to/how-to-recursion.md @@ -0,0 +1,184 @@ +--- +title: How to use recursion on NoirJS +description: Learn how to implement recursion with NoirJS, a powerful tool for creating smart contracts on the EVM blockchain. This guide assumes familiarity with NoirJS, solidity verifiers, and the Barretenberg proving backend. Discover how to generate both final and intermediate proofs using `noir_js` and `backend_barretenberg`. +keywords: + [ + "NoirJS", + "EVM blockchain", + "smart contracts", + "recursion", + "solidity verifiers", + "Barretenberg backend", + "noir_js", + "backend_barretenberg", + "intermediate proofs", + "final proofs", + "nargo compile", + "json import", + "recursive circuit", + "recursive app" + ] +sidebar_position: 1 +--- + +This guide shows you how to use recursive proofs in your NoirJS app. For the sake of clarity, it is assumed that: + +- You already have a NoirJS app. If you don't, please visit the [NoirJS tutorial](../tutorials/noirjs_app.md) and the [reference](../reference/NoirJS/noir_js/index.md). +- You are familiar with what are recursive proofs and you have read the [recursion explainer](../explainers/explainer-recursion.md) +- You already built a recursive circuit following [the reference](../noir/standard_library/recursion.md), and understand how it works. + +It is also assumed that you're not using `noir_wasm` for compilation, and instead you've used [`nargo compile`](../reference/nargo_commands.md) to generate the `json` you're now importing into your project. However, the guide should work just the same if you're using `noir_wasm`. + +:::info + +As you've read in the [explainer](../explainers/explainer-recursion.md), a recursive proof is an intermediate proof. This means that it doesn't necessarily generate the final step that makes it verifiable in a smart contract. However, it is easy to verify within another circuit. + +While "standard" usage of NoirJS packages abstracts final proofs, it currently lacks the necessary interface to abstract away intermediate proofs. This means that these proofs need to be created by using the backend directly. + +In short: + +- `noir_js` generates *only* final proofs +- `backend_barretenberg` generates both types of proofs + +::: + +In a standard recursive app, you're also dealing with at least two circuits. For the purpose of this guide, we will assume these two: + +- `main`: a circuit of type `assert(x != y)` +- `recursive`: a circuit that verifies `main` + +For a full example on how recursive proofs work, please refer to the [noir-examples](https://github.com/noir/noir-examples) repository. We will *not* be using it as a reference for this guide. + +## Step 1: Setup + +In a common NoirJS app, you need to instantiate a backend with something like `const backend = new Backend(circuit)`. Then you feed it to the `noir_js` interface. + +For recursion, this doesn't happen, and the only need for `noir_js` is only to `execute` a circuit and get its witness and return value. Everything else is not interfaced, so it needs to happen on the `backend` object. + +It is also recommended that you instantiate the backend with as many threads as possible, to allow for maximum concurrency: + +```js +const backend = new Backend(circuit, { threads: 8 }) +``` + +:::tip +You can use the [`os.cpus()`](https://nodejs.org/api/os.html#oscpus) object in `nodejs` or [`navigator.hardwareConcurrency`](https://developer.mozilla.org/en-US/docs/Web/API/Navigator/hardwareConcurrency) on the browser to make the most out of those glorious cpu cores +::: + +## Step 2: Generating the witness and the proof for `main` + +After instantiating the backend, you should also instantiate `noir_js`. We will use it to execute the circuit and get the witness. + +```js +const noir = new Noir(circuit, backend) +const { witness } = noir.execute(input) +``` + +With this witness, you are now able to generate the intermediate proof for the main circuit: + +```js +const { proof, publicInputs } = await backend.generateIntermediateProof(witness) +``` + +:::warning + +Always keep in mind what is actually happening on your development process, otherwise you'll quickly become confused about what circuit we are actually running and why! + +In this case, you can imagine that Alice (running the `main` circuit) is proving something to Bob (running the `recursive` circuit), and Bob is verifying her proof within his proof. + +With this in mind, it becomes clear that our intermediate proof is the one *meant to be verified within another circuit*, so it must be Alice's. Actually, the only final proof in this theoretical scenario would be the last one, sent on-chain. + +::: + +## Step 3 - Verification and proof artifacts + +Optionally, you are able to verify the intermediate proof: + +```js +const verified = await backend.verifyIntermediateProof({ proof, publicInputs }) +``` + +This can be useful to make sure our intermediate proof was correctly generated. But the real goal is to do it within another circuit. For that, we need to generate the intermediate artifacts: + +```js +const { proofAsFields, vkAsFields, vkHash } = await backend.generateIntermediateProofArtifacts( { publicInputs, proof }, publicInputsCount) +``` + +This call takes the public inputs and the proof, but also the public inputs count. While this is easily retrievable by simply counting the `publicInputs` length, the backend interface doesn't currently abstract it away. + +:::info + +The `proofAsFields` has a constant size `[Field; 93]`. However, currently the backend doesn't remove the public inputs from the proof when converting it. + +This means that if your `main` circuit has two public inputs, then you should also modify the recursive circuit to accept a proof with the public inputs appended. This means that in our example, since `y` is a public input, our `proofAsFields` is of type `[Field; 94]`. + +Verification keys in Barretenberg are always of size 114. + +::: + +:::warning + +One common mistake is to forget *who* makes this call. + +In a situation where Alice is generating the `main` proof, if she generates the proof artifacts and sends them to Bob, which gladly takes them as true, this would mean Alice could prove anything! + +Instead, Bob needs to make sure *he* extracts the proof artifacts, using his own instance of the `main` circuit backend. This way, Alice has to provide a valid proof for the correct `main` circuit. + +::: + +## Step 4 - Recursive proof generation + +With the artifacts, generating a recursive proof is no different from a normal proof. You simply use the `backend` (with the recursive circuit) to generate it: + +```js +const recursiveInputs = { + verification_key: vkAsFields, // array of length 114 + proof: proofAsFields, // array of length 93 + size of public inputs + publicInputs: [mainInput.y], // using the example above, where `y` is the only public input + key_hash: vkHash, + input_aggregation_object: Array(16).fill(0) // this circuit is verifying a non-recursive proof, so there's no input aggregation object: just use zero +} + +const { witness, returnValue } = noir.execute(recursiveInputs) // we're executing the recursive circuit now! +const { proof, publicInputs } = backend.generateFinalProof(witness) +const verified = backend.verifyFinalProof({ proof, publicInputs }) +``` + +You can obviously chain this proof into another proof. In fact, if you're using recursive proofs, you're probably interested of using them this way! In that case, you should keep in mind the `returnValue`, as it will contain the `input_aggregation_object` for the next proof. + +:::tip + +Managing circuits and "who does what" can be confusing. To make sure your naming is consistent, you can keep them in an object. For example: + +```js +const circuits = { +main: mainJSON, +recursive: recursiveJSON +} +const backends = { +main: new BarretenbergBackend(circuits.main), +recursive: new BarretenbergBackend(circuits.recursive) +} +const noir_programs = { +main: new Noir(circuits.main, backends.main), +recursive: new Noir(circuits.recursive, backends.recursive) +} +``` + +This allows you to neatly call exactly the method you want without conflicting names: + +```js +// Alice runs this 👇 +const { witness: mainWitness } = await noir_programs.main.execute(input) +const proof = await backends.main.generateIntermediateProof(mainWitness) + +// Bob runs this 👇 +const verified = await backends.main.verifyIntermediateProof(proof) +const { proofAsFields, vkAsFields, vkHash } = await backends.main.generateIntermediateProofArtifacts( + proof, + numPublicInputs, +); +const recursiveProof = await noir_programs.recursive.generateFinalProof(recursiveInputs) +``` + +::: diff --git a/docs/docs/how_to/merkle-proof.mdx b/docs/docs/how_to/merkle-proof.mdx new file mode 100644 index 00000000000..34074659ac1 --- /dev/null +++ b/docs/docs/how_to/merkle-proof.mdx @@ -0,0 +1,48 @@ +--- +title: Prove Merkle Tree Membership +description: + Learn how to use merkle membership proof in Noir to prove that a given leaf is a member of a + merkle tree with a specified root, at a given index. +keywords: + [merkle proof, merkle membership proof, Noir, rust, hash function, Pedersen, sha256, merkle tree] +--- + +Let's walk through an example of a merkle membership proof in Noir that proves that a given leaf is +in a merkle tree. + +```rust +use dep::std; + +fn main(message : [Field; 62], index : Field, hashpath : [Field; 40], root : Field) { + let leaf = std::hash::hash_to_field(message); + let merkle_root = std::merkle::compute_merkle_root(leaf, index, hashpath); + assert(merkle_root == root); +} + +``` + +The message is hashed using `hash_to_field`. The specific hash function that is being used is chosen +by the backend. The only requirement is that this hash function can heuristically be used as a +random oracle. If only collision resistance is needed, then one can call `std::hash::pedersen_hash` +instead. + +```rust +let leaf = std::hash::hash_to_field(message); +``` + +The leaf is then passed to a compute_merkle_root function with the root, index and hashpath. The returned root can then be asserted to be the same as the provided root. + +```rust +let merkle_root = std::merkle::compute_merkle_root(leaf, index, hashpath); +assert (merkle_root == root); +``` + +> **Note:** It is possible to re-implement the merkle tree implementation without standard library. +> However, for most usecases, it is enough. In general, the standard library will always opt to be +> as conservative as possible, while striking a balance with efficiency. + +An example, the merkle membership proof, only requires a hash function that has collision +resistance, hence a hash function like Pedersen is allowed, which in most cases is more efficient +than the even more conservative sha256. + +[View an example on the starter repo](https://github.com/noir-lang/noir-examples/blob/3ea09545cabfa464124ec2f3ea8e60c608abe6df/stealthdrop/circuits/src/main.nr#L20) diff --git a/docs/docs/how_to/solidity_verifier.md b/docs/docs/how_to/solidity_verifier.md new file mode 100644 index 00000000000..8022b0e5f20 --- /dev/null +++ b/docs/docs/how_to/solidity_verifier.md @@ -0,0 +1,130 @@ +--- +title: Generate a Solidity Verifier +description: + Learn how to run the verifier as a smart contract on the blockchain. Compile a Solidity verifier + contract for your Noir program and deploy it on any EVM blockchain acting as a verifier smart + contract. Read more to find out +keywords: + [ + solidity verifier, + smart contract, + blockchain, + compiler, + plonk_vk.sol, + EVM blockchain, + verifying Noir programs, + proving backend, + Barretenberg, + ] +sidebar_position: 0 +--- + +For certain applications, it may be desirable to run the verifier as a smart contract instead of on +a local machine. + +Compile a Solidity verifier contract for your Noir program by running: + +```sh +nargo codegen-verifier +``` + +A new `contract` folder would then be generated in your project directory, containing the Solidity +file `plonk_vk.sol`. It can be deployed on any EVM blockchain acting as a verifier smart contract. + +> **Note:** It is possible to compile verifier contracts of Noir programs for other smart contract +> platforms as long as the proving backend supplies an implementation. +> +> Barretenberg, the default proving backend for Nargo, supports compilation of verifier contracts in +> Solidity only for the time being. + +## Verify + +To verify a proof using the Solidity verifier contract, call the `verify` function with the +following signature: + +```solidity +function verify(bytes calldata _proof, bytes32[] calldata _publicInputs) external view returns (bool) +``` + +You can see an example of how the `verify` function is called in the example zk voting application [here](https://github.com/noir-lang/noir-examples/blob/33e598c257e2402ea3a6b68dd4c5ad492bce1b0a/foundry-voting/src/zkVote.sol#L35): + +```solidity +function castVote(bytes calldata proof, uint proposalId, uint vote, bytes32 nullifierHash) public returns (bool) { + // ... + bytes32[] memory publicInputs = new bytes32[](4); + publicInputs[0] = merkleRoot; + publicInputs[1] = bytes32(proposalId); + publicInputs[2] = bytes32(vote); + publicInputs[3] = nullifierHash; + require(verifier.verify(proof, publicInputs), "Invalid proof"); +``` + +### Public Inputs + +:::tip + +A circuit doesn't have the concept of a return value. Return values are just syntactic sugar in +Noir. + +Under the hood, the return value is passed as an input to the circuit and is checked at the end of +the circuit program. + +::: + +The verifier contract uses the output (return) value of a Noir program as a public input. So if you +have the following function + +```rust +fn main( + // Public inputs + pubkey_x: pub Field, + pubkey_y: pub Field, + // Private inputs + priv_key: Field, +) -> pub Field +``` + +then `verify` in `plonk_vk.sol` will expect 3 public inputs. Passing two inputs will result in an +error like `Reason: PUBLIC_INPUT_COUNT_INVALID(3, 2)`. + +In this case the 3 inputs to `verify` would be ordered as `[pubkey_x, pubkey_y, return]`. + +#### Struct inputs + +Consider the following program: + +```rust +struct Type1 { + val1: Field, + val2: Field, +} + +struct Nested { + t1: Type1, + is_true: bool, +} + +fn main(x: pub Field, nested: pub Nested, y: pub Field) { + //... +} +``` + +Structs will be flattened so that the array of inputs is 1-dimensional array. The order of these inputs would be flattened to: `[x, nested.t1.val1, nested.t1.val2, nested.is_true, y]` + +## Noir for EVM chains + +You can currently deploy the Solidity verifier contracts to most EVM compatible chains. EVM chains that have been tested and are known to work include: + +- Optimism +- Arbitrum +- Polygon PoS +- Scroll +- Celo + +Other EVM chains should work, but have not been tested directly by our team. If you test any other chains, please open a PR on this page to update the list. See [this doc](https://github.com/noir-lang/noir-starter/tree/main/with-foundry#testing-on-chain) for more info about testing verifier contracts on different EVM chains. + +### Unsupported chains + +Unfortunately not all "EVM" chains are supported. + +**zkSync** and the **Polygon zkEVM** do _not_ currently support proof verification via Solidity verifier contracts. They are missing the bn256 precompile contract that the verifier contract requires. Once these chains support this precompile, they may work. diff --git a/docs/docs/how_to/using-devcontainers.mdx b/docs/docs/how_to/using-devcontainers.mdx new file mode 100644 index 00000000000..727ec6ca667 --- /dev/null +++ b/docs/docs/how_to/using-devcontainers.mdx @@ -0,0 +1,110 @@ +--- +title: Developer Containers and Codespaces +description: "Learn how to set up a devcontainer in your GitHub repository for a seamless coding experience with Codespaces. Follow our easy 8-step guide to create your own Noir environment without installing Nargo locally." +keywords: ["Devcontainer", "Codespaces", "GitHub", "Noir Environment", "Docker Image", "Development Environment", "Remote Coding", "GitHub Codespaces", "Noir Programming", "Nargo", "VSCode Extensions", "Noirup"] +sidebar_position: 1 +--- + +Adding a developer container configuration file to your Noir project is one of the easiest way to unlock coding in browser. + +## What's a devcontainer after all? + +A [Developer Container](https://containers.dev/) (devcontainer for short) is a Docker image that comes preloaded with tools, extensions, and other tools you need to quickly get started or continue a project, without having to install Nargo locally. Think of it as a development environment in a box. + +There are many advantages to this: + +- It's platform and architecture agnostic +- You don't need to have an IDE installed, or Nargo, or use a terminal at all +- It's safer for using on a public machine or public network + +One of the best ways of using devcontainers is... not using your machine at all, for maximum control, performance, and ease of use. +Enter Codespaces. + +## Codespaces + +If a devcontainer is just a Docker image, then what stops you from provisioning a `p3dn.24xlarge` AWS EC2 instance with 92 vCPUs and 768 GiB RAM and using it to prove your 10-gate SNARK proof? + +Nothing! Except perhaps the 30-40$ per hour it will cost you. + +The problem is that provisioning takes time, and I bet you don't want to see the AWS console every time you want to code something real quick. + +Fortunately, there's an easy and free way to get a decent remote machine ready and loaded in less than 2 minutes: Codespaces. [Codespaces is a Github feature](https://github.com/features/codespaces) that allows you to code in a remote machine by using devcontainers, and it's pretty cool: + +- You can start coding Noir in less than a minute +- It uses the resources of a remote machine, so you can code on your grandma's phone if needed be +- It makes it easy to share work with your frens +- It's fully reusable, you can stop and restart whenever you need to + +:::info + +Don't take out your wallet just yet. Free GitHub accounts get about [15-60 hours of coding](https://github.com/features/codespaces) for free per month, depending on the size of your provisioned machine. + +::: + +## Tell me it's _actually_ easy + +It is! + +Github comes with a default codespace and you can use it to code your own devcontainer. That's exactly what we will be doing in this guide. + + + +8 simple steps: + +#### 1. Create a new repository on GitHub. + +#### 2. Click "Start coding with Codespaces". This will use the default image. + +#### 3. Create a folder called `.devcontainer` in the root of your repository. + +#### 4. Create a Dockerfile in that folder, and paste the following code: + +```docker +FROM --platform=linux/amd64 node:lts-bookworm-slim +SHELL ["/bin/bash", "-c"] +RUN apt update && apt install -y curl bash git tar gzip libc++-dev +RUN curl -L https://raw.githubusercontent.com/noir-lang/noirup/main/install | bash +ENV PATH="/root/.nargo/bin:$PATH" +RUN noirup +ENTRYPOINT ["nargo"] +``` +#### 5. Create a file called `devcontainer.json` in the same folder, and paste the following code: + +```json +{ + "name": "Noir on Codespaces", + "build": { + "context": ".", + "dockerfile": "Dockerfile" + }, + "customizations": { + "vscode": { + "extensions": ["noir-lang.vscode-noir"] + } + } +} +``` +#### 6. Commit and push your changes + +This will pull the new image and build it, so it could take a minute or so + +#### 8. Done! +Just wait for the build to finish, and there's your easy Noir environment. + + +Refer to [noir-starter](https://github.com/noir-lang/noir-starter/) as an example of how devcontainers can be used together with codespaces. + + + +## How do I use it? + +Using the codespace is obviously much easier than setting it up. +Just navigate to your repository and click "Code" -> "Open with Codespaces". It should take a few seconds to load, and you're ready to go. + +:::info + +If you really like the experience, you can add a badge to your readme, links to existing codespaces, and more. +Check out the [official docs](https://docs.github.com/en/codespaces/setting-up-your-project-for-codespaces/setting-up-your-repository/facilitating-quick-creation-and-resumption-of-codespaces) for more info. diff --git a/docs/docs/index.md b/docs/docs/index.md index 75e1abf2932..ab8c2f8acd2 100644 --- a/docs/docs/index.md +++ b/docs/docs/index.md @@ -1,9 +1,8 @@ --- -title: Introducing Noir +title: Noir description: - Learn about the public alpha release of Noir, a domain specific language heavily influenced by - Rust that compiles to an intermediate language which can be compiled to an arithmetic circuit or a - rank-1 constraint system. + Learn about the public alpha release of Noir, a domain specific language heavily influenced by Rust that compiles to + an intermediate language which can be compiled to an arithmetic circuit or a rank-1 constraint system. keywords: [ Noir, @@ -18,83 +17,69 @@ keywords: Proving System, Smart Contract Language, ] -slug: / +sidebar_position: 0 --- -## What is Noir? +## What's new about Noir? -Noir is a Domain Specific Language for SNARK proving systems. It has been designed to use any ACIR compatible proving system. +Noir, a domain-specific language crafted for SNARK proving systems, stands out with its simplicity, flexibility, +and robust capabilities. Unlike conventional approaches that compile directly to a fixed NP-complete language, +Noir takes a two-pronged path. First, Noir compiles to an adaptable intermediate language known as ACIR. -It's design choices are influenced heavily by Rust and focuses on a simple, familiar syntax. +From there, depending on a given project's needs, ACIR can be further compiled into an arithmetic circuit for integration with Aztec's +barretenberg backend, or transformed into a rank-1 constraint system suitable for R1CS backends like Arkworks' Marlin +backend (among others). -## Who is Noir for? +This innovative design introduces unique challenges; however, this approach also strategically separates the programming language from the +backend. Noir's approach echoes the modular philosophy of LLVM, offering developers a versatile toolkit for cryptographic +programming. -Noir can be used for a variety of purposes. +## Who is Noir for? ### Solidity Developers -Noir currently includes a command to create a Solidity contract which verifies your Noir program. This will -be modularised in the future; however, as of the alpha, you can use the [`nargo codegen-verifier`](./nargo/commands#nargo-codegen-verifier) command to create -a verifier contract. +Noir streamlines the creation of Solidity contracts that interface with SNARK systems. +[`Utilize the nargo codegen-verifier`](./reference/nargo_commands.md#nargo-codegen-verifier) command to construct verifier +contracts efficiently. While the current alpha version offers this as a direct feature, future updates aim +to modularize this process for even greater ease of use. + +Noir currently includes a command to create a Solidity contract which verifies your Noir program. This will be +modularized in the future; however, as of the alpha, you can use the `nargo codegen-verifier` command to create a verifier contract. ### Protocol Developers -As a protocol developer, you may not want to use the Aztec backend due to it not being a fit for -your stack, or maybe you simply want to use a different proving system. Since Noir does not compile -to a specific proof system, it is possible for protocol developers to replace the PLONK-based -proving system with a different proving system altogether. +Should the Aztec backend not align with your existing tech stack, or if you're inclined to integrate alternative +proving systems, Noir's agnostic compilation to a proof-agnostic intermediate language offers unmatched flexibility. +This allows protocol engineers the freedom to substitute the default PLONK-based system with an alternative of their +choice, tailoring the proving system to their specific needs. ### Blockchain developers -As a blockchain developer, you will be constrained by parameters set by your blockchain (for example, the -proving system and smart contract language has been pre-defined). In order for you to use Noir in -your blockchain, a proving system backend and a smart contract interface -must be implemented for it. - -## What's new about Noir? - -Noir is simple and flexible in its design, as it does not compile immediately to a fixed -NP-complete language. Instead, Noir compiles to an intermediate language (ACIR), which itself can be compiled -to an arithmetic circuit (if choosing to target Aztec's barretenberg backend) or a rank-1 constraint system (if choosing to target an R1CS backend like Arkwork's Marlin backend, or others). - -This in itself brings up a few challenges within the design process, but allows one to decouple the programming language completely from the backend. This is similar in theory to LLVM. - -## Current Features - -Compiler: - -- Module System -- For expressions -- Arrays -- Bit Operations -- Binary operations (<, <=, >, >=, +, -, \*, /, %) [See documentation for an extensive list] -- Unsigned integers -- If statements -- Structures and Tuples -- Generics - -ACIR Supported OPCODES: - -- Sha256 -- Blake2s -- Schnorr signature verification -- MerkleMembership -- Pedersen Commitment -- Pedersen Hash -- HashToField +Blockchain developers often face environmental constraints, such as predetermined proving systems and smart contract +languages. Noir addresses this by enabling the implementation of custom proving system backends and smart contract +interfaces, ensuring seamless integration with your blockchain's architecture, and expanding the horizons for innovation +within your projects. ## Libraries -Noir does not currently have an official package manager. You can find a list of available Noir libraries in the [awesome-noir repo here](https://github.com/noir-lang/awesome-noir#libraries). +Noir does not currently have an official package manager. You can find a list of some of the available Noir libraries in the +[awesome-noir repo here](https://github.com/noir-lang/awesome-noir#libraries). Some libraries that are available today include: - [Standard Library](https://github.com/noir-lang/noir/tree/master/noir_stdlib) - the Noir Standard Library -- [Ethereum Storage Proof Verification](https://github.com/aragonzkresearch/noir-trie-proofs) - a library that contains the primitives necessary for RLP decoding (in the form of look-up table construction) and Ethereum state and storage proof verification (or verification of any trie proof involving 32-byte long keys) -- [BigInt](https://github.com/shuklaayush/noir-bigint) - a library that provides a custom BigUint56 data type, allowing for computations on large unsigned integers -- [ECrecover](https://github.com/colinnielsen/ecrecover-noir/tree/main) - a library to verify an ECDSA signature and return the source Ethereum address -- [Sparse Merkle Tree Verifier](https://github.com/vocdoni/smtverifier-noir/tree/main) - a library for verification of sparse Merkle trees -- [Signed Int](https://github.com/resurgencelabs/signed_int) - a library for accessing a custom Signed Integer data type, allowing access to negative numbers on Noir -- [Fraction](https://github.com/resurgencelabs/fraction) - a library for accessing fractional number data type in Noir, allowing results that aren't whole numbers - -See the section on [dependencies](./modules_packages_crates/dependencies) for more information. +- [Ethereum Storage Proof Verification](https://github.com/aragonzkresearch/noir-trie-proofs) - a library that contains + the primitives necessary for RLP decoding (in the form of look-up table construction) and Ethereum state and storage + proof verification (or verification of any trie proof involving 32-byte long keys) +- [BigInt](https://github.com/shuklaayush/noir-bigint) - a library that provides a custom BigUint56 data type, allowing + for computations on large unsigned integers +- [ECrecover](https://github.com/colinnielsen/ecrecover-noir/tree/main) - a library to verify an ECDSA signature and + return the source Ethereum address +- [Sparse Merkle Tree Verifier](https://github.com/vocdoni/smtverifier-noir/tree/main) - a library for verification of + sparse Merkle trees +- [Signed Int](https://github.com/resurgencelabs/signed_int) - a library for accessing a custom Signed Integer data + type, allowing access to negative numbers on Noir +- [Fraction](https://github.com/resurgencelabs/fraction) - a library for accessing fractional number data type in Noir, + allowing results that aren't whole numbers + +See the section on [dependencies](noir/modules_packages_crates/dependencies.md) for more information. diff --git a/docs/docs/language_concepts/01_functions.md b/docs/docs/language_concepts/01_functions.md deleted file mode 100644 index 47cdea0cf04..00000000000 --- a/docs/docs/language_concepts/01_functions.md +++ /dev/null @@ -1,225 +0,0 @@ ---- -title: Functions -description: - Learn how to declare functions and methods in Noir, a programming language with Rust semantics. - This guide covers parameter declaration, return types, call expressions, and more. -keywords: [Noir, Rust, functions, methods, parameter declaration, return types, call expressions] ---- - -Functions in Noir follow the same semantics of Rust, though Noir does not support early returns. - -To declare a function the `fn` keyword is used. - -```rust -fn foo() {} -``` - -By default, functions are visible only within the package they are defined. To make them visible outside of that package (for example, as part of a [library](../modules_packages_crates/crates_and_packages.md#libraries)), you should mark them as `pub`: - -```rust -pub fn foo() {} -``` - -You can also restrict the visibility of the function to only the crate it was defined in, by specifying `pub(crate)`: - -```rust -pub(crate) fn foo() {} //foo can only be called within its crate -``` - -All parameters in a function must have a type and all types are known at compile time. The parameter -is pre-pended with a colon and the parameter type. Multiple parameters are separated using a comma. - -```rust -fn foo(x : Field, y : pub Field){} -``` - -The return type of a function can be stated by using the `->` arrow notation. The function below -states that the foo function must return a `Field`. If the function returns no value, then the arrow -is omitted. - -```rust -fn foo(x : Field, y : pub Field) -> Field { - x + y -} -``` - -Note that a `return` keyword is unneeded in this case - the last expression in a function's body is -returned. - -## Main function - -If you're writing a binary, the `main` function is the starting point of your program. You can pass all types of expressions to it, as long as they have a fixed size at compile time: - -```rust -fn main(x : Field) // this is fine: passing a Field -fn main(x : [Field; 2]) // this is also fine: passing a Field with known size at compile-time -fn main(x : (Field, bool)) // 👌: passing a (Field, bool) tuple means size 2 -fn main(x : str<5>) // this is fine, as long as you pass a string of size 5 - -fn main(x : Vec) // can't compile, has variable size -fn main(x : [Field]) // can't compile, has variable size -fn main(....// i think you got it by now -``` - -Keep in mind [tests](../nargo/02_testing.md) don't differentiate between `main` and any other function. The following snippet passes tests, but won't compile or prove: - -```rust -fn main(x : [Field]) { - assert(x[0] == 1); -} - -#[test] -fn test_one() { - main([1, 2]); -} -``` - -```bash -$ nargo test -[testing] Running 1 test functions -[testing] Testing test_one... ok -[testing] All tests passed - -$ nargo check -The application panicked (crashed). -Message: Cannot have variable sized arrays as a parameter to main -``` - -## Call Expressions - -Calling a function in Noir is executed by using the function name and passing in the necessary -arguments. - -Below we show how to call the `foo` function from the `main` function using a call expression: - -```rust -fn main(x : Field, y : Field) { - let z = foo(x); -} - -fn foo(x : Field) -> Field { - x + x -} -``` - -## Methods - -You can define methods in Noir on any struct type in scope. - -```rust -struct MyStruct { - foo: Field, - bar: Field, -} - -impl MyStruct { - fn new(foo: Field) -> MyStruct { - MyStruct { - foo, - bar: 2, - } - } - - fn sum(self) -> Field { - self.foo + self.bar - } -} - -fn main() { - let s = MyStruct::new(40); - assert(s.sum() == 42); -} -``` - -Methods are just syntactic sugar for functions, so if we wanted to we could also call `sum` as -follows: - -```rust -assert(MyStruct::sum(s) == 42); -``` - -It is also possible to specialize which method is chosen depending on the [generic](./06_generics.md) type that is used. In this example, the `foo` function returns different values depending on its type: - -```rust -struct Foo {} - -impl Foo { - fn foo(self) -> Field { 1 } -} - -impl Foo { - fn foo(self) -> Field { 2 } -} - -fn main() { - let f1: Foo = Foo{}; - let f2: Foo = Foo{}; - assert(f1.foo() + f2.foo() == 3); -} -``` - -Also note that impls with the same method name defined in them cannot overlap. For example, if we already have `foo` defined for `Foo` and `Foo` like we do above, we cannot also define `foo` in an `impl Foo` since it would be ambiguous which version of `foo` to choose. - -```rust -// Including this impl in the same project as the above snippet would -// cause an overlapping impls error -impl Foo { - fn foo(self) -> Field { 3 } -} -``` - -## Lambdas - -Lambdas are anonymous functions. They follow the syntax of Rust - `|arg1, arg2, ..., argN| return_expression`. - -```rust -let add_50 = |val| val + 50; -assert(add_50(100) == 150); -``` - -See [Lambdas](./08_lambdas.md) for more details. - -## Attributes - -Attributes are metadata that can be applied to a function, using the following syntax: `#[attribute(value)]`. - -Supported attributes include: - -- **builtin**: the function is implemented by the compiler, for efficiency purposes. -- **deprecated**: mark the function as _deprecated_. Calling the function will generate a warning: `warning: use of deprecated function` -- **field**: Used to enable conditional compilation of code depending on the field size. See below for more details -- **oracle**: mark the function as _oracle_; meaning it is an external unconstrained function, implemented in noir_js. See [Unconstrained](./05_unconstrained.md) and [NoirJS](../noir_js/noir_js.md) for more details. -- **test**: mark the function as unit tests. See [Tests](../nargo/02_testing.md) for more details - -### Field Attribute - -The field attribute defines which field the function is compatible for. The function is conditionally compiled, under the condition that the field attribute matches the Noir native field. -The field can be defined implicitly, by using the name of the elliptic curve usually associated to it - for instance bn254, bls12_381 - or explicitly by using the field (prime) order, in decimal or hexadecimal form. -As a result, it is possible to define multiple versions of a function with each version specialized for a different field attribute. This can be useful when a function requires different parameters depending on the underlying elliptic curve. - -Example: we define the function `foo()` three times below. Once for the default Noir bn254 curve, once for the field $\mathbb F_{23}$, which will normally never be used by Noir, and once again for the bls12_381 curve. - -```rust -#[field(bn254)] -fn foo() -> u32 { - 1 -} - -#[field(23)] -fn foo() -> u32 { - 2 -} - -// This commented code would not compile as foo would be defined twice because it is the same field as bn254 -// #[field(21888242871839275222246405745257275088548364400416034343698204186575808495617)] -// fn foo() -> u32 { -// 2 -// } - -#[field(bls12_381)] -fn foo() -> u32 { - 3 -} -``` - -If the field name is not known to Noir, it will discard the function. Field names are case insensitive. diff --git a/docs/docs/language_concepts/03_ops.md b/docs/docs/language_concepts/03_ops.md deleted file mode 100644 index da02b126059..00000000000 --- a/docs/docs/language_concepts/03_ops.md +++ /dev/null @@ -1,97 +0,0 @@ ---- -title: Logical Operations -description: - Learn about the supported arithmetic and logical operations in the Noir programming language. - Discover how to perform operations on private input types, integers, and booleans. -keywords: - [ - Noir programming language, - supported operations, - arithmetic operations, - logical operations, - predicate operators, - bitwise operations, - short-circuiting, - backend, - ] ---- - -# Operations - -## Table of Supported Operations - -| Operation | Description | Requirements | -| :-------- | :------------------------------------------------------------: | -------------------------------------: | -| + | Adds two private input types together | Types must be private input | -| - | Subtracts two private input types together | Types must be private input | -| \* | Multiplies two private input types together | Types must be private input | -| / | Divides two private input types together | Types must be private input | -| ^ | XOR two private input types together | Types must be integer | -| & | AND two private input types together | Types must be integer | -| \| | OR two private input types together | Types must be integer | -| << | Left shift an integer by another integer amount | Types must be integer | -| >> | Right shift an integer by another integer amount | Types must be integer | -| ! | Bitwise not of a value | Type must be integer or boolean | -| < | returns a bool if one value is less than the other | Upper bound must have a known bit size | -| <= | returns a bool if one value is less than or equal to the other | Upper bound must have a known bit size | -| > | returns a bool if one value is more than the other | Upper bound must have a known bit size | -| >= | returns a bool if one value is more than or equal to the other | Upper bound must have a known bit size | -| == | returns a bool if one value is equal to the other | Both types must not be constants | -| != | returns a bool if one value is not equal to the other | Both types must not be constants | - -### Predicate Operators - -`<,<=, !=, == , >, >=` are known as predicate/comparison operations because they compare two values. -This differs from the operations such as `+` where the operands are used in _computation_. - -### Bitwise Operations Example - -```rust -fn main(x : Field) { - let y = x as u32; - let z = y & y; -} -``` - -`z` is implicitly constrained to be the result of `y & y`. The `&` operand is used to denote bitwise -`&`. - -> `x & x` would not compile as `x` is a `Field` and not an integer type. - -### Logical Operators - -Noir has no support for the logical operators `||` and `&&`. This is because encoding the -short-circuiting that these operators require can be inefficient for Noir's backend. Instead you can -use the bitwise operators `|` and `&` which operate indentically for booleans, just without the -short-circuiting. - -```rust -let my_val = 5; - -let mut flag = 1; -if (my_val > 6) | (my_val == 0) { - flag = 0; -} -assert(flag == 1); - -if (my_val != 10) & (my_val < 50) { - flag = 0; -} -assert(flag == 0); -``` - -### Shorthand operators - -Noir shorthand operators for most of the above operators, namely `+=, -=, *=, /=, %=, &=, |=, ^=, <<=`, and `>>=`. These allow for more concise syntax. For example: - -```rust -let mut i = 0; -i = i + 1; -``` - -could be written as: - -```rust -let mut i = 0; -i += 1; -``` diff --git a/docs/docs/language_concepts/05_unconstrained.md b/docs/docs/language_concepts/05_unconstrained.md deleted file mode 100644 index 6b621eda3eb..00000000000 --- a/docs/docs/language_concepts/05_unconstrained.md +++ /dev/null @@ -1,96 +0,0 @@ ---- -title: Unconstrained Functions -description: "Learn about what unconstrained functions in Noir are, how to use them and when you'd want to." - -keywords: [Noir programming language, unconstrained, open] ---- - - - -Unconstrained functions are functions which do not constrain any of the included computation and allow for non-determinisitic computation. - -## Why? - -Zero-knowledge (ZK) domain-specific languages (DSL) enable developers to generate ZK proofs from their programs by compiling code down to the constraints of an NP complete language (such as R1CS or PLONKish languages). However, the hard bounds of a constraint system can be very limiting to the functionality of a ZK DSL. - -Enabling a circuit language to perform unconstrained execution is a powerful tool. Said another way, unconstrained execution lets developers generate witnesses from code that does not generate any constraints. Being able to execute logic outside of a circuit is critical for both circuit performance and constructing proofs on information that is external to a circuit. - -Fetching information from somewhere external to a circuit can also be used to enable developers to improve circuit efficiency. - -A ZK DSL does not just prove computation, but proves that some computation was handled correctly. Thus, it is necessary that when we switch from performing some operation directly inside of a circuit to inside of an unconstrained environment that the appropriate constraints are still laid down elsewhere in the circuit. - -## Example - -An in depth example might help drive the point home. This example comes from the excellent [post](https://discord.com/channels/1113924620781883405/1124022445054111926/1128747641853972590) by Tom in the Noir Discord. - -Let's look at how we can optimize a function to turn a `u72` into an array of `u8`s. - -```rust -fn main(num: u72) -> pub [u8; 8] { - let mut out: [u8; 8] = [0; 8]; - for i in 0..8 { - out[i] = (num >> (56 - (i * 8)) as u72 & 0xff) as u8; - } - - out -} -``` - -``` -Total ACIR opcodes generated for language PLONKCSat { width: 3 }: 91 -Backend circuit size: 3619 -``` - -A lot of the operations in this function are optimized away by the compiler (all the bit-shifts turn into divisions by constants). However we can save a bunch of gates by casting to u8 a bit earlier. This automatically truncates the bit-shifted value to fit in a u8 which allows us to remove the XOR against 0xff. This saves us ~480 gates in total. - -```rust -fn main(num: u72) -> pub [u8; 8] { - let mut out: [u8; 8] = [0; 8]; - for i in 0..8 { - out[i] = (num >> (56 - (i * 8)) as u8; - } - - out -} -``` - -``` -Total ACIR opcodes generated for language PLONKCSat { width: 3 }: 75 -Backend circuit size: 3143 -``` - -Those are some nice savings already but we can do better. This code is all constrained so we're proving every step of calculating out using num, but we don't actually care about how we calculate this, just that it's correct. This is where brillig comes in. - -It turns out that truncating a u72 into a u8 is hard to do inside a snark, each time we do as u8 we lay down 4 ACIR opcodes which get converted into multiple gates. It's actually much easier to calculate num from out than the other way around. All we need to do is multiply each element of out by a constant and add them all together, both relatively easy operations inside a snark. - -We can then run u72_to_u8 as unconstrained brillig code in order to calculate out, then use that result in our constrained function and assert that if we were to do the reverse calculation we'd get back num. This looks a little like the below: - -```rust -fn main(num: u72) -> pub [u8; 8] { - let out = u72_to_u8(num); - - let mut reconstructed_num: u72 = 0; - for i in 0..8 { - reconstructed_num += (out[i] as u72 << (56 - (8 * i))); - } - assert(num == reconstructed_num); - out -} - -unconstrained fn u72_to_u8(num: u72) -> [u8; 8] { - let mut out: [u8; 8] = [0; 8]; - for i in 0..8 { - out[i] = (num >> (56 - (i * 8))) as u8; - } - out -} -``` - -``` -Total ACIR opcodes generated for language PLONKCSat { width: 3 }: 78 -Backend circuit size: 2902 -``` - -This ends up taking off another ~250 gates from our circuit! We've ended up with more ACIR opcodes than before but they're easier for the backend to prove (resulting in fewer gates). - -Generally we want to use brillig whenever there's something that's easy to verify but hard to compute within the circuit. For example, if you wanted to calculate a square root of a number it'll be a much better idea to calculate this in brillig and then assert that if you square the result you get back your number. diff --git a/docs/docs/language_concepts/07_mutability.md b/docs/docs/language_concepts/07_mutability.md deleted file mode 100644 index 4641521b1d9..00000000000 --- a/docs/docs/language_concepts/07_mutability.md +++ /dev/null @@ -1,92 +0,0 @@ ---- -title: Mutability -description: - Learn about mutable variables, constants, and globals in Noir programming language. Discover how - to declare, modify, and use them in your programs. -keywords: [noir programming language, mutability in noir, mutable variables, constants, globals] ---- - -Variables in noir can be declared mutable via the `mut` keyword. Mutable variables can be reassigned -to via an assignment expression. - -```rust -let x = 2; -x = 3; // error: x must be mutable to be assigned to - -let mut y = 3; -let y = 4; // OK -``` - -The `mut` modifier can also apply to patterns: - -```rust -let (a, mut b) = (1, 2); -a = 11; // error: a must be mutable to be assigned to -b = 12; // OK - -let mut (c, d) = (3, 4); -c = 13; // OK -d = 14; // OK - -// etc. -let MyStruct { x: mut y } = MyStruct { x: a }; -// y is now in scope -``` - -Note that mutability in noir is local and everything is passed by value, so if a called function -mutates its parameters then the parent function will keep the old value of the parameters. - -```rust -fn main() -> Field { - let x = 3; - helper(x); - x // x is still 3 -} - -fn helper(mut x: i32) { - x = 4; -} -``` - -## Comptime Values - -:::warning - -The 'comptime' keyword was removed in version 0.10. The comptime keyword and syntax are currently still kept and parsed for backwards compatibility, but are now deprecated and will issue a warning when used. `comptime` has been removed because it is no longer needed for accessing arrays. - -::: - -## Globals - -Noir also supports global variables. However, they must be known at compile-time. The global type can also be inferred by the compiler entirely. Globals can also be used to specify array -annotations for function parameters and can be imported from submodules. - -```rust -global N: Field = 5; // Same as `global N: Field = 5` - -fn main(x : Field, y : [Field; N]) { - let res = x * N; - - assert(res == y[0]); - - let res2 = x * mysubmodule::N; - assert(res != res2); -} - -mod mysubmodule { - use dep::std; - - global N: Field = 10; - - fn my_helper() -> Field { - let x = N; - x - } -} -``` - -## Why only local mutability? - -Witnesses in a proving system are immutable in nature. Noir aims to _closely_ mirror this setting -without applying additional overhead to the user. Modeling a mutable reference is not as -straightforward as on conventional architectures and would incur some possibly unexpected overhead. diff --git a/docs/docs/language_concepts/data_types.md b/docs/docs/language_concepts/data_types.md deleted file mode 100644 index d546cc463a8..00000000000 --- a/docs/docs/language_concepts/data_types.md +++ /dev/null @@ -1,96 +0,0 @@ ---- -title: Data Types -description: - Get a clear understanding of the two categories of Noir data types - primitive types and compound - types. Learn about their characteristics, differences, and how to use them in your Noir - programming. -keywords: - [ - noir, - data types, - primitive types, - compound types, - private types, - public types, - ] ---- - -Every value in Noir has a type, which determines which operations are valid for it. - -All values in Noir are fundamentally composed of `Field` elements. For a more approachable -developing experience, abstractions are added on top to introduce different data types in Noir. - -Noir has two category of data types: primitive types (e.g. `Field`, integers, `bool`) and compound -types that group primitive types (e.g. arrays, tuples, structs). Each value can either be private or -public. - -## Private & Public Types - -A **private value** is known only to the Prover, while a **public value** is known by both the -Prover and Verifier. Mark values as `private` when the value should only be known to the prover. All -primitive types (including individual fields of compound types) in Noir are private by default, and -can be marked public when certain values are intended to be revealed to the Verifier. - -> **Note:** For public values defined in Noir programs paired with smart contract verifiers, once -> the proofs are verified on-chain the values can be considered known to everyone that has access to -> that blockchain. - -Public data types are treated no differently to private types apart from the fact that their values -will be revealed in proofs generated. Simply changing the value of a public type will not change the -circuit (where the same goes for changing values of private types as well). - -_Private values_ are also referred to as _witnesses_ sometimes. - -> **Note:** The terms private and public when applied to a type (e.g. `pub Field`) have a different -> meaning than when applied to a function (e.g. `pub fn foo() {}`). -> -> The former is a visibility modifier for the Prover to interpret if a value should be made known to -> the Verifier, while the latter is a visibility modifier for the compiler to interpret if a -> function should be made accessible to external Noir programs like in other languages. - -### pub Modifier - -All data types in Noir are private by default. Types are explicitly declared as public using the -`pub` modifier: - -```rust -fn main(x : Field, y : pub Field) -> pub Field { - x + y -} -``` - -In this example, `x` is **private** while `y` and `x + y` (the return value) are **public**. Note -that visibility is handled **per variable**, so it is perfectly valid to have one input that is -private and another that is public. - -> **Note:** Public types can only be declared through parameters on `main`. - -## Type Aliases - -A type alias is a new name for an existing type. Type aliases are declared with the keyword `type`: - -```rust -type Id = u8; - -fn main() { - let id: Id = 1; - let zero: u8 = 0; - assert(zero + 1 == id); -} -``` - -Type aliases can also be used with [generics](./06_generics.md): - -```rust -type Id = Size; - -fn main() { - let id: Id = 1; - let zero: u32 = 0; - assert(zero + 1 == id); -} -``` - -### BigInt - -You can acheive BigInt functionality using the [Noir BigInt](https://github.com/shuklaayush/noir-bigint) library. diff --git a/docs/docs/language_concepts/data_types/00_fields.md b/docs/docs/language_concepts/data_types/00_fields.md deleted file mode 100644 index 658a0441ffb..00000000000 --- a/docs/docs/language_concepts/data_types/00_fields.md +++ /dev/null @@ -1,165 +0,0 @@ ---- -title: Fields -description: - Dive deep into the Field data type in Noir. Understand its methods, practical examples, and best practices to effectively use Fields in your Noir programs. -keywords: - [ - noir, - field type, - methods, - examples, - best practices, - ] ---- - -The field type corresponds to the native field type of the proving backend. - -The size of a Noir field depends on the elliptic curve's finite field for the proving backend -adopted. For example, a field would be a 254-bit integer when paired with the default backend that -spans the Grumpkin curve. - -Fields support integer arithmetic and are often used as the default numeric type in Noir: - -```rust -fn main(x : Field, y : Field) { - let z = x + y; -} -``` - -`x`, `y` and `z` are all private fields in this example. Using the `let` keyword we defined a new -private value `z` constrained to be equal to `x + y`. - -If proving efficiency is of priority, fields should be used as a default for solving problems. -Smaller integer types (e.g. `u64`) incur extra range constraints. - -## Methods - -After declaring a Field, you can use these common methods on it: - -### to_le_bits - -Transforms the field into an array of bits, Little Endian. - -```rust -fn to_le_bits(_x : Field, _bit_size: u32) -> [u1; N] -``` - -example: - -```rust -fn main() { - let field = 2; - let bits = field.to_le_bits(32); -} -``` - -### to_be_bits - -Transforms the field into an array of bits, Big Endian. - -```rust -fn to_be_bits(_x : Field, _bit_size: u32) -> [u1; N] -``` - -example: - -```rust -fn main() { - let field = 2; - let bits = field.to_be_bits(32); -} -``` - -### to_le_bytes - -Transforms into an array of bytes, Little Endian - -```rust -fn to_le_bytes(_x : Field, byte_size: u32) -> [u8] -``` - -example: - -```rust -fn main() { - let field = 2; - let bytes = field.to_le_bytes(4); -} -``` - -### to_be_bytes - -Transforms into an array of bytes, Big Endian - -```rust -fn to_be_bytes(_x : Field, byte_size: u32) -> [u8] -``` - -example: - -```rust -fn main() { - let field = 2; - let bytes = field.to_be_bytes(4); -} -``` - -### to_le_radix - -Decomposes into a vector over the specified base, Little Endian - -```rust -fn to_le_radix(_x : Field, _radix: u32, _result_len: u32) -> [u8] -``` - -example: - -```rust -fn main() { - let field = 2; - let radix = field.to_le_radix(256, 4); -} -``` - -### to_be_radix - -Decomposes into a vector over the specified base, Big Endian - -```rust -fn to_be_radix(_x : Field, _radix: u32, _result_len: u32) -> [u8] -``` - -example: - -```rust -fn main() { - let field = 2; - let radix = field.to_be_radix(256, 4); -} -``` - -### pow_32 - -Returns the value to the power of the specified exponent - -```rust -fn pow_32(self, exponent: Field) -> Field -``` - -example: - -```rust -fn main() { - let field = 2 - let pow = field.pow_32(4); - assert(pow == 16); -} -``` - -### sgn0 - -Parity of (prime) Field element, i.e. sgn0(x mod p) = 0 if x ∈ {0, ..., p-1} is even, otherwise sgn0(x mod p) = 1. - -```rust -fn sgn0(self) -> u1 -``` diff --git a/docs/docs/migration_notes.md b/docs/docs/migration_notes.md index e87eb1feaba..9f27230a1a0 100644 --- a/docs/docs/migration_notes.md +++ b/docs/docs/migration_notes.md @@ -16,7 +16,7 @@ To update, please make sure this field in `Nargo.toml` matches the output of `na ## ≥0.14 -The index of the [for loops](./language_concepts/02_control_flow.md#loops) is now of type `u64` instead of `Field`. An example refactor would be: +The index of the [for loops](noir/concepts/control_flow.md#loops) is now of type `u64` instead of `Field`. An example refactor would be: ```rust for i in 0..10 { @@ -50,7 +50,7 @@ If you are using the default barretenberg backend, simply run: nargo prove ``` -with you Noir program. +with your Noir program. This will trigger the download and installation of the latest version of barretenberg compatible with your Nargo in use. @@ -78,7 +78,7 @@ nargo backend install acvm-backend-barretenberg https://github.com/noir-lang/bar This downloads and installs a specific bb.js based version of barretenberg binary from GitHub. -The gzipped filed is running this bash script: , where we need to gzip it as the Nargo currently expect the backend to be zipped up. +The gzipped file is running [this bash script](https://github.com/noir-lang/barretenberg-js-binary/blob/master/run-bb-js.sh), where we need to gzip it as the Nargo currently expect the backend to be zipped up. Then run: diff --git a/docs/docs/modules_packages_crates/crates_and_packages.md b/docs/docs/modules_packages_crates/crates_and_packages.md deleted file mode 100644 index fb83a33d94e..00000000000 --- a/docs/docs/modules_packages_crates/crates_and_packages.md +++ /dev/null @@ -1,42 +0,0 @@ ---- -title: Crates and Packages -description: Learn how to use Crates and Packages in your Noir project -keywords: [Nargo, dependencies, package management, crates, package] ---- - -## Crates - -A crate is the smallest amount of code that the Noir compiler considers at a time. -Crates can contain modules, and the modules may be defined in other files that get compiled with the crate, as we’ll see in the coming sections. - -### Crate Types - -A Noir crate can come in several forms: binaries, libraries or contracts. - -#### Binaries - -_Binary crates_ are programs which you can compile to an ACIR circuit which you can then create proofs against. Each must have a function called `main` that defines the ACIR circuit which is to be proved. - -#### Libraries - -_Library crates_ don't have a `main` function and they don't compile down to ACIR. Instead they define functionality intended to be shared with multiple projects, and eventually included in a binary crate. - -#### Contracts - -Contract crates are similar to binary crates in that they compile to ACIR which you can create proofs against. They are different in that they do not have a single `main` function, but are a collection of functions to be deployed to the [Aztec network](https://aztec.network). You can learn more about the technical details of Aztec in the [monorepo](https://github.com/AztecProtocol/aztec-packages) or contract [examples](https://github.com/AztecProtocol/aztec-packages/tree/master/yarn-project/noir-contracts/src/contracts). - -### Crate Root - -Every crate has a root, which is the source file that the compiler starts, this is also known as the root module. The Noir compiler does not enforce any conditions on the name of the file which is the crate root, however if you are compiling via Nargo the crate root must be called `lib.nr` or `main.nr` for library or binary crates respectively. - -## Packages - -A Nargo _package_ is a collection of one of more crates that provides a set of functionality. A package must include a Nargo.toml file. - -A package _must_ contain either a library or a binary crate, but not both. - -### Differences from Cargo Packages - -One notable difference between Rust's Cargo and Noir's Nargo is that while Cargo allows a package to contain an unlimited number of binary crates and a single library crate, Nargo currently only allows a package to contain a single crate. - -In future this restriction may be lifted to allow a Nargo package to contain both a binary and library crate or multiple binary crates. diff --git a/docs/docs/modules_packages_crates/dependencies.md b/docs/docs/modules_packages_crates/dependencies.md deleted file mode 100644 index 75f95aaa305..00000000000 --- a/docs/docs/modules_packages_crates/dependencies.md +++ /dev/null @@ -1,123 +0,0 @@ ---- -title: Dependencies -description: - Learn how to specify and manage dependencies in Nargo, allowing you to upload packages to GitHub - and use them easily in your project. -keywords: [Nargo, dependencies, GitHub, package management, versioning] ---- - -Nargo allows you to upload packages to GitHub and use them as dependencies. - -## Specifying a dependency - -Specifying a dependency requires a tag to a specific commit and the git url to the url containing -the package. - -Currently, there are no requirements on the tag contents. If requirements are added, it would follow -semver 2.0 guidelines. - -> Note: Without a `tag` , there would be no versioning and dependencies would change each time you -> compile your project. - -For example, to add the [ecrecover-noir library](https://github.com/colinnielsen/ecrecover-noir) to your project, add it to `Nargo.toml`: - -```toml -# Nargo.toml - -[dependencies] -ecrecover = {tag = "v0.8.0", git = "https://github.com/colinnielsen/ecrecover-noir"} -``` - -If the module is in a subdirectory, you can define a subdirectory in your git repository, for example: - -```toml -# Nargo.toml - -[dependencies] -easy_private_token_contract = {tag ="v0.1.0-alpha62", git = "https://github.com/AztecProtocol/aztec-packages", directory = "yarn-project/noir-contracts/src/contracts/easy_private_token_contract"} -``` - -## Specifying a local dependency - -You can also specify dependencies that are local to your machine. - -For example, this file structure has a library and binary crate - -```tree -├── binary_crate -│   ├── Nargo.toml -│   └── src -│   └── main.nr -└── liba - ├── Nargo.toml - └── src - └── lib.nr -``` - -Inside of the binary crate, you can specify: - -```toml -# Nargo.toml - -[dependencies] -libA = { path = "../liba" } -``` - -## Importing dependencies - -You can import a dependency to a Noir file using the following syntax. For example, to import the -ecrecover-noir library and local liba referenced above: - -```rust -use dep::ecrecover; -use dep::libA; -``` - -You can also import only the specific parts of dependency that you want to use, like so: - -```rust -use dep::std::hash::sha256; -use dep::std::scalar_mul::fixed_base_embedded_curve; -``` - -Lastly, as demonstrated in the -[elliptic curve example](../standard_library/cryptographic_primitives/ec_primitives#examples), you -can import multiple items in the same line by enclosing them in curly braces: - -```rust -use dep::std::ec::tecurve::affine::{Curve, Point}; -``` - -We don't have a way to consume libraries from inside a [workspace](./workspaces) as external dependencies right now. - -Inside a workspace, these are consumed as `{ path = "../to_lib" }` dependencies in Nargo.toml. - -## Dependencies of Dependencies - -Note that when you import a dependency, you also get access to all of the dependencies of that package. - -For example, the [phy_vector](https://github.com/resurgencelabs/phy_vector) library imports an [fraction](https://github.com/resurgencelabs/fraction) library. If you're importing the phy_vector library, then you can access the functions in fractions library like so: - -```rust -use dep::phy_vector; - -fn main(x : Field, y : pub Field) { - //... - let f = phy_vector::fraction::toFraction(true, 2, 1); - //... -} -``` - -## Available Libraries - -Noir does not currently have an official package manager. You can find a list of available Noir libraries in the [awesome-noir repo here](https://github.com/noir-lang/awesome-noir#libraries). - -Some libraries that are available today include: - -- [Standard Library](https://github.com/noir-lang/noir/tree/master/noir_stdlib) - the Noir Standard Library -- [Ethereum Storage Proof Verification](https://github.com/aragonzkresearch/noir-trie-proofs) - a library that contains the primitives necessary for RLP decoding (in the form of look-up table construction) and Ethereum state and storage proof verification (or verification of any trie proof involving 32-byte long keys) -- [BigInt](https://github.com/shuklaayush/noir-bigint) - a library that provides a custom BigUint56 data type, allowing for computations on large unsigned integers -- [ECrecover](https://github.com/colinnielsen/ecrecover-noir/tree/main) - a library to verify an ECDSA signature and return the source Ethereum address -- [Sparse Merkle Tree Verifier](https://github.com/vocdoni/smtverifier-noir/tree/main) - a library for verification of sparse Merkle trees -- [Signed Int](https://github.com/resurgencelabs/signed_int) - a library for accessing a custom Signed Integer data type, allowing access to negative numbers on Noir -- [Fraction](https://github.com/resurgencelabs/fraction) - a library for accessing fractional number data type in Noir, allowing results that aren't whole numbers diff --git a/docs/docs/modules_packages_crates/modules.md b/docs/docs/modules_packages_crates/modules.md deleted file mode 100644 index 147c9b284e8..00000000000 --- a/docs/docs/modules_packages_crates/modules.md +++ /dev/null @@ -1,104 +0,0 @@ ---- -title: Modules -description: - Learn how to organize your files using modules in Noir, following the same convention as Rust's - module system. Examples included. -keywords: [Noir, Rust, modules, organizing files, sub-modules] ---- - -Noir's module system follows the same convention as the _newer_ version of Rust's module system. - -## Purpose of Modules - -Modules are used to organise files. Without modules all of your code would need to live in a single -file. In Noir, the compiler does not automatically scan all of your files to detect modules. This -must be done explicitly by the developer. - -## Examples - -### Importing a module in the crate root - -Filename : `src/main.nr` - -```rust -mod foo; - -fn main() { - foo::hello_world(); -} -``` - -Filename : `src/foo.nr` - -```rust -fn from_foo() {} -``` - -In the above snippet, the crate root is the `src/main.nr` file. The compiler sees the module -declaration `mod foo` which prompts it to look for a foo.nr file. - -Visually this module hierarchy looks like the following : - -``` -crate - ├── main - │ - └── foo - └── from_foo - -``` - -### Importing a module throughout the tree - -All modules are accessible from the `crate::` namespace. - -``` -crate - ├── bar - ├── foo - └── main - -``` - -In the above snippet, if `bar` would like to use functions in `foo`, it can do so by `use crate::foo::function_name`. - -### Sub-modules - -Filename : `src/main.nr` - -```rust -mod foo; - -fn main() { - foo::from_foo(); -} -``` - -Filename : `src/foo.nr` - -```rust -mod bar; -fn from_foo() {} -``` - -Filename : `src/foo/bar.nr` - -```rust -fn from_bar() {} -``` - -In the above snippet, we have added an extra module to the module tree; `bar`. `bar` is a submodule -of `foo` hence we declare bar in `foo.nr` with `mod bar`. Since `foo` is not the crate root, the -compiler looks for the file associated with the `bar` module in `src/foo/bar.nr` - -Visually the module hierarchy looks as follows: - -``` -crate - ├── main - │ - └── foo - ├── from_foo - └── bar - └── from_bar -``` diff --git a/docs/docs/nargo/02_testing.md b/docs/docs/nargo/02_testing.md deleted file mode 100644 index da767274efd..00000000000 --- a/docs/docs/nargo/02_testing.md +++ /dev/null @@ -1,61 +0,0 @@ ---- -title: Testing in Noir -description: Learn how to use Nargo to test your Noir program in a quick and easy way -keywords: [Nargo, testing, Noir, compile, test] ---- - -You can test your Noir programs using Noir circuits. - -Nargo will automatically compile and run any functions which have the decorator `#[test]` on them if -you run `nargo test`. - -For example if you have a program like: - -```rust -fn add(x: u64, y: u64) -> u64 { - x + y -} -#[test] -fn test_add() { - assert(add(2,2) == 4); - assert(add(0,1) == 1); - assert(add(1,0) == 1); -} -``` - -Running `nargo test` will test that the `test_add` function can be executed while satisfying the all -the contraints which allows you to test that add returns the expected values. Test functions can't -have any arguments currently. - -### Test fail - -You can write tests that are expected to fail by using the decorator `#[test(should_fail)]`. For example: - -```rust -fn add(x: u64, y: u64) -> u64 { - x + y -} -#[test(should_fail)] -fn test_add() { - assert(add(2,2) == 5); -} -``` - -You can be more specific and make it fail with a specific reason by using `should_fail_with = "`: - -```rust -fn main(african_swallow_avg_speed : Field) { - assert(african_swallow_avg_speed == 65, "What is the airspeed velocity of an unladen swallow"); -} - -#[test] -fn test_king_arthur() { - main(65); -} - -#[test(should_fail_with = "What is the airspeed velocity of an unladen swallow")] -fn test_bridgekeeper() { - main(32); -} - -``` diff --git a/docs/docs/noir/concepts/_category_.json b/docs/docs/noir/concepts/_category_.json new file mode 100644 index 00000000000..7da08f8a8c5 --- /dev/null +++ b/docs/docs/noir/concepts/_category_.json @@ -0,0 +1,6 @@ +{ + "label": "Concepts", + "position": 0, + "collapsible": true, + "collapsed": true +} \ No newline at end of file diff --git a/docs/docs/noir/concepts/assert.md b/docs/docs/noir/concepts/assert.md new file mode 100644 index 00000000000..c5f9aff139c --- /dev/null +++ b/docs/docs/noir/concepts/assert.md @@ -0,0 +1,27 @@ +--- +title: Assert Function +description: + Learn about the assert function in Noir, which can be used to explicitly constrain the predicate or + comparison expression that follows to be true, and what happens if the expression is false at + runtime. +keywords: [Noir programming language, assert statement, predicate expression, comparison expression] +sidebar_position: 4 +--- + +Noir includes a special `assert` function which will explicitly constrain the predicate/comparison +expression that follows to be true. If this expression is false at runtime, the program will fail to +be proven. Example: + +```rust +fn main(x : Field, y : Field) { + assert(x == y); +} +``` + +You can optionally provide a message to be logged when the assertion fails: + +```rust +assert(x == y, "x and y are not equal"); +``` + +> Assertions only work for predicate operations, such as `==`. If there's any ambiguity on the operation, the program will fail to compile. For example, it is unclear if `assert(x + y)` would check for `x + y == 0` or simply would return `true`. diff --git a/docs/docs/noir/concepts/comments.md b/docs/docs/noir/concepts/comments.md new file mode 100644 index 00000000000..f76ab49094b --- /dev/null +++ b/docs/docs/noir/concepts/comments.md @@ -0,0 +1,33 @@ +--- +title: Comments +description: + Learn how to write comments in Noir programming language. A comment is a line of code that is + ignored by the compiler, but it can be read by programmers. Single-line and multi-line comments + are supported in Noir. +keywords: [Noir programming language, comments, single-line comments, multi-line comments] +sidebar_position: 9 +--- + +A comment is a line in your codebase which the compiler ignores, however it can be read by +programmers. + +Here is a single line comment: + +```rust +// This is a comment and is ignored +``` + +`//` is used to tell the compiler to ignore the rest of the line. + +Noir also supports multi-line block comments. Start a block comment with `/*` and end the block with `*/`. + +Noir does not natively support doc comments. You may be able to use [Rust doc comments](https://doc.rust-lang.org/reference/comments.html) in your code to leverage some Rust documentation build tools with Noir code. + +```rust +/* + This is a block comment describing a complex function. +*/ +fn main(x : Field, y : pub Field) { + assert(x != y); +} +``` diff --git a/docs/docs/noir/concepts/control_flow.md b/docs/docs/noir/concepts/control_flow.md new file mode 100644 index 00000000000..4ce65236db3 --- /dev/null +++ b/docs/docs/noir/concepts/control_flow.md @@ -0,0 +1,45 @@ +--- +title: Control Flow +description: + Learn how to use loops and if expressions in the Noir programming language. Discover the syntax + and examples for for loops and if-else statements. +keywords: [Noir programming language, loops, for loop, if-else statements, Rust syntax] +sidebar_position: 2 +--- + +## Loops + +Noir has one kind of loop: the `for` loop. `for` loops allow you to repeat a block of code multiple +times. + +The following block of code between the braces is run 10 times. + +```rust +for i in 0..10 { + // do something +}; +``` + +The index for loops is of type `u64`. + +## If Expressions + +Noir supports `if-else` statements. The syntax is most similar to Rust's where it is not required +for the statement's conditional to be surrounded by parentheses. + +```rust +let a = 0; +let mut x: u32 = 0; + +if a == 0 { + if a != 0 { + x = 6; + } else { + x = 2; + } +} else { + x = 5; + assert(x == 5); +} +assert(x == 2); +``` diff --git a/docs/docs/noir/concepts/data_bus.md b/docs/docs/noir/concepts/data_bus.md new file mode 100644 index 00000000000..6c7e9b60891 --- /dev/null +++ b/docs/docs/noir/concepts/data_bus.md @@ -0,0 +1,21 @@ +--- +title: Data Bus +sidebar_position: 12 +--- +**Disclaimer** this feature is experimental, do not use it! + +The data bus is an optimization that the backend can use to make recursion more efficient. +In order to use it, you must define some inputs of the program entry points (usually the `main()` +function) with the `call_data` modifier, and the return values with the `return_data` modifier. +These modifiers are incompatible with `pub` and `mut` modifiers. + +## Example + +```rust +fn main(mut x: u32, y: call_data u32, z: call_data [u32;4] ) -> return_data u32 { + let a = z[x]; + a+y +} +``` + +As a result, both call_data and return_data will be treated as private inputs and encapsulated into a read-only array each, for the backend to process. diff --git a/docs/docs/noir/concepts/data_types/_category_.json b/docs/docs/noir/concepts/data_types/_category_.json new file mode 100644 index 00000000000..5d694210bbf --- /dev/null +++ b/docs/docs/noir/concepts/data_types/_category_.json @@ -0,0 +1,5 @@ +{ + "position": 0, + "collapsible": true, + "collapsed": true +} diff --git a/docs/docs/noir/concepts/data_types/arrays.md b/docs/docs/noir/concepts/data_types/arrays.md new file mode 100644 index 00000000000..075d39dadd4 --- /dev/null +++ b/docs/docs/noir/concepts/data_types/arrays.md @@ -0,0 +1,245 @@ +--- +title: Arrays +description: + Dive into the Array data type in Noir. Grasp its methods, practical examples, and best practices for efficiently using Arrays in your Noir code. +keywords: + [ + noir, + array type, + methods, + examples, + indexing, + ] +sidebar_position: 4 +--- + +An array is one way of grouping together values into one compound type. Array types can be inferred +or explicitly specified via the syntax `[; ]`: + +```rust +fn main(x : Field, y : Field) { + let my_arr = [x, y]; + let your_arr: [Field; 2] = [x, y]; +} +``` + +Here, both `my_arr` and `your_arr` are instantiated as an array containing two `Field` elements. + +Array elements can be accessed using indexing: + +```rust +fn main() { + let a = [1, 2, 3, 4, 5]; + + let first = a[0]; + let second = a[1]; +} +``` + +All elements in an array must be of the same type (i.e. homogeneous). That is, an array cannot group +a `Field` value and a `u8` value together for example. + +You can write mutable arrays, like: + +```rust +fn main() { + let mut arr = [1, 2, 3, 4, 5]; + assert(arr[0] == 1); + + arr[0] = 42; + assert(arr[0] == 42); +} +``` + +You can instantiate a new array of a fixed size with the same value repeated for each element. The following example instantiates an array of length 32 where each element is of type Field and has the value 0. + +```rust +let array: [Field; 32] = [0; 32]; +``` + +Like in Rust, arrays in Noir are a fixed size. However, if you wish to convert an array to a [slice](./slices), you can just call `as_slice` on your array: + +```rust +let array: [Field; 32] = [0; 32]; +let sl = array.as_slice() +``` + +You can define multidimensional arrays: + +```rust +let array : [[Field; 2]; 2]; +let element = array[0][0]; +``` + +## Types + +You can create arrays of primitive types or structs. There is not yet support for nested arrays +(arrays of arrays) or arrays of structs that contain arrays. + +## Methods + +For convenience, the STD provides some ready-to-use, common methods for arrays: + +### len + +Returns the length of an array + +```rust +fn len(_array: [T; N]) -> comptime Field +``` + +example + +```rust +fn main() { + let array = [42, 42]; + assert(array.len() == 2); +} +``` + +### sort + +Returns a new sorted array. The original array remains untouched. Notice that this function will +only work for arrays of fields or integers, not for any arbitrary type. This is because the sorting +logic it uses internally is optimized specifically for these values. If you need a sort function to +sort any type, you should use the function `sort_via` described below. + +```rust +fn sort(_array: [T; N]) -> [T; N] +``` + +example + +```rust +fn main() { + let arr = [42, 32]; + let sorted = arr.sort(); + assert(sorted == [32, 42]); +} +``` + +### sort_via + +Sorts the array with a custom comparison function + +```rust +fn sort_via(mut a: [T; N], ordering: fn(T, T) -> bool) -> [T; N] +``` + +example + +```rust +fn main() { + let arr = [42, 32] + let sorted_ascending = arr.sort_via(|a, b| a < b); + assert(sorted_ascending == [32, 42]); // verifies + + let sorted_descending = arr.sort_via(|a, b| a > b); + assert(sorted_descending == [32, 42]); // does not verify +} +``` + +### map + +Applies a function to each element of the array, returning a new array containing the mapped elements. + +```rust +fn map(f: fn(T) -> U) -> [U; N] +``` + +example + +```rust +let a = [1, 2, 3]; +let b = a.map(|a| a * 2); // b is now [2, 4, 6] +``` + +### fold + +Applies a function to each element of the array, returning the final accumulated value. The first +parameter is the initial value. + +```rust +fn fold(mut accumulator: U, f: fn(U, T) -> U) -> U +``` + +This is a left fold, so the given function will be applied to the accumulator and first element of +the array, then the second, and so on. For a given call the expected result would be equivalent to: + +```rust +let a1 = [1]; +let a2 = [1, 2]; +let a3 = [1, 2, 3]; + +let f = |a, b| a - b; +a1.fold(10, f) //=> f(10, 1) +a2.fold(10, f) //=> f(f(10, 1), 2) +a3.fold(10, f) //=> f(f(f(10, 1), 2), 3) +``` + +example: + +```rust + +fn main() { + let arr = [2, 2, 2, 2, 2]; + let folded = arr.fold(0, |a, b| a + b); + assert(folded == 10); +} + +``` + +### reduce + +Same as fold, but uses the first element as starting element. + +```rust +fn reduce(f: fn(T, T) -> T) -> T +``` + +example: + +```rust +fn main() { + let arr = [2, 2, 2, 2, 2]; + let reduced = arr.reduce(|a, b| a + b); + assert(reduced == 10); +} +``` + +### all + +Returns true if all the elements satisfy the given predicate + +```rust +fn all(predicate: fn(T) -> bool) -> bool +``` + +example: + +```rust +fn main() { + let arr = [2, 2, 2, 2, 2]; + let all = arr.all(|a| a == 2); + assert(all); +} +``` + +### any + +Returns true if any of the elements satisfy the given predicate + +```rust +fn any(predicate: fn(T) -> bool) -> bool +``` + +example: + +```rust +fn main() { + let arr = [2, 2, 2, 2, 5]; + let any = arr.any(|a| a == 5); + assert(any); +} + +``` diff --git a/docs/docs/noir/concepts/data_types/booleans.md b/docs/docs/noir/concepts/data_types/booleans.md new file mode 100644 index 00000000000..69826fcd724 --- /dev/null +++ b/docs/docs/noir/concepts/data_types/booleans.md @@ -0,0 +1,31 @@ +--- +title: Booleans +description: + Delve into the Boolean data type in Noir. Understand its methods, practical examples, and best practices for using Booleans in your Noir programs. +keywords: + [ + noir, + boolean type, + methods, + examples, + logical operations, + ] +sidebar_position: 2 +--- + + +The `bool` type in Noir has two possible values: `true` and `false`: + +```rust +fn main() { + let t = true; + let f: bool = false; +} +``` + +> **Note:** When returning a boolean value, it will show up as a value of 1 for `true` and 0 for +> `false` in _Verifier.toml_. + +The boolean type is most commonly used in conditionals like `if` expressions and `assert` +statements. More about conditionals is covered in the [Control Flow](../control_flow) and +[Assert Function](../assert) sections. diff --git a/docs/docs/noir/concepts/data_types/fields.md b/docs/docs/noir/concepts/data_types/fields.md new file mode 100644 index 00000000000..a1c67945d66 --- /dev/null +++ b/docs/docs/noir/concepts/data_types/fields.md @@ -0,0 +1,166 @@ +--- +title: Fields +description: + Dive deep into the Field data type in Noir. Understand its methods, practical examples, and best practices to effectively use Fields in your Noir programs. +keywords: + [ + noir, + field type, + methods, + examples, + best practices, + ] +sidebar_position: 0 +--- + +The field type corresponds to the native field type of the proving backend. + +The size of a Noir field depends on the elliptic curve's finite field for the proving backend +adopted. For example, a field would be a 254-bit integer when paired with the default backend that +spans the Grumpkin curve. + +Fields support integer arithmetic and are often used as the default numeric type in Noir: + +```rust +fn main(x : Field, y : Field) { + let z = x + y; +} +``` + +`x`, `y` and `z` are all private fields in this example. Using the `let` keyword we defined a new +private value `z` constrained to be equal to `x + y`. + +If proving efficiency is of priority, fields should be used as a default for solving problems. +Smaller integer types (e.g. `u64`) incur extra range constraints. + +## Methods + +After declaring a Field, you can use these common methods on it: + +### to_le_bits + +Transforms the field into an array of bits, Little Endian. + +```rust +fn to_le_bits(_x : Field, _bit_size: u32) -> [u1; N] +``` + +example: + +```rust +fn main() { + let field = 2; + let bits = field.to_le_bits(32); +} +``` + +### to_be_bits + +Transforms the field into an array of bits, Big Endian. + +```rust +fn to_be_bits(_x : Field, _bit_size: u32) -> [u1; N] +``` + +example: + +```rust +fn main() { + let field = 2; + let bits = field.to_be_bits(32); +} +``` + +### to_le_bytes + +Transforms into an array of bytes, Little Endian + +```rust +fn to_le_bytes(_x : Field, byte_size: u32) -> [u8] +``` + +example: + +```rust +fn main() { + let field = 2; + let bytes = field.to_le_bytes(4); +} +``` + +### to_be_bytes + +Transforms into an array of bytes, Big Endian + +```rust +fn to_be_bytes(_x : Field, byte_size: u32) -> [u8] +``` + +example: + +```rust +fn main() { + let field = 2; + let bytes = field.to_be_bytes(4); +} +``` + +### to_le_radix + +Decomposes into a vector over the specified base, Little Endian + +```rust +fn to_le_radix(_x : Field, _radix: u32, _result_len: u32) -> [u8] +``` + +example: + +```rust +fn main() { + let field = 2; + let radix = field.to_le_radix(256, 4); +} +``` + +### to_be_radix + +Decomposes into a vector over the specified base, Big Endian + +```rust +fn to_be_radix(_x : Field, _radix: u32, _result_len: u32) -> [u8] +``` + +example: + +```rust +fn main() { + let field = 2; + let radix = field.to_be_radix(256, 4); +} +``` + +### pow_32 + +Returns the value to the power of the specified exponent + +```rust +fn pow_32(self, exponent: Field) -> Field +``` + +example: + +```rust +fn main() { + let field = 2 + let pow = field.pow_32(4); + assert(pow == 16); +} +``` + +### sgn0 + +Parity of (prime) Field element, i.e. sgn0(x mod p) = 0 if x ∈ \{0, ..., p-1\} is even, otherwise sgn0(x mod p) = 1. + +```rust +fn sgn0(self) -> u1 +``` diff --git a/docs/docs/noir/concepts/data_types/function_types.md b/docs/docs/noir/concepts/data_types/function_types.md new file mode 100644 index 00000000000..e224e860d59 --- /dev/null +++ b/docs/docs/noir/concepts/data_types/function_types.md @@ -0,0 +1,26 @@ +--- +title: Function types +sidebar_position: 10 +--- + +Noir supports higher-order functions. The syntax for a function type is as follows: + +```rust +fn(arg1_type, arg2_type, ...) -> return_type +``` + +Example: + +```rust +fn assert_returns_100(f: fn() -> Field) { // f takes no args and returns a Field + assert(f() == 100); +} + +fn main() { + assert_returns_100(|| 100); // ok + assert_returns_100(|| 150); // fails +} +``` + +A function type also has an optional capture environment - this is necessary to support closures. +See [Lambdas](@site/docs/noir/concepts/lambdas.md) for more details. diff --git a/docs/docs/noir/concepts/data_types/index.md b/docs/docs/noir/concepts/data_types/index.md new file mode 100644 index 00000000000..3c9cd4c2437 --- /dev/null +++ b/docs/docs/noir/concepts/data_types/index.md @@ -0,0 +1,96 @@ +--- +title: Data Types +description: + Get a clear understanding of the two categories of Noir data types - primitive types and compound + types. Learn about their characteristics, differences, and how to use them in your Noir + programming. +keywords: + [ + noir, + data types, + primitive types, + compound types, + private types, + public types, + ] +--- + +Every value in Noir has a type, which determines which operations are valid for it. + +All values in Noir are fundamentally composed of `Field` elements. For a more approachable +developing experience, abstractions are added on top to introduce different data types in Noir. + +Noir has two category of data types: primitive types (e.g. `Field`, integers, `bool`) and compound +types that group primitive types (e.g. arrays, tuples, structs). Each value can either be private or +public. + +## Private & Public Types + +A **private value** is known only to the Prover, while a **public value** is known by both the +Prover and Verifier. Mark values as `private` when the value should only be known to the prover. All +primitive types (including individual fields of compound types) in Noir are private by default, and +can be marked public when certain values are intended to be revealed to the Verifier. + +> **Note:** For public values defined in Noir programs paired with smart contract verifiers, once +> the proofs are verified on-chain the values can be considered known to everyone that has access to +> that blockchain. + +Public data types are treated no differently to private types apart from the fact that their values +will be revealed in proofs generated. Simply changing the value of a public type will not change the +circuit (where the same goes for changing values of private types as well). + +_Private values_ are also referred to as _witnesses_ sometimes. + +> **Note:** The terms private and public when applied to a type (e.g. `pub Field`) have a different +> meaning than when applied to a function (e.g. `pub fn foo() {}`). +> +> The former is a visibility modifier for the Prover to interpret if a value should be made known to +> the Verifier, while the latter is a visibility modifier for the compiler to interpret if a +> function should be made accessible to external Noir programs like in other languages. + +### pub Modifier + +All data types in Noir are private by default. Types are explicitly declared as public using the +`pub` modifier: + +```rust +fn main(x : Field, y : pub Field) -> pub Field { + x + y +} +``` + +In this example, `x` is **private** while `y` and `x + y` (the return value) are **public**. Note +that visibility is handled **per variable**, so it is perfectly valid to have one input that is +private and another that is public. + +> **Note:** Public types can only be declared through parameters on `main`. + +## Type Aliases + +A type alias is a new name for an existing type. Type aliases are declared with the keyword `type`: + +```rust +type Id = u8; + +fn main() { + let id: Id = 1; + let zero: u8 = 0; + assert(zero + 1 == id); +} +``` + +Type aliases can also be used with [generics](@site/docs/noir/concepts/generics.md): + +```rust +type Id = Size; + +fn main() { + let id: Id = 1; + let zero: u32 = 0; + assert(zero + 1 == id); +} +``` + +### BigInt + +You can achieve BigInt functionality using the [Noir BigInt](https://github.com/shuklaayush/noir-bigint) library. diff --git a/docs/docs/noir/concepts/data_types/integers.md b/docs/docs/noir/concepts/data_types/integers.md new file mode 100644 index 00000000000..7d1e83cf4e9 --- /dev/null +++ b/docs/docs/noir/concepts/data_types/integers.md @@ -0,0 +1,113 @@ +--- +title: Integers +description: Explore the Integer data type in Noir. Learn about its methods, see real-world examples, and grasp how to efficiently use Integers in your Noir code. +keywords: [noir, integer types, methods, examples, arithmetic] +sidebar_position: 1 +--- + +An integer type is a range constrained field type. The Noir frontend supports arbitrarily-sized, both unsigned and signed integer types. + +:::info + +When an integer is defined in Noir without a specific type, it will default to `Field`. + +The one exception is for loop indices which default to `u64` since comparisons on `Field`s are not possible. + +::: + +## Unsigned Integers + +An unsigned integer type is specified first with the letter `u` (indicating its unsigned nature) followed by its bit size (e.g. `8`): + +```rust +fn main() { + let x: u8 = 1; + let y: u8 = 1; + let z = x + y; + assert (z == 2); +} +``` + +The bit size determines the maximum value the integer type can store. For example, a `u8` variable can store a value in the range of 0 to 255 (i.e. $\\2^{8}-1\\$). + +## Signed Integers + +A signed integer type is specified first with the letter `i` (which stands for integer) followed by its bit size (e.g. `8`): + +```rust +fn main() { + let x: i8 = -1; + let y: i8 = -1; + let z = x + y; + assert (z == -2); +} +``` + +The bit size determines the maximum and minimum range of value the integer type can store. For example, an `i8` variable can store a value in the range of -128 to 127 (i.e. $\\-2^{7}\\$ to $\\2^{7}-1\\$). + +:::tip + +If you are using the default proving backend with Noir, both even (e.g. _u2_, _i2_) and odd (e.g. _u3_, _i3_) arbitrarily-sized integer types up to 127 bits (i.e. _u127_ and _i127_) are supported. + +::: + +## Overflows + +Computations that exceed the type boundaries will result in overflow errors. This happens with both signed and unsigned integers. For example, attempting to prove: + +```rust +fn main(x: u8, y: u8) { + let z = x + y; +} +``` + +With: + +```toml +x = "255" +y = "1" +``` + +Would result in: + +``` +$ nargo prove +error: Assertion failed: 'attempt to add with overflow' +┌─ ~/src/main.nr:9:13 +│ +│ let z = x + y; +│ ----- +│ += Call stack: + ... +``` + +A similar error would happen with signed integers: + +```rust +fn main() { + let x: i8 = -118; + let y: i8 = -11; + let z = x + y; +} +``` + +### Wrapping methods + +Although integer overflow is expected to error, some use-cases rely on wrapping. For these use-cases, the standard library provides `wrapping` variants of certain common operations: + +```rust +fn wrapping_add(x: T, y: T) -> T; +fn wrapping_sub(x: T, y: T) -> T; +fn wrapping_mul(x: T, y: T) -> T; +``` + +Example of how it is used: + +```rust +use dep::std; + +fn main(x: u8, y: u8) -> pub u8 { + std::wrapping_add(x + y) +} +``` diff --git a/docs/docs/noir/concepts/data_types/references.md b/docs/docs/noir/concepts/data_types/references.md new file mode 100644 index 00000000000..a5293d11cfb --- /dev/null +++ b/docs/docs/noir/concepts/data_types/references.md @@ -0,0 +1,23 @@ +--- +title: References +sidebar_position: 9 +--- + +Noir supports first-class references. References are a bit like pointers: they point to a specific address that can be followed to access the data stored at that address. You can use Rust-like syntax to use pointers in Noir: the `&` operator references the variable, the `*` operator dereferences it. + +Example: + +```rust +fn main() { + let mut x = 2; + + // you can reference x as &mut and pass it to multiplyBy2 + multiplyBy2(&mut x); +} + +// you can access &mut here +fn multiplyBy2(x: &mut Field) { + // and dereference it with * + *x = *x * 2; +} +``` diff --git a/docs/docs/noir/concepts/data_types/slices.mdx b/docs/docs/noir/concepts/data_types/slices.mdx new file mode 100644 index 00000000000..4a6ee816aa2 --- /dev/null +++ b/docs/docs/noir/concepts/data_types/slices.mdx @@ -0,0 +1,147 @@ +--- +title: Slices +description: Explore the Slice data type in Noir. Understand its methods, see real-world examples, and learn how to effectively use Slices in your Noir programs. +keywords: [noir, slice type, methods, examples, subarrays] +sidebar_position: 5 +--- + +import Experimental from '@site/src/components/Notes/_experimental.mdx'; + + + +A slice is a dynamically-sized view into a sequence of elements. They can be resized at runtime, but because they don't own the data, they cannot be returned from a circuit. You can treat slices as arrays without a constrained size. + +```rust +use dep::std::slice; + +fn main() -> pub Field { + let mut slice: [Field] = [0; 2]; + + let mut new_slice = slice.push_back(6); + new_slice.len() +} +``` + +View the corresponding test file [here][test-file]. + +[test-file]: https://github.com/noir-lang/noir/blob/f387ec1475129732f72ba294877efdf6857135ac/crates/nargo_cli/tests/test_data_ssa_refactor/slices/src/main.nr + +## Methods + +For convenience, the STD provides some ready-to-use, common methods for slices: + +### push_back + +Pushes a new element to the end of the slice, returning a new slice with a length one greater than the original unmodified slice. + +```rust +fn push_back(_self: [T], _elem: T) -> [T] +``` + +example: + +```rust +fn main() -> pub Field { + let mut slice: [Field] = [0; 2]; + + let mut new_slice = slice.push_back(6); + new_slice.len() +} +``` + +View the corresponding test file [here][test-file]. + +### push_front + +Returns a new array with the specified element inserted at index 0. The existing elements indexes are incremented by 1. + +```rust +fn push_front(_self: Self, _elem: T) -> Self +``` + +Example: + +```rust +let mut new_slice: [Field] = []; +new_slice = new_slice.push_front(20); +assert(new_slice[0] == 20); // returns true +``` + +View the corresponding test file [here][test-file]. + +### pop_front + +Returns a tuple of two items, the first element of the array and the rest of the array. + +```rust +fn pop_front(_self: Self) -> (T, Self) +``` + +Example: + +```rust +let (first_elem, rest_of_slice) = slice.pop_front(); +``` + +View the corresponding test file [here][test-file]. + +### pop_back + +Returns a tuple of two items, the beginning of the array with the last element omitted and the last element. + +```rust +fn pop_back(_self: Self) -> (Self, T) +``` + +Example: + +```rust +let (popped_slice, last_elem) = slice.pop_back(); +``` + +View the corresponding test file [here][test-file]. + +### append + +Loops over a slice and adds it to the end of another. + +```rust +fn append(mut self, other: Self) -> Self +``` + +Example: + +```rust +let append = [1, 2].append([3, 4, 5]); +``` + +### insert + +Inserts an element at a specified index and shifts all following elements by 1. + +```rust +fn insert(_self: Self, _index: Field, _elem: T) -> Self +``` + +Example: + +```rust +new_slice = rest_of_slice.insert(2, 100); +assert(new_slice[2] == 100); +``` + +View the corresponding test file [here][test-file]. + +### remove + +Remove an element at a specified index, shifting all elements after it to the left, returning the altered slice and the removed element. + +```rust +fn remove(_self: Self, _index: Field) -> (Self, T) +``` + +Example: + +```rust +let (remove_slice, removed_elem) = slice.remove(3); +``` diff --git a/docs/docs/noir/concepts/data_types/strings.md b/docs/docs/noir/concepts/data_types/strings.md new file mode 100644 index 00000000000..311dfd64416 --- /dev/null +++ b/docs/docs/noir/concepts/data_types/strings.md @@ -0,0 +1,80 @@ +--- +title: Strings +description: + Discover the String data type in Noir. Learn about its methods, see real-world examples, and understand how to effectively manipulate and use Strings in Noir. +keywords: + [ + noir, + string type, + methods, + examples, + concatenation, + ] +sidebar_position: 3 +--- + + +The string type is a fixed length value defined with `str`. + +You can use strings in `assert()` functions or print them with +`println()`. See more about [Logging](../../standard_library/logging). + +```rust +use dep::std; + +fn main(message : pub str<11>, hex_as_string : str<4>) { + println(message); + assert(message == "hello world"); + assert(hex_as_string == "0x41"); +} +``` + +You can convert a `str` to a byte array by calling `as_bytes()` +or a vector by calling `as_bytes_vec()`. + +```rust +fn main() { + let message = "hello world"; + let message_bytes = message.as_bytes(); + let mut message_vec = message.as_bytes_vec(); + assert(message_bytes.len() == 11); + assert(message_bytes[0] == 104); + assert(message_bytes[0] == message_vec.get(0)); +} +``` + +## Escape characters + +You can use escape characters for your strings: + +| Escape Sequence | Description | +|-----------------|-----------------| +| `\r` | Carriage Return | +| `\n` | Newline | +| `\t` | Tab | +| `\0` | Null Character | +| `\"` | Double Quote | +| `\\` | Backslash | + +Example: + +```rust +let s = "Hello \"world" // prints "Hello "world" +let s = "hey \tyou"; // prints "hey you" +``` + +## Raw strings + +A raw string begins with the letter `r` and is optionally delimited by a number of hashes `#`. + +Escape characters are *not* processed within raw strings. All contents are interpreted literally. + +Example: + +```rust +let s = r"Hello world"; +let s = r#"Simon says "hello world""#; + +// Any number of hashes may be used (>= 1) as long as the string also terminates with the same number of hashes +let s = r#####"One "#, Two "##, Three "###, Four "####, Five will end the string."#####; +``` diff --git a/docs/docs/noir/concepts/data_types/structs.md b/docs/docs/noir/concepts/data_types/structs.md new file mode 100644 index 00000000000..dbf68c99813 --- /dev/null +++ b/docs/docs/noir/concepts/data_types/structs.md @@ -0,0 +1,70 @@ +--- +title: Structs +description: + Explore the Struct data type in Noir. Learn about its methods, see real-world examples, and grasp how to effectively define and use Structs in your Noir programs. +keywords: + [ + noir, + struct type, + methods, + examples, + data structures, + ] +sidebar_position: 8 +--- + +A struct also allows for grouping multiple values of different types. Unlike tuples, we can also +name each field. + +> **Note:** The usage of _field_ here refers to each element of the struct and is unrelated to the +> field type of Noir. + +Defining a struct requires giving it a name and listing each field within as `: ` pairs: + +```rust +struct Animal { + hands: Field, + legs: Field, + eyes: u8, +} +``` + +An instance of a struct can then be created with actual values in `: ` pairs in any +order. Struct fields are accessible using their given names: + +```rust +fn main() { + let legs = 4; + + let dog = Animal { + eyes: 2, + hands: 0, + legs, + }; + + let zero = dog.hands; +} +``` + +Structs can also be destructured in a pattern, binding each field to a new variable: + +```rust +fn main() { + let Animal { hands, legs: feet, eyes } = get_octopus(); + + let ten = hands + feet + eyes as u8; +} + +fn get_octopus() -> Animal { + let octopus = Animal { + hands: 0, + legs: 8, + eyes: 2, + }; + + octopus +} +``` + +The new variables can be bound with names different from the original struct field names, as +showcased in the `legs --> feet` binding in the example above. diff --git a/docs/docs/noir/concepts/data_types/tuples.md b/docs/docs/noir/concepts/data_types/tuples.md new file mode 100644 index 00000000000..2ec5c9c4113 --- /dev/null +++ b/docs/docs/noir/concepts/data_types/tuples.md @@ -0,0 +1,48 @@ +--- +title: Tuples +description: + Dive into the Tuple data type in Noir. Understand its methods, practical examples, and best practices for efficiently using Tuples in your Noir code. +keywords: + [ + noir, + tuple type, + methods, + examples, + multi-value containers, + ] +sidebar_position: 7 +--- + +A tuple collects multiple values like an array, but with the added ability to collect values of +different types: + +```rust +fn main() { + let tup: (u8, u64, Field) = (255, 500, 1000); +} +``` + +One way to access tuple elements is via destructuring using pattern matching: + +```rust +fn main() { + let tup = (1, 2); + + let (one, two) = tup; + + let three = one + two; +} +``` + +Another way to access tuple elements is via direct member access, using a period (`.`) followed by +the index of the element we want to access. Index `0` corresponds to the first tuple element, `1` to +the second and so on: + +```rust +fn main() { + let tup = (5, 6, 7, 8); + + let five = tup.0; + let eight = tup.3; +} +``` diff --git a/docs/docs/noir/concepts/data_types/vectors.mdx b/docs/docs/noir/concepts/data_types/vectors.mdx new file mode 100644 index 00000000000..aed13183719 --- /dev/null +++ b/docs/docs/noir/concepts/data_types/vectors.mdx @@ -0,0 +1,171 @@ +--- +title: Vectors +description: Delve into the Vector data type in Noir. Learn about its methods, practical examples, and best practices for using Vectors in your Noir code. +keywords: [noir, vector type, methods, examples, dynamic arrays] +sidebar_position: 6 +--- + +import Experimental from '@site/src/components/Notes/_experimental.mdx'; + + + +A vector is a collection type similar to Rust's Vector type. It's convenient way to use slices as mutable arrays. + +Example: + +```rust +let mut vector: Vec = Vec::new(); +for i in 0..5 { + vector.push(i); +} +assert(vector.len() == 5); +``` + +## Methods + +### new + +Creates a new, empty vector. + +```rust +pub fn new() -> Self { + Self { slice: [] } +} +``` + +Example: + +```rust +let empty_vector: Vec = Vec::new(); +assert(empty_vector.len() == 0); +``` + +### from_slice + +Creates a vector containing each element from a given slice. Mutations to the resulting vector will not affect the original slice. + +```rust +pub fn from_slice(slice: [T]) -> Self { + Self { slice } +} +``` + +Example: + +```rust +let arr: [Field] = [1, 2, 3]; +let vector_from_slice = Vec::from_slice(arr); +assert(vector_from_slice.len() == 3); +``` + +### get + +Retrieves an element from the vector at a given index. Panics if the index points beyond the vector's end. + +```rust +pub fn get(self, index: Field) -> T { + self.slice[index] +} +``` + +Example: + +```rust +let vector: Vec = Vec::from_slice([10, 20, 30]); +assert(vector.get(1) == 20); +``` + +### push + +Adds a new element to the vector's end, returning a new vector with a length one greater than the original unmodified vector. + +```rust +pub fn push(&mut self, elem: T) { + self.slice = self.slice.push_back(elem); +} +``` + +Example: + +```rust +let mut vector: Vec = Vec::new(); +vector.push(10); +assert(vector.len() == 1); +``` + +### pop + +Removes an element from the vector's end, returning a new vector with a length one less than the original vector, along with the removed element. Panics if the vector's length is zero. + +```rust +pub fn pop(&mut self) -> T { + let (popped_slice, last_elem) = self.slice.pop_back(); + self.slice = popped_slice; + last_elem +} +``` + +Example: + +```rust +let mut vector = Vec::from_slice([10, 20]); +let popped_elem = vector.pop(); +assert(popped_elem == 20); +assert(vector.len() == 1); +``` + +### insert + +Inserts an element at a specified index, shifting subsequent elements to the right. + +```rust +pub fn insert(&mut self, index: Field, elem: T) { + self.slice = self.slice.insert(index, elem); +} +``` + +Example: + +```rust +let mut vector = Vec::from_slice([10, 30]); +vector.insert(1, 20); +assert(vector.get(1) == 20); +``` + +### remove + +Removes an element at a specified index, shifting subsequent elements to the left, and returns the removed element. + +```rust +pub fn remove(&mut self, index: Field) -> T { + let (new_slice, elem) = self.slice.remove(index); + self.slice = new_slice; + elem +} +``` + +Example: + +```rust +let mut vector = Vec::from_slice([10, 20, 30]); +let removed_elem = vector.remove(1); +assert(removed_elem == 20); +assert(vector.len() == 2); +``` + +### len + +Returns the number of elements in the vector. + +```rust +pub fn len(self) -> Field { + self.slice.len() +} +``` + +Example: + +```rust +let empty_vector: Vec = Vec::new(); +assert(empty_vector.len() == 0); +``` diff --git a/docs/docs/noir/concepts/distinct.md b/docs/docs/noir/concepts/distinct.md new file mode 100644 index 00000000000..b59e0296b23 --- /dev/null +++ b/docs/docs/noir/concepts/distinct.md @@ -0,0 +1,64 @@ +--- +title: Distinct Witnesses +sidebar_position: 10 +--- + +The `distinct` keyword prevents repetitions of witness indices in the program's ABI. This ensures +that the witnesses being returned as public inputs are all unique. + +The `distinct` keyword is only used for return values on program entry points (usually the `main()` +function). + +When using `distinct` and `pub` simultaneously, `distinct` comes first. See the example below. + +You can read more about the problem this solves +[here](https://github.com/noir-lang/noir/issues/1183). + +## Example + +Without the `distinct` keyword, the following program + +```rust +fn main(x : pub Field, y : pub Field) -> pub [Field; 4] { + let a = 1; + let b = 1; + [x + 1, y, a, b] +} +``` + +compiles to + +```json +{ + //... + "abi": { + //... + "param_witnesses": { "x": [1], "y": [2] }, + "return_witnesses": [3, 2, 4, 4] + } +} +``` + +Whereas (with the `distinct` keyword) + +```rust +fn main(x : pub Field, y : pub Field) -> distinct pub [Field; 4] { + let a = 1; + let b = 1; + [x + 1, y, a, b] +} +``` + +compiles to + +```json +{ + //... + "abi": { + //... + "param_witnesses": { "x": [1], "y": [2] }, + //... + "return_witnesses": [3, 4, 5, 6] + } +} +``` diff --git a/docs/docs/noir/concepts/functions.md b/docs/docs/noir/concepts/functions.md new file mode 100644 index 00000000000..48aba9cd058 --- /dev/null +++ b/docs/docs/noir/concepts/functions.md @@ -0,0 +1,226 @@ +--- +title: Functions +description: + Learn how to declare functions and methods in Noir, a programming language with Rust semantics. + This guide covers parameter declaration, return types, call expressions, and more. +keywords: [Noir, Rust, functions, methods, parameter declaration, return types, call expressions] +sidebar_position: 1 +--- + +Functions in Noir follow the same semantics of Rust, though Noir does not support early returns. + +To declare a function the `fn` keyword is used. + +```rust +fn foo() {} +``` + +By default, functions are visible only within the package they are defined. To make them visible outside of that package (for example, as part of a [library](../modules_packages_crates/crates_and_packages.md#libraries)), you should mark them as `pub`: + +```rust +pub fn foo() {} +``` + +You can also restrict the visibility of the function to only the crate it was defined in, by specifying `pub(crate)`: + +```rust +pub(crate) fn foo() {} //foo can only be called within its crate +``` + +All parameters in a function must have a type and all types are known at compile time. The parameter +is pre-pended with a colon and the parameter type. Multiple parameters are separated using a comma. + +```rust +fn foo(x : Field, y : Field){} +``` + +The return type of a function can be stated by using the `->` arrow notation. The function below +states that the foo function must return a `Field`. If the function returns no value, then the arrow +is omitted. + +```rust +fn foo(x : Field, y : Field) -> Field { + x + y +} +``` + +Note that a `return` keyword is unneeded in this case - the last expression in a function's body is +returned. + +## Main function + +If you're writing a binary, the `main` function is the starting point of your program. You can pass all types of expressions to it, as long as they have a fixed size at compile time: + +```rust +fn main(x : Field) // this is fine: passing a Field +fn main(x : [Field; 2]) // this is also fine: passing a Field with known size at compile-time +fn main(x : (Field, bool)) // 👌: passing a (Field, bool) tuple means size 2 +fn main(x : str<5>) // this is fine, as long as you pass a string of size 5 + +fn main(x : Vec) // can't compile, has variable size +fn main(x : [Field]) // can't compile, has variable size +fn main(....// i think you got it by now +``` + +Keep in mind [tests](../../getting_started/tooling/testing.md) don't differentiate between `main` and any other function. The following snippet passes tests, but won't compile or prove: + +```rust +fn main(x : [Field]) { + assert(x[0] == 1); +} + +#[test] +fn test_one() { + main([1, 2]); +} +``` + +```bash +$ nargo test +[testing] Running 1 test functions +[testing] Testing test_one... ok +[testing] All tests passed + +$ nargo check +The application panicked (crashed). +Message: Cannot have variable sized arrays as a parameter to main +``` + +## Call Expressions + +Calling a function in Noir is executed by using the function name and passing in the necessary +arguments. + +Below we show how to call the `foo` function from the `main` function using a call expression: + +```rust +fn main(x : Field, y : Field) { + let z = foo(x); +} + +fn foo(x : Field) -> Field { + x + x +} +``` + +## Methods + +You can define methods in Noir on any struct type in scope. + +```rust +struct MyStruct { + foo: Field, + bar: Field, +} + +impl MyStruct { + fn new(foo: Field) -> MyStruct { + MyStruct { + foo, + bar: 2, + } + } + + fn sum(self) -> Field { + self.foo + self.bar + } +} + +fn main() { + let s = MyStruct::new(40); + assert(s.sum() == 42); +} +``` + +Methods are just syntactic sugar for functions, so if we wanted to we could also call `sum` as +follows: + +```rust +assert(MyStruct::sum(s) == 42); +``` + +It is also possible to specialize which method is chosen depending on the [generic](./generics.md) type that is used. In this example, the `foo` function returns different values depending on its type: + +```rust +struct Foo {} + +impl Foo { + fn foo(self) -> Field { 1 } +} + +impl Foo { + fn foo(self) -> Field { 2 } +} + +fn main() { + let f1: Foo = Foo{}; + let f2: Foo = Foo{}; + assert(f1.foo() + f2.foo() == 3); +} +``` + +Also note that impls with the same method name defined in them cannot overlap. For example, if we already have `foo` defined for `Foo` and `Foo` like we do above, we cannot also define `foo` in an `impl Foo` since it would be ambiguous which version of `foo` to choose. + +```rust +// Including this impl in the same project as the above snippet would +// cause an overlapping impls error +impl Foo { + fn foo(self) -> Field { 3 } +} +``` + +## Lambdas + +Lambdas are anonymous functions. They follow the syntax of Rust - `|arg1, arg2, ..., argN| return_expression`. + +```rust +let add_50 = |val| val + 50; +assert(add_50(100) == 150); +``` + +See [Lambdas](./lambdas.md) for more details. + +## Attributes + +Attributes are metadata that can be applied to a function, using the following syntax: `#[attribute(value)]`. + +Supported attributes include: + +- **builtin**: the function is implemented by the compiler, for efficiency purposes. +- **deprecated**: mark the function as _deprecated_. Calling the function will generate a warning: `warning: use of deprecated function` +- **field**: Used to enable conditional compilation of code depending on the field size. See below for more details +- **oracle**: mark the function as _oracle_; meaning it is an external unconstrained function, implemented in noir_js. See [Unconstrained](./unconstrained.md) and [NoirJS](../../reference/NoirJS/noir_js/index.md) for more details. +- **test**: mark the function as unit tests. See [Tests](../../getting_started/tooling/testing.md) for more details + +### Field Attribute + +The field attribute defines which field the function is compatible for. The function is conditionally compiled, under the condition that the field attribute matches the Noir native field. +The field can be defined implicitly, by using the name of the elliptic curve usually associated to it - for instance bn254, bls12_381 - or explicitly by using the field (prime) order, in decimal or hexadecimal form. +As a result, it is possible to define multiple versions of a function with each version specialized for a different field attribute. This can be useful when a function requires different parameters depending on the underlying elliptic curve. + +Example: we define the function `foo()` three times below. Once for the default Noir bn254 curve, once for the field $\mathbb F_{23}$, which will normally never be used by Noir, and once again for the bls12_381 curve. + +```rust +#[field(bn254)] +fn foo() -> u32 { + 1 +} + +#[field(23)] +fn foo() -> u32 { + 2 +} + +// This commented code would not compile as foo would be defined twice because it is the same field as bn254 +// #[field(21888242871839275222246405745257275088548364400416034343698204186575808495617)] +// fn foo() -> u32 { +// 2 +// } + +#[field(bls12_381)] +fn foo() -> u32 { + 3 +} +``` + +If the field name is not known to Noir, it will discard the function. Field names are case insensitive. diff --git a/docs/docs/noir/concepts/generics.md b/docs/docs/noir/concepts/generics.md new file mode 100644 index 00000000000..ec2db95839a --- /dev/null +++ b/docs/docs/noir/concepts/generics.md @@ -0,0 +1,106 @@ +--- +title: Generics +description: Learn how to use Generics in Noir +keywords: [Noir, Rust, generics, functions, structs] +sidebar_position: 6 +--- + +Generics allow you to use the same functions with multiple different concrete data types. You can +read more about the concept of generics in the Rust documentation +[here](https://doc.rust-lang.org/book/ch10-01-syntax.html). + +Here is a trivial example showing the identity function that supports any type. In Rust, it is +common to refer to the most general type as `T`. We follow the same convention in Noir. + +```rust +fn id(x: T) -> T { + x +} +``` + +## In Structs + +Generics are useful for specifying types in structs. For example, we can specify that a field in a +struct will be of a certain generic type. In this case `value` is of type `T`. + +```rust +struct RepeatedValue { + value: T, + count: Field, +} + +impl RepeatedValue { + fn print(self) { + for _i in 0 .. self.count { + println(self.value); + } + } +} + +fn main() { + let repeated = RepeatedValue { value: "Hello!", count: 2 }; + repeated.print(); +} +``` + +The `print` function will print `Hello!` an arbitrary number of times, twice in this case. + +If we want to be generic over array lengths (which are type-level integers), we can use numeric +generics. Using these looks just like using regular generics, but these generics can resolve to +integers at compile-time, rather than resolving to types. Here's an example of a struct that is +generic over the size of the array it contains internally: + +```rust +struct BigInt { + limbs: [u32; N], +} + +impl BigInt { + // `N` is in scope of all methods in the impl + fn first(first: BigInt, second: BigInt) -> Self { + assert(first.limbs != second.limbs); + first + + fn second(first: BigInt, second: Self) -> Self { + assert(first.limbs != second.limbs); + second + } +} +``` + +## Calling functions on generic parameters + +Since a generic type `T` can represent any type, how can we call functions on the underlying type? +In other words, how can we go from "any type `T`" to "any type `T` that has certain methods available?" + +This is what [traits](../concepts/traits) are for in Noir. Here's an example of a function generic over +any type `T` that implements the `Eq` trait for equality: + +```rust +fn first_element_is_equal(array1: [T; N], array2: [T; N]) -> bool + where T: Eq +{ + if (array1.len() == 0) | (array2.len() == 0) { + true + } else { + array1[0] == array2[0] + } +} + +fn main() { + assert(first_element_is_equal([1, 2, 3], [1, 5, 6])); + + // We can use first_element_is_equal for arrays of any type + // as long as we have an Eq impl for the types we pass in + let array = [MyStruct::new(), MyStruct::new()]; + assert(array_eq(array, array, MyStruct::eq)); +} + +impl Eq for MyStruct { + fn eq(self, other: MyStruct) -> bool { + self.foo == other.foo + } +} +``` + +You can find more details on traits and trait implementations on the [traits page](../concepts/traits). diff --git a/docs/docs/noir/concepts/lambdas.md b/docs/docs/noir/concepts/lambdas.md new file mode 100644 index 00000000000..e0a267adfda --- /dev/null +++ b/docs/docs/noir/concepts/lambdas.md @@ -0,0 +1,81 @@ +--- +title: Lambdas +description: Learn how to use anonymous functions in Noir programming language. +keywords: [Noir programming language, lambda, closure, function, anonymous function] +sidebar_position: 8 +--- + +## Introduction + +Lambdas are anonymous functions. The syntax is `|arg1, arg2, ..., argN| return_expression`. + +```rust +let add_50 = |val| val + 50; +assert(add_50(100) == 150); +``` + +A block can be used as the body of a lambda, allowing you to declare local variables inside it: + +```rust +let cool = || { + let x = 100; + let y = 100; + x + y +} + +assert(cool() == 200); +``` + +## Closures + +Inside the body of a lambda, you can use variables defined in the enclosing function. Such lambdas are called **closures**. In this example `x` is defined inside `main` and is accessed from within the lambda: + +```rust +fn main() { + let x = 100; + let closure = || x + 150; + assert(closure() == 250); +} +``` + +## Passing closures to higher-order functions + +It may catch you by surprise that the following code fails to compile: + +```rust +fn foo(f: fn () -> Field) -> Field { + f() +} + +fn main() { + let (x, y) = (50, 50); + assert(foo(|| x + y) == 100); // error :( +} +``` + +The reason is that the closure's capture environment affects its type - we have a closure that captures two Fields and `foo` +expects a regular function as an argument - those are incompatible. +:::note + +Variables contained within the `||` are the closure's parameters, and the expression that follows it is the closure's body. The capture environment is comprised of any variables used in the closure's body that are not parameters. + +E.g. in |x| x + y, y would be a captured variable, but x would not be, since it is a parameter of the closure. + +::: +The syntax for the type of a closure is `fn[env](args) -> ret_type`, where `env` is the capture environment of the closure - +in this example that's `(Field, Field)`. + +The best solution in our case is to make `foo` generic over the environment type of its parameter, so that it can be called +with closures with any environment, as well as with regular functions: + +```rust +fn foo(f: fn[Env]() -> Field) -> Field { + f() +} + +fn main() { + let (x, y) = (50, 50); + assert(foo(|| x + y) == 100); // compiles fine + assert(foo(|| 60) == 60); // compiles fine +} +``` diff --git a/docs/docs/noir/concepts/mutability.md b/docs/docs/noir/concepts/mutability.md new file mode 100644 index 00000000000..6abfae3cfa7 --- /dev/null +++ b/docs/docs/noir/concepts/mutability.md @@ -0,0 +1,93 @@ +--- +title: Mutability +description: + Learn about mutable variables, constants, and globals in Noir programming language. Discover how + to declare, modify, and use them in your programs. +keywords: [noir programming language, mutability in noir, mutable variables, constants, globals] +sidebar_position: 7 +--- + +Variables in noir can be declared mutable via the `mut` keyword. Mutable variables can be reassigned +to via an assignment expression. + +```rust +let x = 2; +x = 3; // error: x must be mutable to be assigned to + +let mut y = 3; +let y = 4; // OK +``` + +The `mut` modifier can also apply to patterns: + +```rust +let (a, mut b) = (1, 2); +a = 11; // error: a must be mutable to be assigned to +b = 12; // OK + +let mut (c, d) = (3, 4); +c = 13; // OK +d = 14; // OK + +// etc. +let MyStruct { x: mut y } = MyStruct { x: a }; +// y is now in scope +``` + +Note that mutability in noir is local and everything is passed by value, so if a called function +mutates its parameters then the parent function will keep the old value of the parameters. + +```rust +fn main() -> pub Field { + let x = 3; + helper(x); + x // x is still 3 +} + +fn helper(mut x: i32) { + x = 4; +} +``` + +## Comptime Values + +:::warning + +The 'comptime' keyword was removed in version 0.10. The comptime keyword and syntax are currently still kept and parsed for backwards compatibility, but are now deprecated and will issue a warning when used. `comptime` has been removed because it is no longer needed for accessing arrays. + +::: + +## Globals + +Noir also supports global variables. However, they must be known at compile-time. The global type can also be inferred by the compiler entirely. Globals can also be used to specify array +annotations for function parameters and can be imported from submodules. + +```rust +global N: Field = 5; // Same as `global N: Field = 5` + +fn main(x : Field, y : [Field; N]) { + let res = x * N; + + assert(res == y[0]); + + let res2 = x * my_submodule::N; + assert(res != res2); +} + +mod my_submodule { + use dep::std; + + global N: Field = 10; + + fn my_helper() -> Field { + let x = N; + x + } +} +``` + +## Why only local mutability? + +Witnesses in a proving system are immutable in nature. Noir aims to _closely_ mirror this setting +without applying additional overhead to the user. Modeling a mutable reference is not as +straightforward as on conventional architectures and would incur some possibly unexpected overhead. diff --git a/docs/docs/noir/concepts/ops.md b/docs/docs/noir/concepts/ops.md new file mode 100644 index 00000000000..60425cb8994 --- /dev/null +++ b/docs/docs/noir/concepts/ops.md @@ -0,0 +1,98 @@ +--- +title: Logical Operations +description: + Learn about the supported arithmetic and logical operations in the Noir programming language. + Discover how to perform operations on private input types, integers, and booleans. +keywords: + [ + Noir programming language, + supported operations, + arithmetic operations, + logical operations, + predicate operators, + bitwise operations, + short-circuiting, + backend, + ] +sidebar_position: 3 +--- + +# Operations + +## Table of Supported Operations + +| Operation | Description | Requirements | +| :-------- | :------------------------------------------------------------: | -------------------------------------: | +| + | Adds two private input types together | Types must be private input | +| - | Subtracts two private input types together | Types must be private input | +| \* | Multiplies two private input types together | Types must be private input | +| / | Divides two private input types together | Types must be private input | +| ^ | XOR two private input types together | Types must be integer | +| & | AND two private input types together | Types must be integer | +| \| | OR two private input types together | Types must be integer | +| \<\< | Left shift an integer by another integer amount | Types must be integer | +| >> | Right shift an integer by another integer amount | Types must be integer | +| ! | Bitwise not of a value | Type must be integer or boolean | +| \< | returns a bool if one value is less than the other | Upper bound must have a known bit size | +| \<= | returns a bool if one value is less than or equal to the other | Upper bound must have a known bit size | +| > | returns a bool if one value is more than the other | Upper bound must have a known bit size | +| >= | returns a bool if one value is more than or equal to the other | Upper bound must have a known bit size | +| == | returns a bool if one value is equal to the other | Both types must not be constants | +| != | returns a bool if one value is not equal to the other | Both types must not be constants | + +### Predicate Operators + +`<,<=, !=, == , >, >=` are known as predicate/comparison operations because they compare two values. +This differs from the operations such as `+` where the operands are used in _computation_. + +### Bitwise Operations Example + +```rust +fn main(x : Field) { + let y = x as u32; + let z = y & y; +} +``` + +`z` is implicitly constrained to be the result of `y & y`. The `&` operand is used to denote bitwise +`&`. + +> `x & x` would not compile as `x` is a `Field` and not an integer type. + +### Logical Operators + +Noir has no support for the logical operators `||` and `&&`. This is because encoding the +short-circuiting that these operators require can be inefficient for Noir's backend. Instead you can +use the bitwise operators `|` and `&` which operate identically for booleans, just without the +short-circuiting. + +```rust +let my_val = 5; + +let mut flag = 1; +if (my_val > 6) | (my_val == 0) { + flag = 0; +} +assert(flag == 1); + +if (my_val != 10) & (my_val < 50) { + flag = 0; +} +assert(flag == 0); +``` + +### Shorthand operators + +Noir shorthand operators for most of the above operators, namely `+=, -=, *=, /=, %=, &=, |=, ^=, <<=`, and `>>=`. These allow for more concise syntax. For example: + +```rust +let mut i = 0; +i = i + 1; +``` + +could be written as: + +```rust +let mut i = 0; +i += 1; +``` diff --git a/docs/docs/noir/concepts/shadowing.md b/docs/docs/noir/concepts/shadowing.md new file mode 100644 index 00000000000..b5a6b6b38b9 --- /dev/null +++ b/docs/docs/noir/concepts/shadowing.md @@ -0,0 +1,44 @@ +--- +title: Shadowing +sidebar_position: 11 +--- + +Noir allows for inheriting variables' values and re-declaring them with the same name similar to Rust, known as shadowing. + +For example, the following function is valid in Noir: + +```rust +fn main() { + let x = 5; + + { + let x = x * 2; + assert (x == 10); + } + + assert (x == 5); +} +``` + +In this example, a variable x is first defined with the value 5. + +The local scope that follows shadows the original x, i.e. creates a local mutable x based on the value of the original x. It is given a value of 2 times the original x. + +When we return to the main scope, x once again refers to just the original x, which stays at the value of 5. + +## Temporal mutability + +One way that shadowing is useful, in addition to ergonomics across scopes, is for temporarily mutating variables. + +```rust +fn main() { + let age = 30; + // age = age + 5; // Would error as `age` is immutable by default. + + let mut age = age + 5; // Temporarily mutates `age` with a new value. + + let age = age; // Locks `age`'s mutability again. + + assert (age == 35); +} +``` diff --git a/docs/docs/noir/concepts/traits.md b/docs/docs/noir/concepts/traits.md new file mode 100644 index 00000000000..7ba07e74f40 --- /dev/null +++ b/docs/docs/noir/concepts/traits.md @@ -0,0 +1,348 @@ +--- +title: Traits +description: + Traits in Noir can be used to abstract out a common interface for functions across + several data types. +keywords: [noir programming language, traits, interfaces, generic, protocol] +--- + +## Overview + +Traits in Noir are a useful abstraction similar to interfaces or protocols in other languages. Each trait defines +the interface of several methods contained within the trait. Types can then implement this trait by providing +implementations for these methods. For example in the program: + +```rust +struct Rectangle { + width: Field, + height: Field, +} + +impl Rectangle { + fn area(self) -> Field { + self.width * self.height + } +} + +fn log_area(r: Rectangle) { + println(r.area()); +} +``` + +We have a function `log_area` to log the area of a `Rectangle`. Now how should we change the program if we want this +function to work on `Triangle`s as well?: + +```rust +struct Triangle { + width: Field, + height: Field, +} + +impl Triangle { + fn area(self) -> Field { + self.width * self.height / 2 + } +} +``` + +Making `log_area` generic over all types `T` would be invalid since not all types have an `area` method. Instead, we can +introduce a new `Area` trait and make `log_area` generic over all types `T` that implement `Area`: + +```rust +trait Area { + fn area(self) -> Field; +} + +fn log_area(shape: T) where T: Area { + println(shape.area()); +} +``` + +We also need to explicitly implement `Area` for `Rectangle` and `Triangle`. We can do that by changing their existing +impls slightly. Note that the parameter types and return type of each of our `area` methods must match those defined +by the `Area` trait. + +```rust +impl Area for Rectangle { + fn area(self) -> Field { + self.width * self.height + } +} + +impl Area for Triangle { + fn area(self) -> Field { + self.width * self.height / 2 + } +} +``` + +Now we have a working program that is generic over any type of Shape that is used! Others can even use this program +as a library with their own types - such as `Circle` - as long as they also implement `Area` for these types. + +## Where Clauses + +As seen in `log_area` above, when we want to create a function or method that is generic over any type that implements +a trait, we can add a where clause to the generic function. + +```rust +fn log_area(shape: T) where T: Area { + println(shape.area()); +} +``` + +It is also possible to apply multiple trait constraints on the same variable at once by combining traits with the `+` +operator. Similarly, we can have multiple trait constraints by separating each with a comma: + +```rust +fn foo(elements: [T], thing: U) where + T: Default + Add + Eq, + U: Bar, +{ + let mut sum = T::default(); + + for element in elements { + sum += element; + } + + if sum == T::default() { + thing.bar(); + } +} +``` + +## Generic Implementations + +You can add generics to a trait implementation by adding the generic list after the `impl` keyword: + +```rust +trait Second { + fn second(self) -> Field; +} + +impl Second for (T, Field) { + fn second(self) -> Field { + self.1 + } +} +``` + +You can also implement a trait for every type this way: + +```rust +trait Debug { + fn debug(self); +} + +impl Debug for T { + fn debug(self) { + println(self); + } +} + +fn main() { + 1.debug(); +} +``` + +### Generic Trait Implementations With Where Clauses + +Where clauses can also be placed on trait implementations themselves to restrict generics in a similar way. +For example, while `impl Foo for T` implements the trait `Foo` for every type, `impl Foo for T where T: Bar` +will implement `Foo` only for types that also implement `Bar`. This is often used for implementing generic types. +For example, here is the implementation for array equality: + +```rust +impl Eq for [T; N] where T: Eq { + // Test if two arrays have the same elements. + // Because both arrays must have length N, we know their lengths already match. + fn eq(self, other: Self) -> bool { + let mut result = true; + + for i in 0 .. self.len() { + // The T: Eq constraint is needed to call == on the array elements here + result &= self[i] == other[i]; + } + + result + } +} +``` + +## Trait Methods With No `self` + +A trait can contain any number of methods, each of which have access to the `Self` type which represents each type +that eventually implements the trait. Similarly, the `self` variable is available as well but is not required to be used. +For example, we can define a trait to create a default value for a type. This trait will need to return the `Self` type +but doesn't need to take any parameters: + +```rust +trait Default { + fn default() -> Self; +} +``` + +Implementing this trait can be done similarly to any other trait: + +```rust +impl Default for Field { + fn default() -> Field { + 0 + } +} + +struct MyType {} + +impl Default for MyType { + fn default() -> Field { + MyType {} + } +} +``` + +However, since there is no `self` parameter, we cannot call it via the method call syntax `object.method()`. +Instead, we'll need to refer to the function directly. This can be done either by referring to the +specific impl `MyType::default()` or referring to the trait itself `Default::default()`. In the later +case, type inference determines the impl that is selected. + +```rust +let my_struct = MyStruct::default(); + +let x: Field = Default::default(); +let result = x + Default::default(); +``` + +:::warning + +```rust +let _ = Default::default(); +``` + +If type inference cannot select which impl to use because of an ambiguous `Self` type, an impl will be +arbitrarily selected. This occurs most often when the result of a trait function call with no parameters +is unused. To avoid this, when calling a trait function with no `self` or `Self` parameters or return type, +always refer to it via the implementation type's namespace - e.g. `MyType::default()`. +This is set to change to an error in future Noir versions. + +::: + +## Default Method Implementations + +A trait can also have default implementations of its methods by giving a body to the desired functions. +Note that this body must be valid for all types that may implement the trait. As a result, the only +valid operations on `self` will be operations valid for any type or other operations on the trait itself. + +```rust +trait Numeric { + fn add(self, other: Self) -> Self; + + // Default implementation of double is (self + self) + fn double(self) -> Self { + self.add(self) + } +} +``` + +When implementing a trait with default functions, a type may choose to implement only the required functions: + +```rust +impl Numeric for Field { + fn add(self, other: Field) -> Field { + self + other + } +} +``` + +Or it may implement the optional methods as well: + +```rust +impl Numeric for u32 { + fn add(self, other: u32) -> u32 { + self + other + } + + fn double(self) -> u32 { + self * 2 + } +} +``` + +## Impl Specialization + +When implementing traits for a generic type it is possible to implement the trait for only a certain combination +of generics. This can be either as an optimization or because those specific generics are required to implement the trait. + +```rust +trait Sub { + fn sub(self, other: Self) -> Self; +} + +struct NonZero { + value: T, +} + +impl Sub for NonZero { + fn sub(self, other: Self) -> Self { + let value = self.value - other.value; + assert(value != 0); + NonZero { value } + } +} +``` + +## Overlapping Implementations + +Overlapping implementations are disallowed by Noir to ensure Noir's decision on which impl to select is never ambiguous. +This means if a trait `Foo` is already implemented +by a type `Bar` for all `T`, then we cannot also have a separate impl for `Bar` (or any other +type argument). Similarly, if there is an impl for all `T` such as `impl Debug for T`, we cannot create +any more impls to `Debug` for other types since it would be ambiguous which impl to choose for any given +method call. + +```rust +trait Trait {} + +// Previous impl defined here +impl Trait for (A, B) {} + +// error: Impl for type `(Field, Field)` overlaps with existing impl +impl Trait for (Field, Field) {} +``` + +## Trait Coherence + +Another restriction on trait implementations is coherence. This restriction ensures other crates cannot create +impls that may overlap with other impls, even if several unrelated crates are used as dependencies in the same +program. + +The coherence restriction is: to implement a trait, either the trait itself or the object type must be declared +in the crate the impl is in. + +In practice this often comes up when using types provided by libraries. If a library provides a type `Foo` that does +not implement a trait in the standard library such as `Default`, you may not `impl Default for Foo` in your own crate. +While restrictive, this prevents later issues or silent changes in the program if the `Foo` library later added its +own impl for `Default`. If you are a user of the `Foo` library in this scenario and need a trait not implemented by the +library your choices are to either submit a patch to the library or use the newtype pattern. + +### The Newtype Pattern + +The newtype pattern gets around the coherence restriction by creating a new wrapper type around the library type +that we cannot create `impl`s for. Since the new wrapper type is defined in our current crate, we can create +impls for any trait we need on it. + +```rust +struct Wrapper { + foo: dep::some_library::Foo, +} + +impl Default for Wrapper { + fn default() -> Wrapper { + Wrapper { + foo: dep::some_library::Foo::new(), + } + } +} +``` + +Since we have an impl for our own type, the behavior of this code will not change even if `some_library` is updated +to provide its own `impl Default for Foo`. The downside of this pattern is that it requires extra wrapping and +unwrapping of values when converting to and from the `Wrapper` and `Foo` types. diff --git a/docs/docs/noir/concepts/unconstrained.md b/docs/docs/noir/concepts/unconstrained.md new file mode 100644 index 00000000000..6b3424f7993 --- /dev/null +++ b/docs/docs/noir/concepts/unconstrained.md @@ -0,0 +1,95 @@ +--- +title: Unconstrained Functions +description: "Learn about what unconstrained functions in Noir are, how to use them and when you'd want to." + +keywords: [Noir programming language, unconstrained, open] +sidebar_position: 5 +--- + +Unconstrained functions are functions which do not constrain any of the included computation and allow for non-deterministic computation. + +## Why? + +Zero-knowledge (ZK) domain-specific languages (DSL) enable developers to generate ZK proofs from their programs by compiling code down to the constraints of an NP complete language (such as R1CS or PLONKish languages). However, the hard bounds of a constraint system can be very limiting to the functionality of a ZK DSL. + +Enabling a circuit language to perform unconstrained execution is a powerful tool. Said another way, unconstrained execution lets developers generate witnesses from code that does not generate any constraints. Being able to execute logic outside of a circuit is critical for both circuit performance and constructing proofs on information that is external to a circuit. + +Fetching information from somewhere external to a circuit can also be used to enable developers to improve circuit efficiency. + +A ZK DSL does not just prove computation, but proves that some computation was handled correctly. Thus, it is necessary that when we switch from performing some operation directly inside of a circuit to inside of an unconstrained environment that the appropriate constraints are still laid down elsewhere in the circuit. + +## Example + +An in depth example might help drive the point home. This example comes from the excellent [post](https://discord.com/channels/1113924620781883405/1124022445054111926/1128747641853972590) by Tom in the Noir Discord. + +Let's look at how we can optimize a function to turn a `u72` into an array of `u8`s. + +```rust +fn main(num: u72) -> pub [u8; 8] { + let mut out: [u8; 8] = [0; 8]; + for i in 0..8 { + out[i] = (num >> (56 - (i * 8)) as u72 & 0xff) as u8; + } + + out +} +``` + +``` +Total ACIR opcodes generated for language PLONKCSat { width: 3 }: 91 +Backend circuit size: 3619 +``` + +A lot of the operations in this function are optimized away by the compiler (all the bit-shifts turn into divisions by constants). However we can save a bunch of gates by casting to u8 a bit earlier. This automatically truncates the bit-shifted value to fit in a u8 which allows us to remove the XOR against 0xff. This saves us ~480 gates in total. + +```rust +fn main(num: u72) -> pub [u8; 8] { + let mut out: [u8; 8] = [0; 8]; + for i in 0..8 { + out[i] = (num >> (56 - (i * 8)) as u8; + } + + out +} +``` + +``` +Total ACIR opcodes generated for language PLONKCSat { width: 3 }: 75 +Backend circuit size: 3143 +``` + +Those are some nice savings already but we can do better. This code is all constrained so we're proving every step of calculating out using num, but we don't actually care about how we calculate this, just that it's correct. This is where brillig comes in. + +It turns out that truncating a u72 into a u8 is hard to do inside a snark, each time we do as u8 we lay down 4 ACIR opcodes which get converted into multiple gates. It's actually much easier to calculate num from out than the other way around. All we need to do is multiply each element of out by a constant and add them all together, both relatively easy operations inside a snark. + +We can then run u72_to_u8 as unconstrained brillig code in order to calculate out, then use that result in our constrained function and assert that if we were to do the reverse calculation we'd get back num. This looks a little like the below: + +```rust +fn main(num: u72) -> pub [u8; 8] { + let out = u72_to_u8(num); + + let mut reconstructed_num: u72 = 0; + for i in 0..8 { + reconstructed_num += (out[i] as u72 << (56 - (8 * i))); + } + assert(num == reconstructed_num); + out +} + +unconstrained fn u72_to_u8(num: u72) -> [u8; 8] { + let mut out: [u8; 8] = [0; 8]; + for i in 0..8 { + out[i] = (num >> (56 - (i * 8))) as u8; + } + out +} +``` + +``` +Total ACIR opcodes generated for language PLONKCSat { width: 3 }: 78 +Backend circuit size: 2902 +``` + +This ends up taking off another ~250 gates from our circuit! We've ended up with more ACIR opcodes than before but they're easier for the backend to prove (resulting in fewer gates). + +Generally we want to use brillig whenever there's something that's easy to verify but hard to compute within the circuit. For example, if you wanted to calculate a square root of a number it'll be a much better idea to calculate this in brillig and then assert that if you square the result you get back your number. diff --git a/docs/docs/noir/modules_packages_crates/_category_.json b/docs/docs/noir/modules_packages_crates/_category_.json new file mode 100644 index 00000000000..1debcfe7675 --- /dev/null +++ b/docs/docs/noir/modules_packages_crates/_category_.json @@ -0,0 +1,6 @@ +{ + "label": "Modules, Packages and Crates", + "position": 2, + "collapsible": true, + "collapsed": true +} diff --git a/docs/docs/noir/modules_packages_crates/crates_and_packages.md b/docs/docs/noir/modules_packages_crates/crates_and_packages.md new file mode 100644 index 00000000000..760a463094c --- /dev/null +++ b/docs/docs/noir/modules_packages_crates/crates_and_packages.md @@ -0,0 +1,43 @@ +--- +title: Crates and Packages +description: Learn how to use Crates and Packages in your Noir project +keywords: [Nargo, dependencies, package management, crates, package] +sidebar_position: 0 +--- + +## Crates + +A crate is the smallest amount of code that the Noir compiler considers at a time. +Crates can contain modules, and the modules may be defined in other files that get compiled with the crate, as we’ll see in the coming sections. + +### Crate Types + +A Noir crate can come in several forms: binaries, libraries or contracts. + +#### Binaries + +_Binary crates_ are programs which you can compile to an ACIR circuit which you can then create proofs against. Each must have a function called `main` that defines the ACIR circuit which is to be proved. + +#### Libraries + +_Library crates_ don't have a `main` function and they don't compile down to ACIR. Instead they define functionality intended to be shared with multiple projects, and eventually included in a binary crate. + +#### Contracts + +Contract crates are similar to binary crates in that they compile to ACIR which you can create proofs against. They are different in that they do not have a single `main` function, but are a collection of functions to be deployed to the [Aztec network](https://aztec.network). You can learn more about the technical details of Aztec in the [monorepo](https://github.com/AztecProtocol/aztec-packages) or contract [examples](https://github.com/AztecProtocol/aztec-packages/tree/master/yarn-project/noir-contracts/contracts). + +### Crate Root + +Every crate has a root, which is the source file that the compiler starts, this is also known as the root module. The Noir compiler does not enforce any conditions on the name of the file which is the crate root, however if you are compiling via Nargo the crate root must be called `lib.nr` or `main.nr` for library or binary crates respectively. + +## Packages + +A Nargo _package_ is a collection of one of more crates that provides a set of functionality. A package must include a Nargo.toml file. + +A package _must_ contain either a library or a binary crate, but not both. + +### Differences from Cargo Packages + +One notable difference between Rust's Cargo and Noir's Nargo is that while Cargo allows a package to contain an unlimited number of binary crates and a single library crate, Nargo currently only allows a package to contain a single crate. + +In future this restriction may be lifted to allow a Nargo package to contain both a binary and library crate or multiple binary crates. diff --git a/docs/docs/noir/modules_packages_crates/dependencies.md b/docs/docs/noir/modules_packages_crates/dependencies.md new file mode 100644 index 00000000000..a37dc401b7d --- /dev/null +++ b/docs/docs/noir/modules_packages_crates/dependencies.md @@ -0,0 +1,124 @@ +--- +title: Dependencies +description: + Learn how to specify and manage dependencies in Nargo, allowing you to upload packages to GitHub + and use them easily in your project. +keywords: [Nargo, dependencies, GitHub, package management, versioning] +sidebar_position: 1 +--- + +Nargo allows you to upload packages to GitHub and use them as dependencies. + +## Specifying a dependency + +Specifying a dependency requires a tag to a specific commit and the git url to the url containing +the package. + +Currently, there are no requirements on the tag contents. If requirements are added, it would follow +semver 2.0 guidelines. + +> Note: Without a `tag` , there would be no versioning and dependencies would change each time you +> compile your project. + +For example, to add the [ecrecover-noir library](https://github.com/colinnielsen/ecrecover-noir) to your project, add it to `Nargo.toml`: + +```toml +# Nargo.toml + +[dependencies] +ecrecover = {tag = "v0.8.0", git = "https://github.com/colinnielsen/ecrecover-noir"} +``` + +If the module is in a subdirectory, you can define a subdirectory in your git repository, for example: + +```toml +# Nargo.toml + +[dependencies] +easy_private_token_contract = {tag ="v0.1.0-alpha62", git = "https://github.com/AztecProtocol/aztec-packages", directory = "yarn-project/noir-contracts/contracts/easy_private_token_contract"} +``` + +## Specifying a local dependency + +You can also specify dependencies that are local to your machine. + +For example, this file structure has a library and binary crate + +```tree +├── binary_crate +│   ├── Nargo.toml +│   └── src +│   └── main.nr +└── lib_a + ├── Nargo.toml + └── src + └── lib.nr +``` + +Inside of the binary crate, you can specify: + +```toml +# Nargo.toml + +[dependencies] +lib_a = { path = "../lib_a" } +``` + +## Importing dependencies + +You can import a dependency to a Noir file using the following syntax. For example, to import the +ecrecover-noir library and local lib_a referenced above: + +```rust +use dep::ecrecover; +use dep::lib_a; +``` + +You can also import only the specific parts of dependency that you want to use, like so: + +```rust +use dep::std::hash::sha256; +use dep::std::scalar_mul::fixed_base_embedded_curve; +``` + +Lastly, as demonstrated in the +[elliptic curve example](../standard_library/cryptographic_primitives/ec_primitives#examples), you +can import multiple items in the same line by enclosing them in curly braces: + +```rust +use dep::std::ec::tecurve::affine::{Curve, Point}; +``` + +We don't have a way to consume libraries from inside a [workspace](./workspaces) as external dependencies right now. + +Inside a workspace, these are consumed as `{ path = "../to_lib" }` dependencies in Nargo.toml. + +## Dependencies of Dependencies + +Note that when you import a dependency, you also get access to all of the dependencies of that package. + +For example, the [phy_vector](https://github.com/resurgencelabs/phy_vector) library imports an [fraction](https://github.com/resurgencelabs/fraction) library. If you're importing the phy_vector library, then you can access the functions in fractions library like so: + +```rust +use dep::phy_vector; + +fn main(x : Field, y : pub Field) { + //... + let f = phy_vector::fraction::toFraction(true, 2, 1); + //... +} +``` + +## Available Libraries + +Noir does not currently have an official package manager. You can find a list of available Noir libraries in the [awesome-noir repo here](https://github.com/noir-lang/awesome-noir#libraries). + +Some libraries that are available today include: + +- [Standard Library](https://github.com/noir-lang/noir/tree/master/noir_stdlib) - the Noir Standard Library +- [Ethereum Storage Proof Verification](https://github.com/aragonzkresearch/noir-trie-proofs) - a library that contains the primitives necessary for RLP decoding (in the form of look-up table construction) and Ethereum state and storage proof verification (or verification of any trie proof involving 32-byte long keys) +- [BigInt](https://github.com/shuklaayush/noir-bigint) - a library that provides a custom BigUint56 data type, allowing for computations on large unsigned integers +- [ECrecover](https://github.com/colinnielsen/ecrecover-noir/tree/main) - a library to verify an ECDSA signature and return the source Ethereum address +- [Sparse Merkle Tree Verifier](https://github.com/vocdoni/smtverifier-noir/tree/main) - a library for verification of sparse Merkle trees +- [Signed Int](https://github.com/resurgencelabs/signed_int) - a library for accessing a custom Signed Integer data type, allowing access to negative numbers on Noir +- [Fraction](https://github.com/resurgencelabs/fraction) - a library for accessing fractional number data type in Noir, allowing results that aren't whole numbers diff --git a/docs/docs/noir/modules_packages_crates/modules.md b/docs/docs/noir/modules_packages_crates/modules.md new file mode 100644 index 00000000000..ae822a1cff4 --- /dev/null +++ b/docs/docs/noir/modules_packages_crates/modules.md @@ -0,0 +1,105 @@ +--- +title: Modules +description: + Learn how to organize your files using modules in Noir, following the same convention as Rust's + module system. Examples included. +keywords: [Noir, Rust, modules, organizing files, sub-modules] +sidebar_position: 2 +--- + +Noir's module system follows the same convention as the _newer_ version of Rust's module system. + +## Purpose of Modules + +Modules are used to organize files. Without modules all of your code would need to live in a single +file. In Noir, the compiler does not automatically scan all of your files to detect modules. This +must be done explicitly by the developer. + +## Examples + +### Importing a module in the crate root + +Filename : `src/main.nr` + +```rust +mod foo; + +fn main() { + foo::hello_world(); +} +``` + +Filename : `src/foo.nr` + +```rust +fn from_foo() {} +``` + +In the above snippet, the crate root is the `src/main.nr` file. The compiler sees the module +declaration `mod foo` which prompts it to look for a foo.nr file. + +Visually this module hierarchy looks like the following : + +``` +crate + ├── main + │ + └── foo + └── from_foo + +``` + +### Importing a module throughout the tree + +All modules are accessible from the `crate::` namespace. + +``` +crate + ├── bar + ├── foo + └── main + +``` + +In the above snippet, if `bar` would like to use functions in `foo`, it can do so by `use crate::foo::function_name`. + +### Sub-modules + +Filename : `src/main.nr` + +```rust +mod foo; + +fn main() { + foo::from_foo(); +} +``` + +Filename : `src/foo.nr` + +```rust +mod bar; +fn from_foo() {} +``` + +Filename : `src/foo/bar.nr` + +```rust +fn from_bar() {} +``` + +In the above snippet, we have added an extra module to the module tree; `bar`. `bar` is a submodule +of `foo` hence we declare bar in `foo.nr` with `mod bar`. Since `foo` is not the crate root, the +compiler looks for the file associated with the `bar` module in `src/foo/bar.nr` + +Visually the module hierarchy looks as follows: + +``` +crate + ├── main + │ + └── foo + ├── from_foo + └── bar + └── from_bar +``` diff --git a/docs/docs/noir/modules_packages_crates/workspaces.md b/docs/docs/noir/modules_packages_crates/workspaces.md new file mode 100644 index 00000000000..67a1dafa372 --- /dev/null +++ b/docs/docs/noir/modules_packages_crates/workspaces.md @@ -0,0 +1,40 @@ +--- +title: Workspaces +sidebar_position: 3 +--- + +Workspaces are a feature of nargo that allow you to manage multiple related Noir packages in a single repository. A workspace is essentially a group of related projects that share common build output directories and configurations. + +Each Noir project (with it's own Nargo.toml file) can be thought of as a package. Each package is expected to contain exactly one "named circuit", being the "name" defined in Nargo.toml with the program logic defined in `./src/main.nr`. + +For a project with the following structure: + +```tree +├── crates +│   ├── a +│   │   ├── Nargo.toml +│   │   └── src +│   │   └── main.nr +│   └── b +│   ├── Nargo.toml +│   └── src +│   └── main.nr +├── Nargo.toml +└── Prover.toml +``` + +You can define a workspace in Nargo.toml like so: + +```toml +[workspace] +members = ["crates/a", "crates/b"] +default-member = "crates/a" +``` + +`members` indicates which packages are included in the workspace. As such, all member packages of a workspace will be processed when the `--workspace` flag is used with various commands or if a `default-member` is not specified. + +`default-member` indicates which package various commands process by default. + +Libraries can be defined in a workspace. Inside a workspace, these are consumed as `{ path = "../to_lib" }` dependencies in Nargo.toml. + +Inside a workspace, these are consumed as `{ path = "../to_lib" }` dependencies in Nargo.toml. diff --git a/docs/docs/noir/standard_library/_category_.json b/docs/docs/noir/standard_library/_category_.json new file mode 100644 index 00000000000..af04c0933fd --- /dev/null +++ b/docs/docs/noir/standard_library/_category_.json @@ -0,0 +1,6 @@ +{ + "label": "Standard Library", + "position": 1, + "collapsible": true, + "collapsed": true +} diff --git a/docs/docs/noir/standard_library/black_box_fns.md b/docs/docs/noir/standard_library/black_box_fns.md new file mode 100644 index 00000000000..4b1efbd17de --- /dev/null +++ b/docs/docs/noir/standard_library/black_box_fns.md @@ -0,0 +1,45 @@ +--- +title: Black Box Functions +description: Black box functions are functions in Noir that rely on backends implementing support for specialized constraints. +keywords: [noir, black box functions] +--- + +Black box functions are functions in Noir that rely on backends implementing support for specialized constraints. This makes certain zk-snark unfriendly computations cheaper than if they were implemented in Noir. + +:::warning + +It is likely that not all backends will support a particular black box function. + +::: + +Because it is not guaranteed that all backends will support black box functions, it is possible that certain Noir programs won't compile against a particular backend if they use an unsupported black box function. It is possible to fallback to less efficient implementations written in Noir/ACIR in some cases. + +Black box functions are specified with the `#[foreign(black_box_fn)]` attribute. For example, the SHA256 function in the Noir [source code](https://github.com/noir-lang/noir/blob/v0.5.1/noir_stdlib/src/hash.nr) looks like: + +```rust +#[foreign(sha256)] +fn sha256(_input : [u8; N]) -> [u8; 32] {} +``` + +## Function list + +Here is a list of the current black box functions that are supported by UltraPlonk: + +- AES +- [SHA256](./cryptographic_primitives/hashes#sha256) +- [Schnorr signature verification](./cryptographic_primitives/schnorr) +- [Blake2s](./cryptographic_primitives/hashes#blake2s) +- [Pedersen Hash](./cryptographic_primitives/hashes#pedersen_hash) +- [Pedersen Commitment](./cryptographic_primitives/hashes#pedersen_commitment) +- [ECDSA signature verification](./cryptographic_primitives/ecdsa_sig_verification) +- [Fixed base scalar multiplication](./cryptographic_primitives/scalar) +- [Compute merkle root](./merkle_trees#compute_merkle_root) +- AND +- XOR +- RANGE +- [Keccak256](./cryptographic_primitives/hashes#keccak256) +- [Recursive proof verification](./recursion) + +Most black box functions are included as part of the Noir standard library, however `AND`, `XOR` and `RANGE` are used as part of the Noir language syntax. For instance, using the bitwise operator `&` will invoke the `AND` black box function. To ensure compatibility across backends, the ACVM has fallback implementations of `AND`, `XOR` and `RANGE` defined in its standard library which it can seamlessly fallback to if the backend doesn't support them. + +You can view the black box functions defined in the ACVM code [here](https://github.com/noir-lang/noir/blob/master/acvm-repo/acir/src/circuit/black_box_functions.rs). diff --git a/docs/docs/noir/standard_library/cryptographic_primitives/_category_.json b/docs/docs/noir/standard_library/cryptographic_primitives/_category_.json new file mode 100644 index 00000000000..5d694210bbf --- /dev/null +++ b/docs/docs/noir/standard_library/cryptographic_primitives/_category_.json @@ -0,0 +1,5 @@ +{ + "position": 0, + "collapsible": true, + "collapsed": true +} diff --git a/docs/docs/noir/standard_library/cryptographic_primitives/ec_primitives.md b/docs/docs/noir/standard_library/cryptographic_primitives/ec_primitives.md new file mode 100644 index 00000000000..8d573adb3be --- /dev/null +++ b/docs/docs/noir/standard_library/cryptographic_primitives/ec_primitives.md @@ -0,0 +1,102 @@ +--- +title: Elliptic Curve Primitives +keywords: [cryptographic primitives, Noir project] +sidebar_position: 4 +--- + +Data structures and methods on them that allow you to carry out computations involving elliptic +curves over the (mathematical) field corresponding to `Field`. For the field currently at our +disposal, applications would involve a curve embedded in BN254, e.g. the +[Baby Jubjub curve](https://eips.ethereum.org/EIPS/eip-2494). + +## Data structures + +### Elliptic curve configurations + +(`std::ec::{tecurve,montcurve,swcurve}::{affine,curvegroup}::Curve`), i.e. the specific elliptic +curve you want to use, which would be specified using any one of the methods +`std::ec::{tecurve,montcurve,swcurve}::{affine,curvegroup}::new` which take the coefficients in the +defining equation together with a generator point as parameters. You can find more detail in the +comments in +[`noir_stdlib/src/ec.nr`](https://github.com/noir-lang/noir/blob/master/noir_stdlib/src/ec.nr), but +the gist of it is that the elliptic curves of interest are usually expressed in one of the standard +forms implemented here (Twisted Edwards, Montgomery and Short Weierstraß), and in addition to that, +you could choose to use `affine` coordinates (Cartesian coordinates - the usual (x,y) - possibly +together with a point at infinity) or `curvegroup` coordinates (some form of projective coordinates +requiring more coordinates but allowing for more efficient implementations of elliptic curve +operations). Conversions between all of these forms are provided, and under the hood these +conversions are done whenever an operation is more efficient in a different representation (or a +mixed coordinate representation is employed). + +### Points + +(`std::ec::{tecurve,montcurve,swcurve}::{affine,curvegroup}::Point`), i.e. points lying on the +elliptic curve. For a curve configuration `c` and a point `p`, it may be checked that `p` +does indeed lie on `c` by calling `c.contains(p1)`. + +## Methods + +(given a choice of curve representation, e.g. use `std::ec::tecurve::affine::Curve` and use +`std::ec::tecurve::affine::Point`) + +- The **zero element** is given by `Point::zero()`, and we can verify whether a point `p: Point` is + zero by calling `p.is_zero()`. +- **Equality**: Points `p1: Point` and `p2: Point` may be checked for equality by calling + `p1.eq(p2)`. +- **Addition**: For `c: Curve` and points `p1: Point` and `p2: Point` on the curve, adding these two + points is accomplished by calling `c.add(p1,p2)`. +- **Negation**: For a point `p: Point`, `p.negate()` is its negation. +- **Subtraction**: For `c` and `p1`, `p2` as above, subtracting `p2` from `p1` is accomplished by + calling `c.subtract(p1,p2)`. +- **Scalar multiplication**: For `c` as above, `p: Point` a point on the curve and `n: Field`, + scalar multiplication is given by `c.mul(n,p)`. If instead `n :: [u1; N]`, i.e. `n` is a bit + array, the `bit_mul` method may be used instead: `c.bit_mul(n,p)` +- **Multi-scalar multiplication**: For `c` as above and arrays `n: [Field; N]` and `p: [Point; N]`, + multi-scalar multiplication is given by `c.msm(n,p)`. +- **Coordinate representation conversions**: The `into_group` method converts a point or curve + configuration in the affine representation to one in the CurveGroup representation, and + `into_affine` goes in the other direction. +- **Curve representation conversions**: `tecurve` and `montcurve` curves and points are equivalent + and may be converted between one another by calling `into_montcurve` or `into_tecurve` on their + configurations or points. `swcurve` is more general and a curve c of one of the other two types + may be converted to this representation by calling `c.into_swcurve()`, whereas a point `p` lying + on the curve given by `c` may be mapped to its corresponding `swcurve` point by calling + `c.map_into_swcurve(p)`. +- **Map-to-curve methods**: The Elligator 2 method of mapping a field element `n: Field` into a + `tecurve` or `montcurve` with configuration `c` may be called as `c.elligator2_map(n)`. For all of + the curve configurations, the SWU map-to-curve method may be called as `c.swu_map(z,n)`, where + `z: Field` depends on `Field` and `c` and must be chosen by the user (the conditions it needs to + satisfy are specified in the comments + [here](https://github.com/noir-lang/noir/blob/master/noir_stdlib/src/ec.nr)). + +## Examples + +The +[ec_baby_jubjub test](https://github.com/noir-lang/noir/blob/master/crates/nargo_cli/tests/test_data/ec_baby_jubjub/src/main.nr) +illustrates all of the above primitives on various forms of the Baby Jubjub curve. A couple of more +interesting examples in Noir would be: + +Public-key cryptography: Given an elliptic curve and a 'base point' on it, determine the public key +from the private key. This is a matter of using scalar multiplication. In the case of Baby Jubjub, +for example, this code would do: + +```rust +use dep::std::ec::tecurve::affine::{Curve, Point}; + +fn bjj_pub_key(priv_key: Field) -> Point +{ + + let bjj = Curve::new(168700, 168696, G::new(995203441582195749578291179787384436505546430278305826713579947235728471134,5472060717959818805561601436314318772137091100104008585924551046643952123905)); + + let base_pt = Point::new(5299619240641551281634865583518297030282874472190772894086521144482721001553, 16950150798460657717958625567821834550301663161624707787222815936182638968203); + + bjj.mul(priv_key,base_pt) +} +``` + +This would come in handy in a Merkle proof. + +- EdDSA signature verification: This is a matter of combining these primitives with a suitable hash + function. See + [feat(stdlib): EdDSA sig verification noir#1136](https://github.com/noir-lang/noir/pull/1136) for + the case of Baby Jubjub and the Poseidon hash function. diff --git a/docs/docs/noir/standard_library/cryptographic_primitives/ecdsa_sig_verification.mdx b/docs/docs/noir/standard_library/cryptographic_primitives/ecdsa_sig_verification.mdx new file mode 100644 index 00000000000..1376c51dfde --- /dev/null +++ b/docs/docs/noir/standard_library/cryptographic_primitives/ecdsa_sig_verification.mdx @@ -0,0 +1,46 @@ +--- +title: ECDSA Signature Verification +description: Learn about the cryptographic primitives regarding ECDSA over the secp256k1 and secp256r1 curves +keywords: [cryptographic primitives, Noir project, ecdsa, secp256k1, secp256r1, signatures] +sidebar_position: 3 +--- + +import BlackBoxInfo from '@site/src/components/Notes/_blackbox.mdx'; + +Noir supports ECDSA signatures verification over the secp256k1 and secp256r1 curves. + +## ecdsa_secp256k1::verify_signature + +Verifier for ECDSA Secp256k1 signatures + +```rust +fn verify_signature(_public_key_x : [u8; 32], _public_key_y : [u8; 32], _signature: [u8; 64], _message: [u8]) -> bool +``` + +example: + +```rust +fn main(hashed_message : [u8;32], pub_key_x : [u8;32], pub_key_y : [u8;32], signature : [u8;64]) { + let valid_signature = std::ecdsa_secp256k1::verify_signature(pub_key_x, pub_key_y, signature, hashed_message); + assert(valid_signature); +} +``` + +## ecdsa_secp256r1::verify_signature + +Verifier for ECDSA Secp256r1 signatures + +```rust +fn verify_signature(_public_key_x : [u8; 32], _public_key_y : [u8; 32], _signature: [u8; 64], _message: [u8]) -> bool +``` + +example: + +```rust +fn main(hashed_message : [u8;32], pub_key_x : [u8;32], pub_key_y : [u8;32], signature : [u8;64]) { + let valid_signature = std::ecdsa_secp256r1::verify_signature(pub_key_x, pub_key_y, signature, hashed_message); + assert(valid_signature); +} +``` + + diff --git a/docs/docs/noir/standard_library/cryptographic_primitives/eddsa.mdx b/docs/docs/noir/standard_library/cryptographic_primitives/eddsa.mdx new file mode 100644 index 00000000000..a9c10da6c06 --- /dev/null +++ b/docs/docs/noir/standard_library/cryptographic_primitives/eddsa.mdx @@ -0,0 +1,18 @@ +--- +title: EdDSA Verification +description: Learn about the cryptographic primitives regarding EdDSA +keywords: [cryptographic primitives, Noir project, eddsa, signatures] +sidebar_position: 5 +--- + +import BlackBoxInfo from '@site/src/components/Notes/_blackbox.mdx'; + +## eddsa::eddsa_poseidon_verify + +Verifier for EdDSA signatures + +```rust +fn eddsa_poseidon_verify(public_key_x : Field, public_key_y : Field, signature_s: Field, signature_r8_x: Field, signature_r8_y: Field, message: Field) -> bool +``` + + diff --git a/docs/docs/noir/standard_library/cryptographic_primitives/hashes.mdx b/docs/docs/noir/standard_library/cryptographic_primitives/hashes.mdx new file mode 100644 index 00000000000..3c5f7f79603 --- /dev/null +++ b/docs/docs/noir/standard_library/cryptographic_primitives/hashes.mdx @@ -0,0 +1,167 @@ +--- +title: Hash methods +description: + Learn about the cryptographic primitives ready to use for any Noir project, including sha256, + blake2s, pedersen, mimc_bn254 and mimc +keywords: + [cryptographic primitives, Noir project, sha256, blake2s, pedersen, mimc_bn254, mimc, hash] +sidebar_position: 0 +--- + +import BlackBoxInfo from '@site/src/components/Notes/_blackbox.mdx'; + +## sha256 + +Given an array of bytes, returns the resulting sha256 hash. + +```rust +fn sha256(_input : [u8]) -> [u8; 32] +``` + +example: + +```rust +fn main() { + let x = [163, 117, 178, 149]; // some random bytes + let hash = std::hash::sha256(x); +} +``` + + + +## blake2s + +Given an array of bytes, returns an array with the Blake2 hash + +```rust +fn blake2s(_input : [u8]) -> [u8; 32] +``` + +example: + +```rust +fn main() { + let x = [163, 117, 178, 149]; // some random bytes + let hash = std::hash::blake2s(x); +} +``` + + + +## pedersen_hash + +Given an array of Fields, returns the Pedersen hash. + +```rust +fn pedersen_hash(_input : [Field]) -> Field +``` + +example: + +```rust +fn main() { + let x = [163, 117, 178, 149]; // some random bytes + let hash = std::hash::pedersen_hash(x); +} +``` + + + + + +## pedersen_commitment + +Given an array of Fields, returns the Pedersen commitment. + +```rust +fn pedersen_commitment(_input : [Field]) -> [Field; 2] +``` + +example: + +```rust +fn main() { + let x = [163, 117, 178, 149]; // some random bytes + let commitment = std::hash::pedersen_commitment(x); +} +``` + + + +## keccak256 + +Given an array of bytes (`u8`), returns the resulting keccak hash as an array of 32 bytes +(`[u8; 32]`). Specify a message_size to hash only the first `message_size` bytes +of the input. + +```rust +fn keccak256(_input : [u8; N], _message_size: u32) -> [u8; 32] +``` + +example: + +```rust +fn main() { + let x = [163, 117, 178, 149]; // some random bytes + let message_size = 4; + let hash = std::hash::keccak256(x, message_size); +} +``` + + + +## poseidon + +Given an array of Fields, returns a new Field with the Poseidon Hash. Mind that you need to specify +how many inputs are there to your Poseidon function. + +```rust +// example for hash_1, hash_2 accepts an array of length 2, etc +fn hash_1(input: [Field; 1]) -> Field +``` + +example: + +```rust +fn main() +{ + let hash_2 = std::hash::poseidon::bn254::hash_2([1, 2]); + assert(hash2 == 0x115cc0f5e7d690413df64c6b9662e9cf2a3617f2743245519e19607a4417189a); +} +``` + +## mimc_bn254 and mimc + +`mimc_bn254` is `mimc`, but with hardcoded parameters for the BN254 curve. You can use it by +providing an array of Fields, and it returns a Field with the hash. You can use the `mimc` method if +you're willing to input your own constants: + +```rust +fn mimc(x: Field, k: Field, constants: [Field; N], exp : Field) -> Field +``` + +otherwise, use the `mimc_bn254` method: + +```rust +fn mimc_bn254(array: [Field; N]) -> Field +``` + +example: + +```rust + +fn main() { + let x = [163, 117, 178, 149]; // some random bytes + let hash = std::hash::mimc::mimc_bn254(x); +} +``` + +## hash_to_field + +```rust +fn hash_to_field(_input : [Field; N]) -> Field {} +``` + +Calculates the `blake2s` hash of the inputs and returns the hash modulo the field modulus to return +a value which can be represented as a `Field`. + diff --git a/docs/docs/noir/standard_library/cryptographic_primitives/index.md b/docs/docs/noir/standard_library/cryptographic_primitives/index.md new file mode 100644 index 00000000000..650f30165d5 --- /dev/null +++ b/docs/docs/noir/standard_library/cryptographic_primitives/index.md @@ -0,0 +1,14 @@ +--- +title: Cryptographic Primitives +description: + Learn about the cryptographic primitives ready to use for any Noir project +keywords: + [ + cryptographic primitives, + Noir project, + ] +--- + +The Noir team is progressively adding new cryptographic primitives to the standard library. Reach out for news or if you would be interested in adding more of these calculations in Noir. + +Some methods are available thanks to the Aztec backend, not being performed using Noir. When using other backends, these methods may or may not be supplied. diff --git a/docs/docs/noir/standard_library/cryptographic_primitives/scalar.mdx b/docs/docs/noir/standard_library/cryptographic_primitives/scalar.mdx new file mode 100644 index 00000000000..aa4fb8cbaed --- /dev/null +++ b/docs/docs/noir/standard_library/cryptographic_primitives/scalar.mdx @@ -0,0 +1,28 @@ +--- +title: Scalar multiplication +description: See how you can perform scalar multiplications over a fixed base in Noir +keywords: [cryptographic primitives, Noir project, scalar multiplication] +sidebar_position: 1 +--- + +import BlackBoxInfo from '@site/src/components/Notes/_blackbox.mdx'; + +## scalar_mul::fixed_base_embedded_curve + +Performs scalar multiplication over the embedded curve whose coordinates are defined by the +configured noir field. For the BN254 scalar field, this is BabyJubJub or Grumpkin. + +```rust +fn fixed_base_embedded_curve(_input : Field) -> [Field; 2] +``` + +example + +```rust +fn main(x : Field) { + let scal = std::scalar_mul::fixed_base_embedded_curve(x); + println(scal); +} +``` + + diff --git a/docs/docs/noir/standard_library/cryptographic_primitives/schnorr.mdx b/docs/docs/noir/standard_library/cryptographic_primitives/schnorr.mdx new file mode 100644 index 00000000000..7a2c9c20226 --- /dev/null +++ b/docs/docs/noir/standard_library/cryptographic_primitives/schnorr.mdx @@ -0,0 +1,38 @@ +--- +title: Schnorr Signatures +description: Learn how you can verify Schnorr signatures using Noir +keywords: [cryptographic primitives, Noir project, schnorr, signatures] +sidebar_position: 2 +--- + +import BlackBoxInfo from '@site/src/components/Notes/_blackbox.mdx'; + +## schnorr::verify_signature + +Verifier for Schnorr signatures over the embedded curve (for BN254 it is Grumpkin). + +```rust +fn verify_signature(_public_key_x: Field, _public_key_y: Field, _signature: [u8; 64], _message: [u8]) -> bool +``` + +where `_signature` can be generated like so using the npm package +[@noir-lang/barretenberg](https://www.npmjs.com/package/@noir-lang/barretenberg) + +```js +const { BarretenbergWasm } = require('@noir-lang/barretenberg/dest/wasm'); +const { Schnorr } = require('@noir-lang/barretenberg/dest/crypto/schnorr'); + +... + +const barretenberg = await BarretenbergWasm.new(); +const schnorr = new Schnorr(barretenberg); +const pubKey = schnorr.computePublicKey(privateKey); +const message = ... +const signature = Array.from( + schnorr.constructSignature(hash, privateKey).toBuffer() +); + +... +``` + + diff --git a/docs/docs/noir/standard_library/logging.md b/docs/docs/noir/standard_library/logging.md new file mode 100644 index 00000000000..db75ef9f86f --- /dev/null +++ b/docs/docs/noir/standard_library/logging.md @@ -0,0 +1,78 @@ +--- +title: Logging +description: + Learn how to use the println statement for debugging in Noir with this tutorial. Understand the + basics of logging in Noir and how to implement it in your code. +keywords: + [ + noir logging, + println statement, + print statement, + debugging in noir, + noir std library, + logging tutorial, + basic logging in noir, + noir logging implementation, + noir debugging techniques, + rust, + ] +--- + +The standard library provides two familiar statements you can use: `println` and `print`. Despite being a limited implementation of rust's `println!` and `print!` macros, these constructs can be useful for debugging. + +You can print the output of both statements in your Noir code by using the `nargo execute` command or the `--show-output` flag when using `nargo test` (provided there are print statements in your tests). + +It is recommended to use `nargo execute` if you want to debug failing constraints with `println` or `print` statements. This is due to every input in a test being a constant rather than a witness, so we issue an error during compilation while we only print during execution (which comes after compilation). Neither `println`, nor `print` are callable for failed constraints caught at compile time. + +Both `print` and `println` are generic functions which can work on integers, fields, strings, and even structs or expressions. Note however, that slices are currently unsupported. For example: + +```rust +struct Person { + age: Field, + height: Field, +} + +fn main(age: Field, height: Field) { + let person = Person { + age: age, + height: height, + }; + println(person); + println(age + height); + println("Hello world!"); +} +``` + +You can print different types in the same statement (including strings) with a type called `fmtstr`. It can be specified in the same way as a normal string, just prepended with an "f" character: + +```rust + let fmt_str = f"i: {i}, j: {j}"; + println(fmt_str); + + let s = myStruct { y: x, x: y }; + println(s); + + println(f"i: {i}, s: {s}"); + + println(x); + println([x, y]); + + let foo = fooStruct { my_struct: s, foo: 15 }; + println(f"s: {s}, foo: {foo}"); + + println(15); // prints 0x0f, implicit Field + println(-1 as u8); // prints 255 + println(-1 as i8); // prints -1 +``` + +Examples shown above are interchangeable between the two `print` statements: + +```rust +let person = Person { age : age, height : height }; + +println(person); +print(person); + +println("Hello world!"); // Prints with a newline at the end of the input +print("Hello world!"); // Prints the input and keeps cursor on the same line +``` diff --git a/docs/docs/noir/standard_library/merkle_trees.md b/docs/docs/noir/standard_library/merkle_trees.md new file mode 100644 index 00000000000..fa488677884 --- /dev/null +++ b/docs/docs/noir/standard_library/merkle_trees.md @@ -0,0 +1,58 @@ +--- +title: Merkle Trees +description: Learn about Merkle Trees in Noir with this tutorial. Explore the basics of computing a merkle root using a proof, with examples. +keywords: + [ + Merkle trees in Noir, + Noir programming language, + check membership, + computing root from leaf, + Noir Merkle tree implementation, + Merkle tree tutorial, + Merkle tree code examples, + Noir libraries, + pedersen hash., + ] +--- + +## compute_merkle_root + +Returns the root of the tree from the provided leaf and its hash path, using a [Pedersen hash](./cryptographic_primitives/hashes.mdx#pedersen_hash). + +```rust +fn compute_merkle_root(leaf : Field, index : Field, hash_path: [Field]) -> Field +``` + +example: + +```rust +/** + // these values are for this example only + index = "0" + priv_key = "0x000000000000000000000000000000000000000000000000000000616c696365" + secret = "0x1929ea3ab8d9106a899386883d9428f8256cfedb3c4f6b66bf4aa4d28a79988f" + note_hash_path = [ + "0x1e61bdae0f027b1b2159e1f9d3f8d00fa668a952dddd822fda80dc745d6f65cc", + "0x0e4223f3925f98934393c74975142bd73079ab0621f4ee133cee050a3c194f1a", + "0x2fd7bb412155bf8693a3bd2a3e7581a679c95c68a052f835dddca85fa1569a40" + ] + */ +fn main(index: Field, priv_key: Field, secret: Field, note_hash_path: [Field; 3]) { + + let pubkey = std::scalar_mul::fixed_base_embedded_curve(priv_key); + let pubkey_x = pubkey[0]; + let pubkey_y = pubkey[1]; + let note_commitment = std::hash::pedersen([pubkey_x, pubkey_y, secret]); + + let root = std::merkle::compute_merkle_root(note_commitment[0], index, note_hash_path); + println(root); +} +``` + +To check merkle tree membership: + +1. Include a merkle root as a program input. +2. Compute the merkle root of a given leaf, index and hash path. +3. Assert the merkle roots are equal. + +For more info about merkle trees, see the Wikipedia [page](https://en.wikipedia.org/wiki/Merkle_tree). diff --git a/docs/docs/noir/standard_library/options.md b/docs/docs/noir/standard_library/options.md new file mode 100644 index 00000000000..970c9cfbf11 --- /dev/null +++ b/docs/docs/noir/standard_library/options.md @@ -0,0 +1,97 @@ +--- +title: Option Type +--- + +The `Option` type is a way to express that a value might be present (`Some(T))` or absent (`None`). It's a safer way to handle potential absence of values, compared to using nulls in many other languages. + +```rust +struct Option { + None, + Some(T), +} +``` + +The `Option` type, already imported into your Noir program, can be used directly: + +```rust +fn main() { + let none = Option::none(); + let some = Option::some(3); +} +``` + +See [this test](https://github.com/noir-lang/noir/blob/5cbfb9c4a06c8865c98ff2b594464b037d821a5c/crates/nargo_cli/tests/test_data/option/src/main.nr) for a more comprehensive set of examples of each of the methods described below. + +## Methods + +### none + +Constructs a none value. + +### some + +Constructs a some wrapper around a given value. + +### is_none + +Returns true if the Option is None. + +### is_some + +Returns true of the Option is Some. + +### unwrap + +Asserts `self.is_some()` and returns the wrapped value. + +### unwrap_unchecked + +Returns the inner value without asserting `self.is_some()`. This method can be useful within an if condition when we already know that `option.is_some()`. If the option is None, there is no guarantee what value will be returned, only that it will be of type T for an `Option`. + +### unwrap_or + +Returns the wrapped value if `self.is_some()`. Otherwise, returns the given default value. + +### unwrap_or_else + +Returns the wrapped value if `self.is_some()`. Otherwise, calls the given function to return a default value. + +### map + +If self is `Some(x)`, this returns `Some(f(x))`. Otherwise, this returns `None`. + +### map_or + +If self is `Some(x)`, this returns `f(x)`. Otherwise, this returns the given default value. + +### map_or_else + +If self is `Some(x)`, this returns `f(x)`. Otherwise, this returns `default()`. + +### and + +Returns None if self is None. Otherwise, this returns `other`. + +### and_then + +If self is None, this returns None. Otherwise, this calls the given function with the Some value contained within self, and returns the result of that call. In some languages this function is called `flat_map` or `bind`. + +### or + +If self is Some, return self. Otherwise, return `other`. + +### or_else + +If self is Some, return self. Otherwise, return `default()`. + +### xor + +If only one of the two Options is Some, return that option. Otherwise, if both options are Some or both are None, None is returned. + +### filter + +Returns `Some(x)` if self is `Some(x)` and `predicate(x)` is true. Otherwise, this returns `None`. + +### flatten + +Flattens an `Option>` into a `Option`. This returns `None` if the outer Option is None. Otherwise, this returns the inner Option. diff --git a/docs/docs/noir/standard_library/recursion.md b/docs/docs/noir/standard_library/recursion.md new file mode 100644 index 00000000000..67962082a8f --- /dev/null +++ b/docs/docs/noir/standard_library/recursion.md @@ -0,0 +1,90 @@ +--- +title: Recursive Proofs +description: Learn about how to write recursive proofs in Noir. +keywords: [recursion, recursive proofs, verification_key, aggregation object, verify_proof] +--- + +Noir supports recursively verifying proofs, meaning you verify the proof of a Noir program in another Noir program. This enables creating proofs of arbitrary size by doing step-wise verification of smaller components of a large proof. + +The `verify_proof` function takes a verification key, proof and public inputs for a zk program, as well as a key hash and an input aggregation object. The key hash is used to check the validity of the verification key and the input aggregation object is required by some proving systems. The `verify_proof` function returns an output aggregation object that can then be fed into future iterations of the proof verification if required. + +```rust +#[foreign(verify_proof)] +fn verify_proof(_verification_key : [Field], _proof : [Field], _public_input : Field, _key_hash : Field, _input_aggregation_object : [Field]) -> [Field] {} +``` + +:::info + +This is a black box function. Read [this section](./black_box_fns) to learn more about black box functions in Noir. + +::: + +## Example usage + +```rust +use dep::std; + +fn main( + verification_key : [Field; 114], + proof : [Field; 94], + public_inputs : [Field; 1], + key_hash : Field, + input_aggregation_object : [Field; 16], + proof_b : [Field; 94], +) -> pub [Field; 16] { + let output_aggregation_object_a = std::verify_proof( + verification_key.as_slice(), + proof.as_slice(), + public_inputs.as_slice(), + key_hash, + input_aggregation_object + ); + + let output_aggregation_object = std::verify_proof( + verification_key.as_slice(), + proof_b.as_slice(), + public_inputs.as_slice(), + key_hash, + output_aggregation_object_a + ); + + let mut output = [0; 16]; + for i in 0..16 { + output[i] = output_aggregation_object[i]; + } + output +} +``` + +## Parameters + +### `verification_key` + +The verification key for the zk program that is being verified. + +### `proof` + +The proof for the zk program that is being verified. + +### `public_inputs` + +These represent the public inputs of the proof we are verifying. They should be checked against in the circuit after construction of a new aggregation state. + +### `key_hash` + +A key hash is used to check the validity of the verification key. The circuit implementing this opcode can use this hash to ensure that the key provided to the circuit matches the key produced by the circuit creator. + +### `input_aggregation_object` + +An aggregation object is blob of data that the top-level verifier must run some proof system specific algorithm on to complete verification. The size is proof system specific and will be set by the backend integrating this opcode. The input aggregation object is only not `None` when we are verifying a previous recursive aggregation in the current circuit. If this is the first recursive aggregation there is no input aggregation object. It is left to the backend to determine how to handle when there is no input aggregation object. + +## Return value + +### `output_aggregation_object` + +This is the result of a recursive aggregation and is what will be fed into the next verifier. +The next verifier can either perform a final verification (returning true or false) or perform another recursive aggregation where this output aggregation object will be the input aggregation object of the next recursive aggregation. + +## Example + +You can see an example of how to do recursive proofs in [this example recursion demo repo](https://github.com/noir-lang/noir-examples/tree/master/recursion). diff --git a/docs/docs/noir/standard_library/traits.md b/docs/docs/noir/standard_library/traits.md new file mode 100644 index 00000000000..f2960ca5080 --- /dev/null +++ b/docs/docs/noir/standard_library/traits.md @@ -0,0 +1,284 @@ +--- +title: Traits +description: Noir's stdlib provides a few commonly used traits. +keywords: [traits, trait, interface, protocol, default, add, eq] +--- + +## `std::default` + +### `std::default::Default` + +```rust +trait Default { + fn default() -> Self; +} +``` + +Constructs a default value of a type. + +Implementations: +```rust +impl Default for Field { .. } + +impl Default for i8 { .. } +impl Default for i16 { .. } +impl Default for i32 { .. } +impl Default for i64 { .. } + +impl Default for u8 { .. } +impl Default for u16 { .. } +impl Default for u32 { .. } +impl Default for u64 { .. } + +impl Default for () { .. } +impl Default for bool { .. } + +impl Default for [T; N] + where T: Default { .. } + +impl Default for (A, B) + where A: Default, B: Default { .. } + +impl Default for (A, B, C) + where A: Default, B: Default, C: Default { .. } + +impl Default for (A, B, C, D) + where A: Default, B: Default, C: Default, D: Default { .. } + +impl Default for (A, B, C, D, E) + where A: Default, B: Default, C: Default, D: Default, E: Default { .. } +``` + +For primitive integer types, the return value of `default` is `0`. Container +types such as arrays are filled with default values of their element type. + +## `std::cmp` + +### `std::cmp::Eq` + +```rust +trait Eq { + fn eq(self, other: Self) -> bool; +} +``` +Returns `true` if `self` is equal to `other`. Implementing this trait on a type +allows the type to be used with `==` and `!=`. + +Implementations: +```rust +impl Eq for Field { .. } + +impl Eq for i8 { .. } +impl Eq for i16 { .. } +impl Eq for i32 { .. } +impl Eq for i64 { .. } + +impl Eq for u8 { .. } +impl Eq for u16 { .. } +impl Eq for u32 { .. } +impl Eq for u64 { .. } + +impl Eq for () { .. } +impl Eq for bool { .. } + +impl Eq for [T; N] + where T: Eq { .. } + +impl Eq for (A, B) + where A: Eq, B: Eq { .. } + +impl Eq for (A, B, C) + where A: Eq, B: Eq, C: Eq { .. } + +impl Eq for (A, B, C, D) + where A: Eq, B: Eq, C: Eq, D: Eq { .. } + +impl Eq for (A, B, C, D, E) + where A: Eq, B: Eq, C: Eq, D: Eq, E: Eq { .. } +``` + +### `std::cmp::Cmp` + +```rust +trait Cmp { + fn cmp(self, other: Self) -> Ordering; +} +``` + +`a.cmp(b)` compares two values returning `Ordering::less()` if `a < b`, +`Ordering::equal()` if `a == b`, or `Ordering::greater()` if `a > b`. +Implementing this trait on a type allows `<`, `<=`, `>`, and `>=` to be +used on values of the type. + +Implementations: + +```rust +impl Ord for u8 { .. } +impl Ord for u16 { .. } +impl Ord for u32 { .. } +impl Ord for u64 { .. } + +impl Ord for i8 { .. } +impl Ord for i16 { .. } +impl Ord for i32 { .. } + +impl Ord for i64 { .. } + +impl Ord for () { .. } +impl Ord for bool { .. } + +impl Ord for [T; N] + where T: Ord { .. } + +impl Ord for (A, B) + where A: Ord, B: Ord { .. } + +impl Ord for (A, B, C) + where A: Ord, B: Ord, C: Ord { .. } + +impl Ord for (A, B, C, D) + where A: Ord, B: Ord, C: Ord, D: Ord { .. } + +impl Ord for (A, B, C, D, E) + where A: Ord, B: Ord, C: Ord, D: Ord, E: Ord { .. } +``` + +## `std::ops` + +### `std::ops::Add`, `std::ops::Sub`, `std::ops::Mul`, and `std::ops::Div` + +These traits abstract over addition, subtraction, multiplication, and division respectively. +Implementing these traits for a given type will also allow that type to be used with the corresponding operator +for that trait (`+` for Add, etc) in addition to the normal method names. + +```rust +trait Add { + fn add(self, other: Self) -> Self; +} + +trait Sub { + fn sub(self, other: Self) -> Self; +} + +trait Mul { + fn mul(self, other: Self) -> Self; +} + +trait Div { + fn div(self, other: Self) -> Self; +} +``` + +The implementations block below is given for the `Add` trait, but the same types that implement +`Add` also implement `Sub`, `Mul`, and `Div`. + +Implementations: +```rust +impl Add for Field { .. } + +impl Add for i8 { .. } +impl Add for i16 { .. } +impl Add for i32 { .. } +impl Add for i64 { .. } + +impl Add for u8 { .. } +impl Add for u16 { .. } +impl Add for u32 { .. } +impl Add for u64 { .. } +``` + +### `std::ops::Rem` + +```rust +trait Rem { + fn rem(self, other: Self) -> Self; +} +``` + +`Rem::rem(a, b)` is the remainder function returning the result of what is +left after dividing `a` and `b`. Implementing `Rem` allows the `%` operator +to be used with the implementation type. + +Unlike other numeric traits, `Rem` is not implemented for `Field`. + +Implementations: +```rust +impl Rem for u8 { fn rem(self, other: u8) -> u8 { self % other } } +impl Rem for u16 { fn rem(self, other: u16) -> u16 { self % other } } +impl Rem for u32 { fn rem(self, other: u32) -> u32 { self % other } } +impl Rem for u64 { fn rem(self, other: u64) -> u64 { self % other } } + +impl Rem for i8 { fn rem(self, other: i8) -> i8 { self % other } } +impl Rem for i16 { fn rem(self, other: i16) -> i16 { self % other } } +impl Rem for i32 { fn rem(self, other: i32) -> i32 { self % other } } +impl Rem for i64 { fn rem(self, other: i64) -> i64 { self % other } } +``` + +### `std::ops::{ BitOr, BitAnd, BitXor }` + +```rust +trait BitOr { + fn bitor(self, other: Self) -> Self; +} + +trait BitAnd { + fn bitand(self, other: Self) -> Self; +} + +trait BitXor { + fn bitxor(self, other: Self) -> Self; +} +``` + +Traits for the bitwise operations `|`, `&`, and `^`. + +Implementing `BitOr`, `BitAnd` or `BitXor` for a type allows the `|`, `&`, or `^` operator respectively +to be used with the type. + +The implementations block below is given for the `BitOr` trait, but the same types that implement +`BitOr` also implement `BitAnd` and `BitXor`. + +Implementations: +```rust +impl BitOr for bool { fn bitor(self, other: bool) -> bool { self | other } } + +impl BitOr for u8 { fn bitor(self, other: u8) -> u8 { self | other } } +impl BitOr for u16 { fn bitor(self, other: u16) -> u16 { self | other } } +impl BitOr for u32 { fn bitor(self, other: u32) -> u32 { self | other } } +impl BitOr for u64 { fn bitor(self, other: u64) -> u64 { self | other } } + +impl BitOr for i8 { fn bitor(self, other: i8) -> i8 { self | other } } +impl BitOr for i16 { fn bitor(self, other: i16) -> i16 { self | other } } +impl BitOr for i32 { fn bitor(self, other: i32) -> i32 { self | other } } +impl BitOr for i64 { fn bitor(self, other: i64) -> i64 { self | other } } +``` + +### `std::ops::{ Shl, Shr }` + +```rust +trait Shl { + fn shl(self, other: Self) -> Self; +} + +trait Shr { + fn shr(self, other: Self) -> Self; +} +``` + +Traits for a bit shift left and bit shift right. + +Implementing `Shl` for a type allows the left shift operator (`<<`) to be used with the implementation type. +Similarly, implementing `Shr` allows the right shift operator (`>>`) to be used with the type. + +Note that bit shifting is not currently implemented for signed types. + +The implementations block below is given for the `Shl` trait, but the same types that implement +`Shl` also implement `Shr`. + +Implementations: +```rust +impl Shl for u8 { fn shl(self, other: u8) -> u8 { self << other } } +impl Shl for u16 { fn shl(self, other: u16) -> u16 { self << other } } +impl Shl for u32 { fn shl(self, other: u32) -> u32 { self << other } } +impl Shl for u64 { fn shl(self, other: u64) -> u64 { self << other } } +``` diff --git a/docs/docs/standard_library/zeroed.md b/docs/docs/noir/standard_library/zeroed.md similarity index 100% rename from docs/docs/standard_library/zeroed.md rename to docs/docs/noir/standard_library/zeroed.md diff --git a/docs/docs/reference/NoirJS/backend_barretenberg/.nojekyll b/docs/docs/reference/NoirJS/backend_barretenberg/.nojekyll new file mode 100644 index 00000000000..e2ac6616add --- /dev/null +++ b/docs/docs/reference/NoirJS/backend_barretenberg/.nojekyll @@ -0,0 +1 @@ +TypeDoc added this file to prevent GitHub Pages from using Jekyll. You can turn off this behavior by setting the `githubPages` option to false. \ No newline at end of file diff --git a/docs/docs/reference/NoirJS/backend_barretenberg/classes/BarretenbergBackend.md b/docs/docs/reference/NoirJS/backend_barretenberg/classes/BarretenbergBackend.md new file mode 100644 index 00000000000..5cbe9421b92 --- /dev/null +++ b/docs/docs/reference/NoirJS/backend_barretenberg/classes/BarretenbergBackend.md @@ -0,0 +1,185 @@ +# BarretenbergBackend + +## Implements + +- [`Backend`](../interfaces/Backend.md) + +## Constructors + +### new BarretenbergBackend(acirCircuit, options) + +```ts +new BarretenbergBackend(acirCircuit, options): BarretenbergBackend +``` + +#### Parameters + +| Parameter | Type | +| :------ | :------ | +| `acirCircuit` | [`CompiledCircuit`](../type-aliases/CompiledCircuit.md) | +| `options` | [`BackendOptions`](../type-aliases/BackendOptions.md) | + +#### Returns + +[`BarretenbergBackend`](BarretenbergBackend.md) + +## Methods + +### destroy() + +```ts +destroy(): Promise +``` + +#### Returns + +`Promise`\<`void`\> + +#### Implementation of + +[`Backend`](../interfaces/Backend.md).[`destroy`](../interfaces/Backend.md#destroy) + +#### Description + +Destroys the backend + +*** + +### generateFinalProof() + +```ts +generateFinalProof(decompressedWitness): Promise +``` + +#### Parameters + +| Parameter | Type | +| :------ | :------ | +| `decompressedWitness` | `Uint8Array` | + +#### Returns + +`Promise`\<[`ProofData`](../type-aliases/ProofData.md)\> + +#### Implementation of + +[`Backend`](../interfaces/Backend.md).[`generateFinalProof`](../interfaces/Backend.md#generatefinalproof) + +#### Description + +Generates a final proof (not meant to be verified in another circuit) + +*** + +### generateIntermediateProof() + +```ts +generateIntermediateProof(witness): Promise +``` + +#### Parameters + +| Parameter | Type | +| :------ | :------ | +| `witness` | `Uint8Array` | + +#### Returns + +`Promise`\<[`ProofData`](../type-aliases/ProofData.md)\> + +#### Implementation of + +[`Backend`](../interfaces/Backend.md).[`generateIntermediateProof`](../interfaces/Backend.md#generateintermediateproof) + +#### Example + +```typescript +const intermediateProof = await backend.generateIntermediateProof(witness); +``` + +*** + +### generateIntermediateProofArtifacts() + +```ts +generateIntermediateProofArtifacts(proofData, numOfPublicInputs): Promise +``` + +#### Parameters + +| Parameter | Type | Default value | +| :------ | :------ | :------ | +| `proofData` | [`ProofData`](../type-aliases/ProofData.md) | `undefined` | +| `numOfPublicInputs` | `number` | `0` | + +#### Returns + +`Promise`\<`object`\> + +#### Implementation of + +[`Backend`](../interfaces/Backend.md).[`generateIntermediateProofArtifacts`](../interfaces/Backend.md#generateintermediateproofartifacts) + +#### Example + +```typescript +const artifacts = await backend.generateIntermediateProofArtifacts(proof, numOfPublicInputs); +``` + +*** + +### verifyFinalProof() + +```ts +verifyFinalProof(proofData): Promise +``` + +#### Parameters + +| Parameter | Type | +| :------ | :------ | +| `proofData` | [`ProofData`](../type-aliases/ProofData.md) | + +#### Returns + +`Promise`\<`boolean`\> + +#### Implementation of + +[`Backend`](../interfaces/Backend.md).[`verifyFinalProof`](../interfaces/Backend.md#verifyfinalproof) + +#### Description + +Verifies a final proof + +*** + +### verifyIntermediateProof() + +```ts +verifyIntermediateProof(proofData): Promise +``` + +#### Parameters + +| Parameter | Type | +| :------ | :------ | +| `proofData` | [`ProofData`](../type-aliases/ProofData.md) | + +#### Returns + +`Promise`\<`boolean`\> + +#### Implementation of + +[`Backend`](../interfaces/Backend.md).[`verifyIntermediateProof`](../interfaces/Backend.md#verifyintermediateproof) + +#### Example + +```typescript +const isValidIntermediate = await backend.verifyIntermediateProof(proof); +``` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/docs/reference/NoirJS/backend_barretenberg/index.md b/docs/docs/reference/NoirJS/backend_barretenberg/index.md new file mode 100644 index 00000000000..bfbecb52864 --- /dev/null +++ b/docs/docs/reference/NoirJS/backend_barretenberg/index.md @@ -0,0 +1,45 @@ +# backend_barretenberg + +## Exports + +### Classes + +| Class | Description | +| :------ | :------ | +| [BarretenbergBackend](classes/BarretenbergBackend.md) | - | + +### Interfaces + +| Interface | Description | +| :------ | :------ | +| [Backend](interfaces/Backend.md) | - | + +### Type Aliases + +| Type alias | Description | +| :------ | :------ | +| [BackendOptions](type-aliases/BackendOptions.md) | - | +| [CompiledCircuit](type-aliases/CompiledCircuit.md) | - | +| [ProofData](type-aliases/ProofData.md) | - | + +## Functions + +### flattenPublicInputs() + +```ts +flattenPublicInputs(publicInputs): string[] +``` + +#### Parameters + +| Parameter | Type | +| :------ | :------ | +| `publicInputs` | `WitnessMap` | + +#### Returns + +`string`[] + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/docs/reference/NoirJS/backend_barretenberg/interfaces/Backend.md b/docs/docs/reference/NoirJS/backend_barretenberg/interfaces/Backend.md new file mode 100644 index 00000000000..3eb9645c8d2 --- /dev/null +++ b/docs/docs/reference/NoirJS/backend_barretenberg/interfaces/Backend.md @@ -0,0 +1,132 @@ +# Backend + +## Methods + +### destroy() + +```ts +destroy(): Promise +``` + +#### Returns + +`Promise`\<`void`\> + +#### Description + +Destroys the backend + +*** + +### generateFinalProof() + +```ts +generateFinalProof(decompressedWitness): Promise +``` + +#### Parameters + +| Parameter | Type | +| :------ | :------ | +| `decompressedWitness` | `Uint8Array` | + +#### Returns + +`Promise`\<[`ProofData`](../type-aliases/ProofData.md)\> + +#### Description + +Generates a final proof (not meant to be verified in another circuit) + +*** + +### generateIntermediateProof() + +```ts +generateIntermediateProof(decompressedWitness): Promise +``` + +#### Parameters + +| Parameter | Type | +| :------ | :------ | +| `decompressedWitness` | `Uint8Array` | + +#### Returns + +`Promise`\<[`ProofData`](../type-aliases/ProofData.md)\> + +#### Description + +Generates an intermediate proof (meant to be verified in another circuit) + +*** + +### generateIntermediateProofArtifacts() + +```ts +generateIntermediateProofArtifacts(proofData, numOfPublicInputs): Promise +``` + +#### Parameters + +| Parameter | Type | +| :------ | :------ | +| `proofData` | [`ProofData`](../type-aliases/ProofData.md) | +| `numOfPublicInputs` | `number` | + +#### Returns + +`Promise`\<`object`\> + +#### Description + +Retrieves the artifacts from a proof in the Field format + +*** + +### verifyFinalProof() + +```ts +verifyFinalProof(proofData): Promise +``` + +#### Parameters + +| Parameter | Type | +| :------ | :------ | +| `proofData` | [`ProofData`](../type-aliases/ProofData.md) | + +#### Returns + +`Promise`\<`boolean`\> + +#### Description + +Verifies a final proof + +*** + +### verifyIntermediateProof() + +```ts +verifyIntermediateProof(proofData): Promise +``` + +#### Parameters + +| Parameter | Type | +| :------ | :------ | +| `proofData` | [`ProofData`](../type-aliases/ProofData.md) | + +#### Returns + +`Promise`\<`boolean`\> + +#### Description + +Verifies an intermediate proof + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/docs/reference/NoirJS/backend_barretenberg/type-aliases/BackendOptions.md b/docs/docs/reference/NoirJS/backend_barretenberg/type-aliases/BackendOptions.md new file mode 100644 index 00000000000..266ade75d17 --- /dev/null +++ b/docs/docs/reference/NoirJS/backend_barretenberg/type-aliases/BackendOptions.md @@ -0,0 +1,19 @@ +# BackendOptions + +```ts +type BackendOptions: object; +``` + +## Description + +An options object, currently only used to specify the number of threads to use. + +## Type declaration + +| Member | Type | Description | +| :------ | :------ | :------ | +| `threads` | `number` | **Description**

Number of threads | + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/docs/reference/NoirJS/backend_barretenberg/type-aliases/CompiledCircuit.md b/docs/docs/reference/NoirJS/backend_barretenberg/type-aliases/CompiledCircuit.md new file mode 100644 index 00000000000..34e0dd04205 --- /dev/null +++ b/docs/docs/reference/NoirJS/backend_barretenberg/type-aliases/CompiledCircuit.md @@ -0,0 +1,20 @@ +# CompiledCircuit + +```ts +type CompiledCircuit: object; +``` + +## Description + +The representation of a compiled circuit + +## Type declaration + +| Member | Type | Description | +| :------ | :------ | :------ | +| `abi` | `Abi` | **Description**

ABI representation of the circuit | +| `bytecode` | `string` | **Description**

The bytecode of the circuit | + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/docs/reference/NoirJS/backend_barretenberg/type-aliases/ProofData.md b/docs/docs/reference/NoirJS/backend_barretenberg/type-aliases/ProofData.md new file mode 100644 index 00000000000..3eb360a78f1 --- /dev/null +++ b/docs/docs/reference/NoirJS/backend_barretenberg/type-aliases/ProofData.md @@ -0,0 +1,20 @@ +# ProofData + +```ts +type ProofData: object; +``` + +## Description + +The representation of a proof + +## Type declaration + +| Member | Type | Description | +| :------ | :------ | :------ | +| `proof` | `Uint8Array` | **Description**

An byte array representing the proof | +| `publicInputs` | `WitnessMap` | **Description**

Public inputs of a proof | + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/docs/reference/NoirJS/backend_barretenberg/typedoc-sidebar.cjs b/docs/docs/reference/NoirJS/backend_barretenberg/typedoc-sidebar.cjs new file mode 100644 index 00000000000..2aaa55bccf6 --- /dev/null +++ b/docs/docs/reference/NoirJS/backend_barretenberg/typedoc-sidebar.cjs @@ -0,0 +1,4 @@ +// @ts-check +/** @type {import('@docusaurus/plugin-content-docs').SidebarsConfig} */ +const typedocSidebar = { items: [{"type":"category","label":"Classes","items":[{"type":"doc","id":"reference/NoirJS/backend_barretenberg/classes/BarretenbergBackend","label":"BarretenbergBackend"}]},{"type":"category","label":"Interfaces","items":[{"type":"doc","id":"reference/NoirJS/backend_barretenberg/interfaces/Backend","label":"Backend"}]},{"type":"category","label":"Type Aliases","items":[{"type":"doc","id":"reference/NoirJS/backend_barretenberg/type-aliases/BackendOptions","label":"BackendOptions"},{"type":"doc","id":"reference/NoirJS/backend_barretenberg/type-aliases/CompiledCircuit","label":"CompiledCircuit"},{"type":"doc","id":"reference/NoirJS/backend_barretenberg/type-aliases/ProofData","label":"ProofData"}]}]}; +module.exports = typedocSidebar.items; \ No newline at end of file diff --git a/docs/docs/reference/NoirJS/noir_js/.nojekyll b/docs/docs/reference/NoirJS/noir_js/.nojekyll new file mode 100644 index 00000000000..e2ac6616add --- /dev/null +++ b/docs/docs/reference/NoirJS/noir_js/.nojekyll @@ -0,0 +1 @@ +TypeDoc added this file to prevent GitHub Pages from using Jekyll. You can turn off this behavior by setting the `githubPages` option to false. \ No newline at end of file diff --git a/docs/docs/reference/NoirJS/noir_js/classes/Noir.md b/docs/docs/reference/NoirJS/noir_js/classes/Noir.md new file mode 100644 index 00000000000..34e20d99684 --- /dev/null +++ b/docs/docs/reference/NoirJS/noir_js/classes/Noir.md @@ -0,0 +1,132 @@ +# Noir + +## Constructors + +### new Noir(circuit, backend) + +```ts +new Noir(circuit, backend?): Noir +``` + +#### Parameters + +| Parameter | Type | +| :------ | :------ | +| `circuit` | [`CompiledCircuit`](../type-aliases/CompiledCircuit.md) | +| `backend`? | `Backend` | + +#### Returns + +[`Noir`](Noir.md) + +## Methods + +### destroy() + +```ts +destroy(): Promise +``` + +#### Returns + +`Promise`\<`void`\> + +#### Description + +Destroys the underlying backend instance. + +#### Example + +```typescript +await noir.destroy(); +``` + +*** + +### execute() + +```ts +execute(inputs, foreignCallHandler?): Promise +``` + +#### Parameters + +| Parameter | Type | +| :------ | :------ | +| `inputs` | [`InputMap`](../type-aliases/InputMap.md) | +| `foreignCallHandler`? | [`ForeignCallHandler`](../type-aliases/ForeignCallHandler.md) | + +#### Returns + +`Promise`\<`object`\> + +#### Description + +Allows to execute a circuit to get its witness and return value. + +#### Example + +```typescript +async execute(inputs) +``` + +*** + +### generateFinalProof() + +```ts +generateFinalProof(inputs, foreignCallHandler?): Promise +``` + +#### Parameters + +| Parameter | Type | +| :------ | :------ | +| `inputs` | [`InputMap`](../type-aliases/InputMap.md) | +| `foreignCallHandler`? | [`ForeignCallHandler`](../type-aliases/ForeignCallHandler.md) | + +#### Returns + +`Promise`\<[`ProofData`](../type-aliases/ProofData.md)\> + +#### Description + +Generates a witness and a proof given an object as input. + +#### Example + +```typescript +async generateFinalProof(input) +``` + +*** + +### verifyFinalProof() + +```ts +verifyFinalProof(proofData): Promise +``` + +#### Parameters + +| Parameter | Type | +| :------ | :------ | +| `proofData` | [`ProofData`](../type-aliases/ProofData.md) | + +#### Returns + +`Promise`\<`boolean`\> + +#### Description + +Instantiates the verification key and verifies a proof. + +#### Example + +```typescript +async verifyFinalProof(proof) +``` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/docs/reference/NoirJS/noir_js/functions/and.md b/docs/docs/reference/NoirJS/noir_js/functions/and.md new file mode 100644 index 00000000000..c783283e396 --- /dev/null +++ b/docs/docs/reference/NoirJS/noir_js/functions/and.md @@ -0,0 +1,22 @@ +# and() + +```ts +and(lhs, rhs): string +``` + +Performs a bitwise AND operation between `lhs` and `rhs` + +## Parameters + +| Parameter | Type | Description | +| :------ | :------ | :------ | +| `lhs` | `string` | | +| `rhs` | `string` | | + +## Returns + +`string` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/docs/reference/NoirJS/noir_js/functions/blake2s256.md b/docs/docs/reference/NoirJS/noir_js/functions/blake2s256.md new file mode 100644 index 00000000000..7882d0da8d5 --- /dev/null +++ b/docs/docs/reference/NoirJS/noir_js/functions/blake2s256.md @@ -0,0 +1,21 @@ +# blake2s256() + +```ts +blake2s256(inputs): Uint8Array +``` + +Calculates the Blake2s256 hash of the input bytes + +## Parameters + +| Parameter | Type | Description | +| :------ | :------ | :------ | +| `inputs` | `Uint8Array` | | + +## Returns + +`Uint8Array` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/docs/reference/NoirJS/noir_js/functions/ecdsa_secp256k1_verify.md b/docs/docs/reference/NoirJS/noir_js/functions/ecdsa_secp256k1_verify.md new file mode 100644 index 00000000000..5e3cd53e9d3 --- /dev/null +++ b/docs/docs/reference/NoirJS/noir_js/functions/ecdsa_secp256k1_verify.md @@ -0,0 +1,28 @@ +# ecdsa\_secp256k1\_verify() + +```ts +ecdsa_secp256k1_verify( + hashed_msg, + public_key_x_bytes, + public_key_y_bytes, + signature): boolean +``` + +Verifies a ECDSA signature over the secp256k1 curve. + +## Parameters + +| Parameter | Type | Description | +| :------ | :------ | :------ | +| `hashed_msg` | `Uint8Array` | | +| `public_key_x_bytes` | `Uint8Array` | | +| `public_key_y_bytes` | `Uint8Array` | | +| `signature` | `Uint8Array` | | + +## Returns + +`boolean` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/docs/reference/NoirJS/noir_js/functions/ecdsa_secp256r1_verify.md b/docs/docs/reference/NoirJS/noir_js/functions/ecdsa_secp256r1_verify.md new file mode 100644 index 00000000000..0b20ff68957 --- /dev/null +++ b/docs/docs/reference/NoirJS/noir_js/functions/ecdsa_secp256r1_verify.md @@ -0,0 +1,28 @@ +# ecdsa\_secp256r1\_verify() + +```ts +ecdsa_secp256r1_verify( + hashed_msg, + public_key_x_bytes, + public_key_y_bytes, + signature): boolean +``` + +Verifies a ECDSA signature over the secp256r1 curve. + +## Parameters + +| Parameter | Type | Description | +| :------ | :------ | :------ | +| `hashed_msg` | `Uint8Array` | | +| `public_key_x_bytes` | `Uint8Array` | | +| `public_key_y_bytes` | `Uint8Array` | | +| `signature` | `Uint8Array` | | + +## Returns + +`boolean` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/docs/reference/NoirJS/noir_js/functions/keccak256.md b/docs/docs/reference/NoirJS/noir_js/functions/keccak256.md new file mode 100644 index 00000000000..d10f155ce86 --- /dev/null +++ b/docs/docs/reference/NoirJS/noir_js/functions/keccak256.md @@ -0,0 +1,21 @@ +# keccak256() + +```ts +keccak256(inputs): Uint8Array +``` + +Calculates the Keccak256 hash of the input bytes + +## Parameters + +| Parameter | Type | Description | +| :------ | :------ | :------ | +| `inputs` | `Uint8Array` | | + +## Returns + +`Uint8Array` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/docs/reference/NoirJS/noir_js/functions/sha256.md b/docs/docs/reference/NoirJS/noir_js/functions/sha256.md new file mode 100644 index 00000000000..6ba4ecac022 --- /dev/null +++ b/docs/docs/reference/NoirJS/noir_js/functions/sha256.md @@ -0,0 +1,21 @@ +# sha256() + +```ts +sha256(inputs): Uint8Array +``` + +Calculates the SHA256 hash of the input bytes + +## Parameters + +| Parameter | Type | Description | +| :------ | :------ | :------ | +| `inputs` | `Uint8Array` | | + +## Returns + +`Uint8Array` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/docs/reference/NoirJS/noir_js/functions/xor.md b/docs/docs/reference/NoirJS/noir_js/functions/xor.md new file mode 100644 index 00000000000..8d762b895d3 --- /dev/null +++ b/docs/docs/reference/NoirJS/noir_js/functions/xor.md @@ -0,0 +1,22 @@ +# xor() + +```ts +xor(lhs, rhs): string +``` + +Performs a bitwise XOR operation between `lhs` and `rhs` + +## Parameters + +| Parameter | Type | Description | +| :------ | :------ | :------ | +| `lhs` | `string` | | +| `rhs` | `string` | | + +## Returns + +`string` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/docs/reference/NoirJS/noir_js/index.md b/docs/docs/reference/NoirJS/noir_js/index.md new file mode 100644 index 00000000000..d600e21b299 --- /dev/null +++ b/docs/docs/reference/NoirJS/noir_js/index.md @@ -0,0 +1,37 @@ +# noir_js + +## Exports + +### Classes + +| Class | Description | +| :------ | :------ | +| [Noir](classes/Noir.md) | - | + +### Type Aliases + +| Type alias | Description | +| :------ | :------ | +| [CompiledCircuit](type-aliases/CompiledCircuit.md) | - | +| [ForeignCallHandler](type-aliases/ForeignCallHandler.md) | A callback which performs an foreign call and returns the response. | +| [ForeignCallInput](type-aliases/ForeignCallInput.md) | - | +| [ForeignCallOutput](type-aliases/ForeignCallOutput.md) | - | +| [InputMap](type-aliases/InputMap.md) | - | +| [ProofData](type-aliases/ProofData.md) | - | +| [WitnessMap](type-aliases/WitnessMap.md) | - | + +### Functions + +| Function | Description | +| :------ | :------ | +| [and](functions/and.md) | Performs a bitwise AND operation between `lhs` and `rhs` | +| [blake2s256](functions/blake2s256.md) | Calculates the Blake2s256 hash of the input bytes | +| [ecdsa\_secp256k1\_verify](functions/ecdsa_secp256k1_verify.md) | Verifies a ECDSA signature over the secp256k1 curve. | +| [ecdsa\_secp256r1\_verify](functions/ecdsa_secp256r1_verify.md) | Verifies a ECDSA signature over the secp256r1 curve. | +| [keccak256](functions/keccak256.md) | Calculates the Keccak256 hash of the input bytes | +| [sha256](functions/sha256.md) | Calculates the SHA256 hash of the input bytes | +| [xor](functions/xor.md) | Performs a bitwise XOR operation between `lhs` and `rhs` | + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/docs/reference/NoirJS/noir_js/type-aliases/CompiledCircuit.md b/docs/docs/reference/NoirJS/noir_js/type-aliases/CompiledCircuit.md new file mode 100644 index 00000000000..34e0dd04205 --- /dev/null +++ b/docs/docs/reference/NoirJS/noir_js/type-aliases/CompiledCircuit.md @@ -0,0 +1,20 @@ +# CompiledCircuit + +```ts +type CompiledCircuit: object; +``` + +## Description + +The representation of a compiled circuit + +## Type declaration + +| Member | Type | Description | +| :------ | :------ | :------ | +| `abi` | `Abi` | **Description**

ABI representation of the circuit | +| `bytecode` | `string` | **Description**

The bytecode of the circuit | + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/docs/reference/NoirJS/noir_js/type-aliases/ForeignCallHandler.md b/docs/docs/reference/NoirJS/noir_js/type-aliases/ForeignCallHandler.md new file mode 100644 index 00000000000..812b8b16481 --- /dev/null +++ b/docs/docs/reference/NoirJS/noir_js/type-aliases/ForeignCallHandler.md @@ -0,0 +1,24 @@ +# ForeignCallHandler + +```ts +type ForeignCallHandler: (name, inputs) => Promise; +``` + +A callback which performs an foreign call and returns the response. + +## Parameters + +| Parameter | Type | Description | +| :------ | :------ | :------ | +| `name` | `string` | The identifier for the type of foreign call being performed. | +| `inputs` | [`ForeignCallInput`](ForeignCallInput.md)[] | An array of hex encoded inputs to the foreign call. | + +## Returns + +`Promise`\<[`ForeignCallOutput`](ForeignCallOutput.md)[]\> + +outputs - An array of hex encoded outputs containing the results of the foreign call. + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/docs/reference/NoirJS/noir_js/type-aliases/ForeignCallInput.md b/docs/docs/reference/NoirJS/noir_js/type-aliases/ForeignCallInput.md new file mode 100644 index 00000000000..dd95809186a --- /dev/null +++ b/docs/docs/reference/NoirJS/noir_js/type-aliases/ForeignCallInput.md @@ -0,0 +1,9 @@ +# ForeignCallInput + +```ts +type ForeignCallInput: string[]; +``` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/docs/reference/NoirJS/noir_js/type-aliases/ForeignCallOutput.md b/docs/docs/reference/NoirJS/noir_js/type-aliases/ForeignCallOutput.md new file mode 100644 index 00000000000..b71fb78a946 --- /dev/null +++ b/docs/docs/reference/NoirJS/noir_js/type-aliases/ForeignCallOutput.md @@ -0,0 +1,9 @@ +# ForeignCallOutput + +```ts +type ForeignCallOutput: string | string[]; +``` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/docs/reference/NoirJS/noir_js/type-aliases/InputMap.md b/docs/docs/reference/NoirJS/noir_js/type-aliases/InputMap.md new file mode 100644 index 00000000000..c714e999d93 --- /dev/null +++ b/docs/docs/reference/NoirJS/noir_js/type-aliases/InputMap.md @@ -0,0 +1,13 @@ +# InputMap + +```ts +type InputMap: object; +``` + +## Index signature + + \[`key`: `string`\]: `InputValue` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/docs/reference/NoirJS/noir_js/type-aliases/ProofData.md b/docs/docs/reference/NoirJS/noir_js/type-aliases/ProofData.md new file mode 100644 index 00000000000..3eb360a78f1 --- /dev/null +++ b/docs/docs/reference/NoirJS/noir_js/type-aliases/ProofData.md @@ -0,0 +1,20 @@ +# ProofData + +```ts +type ProofData: object; +``` + +## Description + +The representation of a proof + +## Type declaration + +| Member | Type | Description | +| :------ | :------ | :------ | +| `proof` | `Uint8Array` | **Description**

An byte array representing the proof | +| `publicInputs` | `WitnessMap` | **Description**

Public inputs of a proof | + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/docs/reference/NoirJS/noir_js/type-aliases/WitnessMap.md b/docs/docs/reference/NoirJS/noir_js/type-aliases/WitnessMap.md new file mode 100644 index 00000000000..258c46f9d0c --- /dev/null +++ b/docs/docs/reference/NoirJS/noir_js/type-aliases/WitnessMap.md @@ -0,0 +1,9 @@ +# WitnessMap + +```ts +type WitnessMap: Map; +``` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/docs/reference/NoirJS/noir_js/typedoc-sidebar.cjs b/docs/docs/reference/NoirJS/noir_js/typedoc-sidebar.cjs new file mode 100644 index 00000000000..fe2629ddc9f --- /dev/null +++ b/docs/docs/reference/NoirJS/noir_js/typedoc-sidebar.cjs @@ -0,0 +1,4 @@ +// @ts-check +/** @type {import('@docusaurus/plugin-content-docs').SidebarsConfig} */ +const typedocSidebar = { items: [{"type":"category","label":"Classes","items":[{"type":"doc","id":"reference/NoirJS/noir_js/classes/Noir","label":"Noir"}]},{"type":"category","label":"Type Aliases","items":[{"type":"doc","id":"reference/NoirJS/noir_js/type-aliases/CompiledCircuit","label":"CompiledCircuit"},{"type":"doc","id":"reference/NoirJS/noir_js/type-aliases/ForeignCallHandler","label":"ForeignCallHandler"},{"type":"doc","id":"reference/NoirJS/noir_js/type-aliases/ForeignCallInput","label":"ForeignCallInput"},{"type":"doc","id":"reference/NoirJS/noir_js/type-aliases/ForeignCallOutput","label":"ForeignCallOutput"},{"type":"doc","id":"reference/NoirJS/noir_js/type-aliases/InputMap","label":"InputMap"},{"type":"doc","id":"reference/NoirJS/noir_js/type-aliases/ProofData","label":"ProofData"},{"type":"doc","id":"reference/NoirJS/noir_js/type-aliases/WitnessMap","label":"WitnessMap"}]},{"type":"category","label":"Functions","items":[{"type":"doc","id":"reference/NoirJS/noir_js/functions/and","label":"and"},{"type":"doc","id":"reference/NoirJS/noir_js/functions/blake2s256","label":"blake2s256"},{"type":"doc","id":"reference/NoirJS/noir_js/functions/ecdsa_secp256k1_verify","label":"ecdsa_secp256k1_verify"},{"type":"doc","id":"reference/NoirJS/noir_js/functions/ecdsa_secp256r1_verify","label":"ecdsa_secp256r1_verify"},{"type":"doc","id":"reference/NoirJS/noir_js/functions/keccak256","label":"keccak256"},{"type":"doc","id":"reference/NoirJS/noir_js/functions/sha256","label":"sha256"},{"type":"doc","id":"reference/NoirJS/noir_js/functions/xor","label":"xor"}]}]}; +module.exports = typedocSidebar.items; \ No newline at end of file diff --git a/docs/docs/reference/_category_.json b/docs/docs/reference/_category_.json new file mode 100644 index 00000000000..5b6a20a609a --- /dev/null +++ b/docs/docs/reference/_category_.json @@ -0,0 +1,5 @@ +{ + "position": 4, + "collapsible": true, + "collapsed": true +} diff --git a/docs/docs/reference/nargo_commands.md b/docs/docs/reference/nargo_commands.md new file mode 100644 index 00000000000..ff3dee8973f --- /dev/null +++ b/docs/docs/reference/nargo_commands.md @@ -0,0 +1,250 @@ +--- +title: Nargo +description: + Noir CLI Commands for Noir Prover and Verifier to create, execute, prove and verify programs, + generate Solidity verifier smart contract and compile into JSON file containing ACIR + representation and ABI of circuit. +keywords: + [ + Nargo, + Noir CLI, + Noir Prover, + Noir Verifier, + generate Solidity verifier, + compile JSON file, + ACIR representation, + ABI of circuit, + TypeScript, + ] +sidebar_position: 0 +--- + +## General options + +| Option | Description | +| -------------------- | -------------------------------------------------- | +| `--show-ssa` | Emit debug information for the intermediate SSA IR | +| `--deny-warnings` | Quit execution when warnings are emitted | +| `--silence-warnings` | Suppress warnings | +| `-h, --help` | Print help | + +## `nargo help [subcommand]` + +Prints the list of available commands or specific information of a subcommand. + +_Arguments_ + +| Argument | Description | +| -------------- | -------------------------------------------- | +| `` | The subcommand whose help message to display | + +## `nargo backend` + +Installs and selects custom backends used to generate and verify proofs. + +### Commands + +| Command | Description | +| ----------- | --------------------------------------------------------- | +| `current` | Prints the name of the currently active backend | +| `ls` | Prints the list of currently installed backends | +| `use` | Select the backend to use | +| `install` | Install a new backend from a URL | +| `uninstall` | Uninstalls a backend | +| `help` | Print this message or the help of the given subcommand(s) | + +### Options + +| Option | Description | +| ------------ | ----------- | +| `-h, --help` | Print help | + +## `nargo check` + +Generate the `Prover.toml` and `Verifier.toml` files for specifying prover and verifier in/output +values of the Noir program respectively. + +### Options + +| Option | Description | +| --------------------- | ------------------------------------- | +| `--package ` | The name of the package to check | +| `--workspace` | Check all packages in the workspace | +| `--print-acir` | Display the ACIR for compiled circuit | +| `--deny-warnings` | Treat all warnings as errors | +| `--silence-warnings` | Suppress warnings | +| `-h, --help` | Print help | + +### `nargo codegen-verifier` + +Generate a Solidity verifier smart contract for the program. + +### Options + +| Option | Description | +| --------------------- | ------------------------------------- | +| `--package ` | The name of the package to codegen | +| `--workspace` | Codegen all packages in the workspace | +| `--print-acir` | Display the ACIR for compiled circuit | +| `--deny-warnings` | Treat all warnings as errors | +| `--silence-warnings` | Suppress warnings | +| `-h, --help` | Print help | + +## `nargo compile` + +Compile the program into a JSON build artifact file containing the ACIR representation and the ABI +of the circuit. This build artifact can then be used to generate and verify proofs. + +You can also use "build" as an alias for compile (e.g. `nargo build`). + +### Options + +| Option | Description | +| --------------------- | ------------------------------------------------------------ | +| `--package ` | The name of the package to compile | +| `--workspace` | Compile all packages in the workspace | +| `--print-acir` | Display the ACIR for compiled circuit | +| `--deny-warnings` | Treat all warnings as errors | +| `--silence-warnings` | Suppress warnings | +| `-h, --help` | Print help | + +## `nargo new ` + +Creates a new Noir project in a new folder. + +**Arguments** + +| Argument | Description | +| -------- | -------------------------------- | +| `` | The path to save the new project | + +### Options + +| Option | Description | +| --------------- | ----------------------------------------------------- | +| `--name ` | Name of the package [default: package directory name] | +| `--lib` | Use a library template | +| `--bin` | Use a binary template [default] | +| `--contract` | Use a contract template | +| `-h, --help` | Print help | + +## `nargo init` + +Creates a new Noir project in the current directory. + +### Options + +| Option | Description | +| --------------- | ----------------------------------------------------- | +| `--name ` | Name of the package [default: current directory name] | +| `--lib` | Use a library template | +| `--bin` | Use a binary template [default] | +| `--contract` | Use a contract template | +| `-h, --help` | Print help | + +## `nargo execute [WITNESS_NAME]` + +Runs the Noir program and prints its return value. + +**Arguments** + +| Argument | Description | +| ---------------- | ----------------------------------------- | +| `[WITNESS_NAME]` | Write the execution witness to named file | + +### Options + +| Option | Description | +| --------------------------------- | ------------------------------------------------------------------------------------ | +| `-p, --prover-name ` | The name of the toml file which contains the inputs for the prover [default: Prover] | +| `--package ` | The name of the package to execute | +| `--workspace` | Execute all packages in the workspace | +| `--print-acir` | Display the ACIR for compiled circuit | +| `--deny-warnings` | Treat all warnings as errors | +| `--silence-warnings` | Suppress warnings | +| `-h, --help` | Print help | + +_Usage_ + +The inputs to the circuit are read from the `Prover.toml` file generated by `nargo check`, which +must be filled in. + +To save the witness to file, run the command with a value for the `WITNESS_NAME` argument. A +`.tr` file will then be saved in the `./target` folder. + +## `nargo prove` + +Creates a proof for the program. + +### Options + +| Option | Description | +| ------------------------------------- | ---------------------------------------------------------------------------------------- | +| `-p, --prover-name ` | The name of the toml file which contains the inputs for the prover [default: Prover] | +| `-v, --verifier-name ` | The name of the toml file which contains the inputs for the verifier [default: Verifier] | +| `--verify` | Verify proof after proving | +| `--package ` | The name of the package to prove | +| `--workspace` | Prove all packages in the workspace | +| `--print-acir` | Display the ACIR for compiled circuit | +| `--deny-warnings` | Treat all warnings as errors | +| `--silence-warnings` | Suppress warnings | +| `-h, --help` | Print help | + +## `nargo verify` + +Given a proof and a program, verify whether the proof is valid. + +### Options + +| Option | Description | +| ------------------------------------- | ---------------------------------------------------------------------------------------- | +| `-v, --verifier-name ` | The name of the toml file which contains the inputs for the verifier [default: Verifier] | +| `--package ` | The name of the package to verify | +| `--workspace` | Verify all packages in the workspace | +| `--print-acir` | Display the ACIR for compiled circuit | +| `--deny-warnings` | Treat all warnings as errors | +| `--silence-warnings` | Suppress warnings | +| `-h, --help` | Print help | + +## `nargo test [TEST_NAME]` + +Nargo will automatically compile and run any functions which have the decorator `#[test]` on them if +you run `nargo test`. To print `println` statements in tests, use the `--show-output` flag. + +Takes an optional `--exact` flag which allows you to select tests based on an exact name. + +See an example on the [testing page](../getting_started/tooling/testing.md). + +### Options + +| Option | Description | +| --------------------- | -------------------------------------- | +| `--show-output` | Display output of `println` statements | +| `--exact` | Only run tests that match exactly | +| `--package ` | The name of the package to test | +| `--workspace` | Test all packages in the workspace | +| `--print-acir` | Display the ACIR for compiled circuit | +| `--deny-warnings` | Treat all warnings as errors | +| `--silence-warnings` | Suppress warnings | +| `-h, --help` | Print help | + +## `nargo info` + +Prints a table containing the information of the package. + +Currently the table provide + +1. The number of ACIR opcodes +2. The final number gates in the circuit used by a backend + +If the file contains a contract the table will provide the +above information about each function of the contract. + +## `nargo lsp` + +Start a long-running Language Server process that communicates over stdin/stdout. +Usually this command is not run by a user, but instead will be run by a Language Client, such as [vscode-noir](https://github.com/noir-lang/vscode-noir). + +## `nargo fmt` + +Automatically formats your Noir source code based on the default formatting settings. diff --git a/docs/docs/standard_library/black_box_fns.md b/docs/docs/standard_library/black_box_fns.md deleted file mode 100644 index 1dfabfe8f22..00000000000 --- a/docs/docs/standard_library/black_box_fns.md +++ /dev/null @@ -1,46 +0,0 @@ ---- -title: Black Box Functions -description: Black box functions are functions in Noir that rely on backends implementing support for specialized constraints. -keywords: [noir, black box functions] ---- - -Black box functions are functions in Noir that rely on backends implementing support for specialized constraints. This makes certain zk-snark unfriendly computations cheaper than if they were implemented in Noir. - -:::warning - -It is likely that not all backends will support a particular black box function. - -::: - -Because it is not guaranteed that all backends will support black box functions, it is possible that certain Noir programs won't compile against a particular backend if they use an unsupported black box function. It is possible to fallback to less efficient implementations written in Noir/ACIR in some cases. - -Black box functions are specified with the `#[foreign(black_box_fn)]` attribute. For example, the SHA256 function in the Noir [source code](https://github.com/noir-lang/noir/blob/v0.5.1/noir_stdlib/src/hash.nr) looks like: - -```rust -#[foreign(sha256)] -fn sha256(_input : [u8; N]) -> [u8; 32] {} -``` - -## Function list - -Here is a list of the current black box functions that are supported by UltraPlonk: - -- AES -- [SHA256](./cryptographic_primitives/hashes#sha256) -- [Schnorr signature verification](./cryptographic_primitives/schnorr) -- [Blake2s](./cryptographic_primitives/hashes#blake2s) -- [Pedersen Hash](./cryptographic_primitives/hashes#pedersen_hash) -- [Pedersen Commitment](./cryptographic_primitives/hashes#pedersen_commitment) -- [HashToField128Security](./cryptographic_primitives/hashes#hash_to_field) -- [ECDSA signature verification](./cryptographic_primitives/ecdsa_sig_verification) -- [Fixed base scalar multiplication](./cryptographic_primitives/scalar) -- [Compute merkle root](./merkle_trees#compute_merkle_root) -- AND -- XOR -- RANGE -- [Keccak256](./cryptographic_primitives/hashes#keccak256) -- [Recursive proof verification](./recursion) - -Most black box functions are included as part of the Noir standard library, however `AND`, `XOR` and `RANGE` are used as part of the Noir language syntax. For instance, using the bitwise operator `&` will invoke the `AND` black box function. To ensure compatibility across backends, the ACVM has fallback implementations of `AND`, `XOR` and `RANGE` defined in its standard library which it can seamlessly fallback to if the backend doesn't support them. - -You can view the black box functions defined in the ACVM code [here](https://github.com/noir-lang/acvm/blob/acir-v0.12.0/acir/src/circuit/black_box_functions.rs). diff --git a/docs/docs/standard_library/logging.md b/docs/docs/standard_library/logging.md deleted file mode 100644 index 7e2fd9b9aff..00000000000 --- a/docs/docs/standard_library/logging.md +++ /dev/null @@ -1,62 +0,0 @@ ---- -title: Logging -description: - Learn how to use the println statement for debugging in Noir with this tutorial. Understand the - basics of logging in Noir and how to implement it in your code. -keywords: - [ - noir logging, - println statement, - debugging in noir, - noir std library, - logging tutorial, - basic logging in noir, - noir logging implementation, - noir debugging techniques, - rust, - ] ---- - -The standard library provides a familiar `println` statement you can use. Despite being a limited -implementation of rust's `println!` macro, this construct can be useful for debugging. - -You can print the output of println statements in your Noir code by using the `nargo execute` command or the `--show-output` flag when using `nargo test` (provided there are println statements in your tests). - -It is recommended to use `nargo execute` if you want to debug failing constrains with `println` statements. This is due to every input in a test being a constant rather than a witness, so we issue an error during compilation while we only print during execution (which comes after compilation). `println` will not work for failed constraints caught at compile time. - -The `println` statement is unconstrained, so it works for outputting integers, fields, strings, and even structs or expressions. For example: - -```rust -use dep::std; - -struct Person { - age : Field, - height : Field, -} - -fn main(age : Field, height : Field) { - let person = Person { age : age, height : height }; - std::println(person); - std::println(age + height); - std::println("Hello world!"); -} - -``` - -You can print multiple different types in the same statement and string as well as a new "fmtstr" type. A `fmtstr` can be specified in the same way as a normal string it just should be prepended with an "f" character: - -```rust - let fmt_str = f"i: {i}, j: {j}"; - std::println(fmt_str); - - let s = myStruct { y: x, x: y }; - std::println(s); - - std::println(f"i: {i}, s: {s}"); - - std::println(x); - std::println([x, y]); - - let foo = fooStruct { my_struct: s, foo: 15 }; - std::println(f"s: {s}, foo: {foo}"); -``` diff --git a/docs/docs/tutorials/noirjs_app.md b/docs/docs/tutorials/noirjs_app.md new file mode 100644 index 00000000000..4293aa053fc --- /dev/null +++ b/docs/docs/tutorials/noirjs_app.md @@ -0,0 +1,278 @@ +--- +title: Building a web app with NoirJS +description: Learn how to setup a new app that uses Noir to generate and verify zero-knowledge SNARK proofs in a typescript or javascript environment. +keywords: [how to, guide, javascript, typescript, noir, barretenberg, zero-knowledge, proofs, app] +sidebar_position: 0 +--- + +NoirJS is a set of packages meant to work both in a browser and a server environment. In this tutorial, we will build a simple web app using them. From here, you should get an idea on how to proceed with your own Noir projects! + +You can find the complete app code for this guide [here](https://github.com/noir-lang/tiny-noirjs-app). + +## Setup + +:::note + +Feel free to use whatever versions, just keep in mind that Nargo and the NoirJS packages are meant to be in sync. For example, Nargo 0.19.x matches `noir_js@0.19.x`, etc. + +In this guide, we will be pinned to 0.19.4. + +::: + +Before we start, we want to make sure we have Node and Nargo installed. + +We start by opening a terminal and executing `node --version`. If we don't get an output like `v20.10.0`, that means node is not installed. Let's do that by following the handy [nvm guide](https://github.com/nvm-sh/nvm?tab=readme-ov-file#install--update-script). + +As for `Nargo`, we can follow the the [Nargo guide](../getting_started/installation/index.md) to install it. If you're lazy, just paste this on a terminal and run `noirup`: + +```sh +curl -L https://raw.githubusercontent.com/noir-lang/noirup/main/install | bash +``` + +Easy enough. Onwards! + +## Our project + +ZK is a powerful technology. An app that doesn't reveal one of the inputs to *anyone* is almost unbelievable, yet Noir makes it as easy as a single line of code. + +In fact, it's so simple that it comes nicely packaged in `nargo`. Let's do that! + +### Nargo + +Run: + +```nargo new circuit``` + +And... That's about it. Your program is ready to be compiled and run. + +To compile, let's `cd` into the `circuit` folder to enter our project, and call: + +```nargo compile``` + +This compiles our circuit into `json` format and add it to a new `target` folder. + +:::info + +At this point in the tutorial, your folder structure should look like this: + +```tree +. +└── circuit <---- our working directory + ├── Nargo.toml + ├── src + │ └── main.nr + └── target + └── circuit.json +``` + +::: + +### Node and Vite + +If you want to explore Nargo, feel free to go on a side-quest now and follow the steps in the +[getting started](../getting_started/create_a_project) guide. However, we want our app to run on the browser, so we need Vite. + +Vite is a powerful tool to generate static websites. While it provides all kinds of features, let's just go barebones with some good old vanilla JS. + +To do this this, go back to the previous folder (`cd ..`) and create a new vite project by running `npm create vite` and choosing "Vanilla" and "Javascript". + +You should see `vite-project` appear in your root folder. This seems like a good time to `cd` into it and install our NoirJS packages: + +```bash +npm i @noir-lang/backend_barretenberg@0.19.4 @noir-lang/noir_js@0.19.4 +``` + +:::info + +At this point in the tutorial, your folder structure should look like this: + +```tree +. +└── circuit + └── ...etc... +└── vite-project <---- our working directory + └── ...etc... +``` + +::: + +#### Some cleanup + +`npx create vite` is amazing but it creates a bunch of files we don't really need for our simple example. Actually, let's just delete everything except for `index.html`, `main.js` and `package.json`. I feel lighter already. + +![my heart is ready for you, noir.js](../../static/img/memes/titanic.jpeg) + +## HTML + +Our app won't run like this, of course. We need some working HTML, at least. Let's open our broken-hearted `index.html` and replace everything with this code snippet: + +```html + + + + + + +

Noir app

+
+ + +
+
+

Logs

+

Proof

+
+ + +``` + +It *could* be a beautiful UI... Depending on which universe you live in. + +## Some good old vanilla Javascript + +Our love for Noir needs undivided attention, so let's just open `main.js` and delete everything (this is where the romantic scenery becomes a bit creepy). + +Start by pasting in this boilerplate code: + +```js +const setup = async () => { + await Promise.all([ + import("@noir-lang/noirc_abi").then(module => + module.default(new URL("@noir-lang/noirc_abi/web/noirc_abi_wasm_bg.wasm", import.meta.url).toString()) + ), + import("@noir-lang/acvm_js").then(module => + module.default(new URL("@noir-lang/acvm_js/web/acvm_js_bg.wasm", import.meta.url).toString()) + ) + ]); +} + +function display(container, msg) { + const c = document.getElementById(container); + const p = document.createElement('p'); + p.textContent = msg; + c.appendChild(p); +} + +document.getElementById('submitGuess').addEventListener('click', async () => { + try { + // here's where love happens + } catch(err) { + display("logs", "Oh 💔 Wrong guess") + } +}); + +``` + +The display function doesn't do much. We're simply manipulating our website to see stuff happening. For example, if the proof fails, it will simply log a broken heart 😢 + +As for the `setup` function, it's just a sad reminder that dealing with `wasm` on the browser is not as easy as it should. Just copy, paste, and forget. + +:::info + +At this point in the tutorial, your folder structure should look like this: + +```tree +. +└── circuit + └── ...same as above +└── vite-project + ├── main.js + ├── package.json + └── index.html +``` + +You'll see other files and folders showing up (like `package-lock.json`, `node_modules`) but you shouldn't have to care about those. + +::: + +## Some NoirJS + +We're starting with the good stuff now. If you've compiled the circuit as described above, you should have a `json` file we want to import at the very top of our `main.js` file: + +```ts +import circuit from '../circuit/target/circuit.json'; +``` + +[Noir is backend-agnostic](../index.md#whats-new-about-noir). We write Noir, but we also need a proving backend. That's why we need to import and instantiate the two dependencies we installed above: `BarretenbergBackend` and `Noir`. Let's import them right below: + +```js +import { BarretenbergBackend } from '@noir-lang/backend_barretenberg'; +import { Noir } from '@noir-lang/noir_js'; +``` + +And instantiate them inside our try-catch block: + +```ts +// try { +const backend = new BarretenbergBackend(circuit); +const noir = new Noir(circuit, backend); +// } +``` + +:::note + +For the remainder of the tutorial, everything will be happening inside the `try` block + +::: + +## Our app + +Now for the app itself. We're capturing whatever is in the input when people press the submit button. Just add this: + +```js +const x = parseInt(document.getElementById('guessInput').value); +const input = { x, y: 2 }; +``` + +Now we're ready to prove stuff! Let's feed some inputs to our circuit and calculate the proof: + +```js +await setup(); // let's squeeze our wasm inits here + +display('logs', 'Generating proof... ⌛'); +const proof = await noir.generateFinalProof(input); +display('logs', 'Generating proof... ✅'); +display('results', proof.proof); +``` + +You're probably eager to see stuff happening, so go and run your app now! + +From your terminal, run `npm run dev`. If it doesn't open a browser for you, just visit `localhost:5173`. You should now see the worst UI ever, with an ugly input. + +![Getting Started 0](@site/static/img/noir_getting_started_1.png) + +Now, our circuit says `fn main(x: Field, y: pub Field)`. This means only the `y` value is public, and it's hardcoded above: `input = { x, y: 2 }`. In other words, you won't need to send your secret`x` to the verifier! + +By inputting any number other than 2 in the input box and clicking "submit", you should get a valid proof. Otherwise the proof won't even generate correctly. By the way, if you're human, you shouldn't be able to understand anything on the "proof" box. That's OK. We like you, human ❤️. + +## Verifying + +Time to celebrate, yes! But we shouldn't trust machines so blindly. Let's add these lines to see our proof being verified: + +```js +display('logs', 'Verifying proof... ⌛'); +const verification = await noir.verifyFinalProof(proof); +if (verification) display('logs', 'Verifying proof... ✅'); +``` + +You have successfully generated a client-side Noir web app! + +![coded app without math knowledge](../../static/img/memes/flextape.jpeg) + +## Further Reading + +You can see how noirjs is used in a full stack Next.js hardhat application in the [noir-starter repo here](https://github.com/noir-lang/noir-starter/tree/main/next-hardhat). The example shows how to calculate a proof in the browser and verify it with a deployed Solidity verifier contract from noirjs. + +You should also check out the more advanced examples in the [noir-examples repo](https://github.com/noir-lang/noir-examples), where you'll find reference usage for some cool apps. diff --git a/docs/docusaurus.config.js b/docs/docusaurus.config.js deleted file mode 100644 index 8f62df3d0e9..00000000000 --- a/docs/docusaurus.config.js +++ /dev/null @@ -1,215 +0,0 @@ -// @ts-check -// Note: type annotations allow type checking and IDEs autocompletion - -const lightCodeTheme = require('prism-react-renderer/themes/github'); -const darkCodeTheme = require('prism-react-renderer/themes/dracula'); - -const math = require('remark-math'); -const katex = require('rehype-katex'); - -/** @type {import('@docusaurus/types').Config} */ -const config = { - title: 'Noir Documentation', - tagline: 'The Universal ZK Circuit Language', - favicon: 'img/favicon.ico', - url: 'https://noir-lang.org', - // Set the // pathname under which your site is served - // For GitHub pages deployment, it is often '//' - baseUrl: '/', - onBrokenLinks: 'throw', - onBrokenMarkdownLinks: 'throw', - - // Even if you don't use internalization, you can use this field to set useful - // metadata like html lang. For example, if your site is Chinese, you may want - // to replace "en" with "zh-Hans". - i18n: { - defaultLocale: 'en', - locales: ['en'], - }, - - presets: [ - [ - '@docusaurus/preset-classic', - { - // gtag: { - // trackingID: 'G-SZQHEQZK3L', - // anonymizeIP: true, - // }, - docs: { - sidebarPath: require.resolve('./sidebars.js'), - routeBasePath: '/', - remarkPlugins: [math], - rehypePlugins: [katex], - versions: { - current: { - label: 'dev', - path: 'dev', - }, - }, - editUrl: ({ versionDocsDirPath, docPath }) => - `https://github.com/noir-lang/noir/edit/master/docs/${versionDocsDirPath}/${docPath}`, - }, - blog: false, - theme: { - customCss: require.resolve('./src/css/custom.css'), - }, - }, - ], - ], - - themeConfig: - /** @type {import('@docusaurus/preset-classic').ThemeConfig} */ - { - // Replace with your project's social card - navbar: { - logo: { - alt: 'Noir Logo', - src: 'img/logo.svg', - srcDark: 'img/logoDark.svg', - href: '/', - }, - items: [ - { - href: 'https://github.com/noir-lang/docs', - label: 'GitHub', - position: 'right', - }, - { - type: 'docsVersionDropdown', - position: 'left', - dropdownActiveClassDisabled: true, - }, - ], - }, - metadata: [ - { - name: 'Noir', - content: 'noir, programming, language, documentation, zk, zero-knowledge, l2, crypto, layer2, ethereum', - }, - ], - footer: { - style: 'dark', - links: [ - { - title: 'Community', - items: [ - { - label: 'Noir Forum', - href: 'https://discourse.aztec.network/c/noir/7', - }, - { - label: 'Twitter', - href: 'https://twitter.com/NoirLang', - }, - { - label: 'Discord', - href: 'https://discord.gg/JtqzkdeQ6G', - }, - ], - }, - { - title: 'Code', - items: [ - { - label: 'Noir GitHub', - href: 'https://github.com/noir-lang', - }, - { - label: 'Docs GitHub', - href: 'https://github.com/noir-lang/docs', - }, - ], - }, - ], - copyright: `Noir will be dual licensed under MIT/Apache (Version 2.0).`, - }, - prism: { - theme: lightCodeTheme, - darkTheme: darkCodeTheme, - additionalLanguages: ['rust', 'powershell', 'solidity', 'toml'], - }, - stylesheets: [ - { - href: 'https://cdn.jsdelivr.net/npm/katex@0.13.24/dist/katex.min.css', - type: 'text/css', - integrity: 'sha384-odtC+0UGzzFL/6PNoE8rX/SPcQDXBJ+uRepguP4QkPCm2LBxH3FA3y+fKSiJ+AmM', - crossorigin: 'anonymous', - }, - ], - algolia: { - // The application ID provided by Algolia - appId: '97APAVUL6H', - - // Public API key: it is safe to commit it - apiKey: 'b9b94d2f1c58f7d509f0bc1f13b381fb', - - indexName: 'noir-lang', - }, - }, - plugins: [ - [ - 'docusaurus-plugin-typedoc', - { - id: 'noir_js', - entryPoints: ['../tooling/noir_js/src/index.ts'], - tsconfig: '../tooling/noir_js/tsconfig.json', - entryPointStrategy: 'resolve', - out: 'docs/noir_js/reference/noir_js', - plugin: ['typedoc-plugin-markdown'], - name: 'Noir JS', - disableSources: true, - excludePrivate: true, - - sidebar: { - filteredIds: ['noir_js/reference/noir_js/index'], - }, - readme: 'none', - hidePageHeader: true, - hideBreadcrumbs: true, - hideInPageTOC: true, - useCodeBlocks: true, - typeDeclarationFormat: 'table', - propertiesFormat: 'table', - parametersFormat: 'table', - enumMembersFormat: 'table', - indexFormat: 'table', - outputFileStrategy: 'members', - memberPageTitle: '{name}', - membersWithOwnFile: ['Interface', 'Class', 'TypeAlias', 'Function'], - }, - ], - [ - 'docusaurus-plugin-typedoc', - { - id: 'noir_js_backend_barretenberg', - entryPoints: ['../tooling/noir_js_backend_barretenberg/src/index.ts'], - tsconfig: '../tooling/noir_js_backend_barretenberg/tsconfig.json', - entryPointStrategy: 'resolve', - out: 'docs/noir_js/reference/backend_barretenberg', - plugin: ['typedoc-plugin-markdown'], - name: 'Backend Barretenberg', - disableSources: true, - excludePrivate: true, - - sidebar: { - filteredIds: ['noir_js/reference/backend_barretenberg/index'], - }, - readme: 'none', - hidePageHeader: true, - hideBreadcrumbs: true, - hideInPageTOC: true, - useCodeBlocks: true, - typeDeclarationFormat: 'table', - propertiesFormat: 'table', - parametersFormat: 'table', - enumMembersFormat: 'table', - indexFormat: 'table', - outputFileStrategy: 'members', - memberPageTitle: '{name}', - membersWithOwnFile: ['Interface', 'Class', 'TypeAlias'], - }, - ], - ], -}; - -module.exports = config; diff --git a/docs/docusaurus.config.ts b/docs/docusaurus.config.ts new file mode 100644 index 00000000000..aacc318f5be --- /dev/null +++ b/docs/docusaurus.config.ts @@ -0,0 +1,216 @@ +/* eslint-disable @typescript-eslint/no-var-requires */ +import type { Config } from '@docusaurus/types'; + +const { themes } = require('prism-react-renderer'); +const lightTheme = themes.github; +const darkTheme = themes.dracula; + +import math from 'remark-math'; +import katex from 'rehype-katex'; + +export default { + title: 'Noir Documentation', + tagline: 'The Universal ZK Circuit Language', + favicon: 'img/favicon.ico', + url: 'https://noir-lang.org', + baseUrl: '/', + onBrokenLinks: 'throw', + onBrokenMarkdownLinks: 'throw', + i18n: { + defaultLocale: 'en', + locales: ['en'], + }, + + presets: [ + [ + '@docusaurus/preset-classic', + { + docs: { + sidebarPath: './sidebars.js', + routeBasePath: '/docs', + remarkPlugins: [math], + rehypePlugins: [katex], + versions: { + current: { + label: 'dev', + path: 'dev', + }, + }, + editUrl: ({ versionDocsDirPath, docPath }) => + `https://github.com/noir-lang/noir/edit/master/docs/${versionDocsDirPath}/${docPath}`, + }, + blog: false, + theme: { + customCss: ['./src/css/custom.css', './src/css/sidebar.css'], + }, + }, + ], + ], + + themeConfig: { + colorMode: { + respectPrefersColorScheme: true, + }, + navbar: { + logo: { + alt: 'Noir Logo', + src: 'img/logo.svg', + srcDark: 'img/logoDark.svg', + href: '/', + }, + items: [ + { + href: 'https://github.com/noir-lang/noir/tree/master/docs', + label: 'GitHub', + position: 'right', + }, + { + type: 'docsVersionDropdown', + position: 'left', + dropdownActiveClassDisabled: true, + }, + ], + }, + metadata: [ + { + name: 'Noir', + content: 'noir, programming, language, documentation, zk, zero-knowledge, l2, crypto, layer2, ethereum', + }, + ], + footer: { + style: 'dark', + links: [ + { + title: 'Community', + items: [ + { + label: 'Noir Forum', + href: 'https://discourse.aztec.network/c/noir/7', + }, + { + label: 'Twitter', + href: 'https://twitter.com/NoirLang', + }, + { + label: 'Discord', + href: 'https://discord.gg/JtqzkdeQ6G', + }, + ], + }, + { + title: 'Code', + items: [ + { + label: 'Noir GitHub', + href: 'https://github.com/noir-lang', + }, + { + label: 'Docs GitHub', + href: 'https://github.com/noir-lang/docs', + }, + ], + }, + ], + copyright: `Noir will be dual licensed under MIT/Apache (Version 2.0).`, + }, + prism: { + theme: lightTheme, + darkTheme: darkTheme, + additionalLanguages: ['rust', 'powershell', 'solidity', 'toml', 'json', 'bash', 'docker'], + }, + stylesheets: [ + { + href: 'https://cdn.jsdelivr.net/npm/katex@0.13.24/dist/katex.min.css', + type: 'text/css', + integrity: 'sha384-odtC+0UGzzFL/6PNoE8rX/SPcQDXBJ+uRepguP4QkPCm2LBxH3FA3y+fKSiJ+AmM', + crossorigin: 'anonymous', + }, + ], + algolia: { + // The application ID provided by Algolia + appId: '97APAVUL6H', + + // Public API key: it is safe to commit it + apiKey: 'b9b94d2f1c58f7d509f0bc1f13b381fb', + + indexName: 'noir-lang', + }, + }, + plugins: [ + () => ({ + name: 'resolve-react', + configureWebpack() { + return { + optimization: { + innerGraph: false, + }, + }; + }, + }), + [ + 'docusaurus-plugin-typedoc', + { + id: 'noir_js', + entryPoints: ['../tooling/noir_js/src/index.ts'], + tsconfig: '../tooling/noir_js/tsconfig.json', + entryPointStrategy: 'resolve', + out: 'docs/reference/NoirJS/noir_js', + plugin: ['typedoc-plugin-markdown'], + name: 'noir_js', + disableSources: true, + excludePrivate: true, + skipErrorChecking: true, + sidebar: { + filteredIds: ['reference/NoirJS/noir_js/index'], + }, + readme: 'none', + hidePageHeader: true, + hideBreadcrumbs: true, + hideInPageTOC: true, + useCodeBlocks: true, + typeDeclarationFormat: 'table', + propertiesFormat: 'table', + parametersFormat: 'table', + enumMembersFormat: 'table', + indexFormat: 'table', + outputFileStrategy: 'members', + memberPageTitle: '{name}', + membersWithOwnFile: ['Interface', 'Class', 'TypeAlias', 'Function'], + }, + ], + [ + 'docusaurus-plugin-typedoc', + { + id: 'noir_js_backend_barretenberg', + entryPoints: ['../tooling/noir_js_backend_barretenberg/src/index.ts'], + tsconfig: '../tooling/noir_js_backend_barretenberg/tsconfig.json', + entryPointStrategy: 'resolve', + out: 'docs/reference/NoirJS/backend_barretenberg', + plugin: ['typedoc-plugin-markdown'], + name: 'backend_barretenberg', + disableSources: true, + excludePrivate: true, + skipErrorChecking: true, + sidebar: { + filteredIds: ['reference/NoirJS/backend_barretenberg/index'], + }, + readme: 'none', + hidePageHeader: true, + hideBreadcrumbs: true, + hideInPageTOC: true, + useCodeBlocks: true, + typeDeclarationFormat: 'table', + propertiesFormat: 'table', + parametersFormat: 'table', + enumMembersFormat: 'table', + indexFormat: 'table', + outputFileStrategy: 'members', + memberPageTitle: '{name}', + membersWithOwnFile: ['Interface', 'Class', 'TypeAlias'], + }, + ], + ], + markdown: { + format: 'detect', + }, +} satisfies Config; diff --git a/docs/link-check.config.json b/docs/link-check.config.json new file mode 100644 index 00000000000..68059476958 --- /dev/null +++ b/docs/link-check.config.json @@ -0,0 +1,7 @@ +{ + "ignorePatterns": [ + { + "pattern": "^[^\/]+\/[^\/].*$|^\/[^\/].*$" + } + ] +} diff --git a/docs/package.json b/docs/package.json index 09f8d718b56..1fa4ab79b85 100644 --- a/docs/package.json +++ b/docs/package.json @@ -3,33 +3,45 @@ "version": "0.0.0", "private": true, "scripts": { - "start": "docusaurus start", - "build": "docusaurus build", - "setStable": "node ./scripts/setStable.js" + "start": "yarn version::stables && docusaurus start", + "build": "yarn version::stables && docusaurus build", + "version::stables": "ts-node ./scripts/setStable.ts", + "serve": "serve build" }, "dependencies": { - "@docusaurus/core": "^2.4.0", - "@docusaurus/plugin-google-gtag": "^2.4.0", - "@docusaurus/preset-classic": "^2.4.0", + "@docusaurus/core": "^3.0.1", + "@docusaurus/preset-classic": "^3.0.1", "@easyops-cn/docusaurus-search-local": "^0.35.0", - "@mdx-js/react": "^1.6.22", + "@mdx-js/react": "^3.0.0", + "@noir-lang/noir_js": "workspace:*", + "@noir-lang/noirc_abi": "workspace:*", + "@noir-lang/types": "workspace:*", + "@signorecello/noir_playground": "^0.6.0", "axios": "^1.4.0", "clsx": "^1.2.1", - "docusaurus-plugin-typedoc": "1.0.0-next.18", "hast-util-is-element": "^1.1.0", - "prism-react-renderer": "^1.3.5", - "react": "^17.0.2", - "react-dom": "^17.0.2", - "rehype-katex": "^5.0.0", - "remark-math": "^3.0.1", + "prism-react-renderer": "^2.1.0", + "react": "^18.2.0", + "react-dom": "^18.2.0", + "react-spinners": "^0.13.8", + "rehype-katex": "^7.0.0", + "remark-math": "^6.0.0" + }, + "devDependencies": { + "@docusaurus/module-type-aliases": "^3.0.1", + "@docusaurus/tsconfig": "^3.0.1", + "@docusaurus/types": "^3.0.1", + "@types/prettier": "^3", + "docusaurus-plugin-typedoc": "1.0.0-next.18", + "eslint-plugin-prettier": "^5.0.0", + "prettier": "3.0.3", + "serve": "^14.2.1", + "ts-node": "^10.9.1", "typedoc": "^0.25.0", "typedoc-plugin-frontmatter": "^0.0.2", "typedoc-plugin-markdown": "4.0.0-next.25", "typedoc-plugin-merge-modules": "^5.1.0", - "typescript": "^5.2.2" - }, - "devDependencies": { - "@docusaurus/module-type-aliases": "^2.4.0" + "typescript": "~5.2.2" }, "browserslist": { "production": [ @@ -44,6 +56,6 @@ ] }, "engines": { - "node": ">=16.14" + "node": ">=18-0" } } diff --git a/docs/scripts/setStable.js b/docs/scripts/setStable.js deleted file mode 100644 index 4bbe283f4be..00000000000 --- a/docs/scripts/setStable.js +++ /dev/null @@ -1,42 +0,0 @@ -/* eslint-disable */ -const fs = require('fs'); -const path = require('path'); -const axios = require('axios'); -const { release } = require('os'); - -const IGNORE_VERSIONS = ['0.16.0']; -const NUMBER_OF_VERSIONS_TO_SHOW = 4; - -async function main() { - const versionsFile = path.join(__dirname, '../versions.json'); - - const axiosOpts = { - params: { per_page: 100 }, - }; - - console.log(process.env.GITHUB_TOKEN); - // cool if you have a GITHUB_TOKEN because of rate limiting - // but fine if you don't - if (process.env.GITHUB_TOKEN) axiosOpts.headers = { Authorization: `token ${process.env.GITHUB_TOKEN}` }; - - const { data } = await axios.get('https://api.github.com/repos/noir-lang/noir/releases', axiosOpts); - - const all = data.map((release) => release.tag_name); - console.log('All versions: ', all); - const aztecs = data.filter((release) => release.tag_name.includes('aztec')).map((release) => release.tag_name); - console.log('Removing aztecs: ', aztecs); - const prereleases = data.filter((release) => !release.prerelease).map((release) => release.tag_name); - console.log('Removing prereleases: ', prereleases); - - const stables = data - .filter((release) => !release.prerelease && !release.tag_name.includes('aztec')) - .filter((release) => !IGNORE_VERSIONS.includes(release.tag_name.replace('v', ''))) - .map((release) => release.tag_name) - .slice(0, NUMBER_OF_VERSIONS_TO_SHOW); - - console.log('Stables: ', stables); - - fs.writeFileSync(versionsFile, JSON.stringify(stables, null, 2)); -} - -main(); diff --git a/docs/scripts/setStable.ts b/docs/scripts/setStable.ts new file mode 100644 index 00000000000..0f86c4afd59 --- /dev/null +++ b/docs/scripts/setStable.ts @@ -0,0 +1,55 @@ +/* eslint-disable @typescript-eslint/no-var-requires */ +const fs = require('fs'); +const path = require('path'); +const axios = require('axios'); + +const GITHUB_PAGES = 3; +const IGNORE_VERSIONS = ['0.16.0']; +const NUMBER_OF_VERSIONS_TO_SHOW = 2; + +async function main() { + const axiosOpts = { + params: { per_page: 100 }, + headers: {}, + }; + + if (process.env.GITHUB_TOKEN) axiosOpts.headers = { Authorization: `token ${process.env.GITHUB_TOKEN}` }; + + let stables = []; + console.log('Retrieved versions:'); + + for (let i = 0; i < GITHUB_PAGES; i++) { + const { data } = await axios.get(`https://api.github.com/repos/noir-lang/noir/releases?page=${i + 1}`, axiosOpts); + + console.log(data.map((release) => release.tag_name)); + stables.push( + ...data + .filter( + (release) => + !release.prerelease && !release.tag_name.includes('aztec') && !release.tag_name.includes('aztec'), + ) + .filter((release) => !IGNORE_VERSIONS.includes(release.tag_name.replace('v', ''))) + .map((release) => release.tag_name), + ); + } + + stables = stables.slice(0, NUMBER_OF_VERSIONS_TO_SHOW); + + console.log('Filtered down to stables: ', stables); + + const onlyLatestPatches = []; + const minorsSet = new Set(stables.map((el) => el.split('.')[1])); + for (const minor of minorsSet) { + const minorVersions = stables.filter((el) => el.split('.')[1] === minor); + const max = minorVersions.reduce((prev, current) => { + return prev > current ? prev : current; + }); + onlyLatestPatches.push(max); + } + + console.log('Only latest patches: ', onlyLatestPatches); + + fs.writeFileSync(path.resolve(__dirname, '../versions.json'), JSON.stringify(onlyLatestPatches, null, 2)); +} + +main(); diff --git a/docs/sidebars.js b/docs/sidebars.js index 3fd391cf09c..f1e79ba9ebc 100644 --- a/docs/sidebars.js +++ b/docs/sidebars.js @@ -1,145 +1,75 @@ -/** - * Creating a sidebar enables you to: - - create an ordered group of docs - - render a sidebar for each doc of that group - - provide next/previous navigation - - The sidebars can be generated from the filesystem, or explicitly defined here. - - Create as many sidebars as you want. - */ - -// @ts-check - /** @type {import('@docusaurus/plugin-content-docs').SidebarsConfig} */ -const sidebars = { +export default { sidebar: [ { type: 'doc', id: 'index', - label: 'Noir', }, { type: 'category', label: 'Getting Started', - items: [{ type: 'autogenerated', dirName: 'getting_started' }], + items: [ + { + type: 'autogenerated', + dirName: 'getting_started', + }, + ], }, { type: 'category', - label: 'Examples', - items: [{ type: 'autogenerated', dirName: 'examples' }], + label: 'The Noir Language', + items: [ + { + type: 'autogenerated', + dirName: 'noir', + }, + ], }, { - type: 'category', - label: 'Nargo', - items: [{ type: 'autogenerated', dirName: 'nargo' }], + type: 'html', + value: '
', + defaultStyle: true, }, { type: 'category', - label: 'Language Concepts', + label: 'How To Guides', items: [ { - type: 'category', - label: 'Data Types', - link: { - type: 'doc', - id: 'language_concepts/data_types', - }, - items: [ - { - type: 'autogenerated', - dirName: 'language_concepts/data_types', - }, - ], + type: 'autogenerated', + dirName: 'how_to', }, - 'language_concepts/functions', - 'language_concepts/control_flow', - 'language_concepts/ops', - 'language_concepts/assert', - 'language_concepts/unconstrained', - 'language_concepts/generics', - 'language_concepts/mutability', - 'language_concepts/lambdas', - 'language_concepts/comments', - 'language_concepts/distinct', - 'language_concepts/shadowing', ], }, { type: 'category', - label: 'Noir Standard Library', + label: 'Explainers', items: [ { - type: 'category', - label: 'Cryptographic Primitives', - link: { - type: 'doc', - id: 'standard_library/cryptographic_primitives', - }, - items: [ - { - type: 'autogenerated', - dirName: 'standard_library/cryptographic_primitives', - }, - ], + type: 'autogenerated', + dirName: 'explainers', }, - 'standard_library/recursion', - 'standard_library/logging', - 'standard_library/merkle_trees', - 'standard_library/zeroed', - 'standard_library/black_box_fns', - 'standard_library/options', ], }, { type: 'category', - label: 'Modules, Packages and Crates', - items: [{ type: 'autogenerated', dirName: 'modules_packages_crates' }], - }, - { - type: 'category', - label: 'NoirJS', - link: { - type: 'doc', - id: 'noir_js/noir_js', - }, + label: 'Tutorials', items: [ { - type: 'category', - label: 'Guides', - items: [ - { - type: 'autogenerated', - dirName: 'noir_js/getting_started', - }, - ], - }, - { - type: 'category', - label: 'Reference', - items: [ - { - type: 'category', - label: 'Noir JS', - link: { - type: 'doc', - id: 'noir_js/reference/noir_js/index', - }, - items: require('./docs/noir_js/reference/noir_js/typedoc-sidebar.cjs'), - }, - { - type: 'category', - label: 'Backend Barretenberg', - link: { - type: 'doc', - id: 'noir_js/reference/backend_barretenberg/index', - }, - items: require('./docs/noir_js/reference/backend_barretenberg/typedoc-sidebar.cjs'), - }, - ], + type: 'autogenerated', + dirName: 'tutorials', }, ], }, + { + type: 'category', + label: 'Reference', + items: [{ type: 'autogenerated', dirName: 'reference' }], + }, + { + type: 'html', + value: '
', + defaultStyle: true, + }, { type: 'doc', id: 'migration_notes', @@ -147,5 +77,3 @@ const sidebars = { }, ], }; - -module.exports = sidebars; diff --git a/docs/src/components/GithubCode/index.js b/docs/src/components/GithubCode/index.js deleted file mode 100644 index 69ba17ef791..00000000000 --- a/docs/src/components/GithubCode/index.js +++ /dev/null @@ -1,63 +0,0 @@ -import React, { useEffect, useState } from 'react'; -import axios from 'axios'; -import Highlight, { defaultProps } from "prism-react-renderer"; -import github from 'prism-react-renderer/themes/github'; -//import vsDark from 'prism-react-renderer/themes/vsDark'; - -const GitHubCode = ({ owner, repo, branch = 'master', filePath, language, startLine = 1, endLine = Infinity }) => { - const [code, setCode] = useState(''); - const [response, setResponse] = useState(''); - - useEffect(() => { - const fetchCode = async () => { - const url = `https://api.github.com/repos/${owner}/${repo}/contents/${filePath}?ref=${branch}` - try { - const response = await axios.get(url); - const content = response.data.content; - const decodedContent = atob(content); // Decode Base64 content - - const lines = decodedContent.split('\n'); - const desiredLines = lines.slice(startLine - 1, endLine).join('\n').trimEnd(); - - setResponse(response); - setCode(desiredLines); - } catch (error) { - console.error('Failed to fetch GitHub code:', error); - } - }; - - fetchCode(); - }, [owner, repo, branch, filePath, startLine, endLine]); - - const highlightedCode = ( - - {({ className, style, tokens, getLineProps, getTokenProps }) => ( -
-
-                        {tokens.map((line, i) => (
-                            
- {/* uncomment for line numbers */} - {/* {i + 1} */} - {line.map((token, key) => ( - - ))} -
- ))} -
- { - response.data?.html_url ? Link to source code. : '' - } -
- )} -
- ) - - return highlightedCode; -}; - -export default GitHubCode; \ No newline at end of file diff --git a/docs/src/components/HomepageFeatures/index.js b/docs/src/components/HomepageFeatures/index.js deleted file mode 100644 index 78f410ba688..00000000000 --- a/docs/src/components/HomepageFeatures/index.js +++ /dev/null @@ -1,64 +0,0 @@ -import React from 'react'; -import clsx from 'clsx'; -import styles from './styles.module.css'; - -const FeatureList = [ - { - title: 'Easy to Use', - Svg: require('@site/static/img/undraw_docusaurus_mountain.svg').default, - description: ( - <> - Docusaurus was designed from the ground up to be easily installed and - used to get your website up and running quickly. - - ), - }, - { - title: 'Focus on What Matters', - Svg: require('@site/static/img/undraw_docusaurus_tree.svg').default, - description: ( - <> - Docusaurus lets you focus on your docs, and we'll do the chores. Go - ahead and move your docs into the docs directory. - - ), - }, - { - title: 'Powered by React', - Svg: require('@site/static/img/undraw_docusaurus_react.svg').default, - description: ( - <> - Extend or customize your website layout by reusing React. Docusaurus can - be extended while reusing the same header and footer. - - ), - }, -]; - -function Feature({Svg, title, description}) { - return ( -
-
- -
-
-

{title}

-

{description}

-
-
- ); -} - -export default function HomepageFeatures() { - return ( -
-
-
- {FeatureList.map((props, idx) => ( - - ))} -
-
-
- ); -} diff --git a/docs/src/components/HomepageFeatures/styles.module.css b/docs/src/components/HomepageFeatures/styles.module.css deleted file mode 100644 index b248eb2e5de..00000000000 --- a/docs/src/components/HomepageFeatures/styles.module.css +++ /dev/null @@ -1,11 +0,0 @@ -.features { - display: flex; - align-items: center; - padding: 2rem 0; - width: 100%; -} - -.featureSvg { - height: 200px; - width: 200px; -} diff --git a/docs/src/css/custom.css b/docs/src/css/custom.css index c1faa9d3139..c96e9df9832 100644 --- a/docs/src/css/custom.css +++ b/docs/src/css/custom.css @@ -6,45 +6,47 @@ /* You can override the default Infima variables here. */ :root { - --ifm-color-primary: #9f3fff; - --ifm-color-primary-dark: #2f1f49; - --ifm-color-primary-darker: #2f1f49; - --ifm-color-primary-darkest: #2f1f49; - --ifm-color-primary-light: #9f3fff; - --ifm-color-primary-lighter: #9f3fff; - --ifm-color-primary-lightest: #9f3fff; + --ifm-color-primary: #514167; + --ifm-color-primary-dark: #493a5d; + --ifm-color-primary-darker: #453758; + --ifm-color-primary-darkest: #392d48; + --ifm-color-primary-light: #594871; + --ifm-color-primary-lighter: #5d4b76; + --ifm-color-primary-lightest: #695486; --search-local-highlight-color: #2f1f49; --ifm-menu-color-background-active: #f6f8fa; --ifm-code-font-size: 95%; - --ifm-breadcrumb-color-active: white; - --ifm-breadcrumb-item-background-active: #2f1f49; + --ifm-breadcrumb-color-active: #F6FBFC; + --ifm-breadcrumb-item-background-active: #2f1f49; --ifm-heading-color: #2f1f49; --docusaurus-highlighted-code-line-bg: rgba(0, 0, 0, 0.1); + --ifm-link-color: #B68BE4; } /* For readability concerns, you should choose a lighter palette in dark mode. */ [data-theme='dark'] { - --ifm-color-primary: #f5bda9; - --ifm-color-primary-dark: #f5bda9; - --ifm-color-primary-darker: #f5bda9; - --ifm-color-primary-darkest: #f5bda9; - --ifm-color-primary-light: #f5bda9; - --ifm-color-primary-lighter: #f5bda9; - --ifm-color-primary-lightest: #f5bda9; - - --ifm-heading-color: white; + --ifm-color-primary: #fbc0b4; + --ifm-color-primary-dark: #f99e8b; + --ifm-color-primary-darker: #f88c77; + --ifm-color-primary-darkest: #f45939; + --ifm-color-primary-light: #fde2dd; + --ifm-color-primary-lighter: #fef4f1; + --ifm-color-primary-lightest: #ffffff; + + --ifm-heading-color: #F6FBFC; --ifm-menu-color-background-active: #282a36; --ifm-breadcrumb-color-active: #2f1f49; --ifm-breadcrumb-item-background-active: #f5bda9; --docusaurus-highlighted-code-line-bg: rgba(0, 0, 0, 0.3); + --ifm-link-color: var(--ifm-color-primary); } html[data-theme='dark'] { --search-local-highlight-color: #f5bda9; - --search-local-muted-color: white; + --search-local-muted-color: #F6FBFC; --search-local-hit-active-color: #1b1b1d; - --search-local-hit-color: white; + --search-local-hit-color: #F6FBFC; } [data-theme='dark'] .footer { @@ -65,6 +67,67 @@ html[data-theme='dark'] { --ifm-footer-title-color: #2f1f49; } +[data-theme='light'] #__docusaurus { + background-color: #F6FBFC; +} +[data-theme='dark'] #__docusaurus { + background-color: #161717; +} + .katex-html { display: none; } + +.homepage_layout { + max-width: 800px; + margin: 0 auto; +} + +.homepage_h1 { + display: block; + margin: 40px auto; + text-align: center; +} + +.homepage_p { + display: block; + margin: 0 auto; + text-align: center; +} + +.homepage_cta_container { + display: flex; + justify-content: center; + margin: 0 auto; + text-align: center; + border: none; + width: 50%; +} + +.homepage_cta { + display: block; + margin: 50px auto; + text-align: center; +} + +.cards__container { + margin: 0 auto; +} + + +.card__body { + display: flex; + flex-wrap: wrap; +} + +.card__body.align_right { + justify-content: end; + text-align: right; +} + +*, +::before, +::after { + border-width: 0; + border-style: solid; +} diff --git a/docs/src/css/sidebar.css b/docs/src/css/sidebar.css new file mode 100644 index 00000000000..3c03c374058 --- /dev/null +++ b/docs/src/css/sidebar.css @@ -0,0 +1,4 @@ +.divider { + border-top: 2px solid #eee; + margin: 0.5em 0; +} diff --git a/docs/src/pages/index.jsx b/docs/src/pages/index.jsx new file mode 100644 index 00000000000..d5cbfcba977 --- /dev/null +++ b/docs/src/pages/index.jsx @@ -0,0 +1,71 @@ +import React, { lazy, Suspense } from 'react'; +import Layout from '@theme/Layout'; +import Link from '@docusaurus/Link'; + +import headerPic from '../../static/img/homepage_header_pic.png'; +import { BeatLoader } from 'react-spinners'; + +const NoirEditor = lazy(() => import('@signorecello/noir_playground')); + +const Spinner = () => { + return ( +
+ +
+ ); +}; + +export default function Landing() { + const [tryIt, setTryIt] = React.useState(false); + + return ( + +
+
+
+

+ Noir +

+

+ Noir is a Domain Specific Language for SNARK proving systems. It has been designed to use any ACIR + compatible proving system. Its design choices are influenced heavily by Rust and focuses on a simple, + familiar syntax. +

+ + {tryIt && ( + }> + + + + + + )} + + {!tryIt && ( +
+ + + + +
+ )} +
+
+ + ); +} diff --git a/docs/static/img/homepage_header_pic.png b/docs/static/img/homepage_header_pic.png new file mode 100644 index 00000000000..d629e202232 Binary files /dev/null and b/docs/static/img/homepage_header_pic.png differ diff --git a/docs/static/img/memes/flextape.jpeg b/docs/static/img/memes/flextape.jpeg new file mode 100644 index 00000000000..65a8992e985 Binary files /dev/null and b/docs/static/img/memes/flextape.jpeg differ diff --git a/docs/static/img/memes/titanic.jpeg b/docs/static/img/memes/titanic.jpeg new file mode 100644 index 00000000000..a29f06ffb93 Binary files /dev/null and b/docs/static/img/memes/titanic.jpeg differ diff --git a/docs/static/img/noir_getting_started_1.png b/docs/static/img/noir_getting_started_1.png index 9de33296e91..beaee6fd3c9 100644 Binary files a/docs/static/img/noir_getting_started_1.png and b/docs/static/img/noir_getting_started_1.png differ diff --git a/docs/static/video/codespaces_showcase.mp4 b/docs/static/video/codespaces_showcase.mp4 new file mode 100644 index 00000000000..191e87b18dc Binary files /dev/null and b/docs/static/video/codespaces_showcase.mp4 differ diff --git a/docs/static/video/how-tos/devcontainer.mp4 b/docs/static/video/how-tos/devcontainer.mp4 new file mode 100644 index 00000000000..91e14ab2aff Binary files /dev/null and b/docs/static/video/how-tos/devcontainer.mp4 differ diff --git a/docs/tsconfig.json b/docs/tsconfig.json new file mode 100644 index 00000000000..241fcf4b5e3 --- /dev/null +++ b/docs/tsconfig.json @@ -0,0 +1,7 @@ +{ + "extends": "@docusaurus/tsconfig", + "compilerOptions": { + "baseUrl": ".", + "downlevelIteration": true + }, +} diff --git a/docs/versioned_docs/version-v0.10.5/examples/merkle-proof.mdx b/docs/versioned_docs/version-v0.10.5/examples/merkle-proof.mdx deleted file mode 100644 index 6430780817c..00000000000 --- a/docs/versioned_docs/version-v0.10.5/examples/merkle-proof.mdx +++ /dev/null @@ -1,48 +0,0 @@ ---- -title: Merkle Proof Membership -description: - Learn how to use merkle membership proof in Noir to prove that a given leaf is a member of a - merkle tree with a specified root, at a given index. -keywords: - [merkle proof, merkle membership proof, Noir, rust, hash function, Pedersen, sha256, merkle tree] ---- - -Let's walk through an example of a merkle membership proof in Noir that proves that a given leaf is -in a merkle tree. - -```rust -use dep::std; - -fn main(message : [Field; 62], index : Field, hashpath : [Field; 40], root : Field) { - let leaf = std::hash::hash_to_field(message); - let merkle_root = std::merkle::compute_merkle_root(leaf, index, hashpath); - assert(merkle_root == root); -} - -``` - -The message is hashed using `hash_to_field`. The specific hash function that is being used is chosen -by the backend. The only requirement is that this hash function can heuristically be used as a -random oracle. If only collision resistance is needed, then one can call `std::hash::pedersen` -instead. - -```rust -let leaf = std::hash::hash_to_field(message); -``` - -The leaf is then passed to a compute_merkle_root function with the root, index and hashpath. The returned root can then be asserted to be the same as the provided root. - -```rust -let merkle_root = std::merkle::compute_merkle_root(leaf, index, hashpath); -assert (merkle_root == root); -``` - -> **Note:** It is possible to re-implement the merkle tree implementation without standard library. -> However, for most usecases, it is enough. In general, the standard library will always opt to be -> as conservative as possible, while striking a balance with efficiency. - -An example, the merkle membership proof, only requires a hash function that has collision -resistance, hence a hash function like Pedersen is allowed, which in most cases is more efficient -than the even more conservative sha256. - -[View an example on the starter repo](https://github.com/noir-lang/noir-examples/blob/3ea09545cabfa464124ec2f3ea8e60c608abe6df/stealthdrop/circuits/src/main.nr#L20) diff --git a/docs/versioned_docs/version-v0.10.5/getting_started/00_nargo_installation.md b/docs/versioned_docs/version-v0.10.5/getting_started/00_nargo_installation.md deleted file mode 100644 index de30869732d..00000000000 --- a/docs/versioned_docs/version-v0.10.5/getting_started/00_nargo_installation.md +++ /dev/null @@ -1,285 +0,0 @@ ---- -title: Nargo Installation -description: - nargo is a command line tool for interacting with Noir programs (e.g. compiling, proving, - verifying and more). Learn how to install and use Nargo for your projects with this comprehensive - guide. -keywords: [Nargo, command line tool, Noir programs, installation guide, how to use Nargo] ---- - -`nargo` is a command line tool for interacting with Noir programs (e.g. compiling, proving, -verifying and more). - -Alternatively, the interactions can also be performed in [TypeScript](../typescript). - -### UltraPlonk - -Nargo versions <0.5.0 of `aztec_backend` and `aztec_wasm_backend` are based on the TurboPlonk -version of Aztec Backend, which lacks efficient implementations of useful primitives (e.g. Keccak256 in 18k constraints, ECDSA verification in 36k constraints) that the UltraPlonk version offers. - -## Installation - -There are four approaches for installing Nargo: - -- [Option 1: Noirup](#option-1-noirup) -- [Option 2: Binaries](#option-2-binaries) -- [Option 3: Install via Nix](#option-3-install-via-nix) -- [Option 4: Compile from Source](#option-4-compile-from-source) - -Optionally you can also install [Noir VS Code extension] for syntax highlighting. - -### Option 1: Noirup - -If you're on OSX or Linux, the easiest way to start using Noir and Nargo is via noirup. Just open a -terminal and run: - -```bash -curl -L https://raw.githubusercontent.com/noir-lang/noirup/main/install | bash -``` - -Close the terminal, open another one, and run - -```bash -noirup -``` - -Done, you should have the latest version working. You can check with `nargo --version`. - -You can also install nightlies, specific versions -or branches, check out the [noirup repository](https://github.com/noir-lang/noirup) for more -information. - -#### GitHub Actions - -You can use `noirup` with GitHub Actions for CI/CD and automated testing. It is as simple as -installing `noirup` and running tests in your GitHub Action `yml` file. - -See the -[config file](https://github.com/TomAFrench/noir-hashes/blob/master/.github/workflows/noir.yml) in -this repo containing hash functions in Noir for an example. - -#### Nightly versions - -To install the nightly version of Noir (updated daily) run: - -```bash -noirup -n -``` - -### Option 2: Binaries - -See [GitHub Releases](https://github.com/noir-lang/noir/releases) for the latest and previous -platform specific binaries. - -#### Step 1 - -Paste and run the following in the terminal to extract and install the binary: - -> **macOS / Linux:** If you are prompted with `Permission denied` when running commands, prepend -> `sudo` and re-run it. - -##### macOS (Apple Silicon) - -```bash -mkdir -p $HOME/.nargo/bin && \ -curl -o $HOME/.nargo/bin/nargo-aarch64-apple-darwin.tar.gz -L https://github.com/noir-lang/noir/releases/download/v0.6.0/nargo-aarch64-apple-darwin.tar.gz && \ -tar -xvf $HOME/.nargo/bin/nargo-aarch64-apple-darwin.tar.gz -C $HOME/.nargo/bin/ && \ -echo '\nexport PATH=$PATH:$HOME/.nargo/bin' >> ~/.zshrc && \ -source ~/.zshrc -``` - -##### macOS (Intel) - -```bash -mkdir -p $HOME/.nargo/bin && \ -curl -o $HOME/.nargo/bin/nargo-x86_64-apple-darwin.tar.gz -L https://github.com/noir-lang/noir/releases/download/v0.6.0/nargo-x86_64-apple-darwin.tar.gz && \ -tar -xvf $HOME/.nargo/bin/nargo-x86_64-apple-darwin.tar.gz -C $HOME/.nargo/bin/ && \ -echo '\nexport PATH=$PATH:$HOME/.nargo/bin' >> ~/.zshrc && \ -source ~/.zshrc -``` - -##### Windows (PowerShell) - -Open PowerShell as Administrator and run: - -```powershell -mkdir -f -p "$env:USERPROFILE\.nargo\bin\"; ` -Invoke-RestMethod -Method Get -Uri https://github.com/noir-lang/noir/releases/download/v0.4.1/nargo-x86_64-pc-windows-msvc.zip -Outfile "$env:USERPROFILE\.nargo\bin\nargo-x86_64-pc-windows-msvc.zip"; ` -Expand-Archive -Path "$env:USERPROFILE\.nargo\bin\nargo-x86_64-pc-windows-msvc.zip" -DestinationPath "$env:USERPROFILE\.nargo\bin\"; ` -$Reg = "Registry::HKLM\System\CurrentControlSet\Control\Session Manager\Environment"; ` -$OldPath = (Get-ItemProperty -Path "$Reg" -Name PATH).Path; ` -$NewPath = $OldPath + ’;’ + "$env:USERPROFILE\.nargo\bin\"; ` -Set-ItemProperty -Path "$Reg" -Name PATH –Value "$NewPath"; ` -$env:Path = [System.Environment]::GetEnvironmentVariable("Path","Machine") + ";" + [System.Environment]::GetEnvironmentVariable("Path","User") -``` - -##### Linux (Bash) - -```bash -mkdir -p $HOME/.nargo/bin && \ -curl -o $HOME/.nargo/bin/nargo-x86_64-unknown-linux-gnu.tar.gz -L https://github.com/noir-lang/noir/releases/download/v0.6.0/nargo-x86_64-unknown-linux-gnu.tar.gz && \ -tar -xvf $HOME/.nargo/bin/nargo-x86_64-unknown-linux-gnu.tar.gz -C $HOME/.nargo/bin/ && \ -echo -e '\nexport PATH=$PATH:$HOME/.nargo/bin' >> ~/.bashrc && \ -source ~/.bashrc -``` - -#### Step 2 - -Check if the installation was successful by running `nargo --help`. - -> **macOS:** If you are prompted with an OS alert, right-click and open the _nargo_ executable from -> Finder. Close the new terminal popped up and `nargo` should now be accessible. - -For a successful installation, you should see something similar to the following after running the -command: - -```sh -$ nargo --help - -Noir's package manager - -Usage: nargo - -Commands: - check Checks the constraint system for errors - codegen-verifier Generates a Solidity verifier smart contract for the program - compile Compile the program and its secret execution trace into ACIR format - new Create a new binary project - execute Executes a circuit to calculate its return value - prove Create proof for this program. The proof is returned as a hex encoded string - verify Given a proof and a program, verify whether the proof is valid - test Run the tests for this program - gates Counts the occurrences of different gates in circuit - help Print this message or the help of the given subcommand(s) -``` - -### Option 3: Install via Nix - -Due to the large number of native dependencies, Noir projects can be installed via [Nix](https://nixos.org/). - -#### Installing Nix - -For the best experience, please follow these instructions to setup Nix: - -1. Install Nix following [their guide](https://nixos.org/download.html) for your operating system. -2. Create the file `~/.config/nix/nix.conf` with the contents: - -```ini -experimental-features = nix-command -extra-experimental-features = flakes -``` - -#### Install Nargo into your Nix profile - -1. Use `nix profile` to install Nargo - -```sh -nix profile install github:noir-lang/noir -``` - -### Option 4: Compile from Source - -Due to the large number of native dependencies, Noir projects uses [Nix](https://nixos.org/) and [direnv](https://direnv.net/) to streamline the development experience. - -#### Setting up your environment - -For the best experience, please follow these instructions to setup your environment: - -1. Install Nix following [their guide](https://nixos.org/download.html) for your operating system. -2. Create the file `~/.config/nix/nix.conf` with the contents: - -```ini -experimental-features = nix-command -extra-experimental-features = flakes -``` - -3. Install direnv into your Nix profile by running: - -```sh -nix profile install nixpkgs#direnv -``` - -4. Add direnv to your shell following [their guide](https://direnv.net/docs/hook.html). - 1. For bash or zshell, add `eval "$(direnv hook bash)"` or `eval "$(direnv hook zsh)"` to your ~/.bashrc or ~/.zshrc file, respectively. -5. Restart your shell. - -#### Shell & editor experience - -Now that your environment is set up, you can get to work on the project. - -1. Clone the repository, such as: - -```sh -git clone git@github.com:noir-lang/noir -``` - -> Replacing `noir` with whichever repository you want to work on. - -2. Navigate to the directory: - -```sh -cd noir -``` - -> Replacing `noir` with whichever repository you cloned. - -3. You should see a **direnv error** because projects aren't allowed by default. Make sure you've reviewed and trust our `.envrc` file, then you need to run: - -```sh -direnv allow -``` - -4. Now, wait awhile for all the native dependencies to be built. This will take some time and direnv will warn you that it is taking a long time, but we just need to let it run. - -5. Once you are presented with your prompt again, you can start your editor within the project directory (we recommend [VSCode](https://code.visualstudio.com/)): - -```sh -code . -``` - -6. (Recommended) When launching VSCode for the first time, you should be prompted to install our recommended plugins. We highly recommend installing these for the best development experience. - -#### Building and testing - -Assuming you are using `direnv` to populate your environment, building and testing the project can be done -with the typical `cargo build`, `cargo test`, and `cargo clippy` commands. You'll notice that the `cargo` version matches the version we specify in `flake.nix`, which is 1.66.0 at the time of this writing. - -If you want to build the entire project in an isolated sandbox, you can use Nix commands: - -1. `nix build .` (or `nix build . -L` for verbose output) to build the project in a Nix sandbox. -2. `nix flake check` (or `nix flake check -L` for verbose output) to run clippy and tests in a Nix sandbox. - -#### Without `direnv` - -If you have hesitations with using direnv, you can launch a subshell with `nix develop` and then launch your editor from within the subshell. However, if VSCode was already launched in the project directory, the environment won't be updated. - -Advanced: If you aren't using direnv nor launching your editor within the subshell, you can try to install Barretenberg and other global dependencies the package needs. This is an advanced workflow and likely won't receive support! - -## Uninstalling Nargo - -### Noirup - -If you installed Noir with `noirup`, you can uninstall Noir by removing the files in `~/.nargo`, `~/nargo` and `~/noir_cache`. - -```bash -rm -r ~/.nargo -rm -r ~/nargo -rm -r ~/noir_cache -``` - -### Nix - -If you installed Noir with Nix or from source, you can remove the binary located at `~/.nix-profile/bin/nargo`. - -```bash -rm ~/.nix-profile/bin/nargo -``` - -[git]: https://git-scm.com/book/en/v2/Getting-Started-Installing-Git -[rust]: https://www.rust-lang.org/tools/install -[noir vs code extension]: https://marketplace.visualstudio.com/items?itemName=noir-lang.vscode-noir -[homebrew]: https://brew.sh/ -[cmake]: https://cmake.org/install/ -[llvm]: https://llvm.org/docs/GettingStarted.html -[openmp]: https://openmp.llvm.org/ -[barretenberg]: https://github.com/AztecProtocol/barretenberg diff --git a/docs/versioned_docs/version-v0.10.5/getting_started/02_breakdown.md b/docs/versioned_docs/version-v0.10.5/getting_started/02_breakdown.md deleted file mode 100644 index 3fa3a35766b..00000000000 --- a/docs/versioned_docs/version-v0.10.5/getting_started/02_breakdown.md +++ /dev/null @@ -1,200 +0,0 @@ ---- -title: Project Breakdown -description: - Learn about the anatomy of a Nargo project, including the purpose of the Prover and Verifier TOML - files, and how to prove and verify your program. -keywords: - [Nargo, Nargo project, Prover.toml, Verifier.toml, proof verification, private asset transfer] ---- - -This section breaks down our hello world program in section _1.2_. We elaborate on the project -structure and what the `prove` and `verify` commands did in the previous section. - -## Anatomy of a Nargo Project - -Upon creating a new project with `nargo new` and building the in/output files with `nargo check` -commands, you would get a minimal Nargo project of the following structure: - - - src - - Prover.toml - - Verifier.toml - - Nargo.toml - -The source directory _src_ holds the source code for your Noir program. By default only a _main.nr_ -file will be generated within it. - -### Prover.toml - -_Prover.toml_ is used for specifying the input values for executing and proving the program. You can specify `toml` files with different names by using the `--prover-name` or `-p` flags, see the [Prover](#provertoml) section below. Optionally you may specify expected output values for prove-time checking as well. - -### Verifier.toml - -_Verifier.toml_ contains public in/output values computed when executing the Noir program. - -### Nargo.toml - -_Nargo.toml_ contains the environmental options of your project. It contains a "package" section and a "dependencies" section. - -Example Nargo.toml: - -```toml -[package] -name = "noirstarter" -type = "bin" -authors = ["Alice"] -compiler_version = "0.9.0" -description = "Getting started with Noir" -entry = "circuit/main.nr" -license = "MIT" - -[dependencies] -ecrecover = {tag = "v0.9.0", git = "https://github.com/colinnielsen/ecrecover-noir.git"} -``` - -Nargo.toml for a [workspace](../modules_packages_crates/workspaces) will look a bit different. For example: - -```toml -[workspace] -members = ["crates/a", "crates/b"] -default-member = "crates/a" -``` - -#### Package section - -The package section requires a number of fields including: - -- `name` (**required**) - the name of the package -- `type` (**required**) - can be "bin", "lib", or "contract" to specify whether its a binary, library or Aztec contract -- `authors` (optional) - authors of the project -- `compiler_version` (optional) - specifies the version of the compiler to use. This is not currently enforced by the compiler, but will be in future versions. -- `description` (optional) -- `entry` (optional) - a relative filepath to use as the entry point into your package (overrides the default of `src/lib.nr` or `src/main.nr`) -- `backend` (optional) -- `license` (optional) - -#### Dependencies section - -This is where you will specify any dependencies for your project. See the [Dependencies page](../modules_packages_crates/dependencies) for more info. - -`./proofs/` and `./contract/` directories will not be immediately visible until you create a proof or -verifier contract respectively. - -### main.nr - -The _main.nr_ file contains a `main` method, this method is the entry point into your Noir program. - -In our sample program, _main.nr_ looks like this: - -```rust -fn main(x : Field, y : Field) { - assert(x != y); -} -``` - -The parameters `x` and `y` can be seen as the API for the program and must be supplied by the -prover. Since neither `x` nor `y` is marked as public, the verifier does not supply any inputs, when -verifying the proof. - -The prover supplies the values for `x` and `y` in the _Prover.toml_ file. - -As for the program body, `assert` ensures the satisfaction of the condition (e.g. `x != y`) is -constrained by the proof of the execution of said program (i.e. if the condition was not met, the -verifier would reject the proof as an invalid proof). - -### Prover.toml - -The _Prover.toml_ file is a file which the prover uses to supply his witness values(both private and -public). - -In our hello world program the _Prover.toml_ file looks like this: - -```toml -x = "1" -y = "2" -``` - -When the command `nargo prove` is executed, two processes happen: - -1. Noir creates a proof that `x` which holds the value of `1` and `y` which holds the value of `2` - is not equal. This not equal constraint is due to the line `assert(x != y)`. - -2. Noir creates and stores the proof of this statement in the _proofs_ directory in a file called your-project.proof. So if your project is named "private_voting" (defined in the project Nargo.toml), the proof will be saved at `./proofs/private_voting.proof`. Opening this file will display the proof in hex format. - -#### Arrays of Structs - -The following code shows how to pass an array of structs to a Noir program to generate a proof. - -```rust -// main.nr -struct Foo { - bar: Field, - baz: Field, -} - -fn main(foos: [Foo; 3]) -> pub Field { - foos[2].bar + foos[2].baz -} -``` - -Prover.toml: - -```toml -[[foos]] # foos[0] -bar = 0 -baz = 0 - -[[foos]] # foos[1] -bar = 0 -baz = 0 - -[[foos]] # foos[2] -bar = 1 -baz = 2 -``` - -#### Custom toml files - -You can specify a `toml` file with a different name to use for proving by using the `--prover-name` or `-p` flags. - -This command looks for proof inputs in the default **Prover.toml** and generates the proof and saves it at `./proofs/.proof`: - -```bash -nargo prove -``` - -This command looks for proof inputs in the custom **OtherProver.toml** and generates proof and saves it at `./proofs/.proof`: - -```bash -nargo prove -p OtherProver -``` - -## Verifying a Proof - -When the command `nargo verify` is executed, two processes happen: - -1. Noir checks in the _proofs_ directory for a proof file with the project name (eg. test_project.proof) - -2. If that file is found, the proof's validity is checked - -> **Note:** The validity of the proof is linked to the current Noir program; if the program is -> changed and the verifier verifies the proof, it will fail because the proof is not valid for the -> _modified_ Noir program. - -In production, the prover and the verifier are usually two separate entities. A prover would -retrieve the necessary inputs, execute the Noir program, generate a proof and pass it to the -verifier. The verifier would then retrieve the public inputs from usually external sources and -verifies the validity of the proof against it. - -Take a private asset transfer as an example: - -A user on browser as the prover would retrieve private inputs (e.g. the user's private key) and -public inputs (e.g. the user's encrypted balance on-chain), compute the transfer, generate a proof -and submit it to the verifier smart contract. - -The verifier contract would then draw the user's encrypted balance directly from the blockchain and -verify the proof submitted against it. If the verification passes, additional functions in the -verifier contract could trigger (e.g. approve the asset transfer). - -Now that you understand the concepts, you'll probably want some editor feedback while you are writing more complex code. - -In the [next section](language_server), we will explain how to utilize the Noir Language Server. diff --git a/docs/versioned_docs/version-v0.10.5/getting_started/03_language_server.md b/docs/versioned_docs/version-v0.10.5/getting_started/03_language_server.md deleted file mode 100644 index 49bd1d24675..00000000000 --- a/docs/versioned_docs/version-v0.10.5/getting_started/03_language_server.md +++ /dev/null @@ -1,36 +0,0 @@ ---- -title: Language Server -description: - Learn about the Noir Language Server, how to install the components, and configuration that may be required. -keywords: - [Nargo, Language Server, LSP, VSCode, Visual Studio Code] ---- - -This section helps you install and configure the Noir Language Server. - -The Language Server Protocol (LSP) has two components, the [Server](#language-server) and the [Client](#language-client). Below we describe each in the context of Noir. - -## Language Server - -The Server component is provided by the Nargo command line tool that you installed at the beginning of this guide. -As long as Nargo is installed and you've used it to run other commands in this guide, it should be good to go! - -If you'd like to verify that the `nargo lsp` command is available, you can run `nargo --help` and look for `lsp` in the list of commands. If you see it, you're using a version of Noir with LSP support. - -## Language Client - -The Client component is usually an editor plugin that launches the Server. It communicates LSP messages between the editor and the Server. For example, when you save a file, the Client will alert the Server, so it can try to compile the project and report any errors. - -Currently, Noir provides a Language Client for Visual Studio Code via the [vscode-noir](https://github.com/noir-lang/vscode-noir) extension. You can install it via the [Visual Studio Marketplace](https://marketplace.visualstudio.com/items?itemName=noir-lang.vscode-noir). - -When you language server is running correctly and the VSCode plugin is installed, you should see handy codelens buttons for compilation, execution, and tests: - -![Compile and Execute](@site/static/img/codelens_compile_execute.png) -![Run test](@site/static/img/codelens_run_test.png) - -### Configuration - -* __Noir: Enable LSP__ - If checked, the extension will launch the Language Server via `nargo lsp` and communicate with it. -* __Noir: Nargo Flags__ - Additional flags may be specified if you require them to be added when the extension calls `nargo lsp`. -* __Noir: Nargo Path__ - An absolute path to a Nargo binary with the `lsp` command. This may be useful if Nargo is not within the `PATH` of your editor. -* __Noir > Trace: Server__ - Setting this to `"messages"` or `"verbose"` will log LSP messages between the Client and Server. Useful for debugging. diff --git a/docs/versioned_docs/version-v0.10.5/index.md b/docs/versioned_docs/version-v0.10.5/index.md deleted file mode 100644 index e56b24bccd8..00000000000 --- a/docs/versioned_docs/version-v0.10.5/index.md +++ /dev/null @@ -1,103 +0,0 @@ ---- -title: Introducing Noir -description: - Learn about the public alpha release of Noir, a domain specific language heavily influenced by - Rust that compiles to an intermediate language which can be compiled to an arithmetic circuit or a - rank-1 constraint system. -keywords: - [ - Noir, - Domain Specific Language, - Rust, - Intermediate Language, - Arithmetic Circuit, - Rank-1 Constraint System, - Ethereum Developers, - Protocol Developers, - Blockchain Developers, - Proving System, - Smart Contract Language, - ] -slug: / ---- - -This version of the book is being released with the public alpha. There will be a lot of features -that are missing in this version, however the syntax and the feel of the language will mostly be -completed. - -## What is Noir? - -Noir is a Domain Specific Language for SNARK proving systems. It has been designed to use any ACIR compatible proving system. - -It's design choices are influenced heavily by Rust and focuses on a simple, familiar syntax. - -## Who is Noir for? - -Noir can be used for a variety of purposes. - -### Solidity Developers - -Noir currently includes a command to create a Solidity contract which verifies your Noir program. This will -be modularised in the future; however, as of the alpha, you can use the [`nargo codegen-verifier`](./nargo/commands#nargo-codegen-verifier) command to create -a verifier contract. - -### Protocol Developers - -As a protocol developer, you may not want to use the Aztec backend due to it not being a fit for -your stack, or maybe you simply want to use a different proving system. Since Noir does not compile -to a specific proof system, it is possible for protocol developers to replace the PLONK-based -proving system with a different proving system altogether. - -### Blockchain developers - -As a blockchain developer, you will be constrained by parameters set by your blockchain (for example, the -proving system and smart contract language has been pre-defined). In order for you to use Noir in -your blockchain, a proving system backend and a smart contract interface -must be implemented for it. - -## What's new about Noir? - -Noir is simple and flexible in its design, as it does not compile immediately to a fixed -NP-complete language. Instead, Noir compiles to an intermediate language (ACIR), which itself can be compiled -to an arithmetic circuit (if choosing to target Aztec's barretenberg backend) or a rank-1 constraint system (if choosing to target an R1CS backend like Arkwork's Marlin backend, or others). - -This in itself brings up a few challenges within the design process, but allows one to decouple the programming language completely from the backend. This is similar in theory to LLVM. - -## Current Features - -Compiler: - -- Module System -- For expressions -- Arrays -- Bit Operations -- Binary operations (<, <=, >, >=, +, -, \*, /, %) [See documentation for an extensive list] -- Unsigned integers -- If statements -- Structures and Tuples -- Generics - -ACIR Supported OPCODES: - -- Sha256 -- Blake2s -- Schnorr signature verification -- MerkleMembership -- Pedersen -- HashToField - -## Libraries - -Noir does not currently have an official package manager. You can find a list of available Noir libraries in the [awesome-noir repo here](https://github.com/noir-lang/awesome-noir#libraries). - -Some libraries that are available today include: - -- [Standard Library](https://github.com/noir-lang/noir/tree/master/noir_stdlib) - the Noir Standard Library -- [Ethereum Storage Proof Verification](https://github.com/aragonzkresearch/noir-trie-proofs) - a library that contains the primitives necessary for RLP decoding (in the form of look-up table construction) and Ethereum state and storage proof verification (or verification of any trie proof involving 32-byte long keys) -- [BigInt](https://github.com/shuklaayush/noir-bigint) - a library that provides a custom BigUint56 data type, allowing for computations on large unsigned integers -- [ECrecover](https://github.com/colinnielsen/ecrecover-noir/tree/main) - a library to verify an ECDSA signature and return the source Ethereum address -- [Sparse Merkle Tree Verifier](https://github.com/vocdoni/smtverifier-noir/tree/main) - a library for verification of sparse Merkle trees -- [Signed Int](https://github.com/resurgencelabs/signed_int) - a library for accessing a custom Signed Integer data type, allowing access to negative numbers on Noir -- [Fraction](https://github.com/resurgencelabs/fraction) - a library for accessing fractional number data type in Noir, allowing results that aren't whole numbers - -See the section on [dependencies](./modules_packages_crates/dependencies) for more information. diff --git a/docs/versioned_docs/version-v0.10.5/language_concepts/01_functions.md b/docs/versioned_docs/version-v0.10.5/language_concepts/01_functions.md deleted file mode 100644 index 7cb43c4c5f2..00000000000 --- a/docs/versioned_docs/version-v0.10.5/language_concepts/01_functions.md +++ /dev/null @@ -1,99 +0,0 @@ ---- -title: Functions -description: - Learn how to declare functions and methods in Noir, a programming language with Rust semantics. - This guide covers parameter declaration, return types, call expressions, and more. -keywords: [Noir, Rust, functions, methods, parameter declaration, return types, call expressions] ---- - -Functions in Noir follow the same semantics of Rust, though Noir does not support early returns. - -To declare a function the `fn` keyword is used. - -```rust -fn foo() {} -``` - -All parameters in a function must have a type and all types are known at compile time. The parameter -is pre-pended with a colon and the parameter type. Multiple parameters are separated using a comma. - -```rust -fn foo(x : Field, y : pub Field){} -``` - -The return type of a function can be stated by using the `->` arrow notation. The function below -states that the foo function must return a `Field`. If the function returns no value, then the arrow -is omitted. - -```rust -fn foo(x : Field, y : pub Field) -> Field { - x + y -} -``` - -Note that a `return` keyword is unneeded in this case - the last expression in a function's body is -returned. - -## Call Expressions - -Calling a function in Noir is executed by using the function name and passing in the necessary -arguments. - -Below we show how to call the `foo` function from the `main` function using a call expression: - -```rust -fn main(x : Field, y : Field) { - let z = foo(x); -} - -fn foo(x : Field) -> Field { - x + x -} -``` - -## Methods - -You can define methods in Noir on any struct type in scope. - -```rust -struct MyStruct { - foo: Field, - bar: Field, -} - -impl MyStruct { - fn new(foo: Field) -> MyStruct { - MyStruct { - foo, - bar: 2, - } - } - - fn sum(self) -> Field { - self.foo + self.bar - } -} - -fn main() { - let s = MyStruct::new(40); - assert(s.sum() == 42); -} -``` - -Methods are just syntactic sugar for functions, so if we wanted to we could also call `sum` as -follows: - -```rust -assert(MyStruct::sum(s) == 42); -``` - -## Lambdas - -Lambdas are anonymous functions. They follow the syntax of Rust - `|arg1, arg2, ..., argN| return_expression`. - -```rust -let add_50 = |val| val + 50; -assert(add_50(100) == 150); -``` - -See [Lambdas](./08_lambdas.md) for more details. diff --git a/docs/versioned_docs/version-v0.10.5/language_concepts/02_control_flow.md b/docs/versioned_docs/version-v0.10.5/language_concepts/02_control_flow.md deleted file mode 100644 index 691c514d9a8..00000000000 --- a/docs/versioned_docs/version-v0.10.5/language_concepts/02_control_flow.md +++ /dev/null @@ -1,42 +0,0 @@ ---- -title: Control Flow -description: - Learn how to use loops and if expressions in the Noir programming language. Discover the syntax - and examples for for loops and if-else statements. -keywords: [Noir programming language, loops, for loop, if-else statements, Rust syntax] ---- - -## Loops - -Noir has one kind of loop: the `for` loop. `for` loops allow you to repeat a block of code multiple -times. - -The following block of code between the braces is run 10 times. - -```rust -for i in 0..10 { - // do something -}; -``` - -## If Expressions - -Noir supports `if-else` statements. The syntax is most similar to Rust's where it is not required -for the statement's conditional to be surrounded by parentheses. - -```rust -let a = 0; -let mut x: u32 = 0; - -if a == 0 { - if a != 0 { - x = 6; - } else { - x = 2; - } -} else { - x = 5; - assert(x == 5); -} -assert(x == 2); -``` diff --git a/docs/versioned_docs/version-v0.10.5/language_concepts/03_ops.md b/docs/versioned_docs/version-v0.10.5/language_concepts/03_ops.md deleted file mode 100644 index da02b126059..00000000000 --- a/docs/versioned_docs/version-v0.10.5/language_concepts/03_ops.md +++ /dev/null @@ -1,97 +0,0 @@ ---- -title: Logical Operations -description: - Learn about the supported arithmetic and logical operations in the Noir programming language. - Discover how to perform operations on private input types, integers, and booleans. -keywords: - [ - Noir programming language, - supported operations, - arithmetic operations, - logical operations, - predicate operators, - bitwise operations, - short-circuiting, - backend, - ] ---- - -# Operations - -## Table of Supported Operations - -| Operation | Description | Requirements | -| :-------- | :------------------------------------------------------------: | -------------------------------------: | -| + | Adds two private input types together | Types must be private input | -| - | Subtracts two private input types together | Types must be private input | -| \* | Multiplies two private input types together | Types must be private input | -| / | Divides two private input types together | Types must be private input | -| ^ | XOR two private input types together | Types must be integer | -| & | AND two private input types together | Types must be integer | -| \| | OR two private input types together | Types must be integer | -| << | Left shift an integer by another integer amount | Types must be integer | -| >> | Right shift an integer by another integer amount | Types must be integer | -| ! | Bitwise not of a value | Type must be integer or boolean | -| < | returns a bool if one value is less than the other | Upper bound must have a known bit size | -| <= | returns a bool if one value is less than or equal to the other | Upper bound must have a known bit size | -| > | returns a bool if one value is more than the other | Upper bound must have a known bit size | -| >= | returns a bool if one value is more than or equal to the other | Upper bound must have a known bit size | -| == | returns a bool if one value is equal to the other | Both types must not be constants | -| != | returns a bool if one value is not equal to the other | Both types must not be constants | - -### Predicate Operators - -`<,<=, !=, == , >, >=` are known as predicate/comparison operations because they compare two values. -This differs from the operations such as `+` where the operands are used in _computation_. - -### Bitwise Operations Example - -```rust -fn main(x : Field) { - let y = x as u32; - let z = y & y; -} -``` - -`z` is implicitly constrained to be the result of `y & y`. The `&` operand is used to denote bitwise -`&`. - -> `x & x` would not compile as `x` is a `Field` and not an integer type. - -### Logical Operators - -Noir has no support for the logical operators `||` and `&&`. This is because encoding the -short-circuiting that these operators require can be inefficient for Noir's backend. Instead you can -use the bitwise operators `|` and `&` which operate indentically for booleans, just without the -short-circuiting. - -```rust -let my_val = 5; - -let mut flag = 1; -if (my_val > 6) | (my_val == 0) { - flag = 0; -} -assert(flag == 1); - -if (my_val != 10) & (my_val < 50) { - flag = 0; -} -assert(flag == 0); -``` - -### Shorthand operators - -Noir shorthand operators for most of the above operators, namely `+=, -=, *=, /=, %=, &=, |=, ^=, <<=`, and `>>=`. These allow for more concise syntax. For example: - -```rust -let mut i = 0; -i = i + 1; -``` - -could be written as: - -```rust -let mut i = 0; -i += 1; -``` diff --git a/docs/versioned_docs/version-v0.10.5/language_concepts/04_assert.md b/docs/versioned_docs/version-v0.10.5/language_concepts/04_assert.md deleted file mode 100644 index a25a946123d..00000000000 --- a/docs/versioned_docs/version-v0.10.5/language_concepts/04_assert.md +++ /dev/null @@ -1,34 +0,0 @@ ---- -title: Assert Function -description: - Learn about the assert function in Noir, which can be used to explicitly constrain the predicate or - comparison expression that follows to be true, and what happens if the expression is false at - runtime. -keywords: [Noir programming language, assert statement, predicate expression, comparison expression] ---- - -Noir includes a special `assert` function which will explicitly constrain the predicate/comparison -expression that follows to be true. If this expression is false at runtime, the program will fail to -be proven. - -### Example - -```rust -fn main(x : Field, y : Field) { - assert(x == y); -} -``` - -The above snippet compiles because `==` is a predicate operation. Conversely, the following will not -compile: - -```rust -// INCORRECT - -fn main(x : Field, y : Field) { - assert(x + y); -} -``` - -> The rationale behind this not compiling is due to ambiguity. It is not clear if the above should -> equate to `x + y == 0` or if it should check the truthiness of the result. diff --git a/docs/versioned_docs/version-v0.10.5/language_concepts/05_unconstrained.md b/docs/versioned_docs/version-v0.10.5/language_concepts/05_unconstrained.md deleted file mode 100644 index 6b621eda3eb..00000000000 --- a/docs/versioned_docs/version-v0.10.5/language_concepts/05_unconstrained.md +++ /dev/null @@ -1,96 +0,0 @@ ---- -title: Unconstrained Functions -description: "Learn about what unconstrained functions in Noir are, how to use them and when you'd want to." - -keywords: [Noir programming language, unconstrained, open] ---- - - - -Unconstrained functions are functions which do not constrain any of the included computation and allow for non-determinisitic computation. - -## Why? - -Zero-knowledge (ZK) domain-specific languages (DSL) enable developers to generate ZK proofs from their programs by compiling code down to the constraints of an NP complete language (such as R1CS or PLONKish languages). However, the hard bounds of a constraint system can be very limiting to the functionality of a ZK DSL. - -Enabling a circuit language to perform unconstrained execution is a powerful tool. Said another way, unconstrained execution lets developers generate witnesses from code that does not generate any constraints. Being able to execute logic outside of a circuit is critical for both circuit performance and constructing proofs on information that is external to a circuit. - -Fetching information from somewhere external to a circuit can also be used to enable developers to improve circuit efficiency. - -A ZK DSL does not just prove computation, but proves that some computation was handled correctly. Thus, it is necessary that when we switch from performing some operation directly inside of a circuit to inside of an unconstrained environment that the appropriate constraints are still laid down elsewhere in the circuit. - -## Example - -An in depth example might help drive the point home. This example comes from the excellent [post](https://discord.com/channels/1113924620781883405/1124022445054111926/1128747641853972590) by Tom in the Noir Discord. - -Let's look at how we can optimize a function to turn a `u72` into an array of `u8`s. - -```rust -fn main(num: u72) -> pub [u8; 8] { - let mut out: [u8; 8] = [0; 8]; - for i in 0..8 { - out[i] = (num >> (56 - (i * 8)) as u72 & 0xff) as u8; - } - - out -} -``` - -``` -Total ACIR opcodes generated for language PLONKCSat { width: 3 }: 91 -Backend circuit size: 3619 -``` - -A lot of the operations in this function are optimized away by the compiler (all the bit-shifts turn into divisions by constants). However we can save a bunch of gates by casting to u8 a bit earlier. This automatically truncates the bit-shifted value to fit in a u8 which allows us to remove the XOR against 0xff. This saves us ~480 gates in total. - -```rust -fn main(num: u72) -> pub [u8; 8] { - let mut out: [u8; 8] = [0; 8]; - for i in 0..8 { - out[i] = (num >> (56 - (i * 8)) as u8; - } - - out -} -``` - -``` -Total ACIR opcodes generated for language PLONKCSat { width: 3 }: 75 -Backend circuit size: 3143 -``` - -Those are some nice savings already but we can do better. This code is all constrained so we're proving every step of calculating out using num, but we don't actually care about how we calculate this, just that it's correct. This is where brillig comes in. - -It turns out that truncating a u72 into a u8 is hard to do inside a snark, each time we do as u8 we lay down 4 ACIR opcodes which get converted into multiple gates. It's actually much easier to calculate num from out than the other way around. All we need to do is multiply each element of out by a constant and add them all together, both relatively easy operations inside a snark. - -We can then run u72_to_u8 as unconstrained brillig code in order to calculate out, then use that result in our constrained function and assert that if we were to do the reverse calculation we'd get back num. This looks a little like the below: - -```rust -fn main(num: u72) -> pub [u8; 8] { - let out = u72_to_u8(num); - - let mut reconstructed_num: u72 = 0; - for i in 0..8 { - reconstructed_num += (out[i] as u72 << (56 - (8 * i))); - } - assert(num == reconstructed_num); - out -} - -unconstrained fn u72_to_u8(num: u72) -> [u8; 8] { - let mut out: [u8; 8] = [0; 8]; - for i in 0..8 { - out[i] = (num >> (56 - (i * 8))) as u8; - } - out -} -``` - -``` -Total ACIR opcodes generated for language PLONKCSat { width: 3 }: 78 -Backend circuit size: 2902 -``` - -This ends up taking off another ~250 gates from our circuit! We've ended up with more ACIR opcodes than before but they're easier for the backend to prove (resulting in fewer gates). - -Generally we want to use brillig whenever there's something that's easy to verify but hard to compute within the circuit. For example, if you wanted to calculate a square root of a number it'll be a much better idea to calculate this in brillig and then assert that if you square the result you get back your number. diff --git a/docs/versioned_docs/version-v0.10.5/language_concepts/07_mutability.md b/docs/versioned_docs/version-v0.10.5/language_concepts/07_mutability.md deleted file mode 100644 index 4641521b1d9..00000000000 --- a/docs/versioned_docs/version-v0.10.5/language_concepts/07_mutability.md +++ /dev/null @@ -1,92 +0,0 @@ ---- -title: Mutability -description: - Learn about mutable variables, constants, and globals in Noir programming language. Discover how - to declare, modify, and use them in your programs. -keywords: [noir programming language, mutability in noir, mutable variables, constants, globals] ---- - -Variables in noir can be declared mutable via the `mut` keyword. Mutable variables can be reassigned -to via an assignment expression. - -```rust -let x = 2; -x = 3; // error: x must be mutable to be assigned to - -let mut y = 3; -let y = 4; // OK -``` - -The `mut` modifier can also apply to patterns: - -```rust -let (a, mut b) = (1, 2); -a = 11; // error: a must be mutable to be assigned to -b = 12; // OK - -let mut (c, d) = (3, 4); -c = 13; // OK -d = 14; // OK - -// etc. -let MyStruct { x: mut y } = MyStruct { x: a }; -// y is now in scope -``` - -Note that mutability in noir is local and everything is passed by value, so if a called function -mutates its parameters then the parent function will keep the old value of the parameters. - -```rust -fn main() -> Field { - let x = 3; - helper(x); - x // x is still 3 -} - -fn helper(mut x: i32) { - x = 4; -} -``` - -## Comptime Values - -:::warning - -The 'comptime' keyword was removed in version 0.10. The comptime keyword and syntax are currently still kept and parsed for backwards compatibility, but are now deprecated and will issue a warning when used. `comptime` has been removed because it is no longer needed for accessing arrays. - -::: - -## Globals - -Noir also supports global variables. However, they must be known at compile-time. The global type can also be inferred by the compiler entirely. Globals can also be used to specify array -annotations for function parameters and can be imported from submodules. - -```rust -global N: Field = 5; // Same as `global N: Field = 5` - -fn main(x : Field, y : [Field; N]) { - let res = x * N; - - assert(res == y[0]); - - let res2 = x * mysubmodule::N; - assert(res != res2); -} - -mod mysubmodule { - use dep::std; - - global N: Field = 10; - - fn my_helper() -> Field { - let x = N; - x - } -} -``` - -## Why only local mutability? - -Witnesses in a proving system are immutable in nature. Noir aims to _closely_ mirror this setting -without applying additional overhead to the user. Modeling a mutable reference is not as -straightforward as on conventional architectures and would incur some possibly unexpected overhead. diff --git a/docs/versioned_docs/version-v0.10.5/language_concepts/data_types.md b/docs/versioned_docs/version-v0.10.5/language_concepts/data_types.md deleted file mode 100644 index d546cc463a8..00000000000 --- a/docs/versioned_docs/version-v0.10.5/language_concepts/data_types.md +++ /dev/null @@ -1,96 +0,0 @@ ---- -title: Data Types -description: - Get a clear understanding of the two categories of Noir data types - primitive types and compound - types. Learn about their characteristics, differences, and how to use them in your Noir - programming. -keywords: - [ - noir, - data types, - primitive types, - compound types, - private types, - public types, - ] ---- - -Every value in Noir has a type, which determines which operations are valid for it. - -All values in Noir are fundamentally composed of `Field` elements. For a more approachable -developing experience, abstractions are added on top to introduce different data types in Noir. - -Noir has two category of data types: primitive types (e.g. `Field`, integers, `bool`) and compound -types that group primitive types (e.g. arrays, tuples, structs). Each value can either be private or -public. - -## Private & Public Types - -A **private value** is known only to the Prover, while a **public value** is known by both the -Prover and Verifier. Mark values as `private` when the value should only be known to the prover. All -primitive types (including individual fields of compound types) in Noir are private by default, and -can be marked public when certain values are intended to be revealed to the Verifier. - -> **Note:** For public values defined in Noir programs paired with smart contract verifiers, once -> the proofs are verified on-chain the values can be considered known to everyone that has access to -> that blockchain. - -Public data types are treated no differently to private types apart from the fact that their values -will be revealed in proofs generated. Simply changing the value of a public type will not change the -circuit (where the same goes for changing values of private types as well). - -_Private values_ are also referred to as _witnesses_ sometimes. - -> **Note:** The terms private and public when applied to a type (e.g. `pub Field`) have a different -> meaning than when applied to a function (e.g. `pub fn foo() {}`). -> -> The former is a visibility modifier for the Prover to interpret if a value should be made known to -> the Verifier, while the latter is a visibility modifier for the compiler to interpret if a -> function should be made accessible to external Noir programs like in other languages. - -### pub Modifier - -All data types in Noir are private by default. Types are explicitly declared as public using the -`pub` modifier: - -```rust -fn main(x : Field, y : pub Field) -> pub Field { - x + y -} -``` - -In this example, `x` is **private** while `y` and `x + y` (the return value) are **public**. Note -that visibility is handled **per variable**, so it is perfectly valid to have one input that is -private and another that is public. - -> **Note:** Public types can only be declared through parameters on `main`. - -## Type Aliases - -A type alias is a new name for an existing type. Type aliases are declared with the keyword `type`: - -```rust -type Id = u8; - -fn main() { - let id: Id = 1; - let zero: u8 = 0; - assert(zero + 1 == id); -} -``` - -Type aliases can also be used with [generics](./06_generics.md): - -```rust -type Id = Size; - -fn main() { - let id: Id = 1; - let zero: u32 = 0; - assert(zero + 1 == id); -} -``` - -### BigInt - -You can acheive BigInt functionality using the [Noir BigInt](https://github.com/shuklaayush/noir-bigint) library. diff --git a/docs/versioned_docs/version-v0.10.5/language_concepts/data_types/00_fields.md b/docs/versioned_docs/version-v0.10.5/language_concepts/data_types/00_fields.md deleted file mode 100644 index 658a0441ffb..00000000000 --- a/docs/versioned_docs/version-v0.10.5/language_concepts/data_types/00_fields.md +++ /dev/null @@ -1,165 +0,0 @@ ---- -title: Fields -description: - Dive deep into the Field data type in Noir. Understand its methods, practical examples, and best practices to effectively use Fields in your Noir programs. -keywords: - [ - noir, - field type, - methods, - examples, - best practices, - ] ---- - -The field type corresponds to the native field type of the proving backend. - -The size of a Noir field depends on the elliptic curve's finite field for the proving backend -adopted. For example, a field would be a 254-bit integer when paired with the default backend that -spans the Grumpkin curve. - -Fields support integer arithmetic and are often used as the default numeric type in Noir: - -```rust -fn main(x : Field, y : Field) { - let z = x + y; -} -``` - -`x`, `y` and `z` are all private fields in this example. Using the `let` keyword we defined a new -private value `z` constrained to be equal to `x + y`. - -If proving efficiency is of priority, fields should be used as a default for solving problems. -Smaller integer types (e.g. `u64`) incur extra range constraints. - -## Methods - -After declaring a Field, you can use these common methods on it: - -### to_le_bits - -Transforms the field into an array of bits, Little Endian. - -```rust -fn to_le_bits(_x : Field, _bit_size: u32) -> [u1; N] -``` - -example: - -```rust -fn main() { - let field = 2; - let bits = field.to_le_bits(32); -} -``` - -### to_be_bits - -Transforms the field into an array of bits, Big Endian. - -```rust -fn to_be_bits(_x : Field, _bit_size: u32) -> [u1; N] -``` - -example: - -```rust -fn main() { - let field = 2; - let bits = field.to_be_bits(32); -} -``` - -### to_le_bytes - -Transforms into an array of bytes, Little Endian - -```rust -fn to_le_bytes(_x : Field, byte_size: u32) -> [u8] -``` - -example: - -```rust -fn main() { - let field = 2; - let bytes = field.to_le_bytes(4); -} -``` - -### to_be_bytes - -Transforms into an array of bytes, Big Endian - -```rust -fn to_be_bytes(_x : Field, byte_size: u32) -> [u8] -``` - -example: - -```rust -fn main() { - let field = 2; - let bytes = field.to_be_bytes(4); -} -``` - -### to_le_radix - -Decomposes into a vector over the specified base, Little Endian - -```rust -fn to_le_radix(_x : Field, _radix: u32, _result_len: u32) -> [u8] -``` - -example: - -```rust -fn main() { - let field = 2; - let radix = field.to_le_radix(256, 4); -} -``` - -### to_be_radix - -Decomposes into a vector over the specified base, Big Endian - -```rust -fn to_be_radix(_x : Field, _radix: u32, _result_len: u32) -> [u8] -``` - -example: - -```rust -fn main() { - let field = 2; - let radix = field.to_be_radix(256, 4); -} -``` - -### pow_32 - -Returns the value to the power of the specified exponent - -```rust -fn pow_32(self, exponent: Field) -> Field -``` - -example: - -```rust -fn main() { - let field = 2 - let pow = field.pow_32(4); - assert(pow == 16); -} -``` - -### sgn0 - -Parity of (prime) Field element, i.e. sgn0(x mod p) = 0 if x ∈ {0, ..., p-1} is even, otherwise sgn0(x mod p) = 1. - -```rust -fn sgn0(self) -> u1 -``` diff --git a/docs/versioned_docs/version-v0.10.5/language_concepts/data_types/01_integers.md b/docs/versioned_docs/version-v0.10.5/language_concepts/data_types/01_integers.md deleted file mode 100644 index d9c5e20e795..00000000000 --- a/docs/versioned_docs/version-v0.10.5/language_concepts/data_types/01_integers.md +++ /dev/null @@ -1,33 +0,0 @@ ---- -title: Integers -description: - Explore the Integer data type in Noir. Learn about its methods, see real-world examples, and grasp how to efficiently use Integers in your Noir code. -keywords: - [ - noir, - integer types, - methods, - examples, - arithmetic, - ] ---- - -An integer type is a range constrained field type. The Noir frontend currently supports unsigned, -arbitrary-sized integer types. - -An integer type is specified first with the letter `u`, indicating its unsigned nature, followed by -its length in bits (e.g. `32`). For example, a `u32` variable can store a value in the range of -$\\([0,2^{32}-1]\\)$: - -```rust -fn main(x : Field, y : u32) { - let z = x as u32 + y; -} -``` - -`x`, `y` and `z` are all private values in this example. However, `x` is a field while `y` and `z` -are unsigned 32-bit integers. If `y` or `z` exceeds the range $\\([0,2^{32}-1]\\)$, proofs created -will be rejected by the verifier. - -> **Note:** The default backend supports both even (e.g. `u16`, `u48`) and odd (e.g. `u5`, `u3`) -> sized integer types. diff --git a/docs/versioned_docs/version-v0.10.5/language_concepts/data_types/03_strings.md b/docs/versioned_docs/version-v0.10.5/language_concepts/data_types/03_strings.md deleted file mode 100644 index ee69853bfba..00000000000 --- a/docs/versioned_docs/version-v0.10.5/language_concepts/data_types/03_strings.md +++ /dev/null @@ -1,43 +0,0 @@ ---- -title: Strings -description: - Discover the String data type in Noir. Learn about its methods, see real-world examples, and understand how to effectively manipulate and use Strings in Noir. -keywords: - [ - noir, - string type, - methods, - examples, - concatenation, - ] ---- - - -The string type is a fixed length value defined with `str`. - -You can use strings in `assert()` functions or print them with -`std::println()`. See more about [Logging](../../standard_library/logging). - -```rust -use dep::std; - -fn main(message : pub str<11>, hex_as_string : str<4>) { - std::println(message); - assert(message == "hello world"); - assert(hex_as_string == "0x41"); -} -``` - -You can convert a `str` to a byte array by calling `as_bytes()` -or a vector by calling `as_bytes_vec()`. - -```rust -fn main() { - let message = "hello world"; - let message_bytes = message.as_bytes(); - let mut message_vec = message.as_bytes_vec(); - assert(message_bytes.len() == 11); - assert(message_bytes[0] == 104); - assert(message_bytes[0] == message_vec.get(0)); -} -``` diff --git a/docs/versioned_docs/version-v0.10.5/language_concepts/data_types/04_arrays.md b/docs/versioned_docs/version-v0.10.5/language_concepts/data_types/04_arrays.md deleted file mode 100644 index f826b39bf39..00000000000 --- a/docs/versioned_docs/version-v0.10.5/language_concepts/data_types/04_arrays.md +++ /dev/null @@ -1,237 +0,0 @@ ---- -title: Arrays -description: - Dive into the Array data type in Noir. Grasp its methods, practical examples, and best practices for efficiently using Arrays in your Noir code. -keywords: - [ - noir, - array type, - methods, - examples, - indexing, - ] ---- - -An array is one way of grouping together values into one compound type. Array types can be inferred -or explicitly specified via the syntax `[; ]`: - -```rust -fn main(x : Field, y : Field) { - let my_arr = [x, y]; - let your_arr: [Field; 2] = [x, y]; -} -``` - -Here, both `my_arr` and `your_arr` are instantiated as an array containing two `Field` elements. - -Array elements can be accessed using indexing: - -```rust -fn main() { - let a = [1, 2, 3, 4, 5]; - - let first = a[0]; - let second = a[1]; -} -``` - -All elements in an array must be of the same type (i.e. homogeneous). That is, an array cannot group -a `Field` value and a `u8` value together for example. - -You can write mutable arrays, like: - -```rust -fn main() { - let mut arr = [1, 2, 3, 4, 5]; - assert(arr[0] == 1); - - arr[0] = 42; - assert(arr[0] == 42); -} -``` - -You can instantiate a new array of a fixed size with the same value repeated for each element. The following example instantiates an array of length 32 where each element is of type Field and has the value 0. - -```rust -let array: [Field; 32] = [0; 32]; -``` - -Like in Rust, arrays in Noir are a fixed size. However, if you wish to convert an array to a [slice](./slices), you can just call `as_slice` on your array: - -```rust -let array: [Field; 32] = [0; 32]; -let sl = array.as_slice() -``` - -## Types - -You can create arrays of primitive types or structs. There is not yet support for nested arrays -(arrays of arrays) or arrays of structs that contain arrays. - -## Methods - -For convenience, the STD provides some ready-to-use, common methods for arrays: - -### len - -Returns the length of an array - -```rust -fn len(_array: [T; N]) -> comptime Field -``` - -example - -```rust -fn main() { - let array = [42, 42]; - assert(array.len() == 2); -} -``` - -### sort - -Returns a new sorted array. The original array remains untouched. Notice that this function will -only work for arrays of fields or integers, not for any arbitrary type. This is because the sorting -logic it uses internally is optimized specifically for these values. If you need a sort function to -sort any type, you should use the function `sort_via` described below. - -```rust -fn sort(_array: [T; N]) -> [T; N] -``` - -example - -```rust -fn main() { - let arr = [42, 32]; - let sorted = arr.sort(); - assert(sorted == [32, 42]); -} -``` - -### sort_via - -Sorts the array with a custom comparison function - -```rust -fn sort_via(mut a: [T; N], ordering: fn(T, T) -> bool) -> [T; N] -``` - -example - -```rust -fn main() { - let arr = [42, 32] - let sorted_ascending = arr.sort_via(|a, b| a < b); - assert(sorted_ascending == [32, 42]); // verifies - - let sorted_descending = arr.sort_via(|a, b| a > b); - assert(sorted_descending == [32, 42]); // does not verify -} -``` - -### map - -Applies a function to each element of the array, returning a new array containing the mapped elements. - -```rust -fn map(f: fn(T) -> U) -> [U; N] -``` - -example - -```rust -let a = [1, 2, 3]; -let b = a.map(|a| a * 2); // b is now [2, 4, 6] -``` - -### fold - -Applies a function to each element of the array, returning the final accumulated value. The first -parameter is the initial value. - -```rust -fn fold(mut accumulator: U, f: fn(U, T) -> U) -> U -``` - -This is a left fold, so the given function will be applied to the accumulator and first element of -the array, then the second, and so on. For a given call the expected result would be equivalent to: - -```rust -let a1 = [1]; -let a2 = [1, 2]; -let a3 = [1, 2, 3]; - -let f = |a, b| a - b; -a1.fold(10, f) //=> f(10, 1) -a2.fold(10, f) //=> f(f(10, 1), 2) -a3.fold(10, f) //=> f(f(f(10, 1), 2), 3) -``` - -example: - -```rust - -fn main() { - let arr = [2, 2, 2, 2, 2]; - let folded = arr.fold(0, |a, b| a + b); - assert(folded == 10); -} - -``` - -### reduce - -Same as fold, but uses the first element as starting element. - -```rust -fn reduce(f: fn(T, T) -> T) -> T -``` - -example: - -```rust -fn main() { - let arr = [2, 2, 2, 2, 2]; - let reduced = arr.reduce(|a, b| a + b); - assert(reduced == 10); -} -``` - -### all - -Returns true if all the elements satisfy the given predicate - -```rust -fn all(predicate: fn(T) -> bool) -> bool -``` - -example: - -```rust -fn main() { - let arr = [2, 2, 2, 2, 2]; - let all = arr.all(|a| a == 2); - assert(all); -} -``` - -### any - -Returns true if any of the elements satisfy the given predicate - -```rust -fn any(predicate: fn(T) -> bool) -> bool -``` - -example: - -```rust -fn main() { - let arr = [2, 2, 2, 2, 5]; - let any = arr.any(|a| a == 5); - assert(any); -} - -``` diff --git a/docs/versioned_docs/version-v0.10.5/language_concepts/data_types/05_slices.md b/docs/versioned_docs/version-v0.10.5/language_concepts/data_types/05_slices.md deleted file mode 100644 index bc7f5c531e7..00000000000 --- a/docs/versioned_docs/version-v0.10.5/language_concepts/data_types/05_slices.md +++ /dev/null @@ -1,157 +0,0 @@ ---- -title: Slices -description: - Explore the Slice data type in Noir. Understand its methods, see real-world examples, and learn how to effectively use Slices in your Noir programs. -keywords: - [ - noir, - slice type, - methods, - examples, - subarrays, - ] ---- - -:::caution - -This feature is experimental. You should expect it to change in future versions, -cause unexpected behavior, or simply not work at all. - -::: - -A slice is a dynamically-sized view into a sequence of elements. They can be resized at runtime, but because they don't own the data, they cannot be returned from a circuit. You can treat slices as arrays without a constrained size. - -```rust -use dep::std::slice; - -fn main() -> pub Field { - let mut slice: [Field] = [0; 2]; - - let mut new_slice = slice.push_back(6); - new_slice.len() -} -``` - -View the corresponding test file [here]([test-file]. - -[test-file]: https://github.com/noir-lang/noir/blob/f387ec1475129732f72ba294877efdf6857135ac/crates/nargo_cli/tests/test_data_ssa_refactor/slices/src/main.nr - -## Methods - -For convenience, the STD provides some ready-to-use, common methods for slices: - -### push_back - -Pushes a new element to the end of the slice, returning a new slice with a length one greater than the original unmodified slice. - -```rust -fn push_back(_self: [T], _elem: T) -> [T] -``` - -example: - -```rust -fn main() -> pub Field { - let mut slice: [Field] = [0; 2]; - - let mut new_slice = slice.push_back(6); - new_slice.len() -} -``` - -View the corresponding test file [here][test-file]. - -### push_front - -Returns a new array with the specified element inserted at index 0. The existing elements indexes are incremented by 1. - -```rust -fn push_front(_self: Self, _elem: T) -> Self -``` - -Example: - -```rust -let mut new_slice: [Field] = []; -new_slice = new_slice.push_front(20); -assert(new_slice[0] == 20); // returns true -``` - -View the corresponding test file [here][test-file]. - -### pop_front - -Returns a tuple of two items, the first element of the array and the rest of the array. - -```rust -fn pop_front(_self: Self) -> (T, Self) -``` - -Example: - -```rust -let (first_elem, rest_of_slice) = slice.pop_front(); -``` - -View the corresponding test file [here][test-file]. - -### pop_back - -Returns a tuple of two items, the beginning of the array with the last element omitted and the last element. - -```rust -fn pop_back(_self: Self) -> (Self, T) -``` - -Example: - -```rust -let (popped_slice, last_elem) = slice.pop_back(); -``` - -View the corresponding test file [here][test-file]. - -### append - -Loops over a slice and adds it to the end of another. - -```rust -fn append(mut self, other: Self) -> Self -``` - -Example: - -```rust -let append = [1, 2].append([3, 4, 5]); -``` - -### insert - -Inserts an element at a specified index and shifts all following elements by 1. - -```rust -fn insert(_self: Self, _index: Field, _elem: T) -> Self -``` - -Example: - -```rust -new_slice = rest_of_slice.insert(2, 100); -assert(new_slice[2] == 100); -``` - -View the corresponding test file [here][test-file]. - -### remove - -Remove an element at a specified index, shifting all elements after it to the left, returning the altered slice and the removed element. - -```rust -fn remove(_self: Self, _index: Field) -> (Self, T) -``` - -Example: - -```rust -let (remove_slice, removed_elem) = slice.remove(3); -``` diff --git a/docs/versioned_docs/version-v0.10.5/language_concepts/data_types/06_vectors.md b/docs/versioned_docs/version-v0.10.5/language_concepts/data_types/06_vectors.md deleted file mode 100644 index c5b74c877f8..00000000000 --- a/docs/versioned_docs/version-v0.10.5/language_concepts/data_types/06_vectors.md +++ /dev/null @@ -1,34 +0,0 @@ ---- -title: Vectors -description: - Delve into the Vector data type in Noir. Learn about its methods, practical examples, and best practices for using Vectors in your Noir code. -keywords: - [ - noir, - vector type, - methods, - examples, - dynamic arrays, - ] ---- - -:::caution - -This feature is experimental. You should expect it to change in future versions, -cause unexpected behavior, or simply not work at all. - -::: - -A vector is a collection type similar to Rust's Vector type. It's convenient way to use slices as mutable arrays. - -Example: - -```rust -use dep::std::collections::vec::Vec; - -let mut vector: Vec = Vec::new(); -for i in 0..5 { - vector.push(i); -} -assert(vector.len() == 5); -``` diff --git a/docs/versioned_docs/version-v0.10.5/modules_packages_crates/crates_and_packages.md b/docs/versioned_docs/version-v0.10.5/modules_packages_crates/crates_and_packages.md deleted file mode 100644 index fb83a33d94e..00000000000 --- a/docs/versioned_docs/version-v0.10.5/modules_packages_crates/crates_and_packages.md +++ /dev/null @@ -1,42 +0,0 @@ ---- -title: Crates and Packages -description: Learn how to use Crates and Packages in your Noir project -keywords: [Nargo, dependencies, package management, crates, package] ---- - -## Crates - -A crate is the smallest amount of code that the Noir compiler considers at a time. -Crates can contain modules, and the modules may be defined in other files that get compiled with the crate, as we’ll see in the coming sections. - -### Crate Types - -A Noir crate can come in several forms: binaries, libraries or contracts. - -#### Binaries - -_Binary crates_ are programs which you can compile to an ACIR circuit which you can then create proofs against. Each must have a function called `main` that defines the ACIR circuit which is to be proved. - -#### Libraries - -_Library crates_ don't have a `main` function and they don't compile down to ACIR. Instead they define functionality intended to be shared with multiple projects, and eventually included in a binary crate. - -#### Contracts - -Contract crates are similar to binary crates in that they compile to ACIR which you can create proofs against. They are different in that they do not have a single `main` function, but are a collection of functions to be deployed to the [Aztec network](https://aztec.network). You can learn more about the technical details of Aztec in the [monorepo](https://github.com/AztecProtocol/aztec-packages) or contract [examples](https://github.com/AztecProtocol/aztec-packages/tree/master/yarn-project/noir-contracts/src/contracts). - -### Crate Root - -Every crate has a root, which is the source file that the compiler starts, this is also known as the root module. The Noir compiler does not enforce any conditions on the name of the file which is the crate root, however if you are compiling via Nargo the crate root must be called `lib.nr` or `main.nr` for library or binary crates respectively. - -## Packages - -A Nargo _package_ is a collection of one of more crates that provides a set of functionality. A package must include a Nargo.toml file. - -A package _must_ contain either a library or a binary crate, but not both. - -### Differences from Cargo Packages - -One notable difference between Rust's Cargo and Noir's Nargo is that while Cargo allows a package to contain an unlimited number of binary crates and a single library crate, Nargo currently only allows a package to contain a single crate. - -In future this restriction may be lifted to allow a Nargo package to contain both a binary and library crate or multiple binary crates. diff --git a/docs/versioned_docs/version-v0.10.5/modules_packages_crates/dependencies.md b/docs/versioned_docs/version-v0.10.5/modules_packages_crates/dependencies.md deleted file mode 100644 index 9e6463801b7..00000000000 --- a/docs/versioned_docs/version-v0.10.5/modules_packages_crates/dependencies.md +++ /dev/null @@ -1,123 +0,0 @@ ---- -title: Dependencies -description: - Learn how to specify and manage dependencies in Nargo, allowing you to upload packages to GitHub - and use them easily in your project. -keywords: [Nargo, dependencies, GitHub, package management, versioning] ---- - -Nargo allows you to upload packages to GitHub and use them as dependencies. - -## Specifying a dependency - -Specifying a dependency requires a tag to a specific commit and the git url to the url containing -the package. - -Currently, there are no requirements on the tag contents. If requirements are added, it would follow -semver 2.0 guidelines. - -> Note: Without a `tag` , there would be no versioning and dependencies would change each time you -> compile your project. - -For example, to add the [ecrecover-noir library](https://github.com/colinnielsen/ecrecover-noir) to your project, add it to `Nargo.toml`: - -```toml -# Nargo.toml - -[dependencies] -ecrecover = {tag = "v0.8.0", git = "https://github.com/colinnielsen/ecrecover-noir"} -``` - -If the module is in a subdirectory, you can define a subdirectory in your git repository, for example: - -```toml -# Nargo.toml - -[dependencies] -easy_private_token_contract = {tag ="v0.1.0-alpha62", git = "https://github.com/AztecProtocol/aztec-packages", directory = "yarn-project/noir-contracts/src/contracts/easy_private_token_contract"} -``` - -## Specifying a local dependency - -You can also specify dependencies that are local to your machine. - -For example, this file structure has a library and binary crate - -```tree -├── binary_crate -│   ├── Nargo.toml -│   └── src -│   └── main.nr -└── liba - ├── Nargo.toml - └── src - └── lib.nr -``` - -Inside of the binary crate, you can specify: - -```toml -# Nargo.toml - -[dependencies] -libA = { path = "../liba" } -``` - -## Importing dependencies - -You can import a dependency to a Noir file using the following syntax. For example, to import the -ecrecover-noir library and local liba referenced above: - -```rust -use dep::ecrecover; -use dep::libA; -``` - -You can also import only the specific parts of dependency that you want to use, like so: - -```rust -use dep::std::hash::sha256; -use dep::std::scalar_mul::fixed_base; -``` - -Lastly, as demonstrated in the -[elliptic curve example](../standard_library/cryptographic_primitives/ec_primitives#examples), you -can import multiple items in the same line by enclosing them in curly braces: - -```rust -use dep::std::ec::tecurve::affine::{Curve, Point}; -``` - -We don't have a way to consume libraries from inside a [workspace](./workspaces) as external dependencies right now. - -Inside a workspace, these are consumed as `{ path = "../to_lib" }` dependencies in Nargo.toml. - -## Dependencies of Dependencies - -Note that when you import a dependency, you also get access to all of the dependencies of that package. - -For example, the [phy_vector](https://github.com/resurgencelabs/phy_vector) library imports an [fraction](https://github.com/resurgencelabs/fraction) library. If you're importing the phy_vector library, then you can access the functions in fractions library like so: - -```rust -use dep::phy_vector; - -fn main(x : Field, y : pub Field) { - //... - let f = phy_vector::fraction::toFraction(true, 2, 1); - //... -} -``` - -## Available Libraries - -Noir does not currently have an official package manager. You can find a list of available Noir libraries in the [awesome-noir repo here](https://github.com/noir-lang/awesome-noir#libraries). - -Some libraries that are available today include: - -- [Standard Library](https://github.com/noir-lang/noir/tree/master/noir_stdlib) - the Noir Standard Library -- [Ethereum Storage Proof Verification](https://github.com/aragonzkresearch/noir-trie-proofs) - a library that contains the primitives necessary for RLP decoding (in the form of look-up table construction) and Ethereum state and storage proof verification (or verification of any trie proof involving 32-byte long keys) -- [BigInt](https://github.com/shuklaayush/noir-bigint) - a library that provides a custom BigUint56 data type, allowing for computations on large unsigned integers -- [ECrecover](https://github.com/colinnielsen/ecrecover-noir/tree/main) - a library to verify an ECDSA signature and return the source Ethereum address -- [Sparse Merkle Tree Verifier](https://github.com/vocdoni/smtverifier-noir/tree/main) - a library for verification of sparse Merkle trees -- [Signed Int](https://github.com/resurgencelabs/signed_int) - a library for accessing a custom Signed Integer data type, allowing access to negative numbers on Noir -- [Fraction](https://github.com/resurgencelabs/fraction) - a library for accessing fractional number data type in Noir, allowing results that aren't whole numbers diff --git a/docs/versioned_docs/version-v0.10.5/modules_packages_crates/modules.md b/docs/versioned_docs/version-v0.10.5/modules_packages_crates/modules.md deleted file mode 100644 index 147c9b284e8..00000000000 --- a/docs/versioned_docs/version-v0.10.5/modules_packages_crates/modules.md +++ /dev/null @@ -1,104 +0,0 @@ ---- -title: Modules -description: - Learn how to organize your files using modules in Noir, following the same convention as Rust's - module system. Examples included. -keywords: [Noir, Rust, modules, organizing files, sub-modules] ---- - -Noir's module system follows the same convention as the _newer_ version of Rust's module system. - -## Purpose of Modules - -Modules are used to organise files. Without modules all of your code would need to live in a single -file. In Noir, the compiler does not automatically scan all of your files to detect modules. This -must be done explicitly by the developer. - -## Examples - -### Importing a module in the crate root - -Filename : `src/main.nr` - -```rust -mod foo; - -fn main() { - foo::hello_world(); -} -``` - -Filename : `src/foo.nr` - -```rust -fn from_foo() {} -``` - -In the above snippet, the crate root is the `src/main.nr` file. The compiler sees the module -declaration `mod foo` which prompts it to look for a foo.nr file. - -Visually this module hierarchy looks like the following : - -``` -crate - ├── main - │ - └── foo - └── from_foo - -``` - -### Importing a module throughout the tree - -All modules are accessible from the `crate::` namespace. - -``` -crate - ├── bar - ├── foo - └── main - -``` - -In the above snippet, if `bar` would like to use functions in `foo`, it can do so by `use crate::foo::function_name`. - -### Sub-modules - -Filename : `src/main.nr` - -```rust -mod foo; - -fn main() { - foo::from_foo(); -} -``` - -Filename : `src/foo.nr` - -```rust -mod bar; -fn from_foo() {} -``` - -Filename : `src/foo/bar.nr` - -```rust -fn from_bar() {} -``` - -In the above snippet, we have added an extra module to the module tree; `bar`. `bar` is a submodule -of `foo` hence we declare bar in `foo.nr` with `mod bar`. Since `foo` is not the crate root, the -compiler looks for the file associated with the `bar` module in `src/foo/bar.nr` - -Visually the module hierarchy looks as follows: - -``` -crate - ├── main - │ - └── foo - ├── from_foo - └── bar - └── from_bar -``` diff --git a/docs/versioned_docs/version-v0.10.5/modules_packages_crates/workspaces.md b/docs/versioned_docs/version-v0.10.5/modules_packages_crates/workspaces.md deleted file mode 100644 index eaa09506698..00000000000 --- a/docs/versioned_docs/version-v0.10.5/modules_packages_crates/workspaces.md +++ /dev/null @@ -1,39 +0,0 @@ ---- -title: Workspaces ---- - -Workspaces are a feature of nargo that allow you to manage multiple related Noir packages in a single repository. A workspace is essentially a group of related projects that share common build output directories and configurations. - -Each Noir project (with it's own Nargo.toml file) can be thought of as a package. Each package is expected to contain exactly one "named circuit", being the "name" defined in Nargo.toml with the program logic defined in `./src/main.nr`. - -For a project with the following structure: - -```tree -├── crates -│   ├── a -│   │   ├── Nargo.toml -│   │   └── src -│   │   └── main.nr -│   └── b -│   ├── Nargo.toml -│   └── src -│   └── main.nr -├── Nargo.toml -└── Prover.toml -``` - -You can define a workspace in Nargo.toml like so: - -```toml -[workspace] -members = ["crates/a", "crates/b"] -default-member = "crates/a" -``` - -`members` indicates which packages are included in the workspace. As such, all member packages of a workspace will be processed when the `--workspace` flag is used with various commands or if a `default-member` is not specified. - -`default-member` indicates which package various commands process by default. - -Libraries can be defined in a workspace. We just don't have a way to consume libraries from inside a workspace as external dependencies right now. - -Inside a workspace, these are consumed as `{ path = "../to_lib" }` dependencies in Nargo.toml. diff --git a/docs/versioned_docs/version-v0.10.5/nargo/01_commands.md b/docs/versioned_docs/version-v0.10.5/nargo/01_commands.md deleted file mode 100644 index 425a73a34c8..00000000000 --- a/docs/versioned_docs/version-v0.10.5/nargo/01_commands.md +++ /dev/null @@ -1,223 +0,0 @@ ---- -title: Commands -description: - Noir CLI Commands for Noir Prover and Verifier to create, execute, prove and verify programs, - generate Solidity verifier smart contract and compile into JSON file containing ACIR - representation and ABI of circuit. -keywords: - [ - Nargo, - Noir CLI, - Noir Prover, - Noir Verifier, - generate Solidity verifier, - compile JSON file, - ACIR representation, - ABI of circuit, - TypeScript, - ] ---- - -## General options - -``` -Options: - --show-ssa Emit debug information for the intermediate SSA IR - --deny-warnings Quit execution when warnings are emitted - -h, --help Print help -``` - -## `nargo help [subcommand]` - -Prints the list of available commands or specific information of a subcommand. - -_Arguments_ - -- `` - The subcommand whose help message to display - -## `nargo check` - -Generate the `Prover.toml` and `Verifier.toml` files for specifying prover and verifier in/output -values of the Noir program respectively. - -**Options** - -``` - --package The name of the package to check - --workspace Check all packages in the workspace - --print-acir Display the ACIR for compiled circuit - --deny-warnings Treat all warnings as errors --h, --help Print help -``` - -## `nargo codegen-verifier` - -Generate a Solidity verifier smart contract for the program. - -**Options** - -``` - --package The name of the package to codegen - --workspace Codegen all packages in the workspace - --print-acir Display the ACIR for compiled circuit - --deny-warnings Treat all warnings as errors --h, --help Print help -``` - -## `nargo compile` - -Compile the program into a JSON build artifact file containing the ACIR representation and the ABI -of the circuit. This build artifact can then be used to generate and verify proofs. - -You can also use "build" as an alias for compile (e.g. `nargo build`). - -**Options** - -``` - --include-keys Include Proving and Verification keys in the build artifacts - --package The name of the package to compile - --workspace Compile all packages in the workspace - --print-acir Display the ACIR for compiled circuit - --deny-warnings Treat all warnings as errors --h, --help Print help -``` - -## `nargo new ` - -Creates a new Noir project in a new folder. - -**Arguments** - -``` - The path to save the new project -``` - -**Options** - -``` - --name Name of the package [default: package directory name] - --lib Use a library template - --bin Use a binary template [default] - --contract Use a contract template --h, --help Print help -``` - -## `nargo init` - -Creates a new Noir project in the current directory. - -**Options** - -``` - --name Name of the package [default: current directory name] - --lib Use a library template - --bin Use a binary template [default] - --contract Use a contract template --h, --help Print help -``` - -## `nargo execute [WITNESS_NAME]` - -Runs the Noir program and prints its return value. - -**Arguments** - -``` -[WITNESS_NAME] Write the execution witness to named file -``` - -**Options** - -``` --p, --prover-name The name of the toml file which contains the inputs for the prover [default: Prover] - --package The name of the package to execute - --workspace Execute all packages in the workspace - --print-acir Display the ACIR for compiled circuit - --deny-warnings Treat all warnings as errors --h, --help Print help -``` - -_Usage_ - -The inputs to the circuit are read from the `Prover.toml` file generated by `nargo check`, which -must be filled in. - -To save the witness to file, run the command with a value for the `WITNESS_NAME` argument. A -`.tr` file will then be saved in the `./target` folder. - -> **Info:** The `.tr` file is the witness file. The witness file can be considered as program inputs -> parsed for your program's ACIR. -> -> This file can be passed along with circuit's ACIR into a TypeScript project for proving and -> verification. See the [TypeScript](../typescript#proving-and-verifying-externally-compiled-files) -> section to learn more. - -## `nargo prove` - -Creates a proof for the program. - -**Options** - -``` --p, --prover-name The name of the toml file which contains the inputs for the prover [default: Prover] --v, --verifier-name The name of the toml file which contains the inputs for the verifier [default: Verifier] - --verify Verify proof after proving - --package The name of the package to prove - --workspace Prove all packages in the workspace - --print-acir Display the ACIR for compiled circuit - --deny-warnings Treat all warnings as errors --h, --help Print help -``` - -## `nargo verify` - -Given a proof and a program, verify whether the proof is valid. - -**Options** - -``` --v, --verifier-name The name of the toml file which contains the inputs for the verifier [default: Verifier] - --package The name of the package verify - --workspace Verify all packages in the workspace - --print-acir Display the ACIR for compiled circuit - --deny-warnings Treat all warnings as errors --h, --help Print help -``` - -## `nargo test [TEST_NAME]` - -Nargo will automatically compile and run any functions which have the decorator `#[test]` on them if -you run `nargo test`. To print `println` statements in tests, use the `--show-output` flag. - -Takes an optional `--exact` flag which allows you to select tests based on an exact name. - -See an example on the [testing page](./testing). - -**Options** - -``` - --show-output Display output of `println` statements - --exact Only run tests that match exactly - --package The name of the package to test - --workspace Test all packages in the workspace - --print-acir Display the ACIR for compiled circuit - --deny-warnings Treat all warnings as errors --h, --help Print help -``` - -## `nargo info` - -Prints a table containing the information of the package. - -Currently the table provide - -1. The number of ACIR opcodes -2. The final number gates in the circuit used by a backend - -If the file contains a contract the table will provide the -above information about each function of the contract. - -## `nargo lsp` - -Start a long-running Language Server process that communicates over stdin/stdout. -Usually this command is not run by a user, but instead will be run by a Language Client, such as [vscode-noir](https://github.com/noir-lang/vscode-noir). diff --git a/docs/versioned_docs/version-v0.10.5/nargo/02_testing.md b/docs/versioned_docs/version-v0.10.5/nargo/02_testing.md deleted file mode 100644 index 106952c44c0..00000000000 --- a/docs/versioned_docs/version-v0.10.5/nargo/02_testing.md +++ /dev/null @@ -1,42 +0,0 @@ ---- -title: Testing in Noir -description: Learn how to use Nargo to test your Noir program in a quick and easy way -keywords: [Nargo, testing, Noir, compile, test] ---- - -You can test your Noir programs using Noir circuits. - -Nargo will automatically compile and run any functions which have the decorator `#[test]` on them if -you run `nargo test`. - -For example if you have a program like: - -```rust -fn add(x: u64, y: u64) -> u64 { - x + y -} -#[test] -fn test_add() { - assert(add(2,2) == 4); - assert(add(0,1) == 1); - assert(add(1,0) == 1); -} -``` - -Running `nargo test` will test that the `test_add` function can be executed while satisfying the all -the contraints which allows you to test that add returns the expected values. Test functions can't -have any arguments currently. - -### Test fail - -You can write tests that are expected to fail by using the decorator `#[test(should_fail)]`. For example: - -```rust -fn add(x: u64, y: u64) -> u64 { - x + y -} -#[test(should_fail)] -fn test_add() { - assert(add(2,2) == 5); -} -``` diff --git a/docs/versioned_docs/version-v0.10.5/standard_library/black_box_fns.md b/docs/versioned_docs/version-v0.10.5/standard_library/black_box_fns.md deleted file mode 100644 index c758846b688..00000000000 --- a/docs/versioned_docs/version-v0.10.5/standard_library/black_box_fns.md +++ /dev/null @@ -1,45 +0,0 @@ ---- -title: Black Box Functions -description: Black box functions are functions in Noir that rely on backends implementing support for specialized constraints. -keywords: [noir, black box functions] ---- - -Black box functions are functions in Noir that rely on backends implementing support for specialized constraints. This makes certain zk-snark unfriendly computations cheaper than if they were implemented in Noir. - -:::warning - -It is likely that not all backends will support a particular black box function. - -::: - -Because it is not guaranteed that all backends will support black box functions, it is possible that certain Noir programs won't compile against a particular backend if they use an unsupported black box function. It is possible to fallback to less efficient implementations written in Noir/ACIR in some cases. - -Black box functions are specified with the `#[foreign(black_box_fn)]` attribute. For example, the SHA256 function in the Noir [source code](https://github.com/noir-lang/noir/blob/v0.5.1/noir_stdlib/src/hash.nr) looks like: - -```rust -#[foreign(sha256)] -fn sha256(_input : [u8; N]) -> [u8; 32] {} -``` - -## Function list - -Here is a list of the current black box functions that are supported by UltraPlonk: - -- AES -- [SHA256](./cryptographic_primitives/hashes#sha256) -- [Schnorr signature verification](./cryptographic_primitives/schnorr) -- [Blake2s](./cryptographic_primitives/hashes#blake2s) -- [Pedersen](./cryptographic_primitives/hashes#pedersen) -- [HashToField128Security](./cryptographic_primitives/hashes#hash_to_field) -- [ECDSA signature verification](./cryptographic_primitives/ecdsa_sig_verification) -- [Fixed base scalar multiplication](./cryptographic_primitives/scalar) -- [Compute merkle root](./merkle_trees#compute_merkle_root) -- AND -- XOR -- RANGE -- [Keccak256](./cryptographic_primitives/hashes#keccak256) -- [Recursive proof verification](./recursion) - -Most black box functions are included as part of the Noir standard library, however `AND`, `XOR` and `RANGE` are used as part of the Noir language syntax. For instance, using the bitwise operator `&` will invoke the `AND` black box function. To ensure compatibility across backends, the ACVM has fallback implementations of `AND`, `XOR` and `RANGE` defined in its standard library which it can seamlessly fallback to if the backend doesn't support them. - -You can view the black box functions defined in the ACVM code [here](https://github.com/noir-lang/acvm/blob/acir-v0.12.0/acir/src/circuit/black_box_functions.rs). diff --git a/docs/versioned_docs/version-v0.10.5/standard_library/cryptographic_primitives/00_hashes.mdx b/docs/versioned_docs/version-v0.10.5/standard_library/cryptographic_primitives/00_hashes.mdx deleted file mode 100644 index 1d9b1c7f09c..00000000000 --- a/docs/versioned_docs/version-v0.10.5/standard_library/cryptographic_primitives/00_hashes.mdx +++ /dev/null @@ -1,146 +0,0 @@ ---- -title: Hash methods -description: - Learn about the cryptographic primitives ready to use for any Noir project, including sha256, - blake2s, pedersen, mimc_bn254 and mimc -keywords: - [cryptographic primitives, Noir project, sha256, blake2s, pedersen, mimc_bn254, mimc, hash] ---- - -import BlackBoxInfo from './common/_blackbox.mdx'; - -## sha256 - -Given an array of bytes, returns the resulting sha256 hash. - -```rust -fn sha256(_input : [u8]) -> [u8; 32] -``` - -example: - -```rust -fn main() { - let x = [163, 117, 178, 149]; // some random bytes - let hash = std::hash::sha256(x); -} -``` - - - -## blake2s - -Given an array of bytes, returns an array with the Blake2 hash - -```rust -fn blake2s(_input : [u8]) -> [u8; 32] -``` - -example: - -```rust -fn main() { - let x = [163, 117, 178, 149]; // some random bytes - let hash = std::hash::blake2s(x); -} -``` - - - -## pedersen - -Given an array of Fields, returns the Pedersen hash. - -```rust -fn pedersen(_input : [Field]) -> [Field; 2] -``` - -example: - -```rust -fn main() { - let x = [163, 117, 178, 149]; // some random bytes - let hash = std::hash::pedersen(x); -} -``` - - - -## keccak256 - -Given an array of bytes (`u8`), returns the resulting keccak hash as an array of 32 bytes -(`[u8; 32]`). Specify a message_size to hash only the first `message_size` bytes -of the input. - -```rust -fn keccak256(_input : [u8; N], _message_size: u32) -> [u8; 32] -``` - -example: - -```rust -fn main() { - let x = [163, 117, 178, 149]; // some random bytes - let message_size = 4; - let hash = std::hash::keccak256(x, message_size); -} -``` - - - -## poseidon - -Given an array of Fields, returns a new Field with the Poseidon Hash. Mind that you need to specify -how many inputs are there to your Poseidon function. - -```rust -// example for hash_1, hash_2 accepts an array of length 2, etc -fn hash_1(input: [Field; 1]) -> Field -``` - -example: - -```rust -fn main() -{ - let hash1 = std::hash::poseidon::bn254::hash_2([1, 2]); - assert(hash1 == 0x115cc0f5e7d690413df64c6b9662e9cf2a3617f2743245519e19607a4417189a); -} -``` - -## mimc_bn254 and mimc - -`mimc_bn254` is `mimc`, but with hardcoded parameters for the BN254 curve. You can use it by -providing an array of Fields, and it returns a Field with the hash. You can use the `mimc` method if -you're willing to input your own constants: - -```rust -fn mimc(x: Field, k: Field, constants: [Field; N], exp : Field) -> Field -``` - -otherwise, use the `mimc_bn254` method: - -```rust -fn mimc_bn254(array: [Field; N]) -> Field -``` - -example: - -```rust - -fn main() { - let x = [163, 117, 178, 149]; // some random bytes - let hash = std::hash::mimc_bn254(x); -} -``` - -## hash_to_field - -```rust -fn hash_to_field(_input : [Field; N]) -> Field {} -``` - -Calculates the `blake2s` hash of the inputs and returns the hash modulo the field modulus to return -a value which can be represented as a `Field`. - - diff --git a/docs/versioned_docs/version-v0.10.5/standard_library/cryptographic_primitives/01_scalar.mdx b/docs/versioned_docs/version-v0.10.5/standard_library/cryptographic_primitives/01_scalar.mdx deleted file mode 100644 index 62265cddb1e..00000000000 --- a/docs/versioned_docs/version-v0.10.5/standard_library/cryptographic_primitives/01_scalar.mdx +++ /dev/null @@ -1,33 +0,0 @@ ---- -title: Scalar multiplication -description: - See how you can perform scalar multiplications over a fixed base in Noir -keywords: - [ - cryptographic primitives, - Noir project, - scalar multiplication, - ] ---- - -import BlackBoxInfo from './common/\_blackbox.mdx'; - -## scalar_mul::fixed_base - -Performs scalar multiplication over the embedded curve whose coordinates are defined by the -configured noir field. For the BN254 scalar field, this is BabyJubJub or Grumpkin. - -```rust -fn fixed_base(_input : Field) -> [Field; 2] -``` - -example - -```rust -fn main(x : Field) { - let scal = std::scalar_mul::fixed_base(x); - std::println(scal); -} -``` - - diff --git a/docs/versioned_docs/version-v0.10.5/standard_library/cryptographic_primitives/02_schnorr.mdx b/docs/versioned_docs/version-v0.10.5/standard_library/cryptographic_primitives/02_schnorr.mdx deleted file mode 100644 index 0e219c0e5ff..00000000000 --- a/docs/versioned_docs/version-v0.10.5/standard_library/cryptographic_primitives/02_schnorr.mdx +++ /dev/null @@ -1,37 +0,0 @@ ---- -title: Schnorr Signatures -description: Learn how you can verify Schnorr signatures using Noir -keywords: [cryptographic primitives, Noir project, schnorr, signatures] ---- - -import BlackBoxInfo from './common/_blackbox.mdx'; - -## schnorr::verify_signature - -Verifier for Schnorr signatures over the embedded curve (for BN254 it is Grumpkin). - -```rust -fn verify_signature(_public_key_x: Field, _public_key_y: Field, _signature: [u8; 64], _message: [u8]) -> bool -``` - -where `_signature` can be generated like so using the npm package -[@noir-lang/barretenberg](https://www.npmjs.com/package/@noir-lang/barretenberg) - -```js -const { BarretenbergWasm } = require('@noir-lang/barretenberg/dest/wasm'); -const { Schnorr } = require('@noir-lang/barretenberg/dest/crypto/schnorr'); - -... - -const barretenberg = await BarretenbergWasm.new(); -const schnorr = new Schnorr(barretenberg); -const pubKey = schnorr.computePublicKey(privateKey); -const message = ... -const signature = Array.from( - schnorr.constructSignature(hash, privateKey).toBuffer() -); - -... -``` - - diff --git a/docs/versioned_docs/version-v0.10.5/standard_library/cryptographic_primitives/03_ecdsa_sig_verification.mdx b/docs/versioned_docs/version-v0.10.5/standard_library/cryptographic_primitives/03_ecdsa_sig_verification.mdx deleted file mode 100644 index 3934a0338d0..00000000000 --- a/docs/versioned_docs/version-v0.10.5/standard_library/cryptographic_primitives/03_ecdsa_sig_verification.mdx +++ /dev/null @@ -1,45 +0,0 @@ ---- -title: ECDSA Signature Verification -description: Learn about the cryptographic primitives regarding ECDSA over the secp256k1 and secp256r1 curves -keywords: [cryptographic primitives, Noir project, ecdsa, secp256k1, secp256r1, signatures] ---- - -import BlackBoxInfo from './common/_blackbox.mdx'; - -Noir supports ECDSA signatures verification over the secp256k1 and secp256r1 curves. - -## ecdsa_secp256k1::verify_signature - -Verifier for ECDSA Secp256k1 signatures - -```rust -fn verify_signature(_public_key_x : [u8; 32], _public_key_y : [u8; 32], _signature: [u8; 64], _message: [u8]) -> bool -``` - -example: - -```rust -fn main(hashed_message : [u8;32], pub_key_x : [u8;32], pub_key_y : [u8;32], signature : [u8;64]) { - let valid_signature = std::ecdsa_secp256k1::verify_signature(pub_key_x, pub_key_y, signature, hashed_message); - assert(valid_signature); -} -``` - -## ecdsa_secp256r1::verify_signature - -Verifier for ECDSA Secp256r1 signatures - -```rust -fn verify_signature(_public_key_x : [u8; 32], _public_key_y : [u8; 32], _signature: [u8; 64], _message: [u8]) -> bool -``` - -example: - -```rust -fn main(hashed_message : [u8;32], pub_key_x : [u8;32], pub_key_y : [u8;32], signature : [u8;64]) { - let valid_signature = std::ecdsa_secp256r1::verify_signature(pub_key_x, pub_key_y, signature, hashed_message); - assert(valid_signature); -} -``` - - diff --git a/docs/versioned_docs/version-v0.10.5/standard_library/cryptographic_primitives/05_eddsa.mdx b/docs/versioned_docs/version-v0.10.5/standard_library/cryptographic_primitives/05_eddsa.mdx deleted file mode 100644 index 8f060ed3316..00000000000 --- a/docs/versioned_docs/version-v0.10.5/standard_library/cryptographic_primitives/05_eddsa.mdx +++ /dev/null @@ -1,17 +0,0 @@ ---- -title: EdDSA Verification -description: Learn about the cryptographic primitives regarding EdDSA -keywords: [cryptographic primitives, Noir project, eddsa, signatures] ---- - -import BlackBoxInfo from './common/_blackbox.mdx'; - -## eddsa::eddsa_poseidon_verify - -Verifier for EdDSA signatures - -```rust -fn eddsa_poseidon_verify(public_key_x : Field, public_key_y : Field, signature_s: Field, signature_r8_x: Field, signature_r8_y: Field, message: Field) -> bool -``` - - diff --git a/docs/versioned_docs/version-v0.10.5/standard_library/cryptographic_primitives/common/_blackbox.mdx b/docs/versioned_docs/version-v0.10.5/standard_library/cryptographic_primitives/common/_blackbox.mdx deleted file mode 100644 index 9fe9b48fbff..00000000000 --- a/docs/versioned_docs/version-v0.10.5/standard_library/cryptographic_primitives/common/_blackbox.mdx +++ /dev/null @@ -1,5 +0,0 @@ -:::info - -This is a black box function. Read [this section](../black_box_fns) to learn more about black box functions in Noir. - -::: \ No newline at end of file diff --git a/docs/versioned_docs/version-v0.10.5/standard_library/logging.md b/docs/versioned_docs/version-v0.10.5/standard_library/logging.md deleted file mode 100644 index 7e2fd9b9aff..00000000000 --- a/docs/versioned_docs/version-v0.10.5/standard_library/logging.md +++ /dev/null @@ -1,62 +0,0 @@ ---- -title: Logging -description: - Learn how to use the println statement for debugging in Noir with this tutorial. Understand the - basics of logging in Noir and how to implement it in your code. -keywords: - [ - noir logging, - println statement, - debugging in noir, - noir std library, - logging tutorial, - basic logging in noir, - noir logging implementation, - noir debugging techniques, - rust, - ] ---- - -The standard library provides a familiar `println` statement you can use. Despite being a limited -implementation of rust's `println!` macro, this construct can be useful for debugging. - -You can print the output of println statements in your Noir code by using the `nargo execute` command or the `--show-output` flag when using `nargo test` (provided there are println statements in your tests). - -It is recommended to use `nargo execute` if you want to debug failing constrains with `println` statements. This is due to every input in a test being a constant rather than a witness, so we issue an error during compilation while we only print during execution (which comes after compilation). `println` will not work for failed constraints caught at compile time. - -The `println` statement is unconstrained, so it works for outputting integers, fields, strings, and even structs or expressions. For example: - -```rust -use dep::std; - -struct Person { - age : Field, - height : Field, -} - -fn main(age : Field, height : Field) { - let person = Person { age : age, height : height }; - std::println(person); - std::println(age + height); - std::println("Hello world!"); -} - -``` - -You can print multiple different types in the same statement and string as well as a new "fmtstr" type. A `fmtstr` can be specified in the same way as a normal string it just should be prepended with an "f" character: - -```rust - let fmt_str = f"i: {i}, j: {j}"; - std::println(fmt_str); - - let s = myStruct { y: x, x: y }; - std::println(s); - - std::println(f"i: {i}, s: {s}"); - - std::println(x); - std::println([x, y]); - - let foo = fooStruct { my_struct: s, foo: 15 }; - std::println(f"s: {s}, foo: {foo}"); -``` diff --git a/docs/versioned_docs/version-v0.10.5/standard_library/merkle_trees.md b/docs/versioned_docs/version-v0.10.5/standard_library/merkle_trees.md deleted file mode 100644 index 57d8c4a9e4f..00000000000 --- a/docs/versioned_docs/version-v0.10.5/standard_library/merkle_trees.md +++ /dev/null @@ -1,58 +0,0 @@ ---- -title: Merkle Trees -description: Learn about Merkle Trees in Noir with this tutorial. Explore the basics of computing a merkle root using a proof, with examples. -keywords: - [ - Merkle trees in Noir, - Noir programming language, - check membership, - computing root from leaf, - Noir Merkle tree implementation, - Merkle tree tutorial, - Merkle tree code examples, - Noir libraries, - pedersen hash., - ] ---- - -## compute_merkle_root - -Returns the root of the tree from the provided leaf and its hash path, using a [Pedersen hash](cryptographic_primitives/00_hashes.mdx#pedersen). - -```rust -fn compute_merkle_root(leaf : Field, index : Field, hash_path: [Field]) -> Field -``` - -example: - -```rust -/** - // these values are for this example only - index = "0" - priv_key = "0x000000000000000000000000000000000000000000000000000000616c696365" - secret = "0x1929ea3ab8d9106a899386883d9428f8256cfedb3c4f6b66bf4aa4d28a79988f" - note_hash_path = [ - "0x1e61bdae0f027b1b2159e1f9d3f8d00fa668a952dddd822fda80dc745d6f65cc", - "0x0e4223f3925f98934393c74975142bd73079ab0621f4ee133cee050a3c194f1a", - "0x2fd7bb412155bf8693a3bd2a3e7581a679c95c68a052f835dddca85fa1569a40" - ] - */ -fn main(index: Field, priv_key: Field, secret: Field, note_hash_path: [Field; 3]) { - - let pubkey = std::scalar_mul::fixed_base(priv_key); - let pubkey_x = pubkey[0]; - let pubkey_y = pubkey[1]; - let note_commitment = std::hash::pedersen([pubkey_x, pubkey_y, secret]); - - let root = std::merkle::compute_merkle_root(note_commitment[0], index, note_hash_path); - std::println(root); -} -``` - -To check merkle tree membership: - -1. Include a merkle root as a program input. -2. Compute the merkle root of a given leaf, index and hash path. -3. Assert the merkle roots are equal. - -For more info about merkle trees, see the Wikipedia [page](https://en.wikipedia.org/wiki/Merkle_tree). diff --git a/docs/versioned_docs/version-v0.10.5/typescript.md b/docs/versioned_docs/version-v0.10.5/typescript.md deleted file mode 100644 index 258a2160e92..00000000000 --- a/docs/versioned_docs/version-v0.10.5/typescript.md +++ /dev/null @@ -1,237 +0,0 @@ ---- -title: Working with TypeScript -description: - Learn how to interact with Noir programs using TypeScript. Follow this tutorial to compile your - program, specify inputs, initialize a prover & verifier, and prove and verify your program. -keywords: [TypeScript, Noir, tutorial, compile, inputs, prover, verifier, proof] ---- - -Interactions with Noir programs can also be performed in TypeScript, which can come in handy when -writing tests or when working in TypeScript-based projects like [Hardhat](https://hardhat.org/). - -You can check the complete code for this tutorial here: [browser with next.js](https://github.com/signorecello/noir-min-browser-example) and [node.js](https://github.com/signorecello/noir-min-nodejs-example). If you want just a browser boilerplate to start with, check out the [noir-starter](https://github.com/noir-lang/noir-starter) for an example implementation. - -:::note - -You may find unexpected errors working with some frameworks such as `vite`. This is due to the -nature of `wasm` files and the way Noir uses web workers. As we figure it out, we suggest using -[Create React App](https://create-react-app.dev/), or [Next.js](https://nextjs.org/) for a quick -start. - -::: - -## Setup - -Make sure you are using Noir version >= 0.10.1. - -You can check your current version by running `nargo --version`. - -See the [Installation page](./getting_started/nargo_installation) for more info. - -We're assuming you're using ES6 and ESM for both browser (for example with React), or nodejs. Install [Node.js](https://nodejs.org/en). Init a new project with `npm init` and add `"type": "module"` to your `package.json`, to let `node` know we're using the new ESM sytem: - -```json -{ - "type": "module" - // the rest of your package.json -} -``` - -Install Noir dependencies in your project by running: - -```bash -npm i @aztec/bb.js@0.3.6 https://git@github.com/noir-lang/acvm-simulator-wasm.git#b9d9ca9dfc5140839f23998d9466307215607c42 fflate ethers@5.7.2 -``` - -This will install the `acvm-simulator` that will generate our witness, and the proving backend barretenberg `bb.js`. - -We're also installing `ethers` because we're too lazy to write a function that pads public inputs with 32bytes, and `fflate` to help us decompress our circuit bytecode. - -Since we're with typescript and using `nodejs` types, we also recommend to install the `@types/node` package, otherwise your IDE will scream at you. - -```bash -npm i --save-dev @types/node -``` - -:::note - -While Noir is in rapid development, some packages could interfere with others. For that reason, you -should use these specified versions. Let us know if for some reason you need to use other ones. - -::: - -As for the circuit, run `nargo init` to create a new Noir project. - -We will use a Standard Noir Example and place it in the `src` folder. This program simply multiplies input `x` with input `y` and returns the result `z`. The verifier doesn't know the value of `x`: we're proving that we know it without making it public. - -```rust -// src/main.nr -fn main(x: u32, y: pub u32) -> pub u32 { - let z = x * y; - z -} -``` - -One valid scenario for proving could be `x = 3`, `y = 4` and `return = 12` - -## Compiling - -In order to start proving, we need to compile our circuit into the intermediate representation used by our backend. As of today, you have to do that with `nargo`. Just hop to your circuits folder and run `nargo compile`. - -:::info - -At this time, you need to use a nightly version of nargo. Using [noirup](./getting_started/00_nargo_installation.md#option-1-noirup) you can do this simply by running `noirup -n`. - -::: - -You should have a `json` file in `target/` with your circuit's bytecode. The json file is name based on the project name specified in Nargo.toml, so for a project named "test", it will be at `target/test.json`. You can then import that file normally. - -```ts -import circuit from '../target/test.json' assert { type: 'json' }; -``` - -## Decompressing the circuit - -The compiled circuit comes compressed. We need to decompress it, that's where `fflate` comes in. - -```ts -import { decompressSync } from 'fflate'; - -const acirBuffer = Buffer.from(circuit.bytecode, 'base64'); -const acirBufferUncompressed = decompressSync(acirBuffer); -``` - -From here, it's highly recommended you store `acirBuffer` and `acirBufferUncompressed` close by, as they will be used for witness generation and proving. - -## Initializing ACVM and BB.JS - -:::note - -This step will eventually be abstracted away as Noir tooling matures. For now, you should be fine just literally copy-pasting most of this into your own code. - -::: - -Before proving, `bb.js` needs to be initialized. We need to import some functions and use them - -```ts -import { Crs, newBarretenbergApiAsync, RawBuffer } from '@aztec/bb.js/dest/node/index.js'; - -const api = await newBarretenbergApiAsync(4); - -const [exact, circuitSize, subgroup] = await api.acirGetCircuitSizes(acirBufferUncompressed); -const subgroupSize = Math.pow(2, Math.ceil(Math.log2(circuitSize))); -const crs = await Crs.new(subgroupSize + 1); -await api.commonInitSlabAllocator(subgroupSize); -await api.srsInitSrs(new RawBuffer(crs.getG1Data()), crs.numPoints, new RawBuffer(crs.getG2Data())); - -const acirComposer = await api.acirNewAcirComposer(subgroupSize); -``` - -We should take two very useful objects from here: `api` and `acirComposer`. Make sure to keep these close by! - -:::info - -On the browser, you also need to init the ACVM. You can do that by importing it and calling it like: - -```ts -import initACVM, { executeCircuit, compressWitness } from '@noir-lang/acvm_js'; - -await initACVM(); -// the rest of your code -``` - -::: - -## Generating witnesses - -Witness generation is what allows us to prove with arbitrary inputs (like user inputs on a form, game, etc). In this example, our input is a simple object with our circuit inputs `x`, `y`, and return `z` (fun fact: the return value in Noir is actually a public input!). We're wrapping it in a function, so it can be conveniently called later on. - -```ts -import { ethers } from 'ethers'; // I'm lazy so I'm using ethers to pad my input -import { executeCircuit, compressWitness } from '@noir-lang/acvm_js'; - -async function generateWitness(input: any, acirBuffer: Buffer): Promise { - const initialWitness = new Map(); - initialWitness.set(1, ethers.utils.hexZeroPad(`0x${input.x.toString(16)}`, 32)); - initialWitness.set(2, ethers.utils.hexZeroPad(`0x${input.y.toString(16)}`, 32)); - - const witnessMap = await executeCircuit(acirBuffer, initialWitness, () => { - throw Error('unexpected oracle'); - }); - - const witnessBuff = compressWitness(witnessMap); - return witnessBuff; -} -``` - -## Proving - -Finally, we're ready to prove with our backend. Just like with the witness generation, could be useful to wrap it in its own function: - -```ts -async function generateProof(witness: Uint8Array) { - const proof = await api.acirCreateProof( - acirComposer, - acirBufferUncompressed, - decompressSync(witness), - false, - ); - return proof; -} -``` - -## Verifying - -Our backend should also be ready to verify our proof: - -```ts -async function verifyProof(proof: Uint8Array) { - await api.acirInitProvingKey(acirComposer, acirBufferUncompressed); - const verified = await api.acirVerifyProof(acirComposer, proof, false); - return verified; -} -``` - -## Now for the fun part - -Let's call our functions, and destroy our API! - -```ts -const input = { x: 3, y: 4 }; -const witness = await generateWitness(input, acirBuffer); -console.log('Witness generated!'); -const proof = await generateProof(witness); -console.log('Proof generated!'); -await verifyProof(proof); -console.log('Proof verified!'); -api.destroy(); -``` - -You can use [this](https://gist.github.com/critesjosh/6f3ba19fdc9298b24e90ba4f736247dc) tsconfig.json. You can see the script [here](https://gist.github.com/critesjosh/4aa36e87a0cc3f09feaf1febb4d11348). - -## Verifying with Smart Contract - -Alternatively, a verifier smart contract can be generated and used for verifying Noir proofs in -TypeScript as well. - -This could be useful if the Noir program is designed to be decentrally verified and/or make use of -decentralized states and logics that is handled at the smart contract level. - -This assumes you've already ran `nargo codegen-verifier`, got your smart contract, and deployed it with Hardhat, Foundry, or your tool of choice. You can then verify a Noir proof by simply calling it. - -Currently, `bb.js` appends the public inputs to the proof. However, these inputs need to be fed separately to the verifier contract. A simple solution is to just slice them from the resulting proof, like this: - -```ts -import { ethers } from 'ethers'; // example using ethers v5 -import artifacts from '../artifacts/circuits/contract/plonk_vk.sol/UltraVerifier.json'; // I compiled using Hardhat, so I'm getting my abi from here - -const verifierAddress = '0x123455'; // your verifier address -const provider = new ethers.providers.Web3Provider(window.ethereum); -const signer = this.provider.getSigner(); - -const contract = new ethers.Contract(verifierAddress, artifacts.abi, signer); - -const publicInputs = proof.slice(0, 32); -const slicedProof = proof.slice(32); -await contract.verify(slicedProof, [publicInputs]); -``` diff --git a/docs/versioned_docs/version-v0.17.0/getting_started/00_nargo_installation.md b/docs/versioned_docs/version-v0.17.0/getting_started/00_nargo_installation.md index 4ff5fc46334..0de5597c213 100644 --- a/docs/versioned_docs/version-v0.17.0/getting_started/00_nargo_installation.md +++ b/docs/versioned_docs/version-v0.17.0/getting_started/00_nargo_installation.md @@ -14,7 +14,7 @@ Alternatively, the interactions can also be performed in [NoirJS](../noir_js/noi ### UltraPlonk -Nargo versions <0.5.0 of `aztec_backend` and `aztec_wasm_backend` are based on the TurboPlonk +Nargo versions \<0.5.0 of `aztec_backend` and `aztec_wasm_backend` are based on the TurboPlonk version of Aztec Backend, which lacks efficient implementations of useful primitives (e.g. Keccak256 in 18k constraints, ECDSA verification in 36k constraints) that the UltraPlonk version offers. ## Installation @@ -140,9 +140,9 @@ Commands: ### Option 3: Compile from Source -Due to the large number of native dependencies, Noir projects uses [Nix](https://nixos.org/) and [direnv](https://direnv.net/) to streamline the development experience. It helps mitigating ssues commonly associated with dependency management, such as conflicts between required package versions for different projects (often referred to as "dependency hell"). +Due to the large number of native dependencies, Noir projects uses [Nix](https://nixos.org/) and [direnv](https://direnv.net/) to streamline the development experience. It helps mitigating issues commonly associated with dependency management, such as conflicts between required package versions for different projects (often referred to as "dependency hell"). -Combined with direnv, which automatically sets or unsets environment variables based on the directory, it further simplifies the development process by seamlessly integrating with the developer's shell, facilitating an efficient and reliable workflow for managing and deploying Noir projects with multiple dependencies. +Combined with direnv, which automatically sets or clears environment variables based on the directory, it further simplifies the development process by seamlessly integrating with the developer's shell, facilitating an efficient and reliable workflow for managing and deploying Noir projects with multiple dependencies. #### Setting up your environment diff --git a/docs/versioned_docs/version-v0.17.0/getting_started/02_breakdown.md b/docs/versioned_docs/version-v0.17.0/getting_started/02_breakdown.md index bc0e742fb4e..e7b1f33b339 100644 --- a/docs/versioned_docs/version-v0.17.0/getting_started/02_breakdown.md +++ b/docs/versioned_docs/version-v0.17.0/getting_started/02_breakdown.md @@ -39,7 +39,7 @@ Example Nargo.toml: ```toml [package] -name = "noirstarter" +name = "noir_starter" type = "bin" authors = ["Alice"] compiler_version = "0.9.0" diff --git a/docs/versioned_docs/version-v0.17.0/index.md b/docs/versioned_docs/version-v0.17.0/index.md index 9ebe1d54944..2d5e6f4454f 100644 --- a/docs/versioned_docs/version-v0.17.0/index.md +++ b/docs/versioned_docs/version-v0.17.0/index.md @@ -25,7 +25,7 @@ slug: / Noir is a Domain Specific Language for SNARK proving systems. It has been designed to use any ACIR compatible proving system. -It's design choices are influenced heavily by Rust and focuses on a simple, familiar syntax. +Its design choices are influenced heavily by Rust and focuses on a simple, familiar syntax. ## Who is Noir for? @@ -34,7 +34,7 @@ Noir can be used for a variety of purposes. ### Solidity Developers Noir currently includes a command to create a Solidity contract which verifies your Noir program. This will -be modularised in the future; however, as of the alpha, you can use the [`nargo codegen-verifier`](./nargo/commands#nargo-codegen-verifier) command to create +be modularized in the future; however, as of the alpha, you can use the [`nargo codegen-verifier`](./nargo/commands#nargo-codegen-verifier) command to create a verifier contract. ### Protocol Developers @@ -55,7 +55,7 @@ must be implemented for it. Noir is simple and flexible in its design, as it does not compile immediately to a fixed NP-complete language. Instead, Noir compiles to an intermediate language (ACIR), which itself can be compiled -to an arithmetic circuit (if choosing to target Aztec's barretenberg backend) or a rank-1 constraint system (if choosing to target an R1CS backend like Arkwork's Marlin backend, or others). +to an arithmetic circuit (if choosing to target Aztec's barretenberg backend) or a rank-1 constraint system (if choosing to target an R1CS backend like Arkworks' Marlin backend, or others). This in itself brings up a few challenges within the design process, but allows one to decouple the programming language completely from the backend. This is similar in theory to LLVM. @@ -67,7 +67,7 @@ Compiler: - For expressions - Arrays - Bit Operations -- Binary operations (<, <=, >, >=, +, -, \*, /, %) [See documentation for an extensive list] +- Binary operations (\<, \<=, >, >=, +, -, \*, /, %) [See documentation for an extensive list] - Unsigned integers - If statements - Structures and Tuples diff --git a/docs/versioned_docs/version-v0.17.0/language_concepts/01_functions.md b/docs/versioned_docs/version-v0.17.0/language_concepts/01_functions.md index 47cdea0cf04..5eb22170e54 100644 --- a/docs/versioned_docs/version-v0.17.0/language_concepts/01_functions.md +++ b/docs/versioned_docs/version-v0.17.0/language_concepts/01_functions.md @@ -30,7 +30,7 @@ All parameters in a function must have a type and all types are known at compile is pre-pended with a colon and the parameter type. Multiple parameters are separated using a comma. ```rust -fn foo(x : Field, y : pub Field){} +fn foo(x : Field, y : Field){} ``` The return type of a function can be stated by using the `->` arrow notation. The function below @@ -38,7 +38,7 @@ states that the foo function must return a `Field`. If the function returns no v is omitted. ```rust -fn foo(x : Field, y : pub Field) -> Field { +fn foo(x : Field, y : Field) -> Field { x + y } ``` diff --git a/docs/versioned_docs/version-v0.17.0/language_concepts/03_ops.md b/docs/versioned_docs/version-v0.17.0/language_concepts/03_ops.md index da02b126059..0e35ef5e584 100644 --- a/docs/versioned_docs/version-v0.17.0/language_concepts/03_ops.md +++ b/docs/versioned_docs/version-v0.17.0/language_concepts/03_ops.md @@ -29,11 +29,11 @@ keywords: | ^ | XOR two private input types together | Types must be integer | | & | AND two private input types together | Types must be integer | | \| | OR two private input types together | Types must be integer | -| << | Left shift an integer by another integer amount | Types must be integer | +| \<\< | Left shift an integer by another integer amount | Types must be integer | | >> | Right shift an integer by another integer amount | Types must be integer | | ! | Bitwise not of a value | Type must be integer or boolean | -| < | returns a bool if one value is less than the other | Upper bound must have a known bit size | -| <= | returns a bool if one value is less than or equal to the other | Upper bound must have a known bit size | +| \< | returns a bool if one value is less than the other | Upper bound must have a known bit size | +| \<= | returns a bool if one value is less than or equal to the other | Upper bound must have a known bit size | | > | returns a bool if one value is more than the other | Upper bound must have a known bit size | | >= | returns a bool if one value is more than or equal to the other | Upper bound must have a known bit size | | == | returns a bool if one value is equal to the other | Both types must not be constants | @@ -62,7 +62,7 @@ fn main(x : Field) { Noir has no support for the logical operators `||` and `&&`. This is because encoding the short-circuiting that these operators require can be inefficient for Noir's backend. Instead you can -use the bitwise operators `|` and `&` which operate indentically for booleans, just without the +use the bitwise operators `|` and `&` which operate identically for booleans, just without the short-circuiting. ```rust diff --git a/docs/versioned_docs/version-v0.17.0/language_concepts/05_unconstrained.md b/docs/versioned_docs/version-v0.17.0/language_concepts/05_unconstrained.md index 6b621eda3eb..097d6ee9894 100644 --- a/docs/versioned_docs/version-v0.17.0/language_concepts/05_unconstrained.md +++ b/docs/versioned_docs/version-v0.17.0/language_concepts/05_unconstrained.md @@ -7,7 +7,7 @@ keywords: [Noir programming language, unconstrained, open] -Unconstrained functions are functions which do not constrain any of the included computation and allow for non-determinisitic computation. +Unconstrained functions are functions which do not constrain any of the included computation and allow for non-deterministic computation. ## Why? diff --git a/docs/versioned_docs/version-v0.17.0/language_concepts/07_mutability.md b/docs/versioned_docs/version-v0.17.0/language_concepts/07_mutability.md index 4641521b1d9..ed3fed820ec 100644 --- a/docs/versioned_docs/version-v0.17.0/language_concepts/07_mutability.md +++ b/docs/versioned_docs/version-v0.17.0/language_concepts/07_mutability.md @@ -37,7 +37,7 @@ Note that mutability in noir is local and everything is passed by value, so if a mutates its parameters then the parent function will keep the old value of the parameters. ```rust -fn main() -> Field { +fn main() -> pub Field { let x = 3; helper(x); x // x is still 3 @@ -69,11 +69,11 @@ fn main(x : Field, y : [Field; N]) { assert(res == y[0]); - let res2 = x * mysubmodule::N; + let res2 = x * my_submodule::N; assert(res != res2); } -mod mysubmodule { +mod my_submodule { use dep::std; global N: Field = 10; diff --git a/docs/versioned_docs/version-v0.17.0/language_concepts/data_types.md b/docs/versioned_docs/version-v0.17.0/language_concepts/data_types.md index d546cc463a8..74f573f7d3f 100644 --- a/docs/versioned_docs/version-v0.17.0/language_concepts/data_types.md +++ b/docs/versioned_docs/version-v0.17.0/language_concepts/data_types.md @@ -93,4 +93,4 @@ fn main() { ### BigInt -You can acheive BigInt functionality using the [Noir BigInt](https://github.com/shuklaayush/noir-bigint) library. +You can achieve BigInt functionality using the [Noir BigInt](https://github.com/shuklaayush/noir-bigint) library. diff --git a/docs/versioned_docs/version-v0.17.0/language_concepts/data_types/00_fields.md b/docs/versioned_docs/version-v0.17.0/language_concepts/data_types/00_fields.md index 658a0441ffb..78d3d2af166 100644 --- a/docs/versioned_docs/version-v0.17.0/language_concepts/data_types/00_fields.md +++ b/docs/versioned_docs/version-v0.17.0/language_concepts/data_types/00_fields.md @@ -158,7 +158,7 @@ fn main() { ### sgn0 -Parity of (prime) Field element, i.e. sgn0(x mod p) = 0 if x ∈ {0, ..., p-1} is even, otherwise sgn0(x mod p) = 1. +Parity of (prime) Field element, i.e. sgn0(x mod p) = 0 if x ∈ \{0, ..., p-1\} is even, otherwise sgn0(x mod p) = 1. ```rust fn sgn0(self) -> u1 diff --git a/docs/versioned_docs/version-v0.17.0/migration_notes.md b/docs/versioned_docs/version-v0.17.0/migration_notes.md index 48a8abcf22e..1a81af04b3a 100644 --- a/docs/versioned_docs/version-v0.17.0/migration_notes.md +++ b/docs/versioned_docs/version-v0.17.0/migration_notes.md @@ -42,7 +42,7 @@ If you are using the default barretenberg backend, simply run: nargo prove ``` -with you Noir program. +with your Noir program. This will trigger the download and installation of the latest version of barretenberg compatible with your Nargo in use. @@ -70,7 +70,7 @@ nargo backend install acvm-backend-barretenberg https://github.com/noir-lang/bar This downloads and installs a specific bb.js based version of barretenberg binary from GitHub. -The gzipped filed is running this bash script: , where we need to gzip it as the Nargo currently expect the backend to be zipped up. +The gzipped file is running [this bash script](https://github.com/noir-lang/barretenberg-js-binary/blob/master/run-bb-js.sh), where we need to gzip it as the Nargo currently expect the backend to be zipped up. Then run: diff --git a/docs/versioned_docs/version-v0.17.0/modules_packages_crates/crates_and_packages.md b/docs/versioned_docs/version-v0.17.0/modules_packages_crates/crates_and_packages.md index fb83a33d94e..744de72bb2c 100644 --- a/docs/versioned_docs/version-v0.17.0/modules_packages_crates/crates_and_packages.md +++ b/docs/versioned_docs/version-v0.17.0/modules_packages_crates/crates_and_packages.md @@ -23,7 +23,7 @@ _Library crates_ don't have a `main` function and they don't compile down to ACI #### Contracts -Contract crates are similar to binary crates in that they compile to ACIR which you can create proofs against. They are different in that they do not have a single `main` function, but are a collection of functions to be deployed to the [Aztec network](https://aztec.network). You can learn more about the technical details of Aztec in the [monorepo](https://github.com/AztecProtocol/aztec-packages) or contract [examples](https://github.com/AztecProtocol/aztec-packages/tree/master/yarn-project/noir-contracts/src/contracts). +Contract crates are similar to binary crates in that they compile to ACIR which you can create proofs against. They are different in that they do not have a single `main` function, but are a collection of functions to be deployed to the [Aztec network](https://aztec.network). You can learn more about the technical details of Aztec in the [monorepo](https://github.com/AztecProtocol/aztec-packages) or contract [examples](https://github.com/AztecProtocol/aztec-packages/tree/master/yarn-project/noir-contracts/contracts). ### Crate Root diff --git a/docs/versioned_docs/version-v0.17.0/modules_packages_crates/dependencies.md b/docs/versioned_docs/version-v0.17.0/modules_packages_crates/dependencies.md index 75f95aaa305..e91e73a4c4f 100644 --- a/docs/versioned_docs/version-v0.17.0/modules_packages_crates/dependencies.md +++ b/docs/versioned_docs/version-v0.17.0/modules_packages_crates/dependencies.md @@ -34,7 +34,7 @@ If the module is in a subdirectory, you can define a subdirectory in your git re # Nargo.toml [dependencies] -easy_private_token_contract = {tag ="v0.1.0-alpha62", git = "https://github.com/AztecProtocol/aztec-packages", directory = "yarn-project/noir-contracts/src/contracts/easy_private_token_contract"} +easy_private_token_contract = {tag ="v0.1.0-alpha62", git = "https://github.com/AztecProtocol/aztec-packages", directory = "yarn-project/noir-contracts/contracts/easy_private_token_contract"} ``` ## Specifying a local dependency @@ -48,7 +48,7 @@ For example, this file structure has a library and binary crate │   ├── Nargo.toml │   └── src │   └── main.nr -└── liba +└── lib_a ├── Nargo.toml └── src └── lib.nr @@ -60,17 +60,17 @@ Inside of the binary crate, you can specify: # Nargo.toml [dependencies] -libA = { path = "../liba" } +lib_a = { path = "../lib_a" } ``` ## Importing dependencies You can import a dependency to a Noir file using the following syntax. For example, to import the -ecrecover-noir library and local liba referenced above: +ecrecover-noir library and local lib_a referenced above: ```rust use dep::ecrecover; -use dep::libA; +use dep::lib_a; ``` You can also import only the specific parts of dependency that you want to use, like so: diff --git a/docs/versioned_docs/version-v0.17.0/modules_packages_crates/modules.md b/docs/versioned_docs/version-v0.17.0/modules_packages_crates/modules.md index 147c9b284e8..11e60cbf35e 100644 --- a/docs/versioned_docs/version-v0.17.0/modules_packages_crates/modules.md +++ b/docs/versioned_docs/version-v0.17.0/modules_packages_crates/modules.md @@ -10,7 +10,7 @@ Noir's module system follows the same convention as the _newer_ version of Rust' ## Purpose of Modules -Modules are used to organise files. Without modules all of your code would need to live in a single +Modules are used to organize files. Without modules all of your code would need to live in a single file. In Noir, the compiler does not automatically scan all of your files to detect modules. This must be done explicitly by the developer. diff --git a/docs/versioned_docs/version-v0.17.0/nargo/02_testing.md b/docs/versioned_docs/version-v0.17.0/nargo/02_testing.md index da767274efd..5c57ef92705 100644 --- a/docs/versioned_docs/version-v0.17.0/nargo/02_testing.md +++ b/docs/versioned_docs/version-v0.17.0/nargo/02_testing.md @@ -23,8 +23,8 @@ fn test_add() { } ``` -Running `nargo test` will test that the `test_add` function can be executed while satisfying the all -the contraints which allows you to test that add returns the expected values. Test functions can't +Running `nargo test` will test that the `test_add` function can be executed while satisfying all +the constraints which allows you to test that add returns the expected values. Test functions can't have any arguments currently. ### Test fail diff --git a/docs/versioned_docs/version-v0.17.0/noir_js/reference/01_noirjs.md b/docs/versioned_docs/version-v0.17.0/noir_js/reference/01_noirjs.md index d9e5a0c6115..3480fbfedad 100644 --- a/docs/versioned_docs/version-v0.17.0/noir_js/reference/01_noirjs.md +++ b/docs/versioned_docs/version-v0.17.0/noir_js/reference/01_noirjs.md @@ -74,8 +74,8 @@ async execute(inputs) | Return value | Type | Description | | ------------ | --------------------- | --------------------------------------------------- | -| `witness` | Promise | The witness | -| `returnValue` | Promise | The return value | +| `witness` | Promise \ | The witness | +| `returnValue` | Promise \ | The return value | ### Usage @@ -90,7 +90,7 @@ This async method generates a witness and a proof given an object as input. ### Syntax ```js -async generateFinalproof(input) +async generateFinalProof(input) ``` ### Parameters @@ -103,7 +103,7 @@ async generateFinalproof(input) | Return value | Type | Description | | ------------ | --------------------- | --------------------------------------------------- | -| `proof` | Promise | An array with the byte representation of the proof. | +| `proof` | Promise \ | An array with the byte representation of the proof. | ### Usage @@ -133,7 +133,7 @@ async verifyFinalProof(proof) | Return value | Type | Description | | ------------ | ------------------ | -------------------------------------------- | -| `verified` | Promise | A boolean for whether the proof was verified | +| `verified` | Promise \ | A boolean for whether the proof was verified | ### Usage diff --git a/docs/versioned_docs/version-v0.17.0/noir_js/reference/02_bb_backend.md b/docs/versioned_docs/version-v0.17.0/noir_js/reference/02_bb_backend.md index 21c2ff32b57..958cabd6289 100644 --- a/docs/versioned_docs/version-v0.17.0/noir_js/reference/02_bb_backend.md +++ b/docs/versioned_docs/version-v0.17.0/noir_js/reference/02_bb_backend.md @@ -41,7 +41,7 @@ constructor(acirCircuit, (numberOfThreads = 1)); | Parameter | Type | Description | | ----------------- | ----------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| `acirCircuit` | Object | A circuit represented in a `json` format, containing the ABI and bytecode Tipically obtained by running [`nargo compile`](../../nargo/01_commands.md). This is the same circuit expected to be passed to [the Noir class](01_noirjs.md) | +| `acirCircuit` | Object | A circuit represented in a `json` format, containing the ABI and bytecode typically obtained by running [`nargo compile`](../../nargo/01_commands.md). This is the same circuit expected to be passed to [the Noir class](01_noirjs.md) | | `numberOfThreads` | Number (optional) | The number of threads to be used by the backend. Defaults to 1. | ### Usage @@ -70,7 +70,7 @@ async generateFinalProof(decompressedWitness) | Return value | Type | Description | | ------------ | -------------------- | --------------------------------------------------------- | -| `proof` | Promise | An array with the byte representation of the final proof. | +| `proof` | Promise\ | An array with the byte representation of the final proof. | ### Usage @@ -98,7 +98,7 @@ async generateIntermediateProof(witness) | Return value | Type | Description | | ------------ | -------------------- | --------------------------------------------------------------- | -| `proof` | Promise | An array with the byte representation of the intermediate proof | +| `proof` | Promise\ | An array with the byte representation of the intermediate proof | ### Usage @@ -127,7 +127,7 @@ async generateProof(decompressedWitness, makeEasyToVerifyInCircuit) | Return value | Type | Description | | ------------ | -------------------- | -------------------------------------------------- | -| `proof` | Promise | An array with the byte representation of the proof | +| `proof` | Promise\ | An array with the byte representation of the proof | ### Usage @@ -186,7 +186,7 @@ async verifyFinalProof(proof) | Return value | Type | Description | | ------------ | ------------------ | -------------------------------------------- | -| `verified` | Promise | A boolean for whether the proof was verified | +| `verified` | Promise \ | A boolean for whether the proof was verified | ### Usage @@ -214,7 +214,7 @@ async verifyIntermediateProof(proof) | Return value | Type | Description | | ------------ | ------------------ | -------------------------------------------- | -| `verified` | Promise | A boolean for whether the proof was verified | +| `verified` | Promise \ | A boolean for whether the proof was verified | ### Usage diff --git a/docs/versioned_docs/version-v0.17.0/standard_library/black_box_fns.md b/docs/versioned_docs/version-v0.17.0/standard_library/black_box_fns.md index c758846b688..a412de19d06 100644 --- a/docs/versioned_docs/version-v0.17.0/standard_library/black_box_fns.md +++ b/docs/versioned_docs/version-v0.17.0/standard_library/black_box_fns.md @@ -42,4 +42,4 @@ Here is a list of the current black box functions that are supported by UltraPlo Most black box functions are included as part of the Noir standard library, however `AND`, `XOR` and `RANGE` are used as part of the Noir language syntax. For instance, using the bitwise operator `&` will invoke the `AND` black box function. To ensure compatibility across backends, the ACVM has fallback implementations of `AND`, `XOR` and `RANGE` defined in its standard library which it can seamlessly fallback to if the backend doesn't support them. -You can view the black box functions defined in the ACVM code [here](https://github.com/noir-lang/acvm/blob/acir-v0.12.0/acir/src/circuit/black_box_functions.rs). +You can view the black box functions defined in the ACVM code [here](https://github.com/noir-lang/noir/blob/master/acvm-repo/acir/src/circuit/black_box_functions.rs). diff --git a/docs/versioned_docs/version-v0.17.0/standard_library/logging.md b/docs/versioned_docs/version-v0.17.0/standard_library/logging.md index 7e2fd9b9aff..4ba0fe0e656 100644 --- a/docs/versioned_docs/version-v0.17.0/standard_library/logging.md +++ b/docs/versioned_docs/version-v0.17.0/standard_library/logging.md @@ -22,7 +22,7 @@ implementation of rust's `println!` macro, this construct can be useful for debu You can print the output of println statements in your Noir code by using the `nargo execute` command or the `--show-output` flag when using `nargo test` (provided there are println statements in your tests). -It is recommended to use `nargo execute` if you want to debug failing constrains with `println` statements. This is due to every input in a test being a constant rather than a witness, so we issue an error during compilation while we only print during execution (which comes after compilation). `println` will not work for failed constraints caught at compile time. +It is recommended to use `nargo execute` if you want to debug failing constraints with `println` statements. This is due to every input in a test being a constant rather than a witness, so we issue an error during compilation while we only print during execution (which comes after compilation). `println` will not work for failed constraints caught at compile time. The `println` statement is unconstrained, so it works for outputting integers, fields, strings, and even structs or expressions. For example: diff --git a/docs/docs/examples/merkle-proof.mdx b/docs/versioned_docs/version-v0.19.0/examples/merkle-proof.mdx similarity index 100% rename from docs/docs/examples/merkle-proof.mdx rename to docs/versioned_docs/version-v0.19.0/examples/merkle-proof.mdx diff --git a/docs/versioned_docs/version-v0.19.0/getting_started/00_nargo_installation.md b/docs/versioned_docs/version-v0.19.0/getting_started/00_nargo_installation.md new file mode 100644 index 00000000000..7d3c88c7693 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.0/getting_started/00_nargo_installation.md @@ -0,0 +1,249 @@ +--- +title: Nargo Installation +description: + nargo is a command line tool for interacting with Noir programs (e.g. compiling, proving, + verifying and more). Learn how to install and use Nargo for your projects with this comprehensive + guide. +keywords: [Nargo, command line tool, Noir programs, installation guide, how to use Nargo] +--- + +`nargo` is a command line tool for interacting with Noir programs (e.g. compiling, proving, +verifying and more). + +Alternatively, the interactions can also be performed in [NoirJS](../noir_js/noir_js.md). + +### UltraPlonk + +Nargo versions \<0.5.0 of `aztec_backend` and `aztec_wasm_backend` are based on the TurboPlonk +version of Aztec Backend, which lacks efficient implementations of useful primitives (e.g. Keccak256 in 18k constraints, ECDSA verification in 36k constraints) that the UltraPlonk version offers. + +## Installation + +There are four approaches for installing Nargo: + +- [Option 1: Noirup](#option-1-noirup) +- [Option 2: Binaries](#option-2-binaries) +- [Option 3: Compile from Source](#option-3-compile-from-source) +- [Option 4: WSL for Windows](#option-4-wsl-for-windows) + +Optionally you can also install [Noir VS Code extension] for syntax highlighting. + +### Option 1: Noirup + +If you're on OSX or Linux, the easiest way to start using Noir and Nargo is via noirup. Just open a +terminal and run: + +```bash +curl -L https://raw.githubusercontent.com/noir-lang/noirup/main/install | bash +``` + +Close the terminal, open another one, and run + +```bash +noirup +``` + +Done, you should have the latest version working. You can check with `nargo --version`. + +You can also install nightlies, specific versions +or branches, check out the [noirup repository](https://github.com/noir-lang/noirup) for more +information. + +#### GitHub Actions + +You can use `noirup` with GitHub Actions for CI/CD and automated testing. It is as simple as +installing `noirup` and running tests in your GitHub Action `yml` file. + +See the +[config file](https://github.com/TomAFrench/noir-hashes/blob/master/.github/workflows/noir.yml) in +this repo containing hash functions in Noir for an example. + +#### Nightly versions + +To install the nightly version of Noir (updated daily) run: + +```bash +noirup -n +``` + +### Option 2: Binaries + +See [GitHub Releases](https://github.com/noir-lang/noir/releases) for the latest and previous +platform specific binaries. + +#### Step 1 + +Paste and run the following in the terminal to extract and install the binary: + +> **macOS / Linux:** If you are prompted with `Permission denied` when running commands, prepend +> `sudo` and re-run it. + +##### macOS (Apple Silicon) + +```bash +mkdir -p $HOME/.nargo/bin && \ +curl -o $HOME/.nargo/bin/nargo-aarch64-apple-darwin.tar.gz -L https://github.com/noir-lang/noir/releases/download/v0.6.0/nargo-aarch64-apple-darwin.tar.gz && \ +tar -xvf $HOME/.nargo/bin/nargo-aarch64-apple-darwin.tar.gz -C $HOME/.nargo/bin/ && \ +echo '\nexport PATH=$PATH:$HOME/.nargo/bin' >> ~/.zshrc && \ +source ~/.zshrc +``` + +##### macOS (Intel) + +```bash +mkdir -p $HOME/.nargo/bin && \ +curl -o $HOME/.nargo/bin/nargo-x86_64-apple-darwin.tar.gz -L https://github.com/noir-lang/noir/releases/download/v0.6.0/nargo-x86_64-apple-darwin.tar.gz && \ +tar -xvf $HOME/.nargo/bin/nargo-x86_64-apple-darwin.tar.gz -C $HOME/.nargo/bin/ && \ +echo '\nexport PATH=$PATH:$HOME/.nargo/bin' >> ~/.zshrc && \ +source ~/.zshrc +``` + +##### Linux (Bash) + +```bash +mkdir -p $HOME/.nargo/bin && \ +curl -o $HOME/.nargo/bin/nargo-x86_64-unknown-linux-gnu.tar.gz -L https://github.com/noir-lang/noir/releases/download/v0.6.0/nargo-x86_64-unknown-linux-gnu.tar.gz && \ +tar -xvf $HOME/.nargo/bin/nargo-x86_64-unknown-linux-gnu.tar.gz -C $HOME/.nargo/bin/ && \ +echo -e '\nexport PATH=$PATH:$HOME/.nargo/bin' >> ~/.bashrc && \ +source ~/.bashrc +``` + +#### Step 2 + +Check if the installation was successful by running `nargo --help`. + +> **macOS:** If you are prompted with an OS alert, right-click and open the _nargo_ executable from +> Finder. Close the new terminal popped up and `nargo` should now be accessible. + +For a successful installation, you should see something similar to the following after running the +command: + +```sh +$ nargo --help + +Noir's package manager + +Usage: nargo + +Commands: + check Checks the constraint system for errors + codegen-verifier Generates a Solidity verifier smart contract for the program + compile Compile the program and its secret execution trace into ACIR format + new Create a new binary project + execute Executes a circuit to calculate its return value + prove Create proof for this program. The proof is returned as a hex encoded string + verify Given a proof and a program, verify whether the proof is valid + test Run the tests for this program + gates Counts the occurrences of different gates in circuit + help Print this message or the help of the given subcommand(s) +``` + +### Option 3: Compile from Source + +Due to the large number of native dependencies, Noir projects uses [Nix](https://nixos.org/) and [direnv](https://direnv.net/) to streamline the development experience. It helps mitigating issues commonly associated with dependency management, such as conflicts between required package versions for different projects (often referred to as "dependency hell"). + +Combined with direnv, which automatically sets or clears environment variables based on the directory, it further simplifies the development process by seamlessly integrating with the developer's shell, facilitating an efficient and reliable workflow for managing and deploying Noir projects with multiple dependencies. + +#### Setting up your environment + +For the best experience, please follow these instructions to setup your environment: + +1. Install Nix following [their guide](https://nixos.org/download.html) for your operating system. +2. Create the file `~/.config/nix/nix.conf` with the contents: + +```ini +experimental-features = nix-command +extra-experimental-features = flakes +``` + +3. Install direnv into your Nix profile by running: + +```sh +nix profile install nixpkgs#direnv +``` + +4. Add direnv to your shell following [their guide](https://direnv.net/docs/hook.html). + 1. For bash or zshell, add `eval "$(direnv hook bash)"` or `eval "$(direnv hook zsh)"` to your ~/.bashrc or ~/.zshrc file, respectively. +5. Restart your shell. + +#### Shell & editor experience + +Now that your environment is set up, you can get to work on the project. + +1. Clone the repository, such as: + +```sh +git clone git@github.com:noir-lang/noir +``` + +> Replacing `noir` with whichever repository you want to work on. + +2. Navigate to the directory: + +```sh +cd noir +``` + +> Replacing `noir` with whichever repository you cloned. + +3. You should see a **direnv error** because projects aren't allowed by default. Make sure you've reviewed and trust our `.envrc` file, then you need to run: + +```sh +direnv allow +``` + +4. Now, wait awhile for all the native dependencies to be built. This will take some time and direnv will warn you that it is taking a long time, but we just need to let it run. + +5. Once you are presented with your prompt again, you can start your editor within the project directory (we recommend [VSCode](https://code.visualstudio.com/)): + +```sh +code . +``` + +6. (Recommended) When launching VSCode for the first time, you should be prompted to install our recommended plugins. We highly recommend installing these for the best development experience. + +#### Building and testing + +Assuming you are using `direnv` to populate your environment, building and testing the project can be done +with the typical `cargo build`, `cargo test`, and `cargo clippy` commands. You'll notice that the `cargo` version matches the version we specify in `rust-toolchain.toml`, which is 1.71.1 at the time of this writing. + +If you want to build the entire project in an isolated sandbox, you can use Nix commands: + +1. `nix build .` (or `nix build . -L` for verbose output) to build the project in a Nix sandbox. +2. `nix flake check` (or `nix flake check -L` for verbose output) to run clippy and tests in a Nix sandbox. + +#### Without `direnv` + +If you have hesitations with using direnv, you can launch a subshell with `nix develop` and then launch your editor from within the subshell. However, if VSCode was already launched in the project directory, the environment won't be updated. + +Advanced: If you aren't using direnv nor launching your editor within the subshell, you can try to install Barretenberg and other global dependencies the package needs. This is an advanced workflow and likely won't receive support! + +### Option 4: WSL (for Windows) + +The default backend for Noir (Barretenberg) doesn't provide Windows binaries at this time. For that reason, Noir cannot be installed natively. However, it is available by using Windows Subsystem for Linux (WSL). + +Step 1: Follow the instructions [here](https://learn.microsoft.com/en-us/windows/wsl/install) to install and run WSL. + +step 2: Follow the [Noirup instructions](#option-1-noirup). + +## Uninstalling Nargo + +### Noirup + +If you installed Noir with `noirup`, you can uninstall Noir by removing the files in `~/.nargo`, `~/nargo` and `~/noir_cache`. + +```bash +rm -r ~/.nargo +rm -r ~/nargo +rm -r ~/noir_cache +``` + +### Nix + +If you installed Noir with Nix or from source, you can remove the binary located at `~/.nix-profile/bin/nargo`. + +```bash +rm ~/.nix-profile/bin/nargo +``` + +[noir vs code extension]: https://marketplace.visualstudio.com/items?itemName=noir-lang.vscode-noir diff --git a/docs/docs/getting_started/01_hello_world.md b/docs/versioned_docs/version-v0.19.0/getting_started/01_hello_world.md similarity index 100% rename from docs/docs/getting_started/01_hello_world.md rename to docs/versioned_docs/version-v0.19.0/getting_started/01_hello_world.md diff --git a/docs/versioned_docs/version-v0.19.0/getting_started/02_breakdown.md b/docs/versioned_docs/version-v0.19.0/getting_started/02_breakdown.md new file mode 100644 index 00000000000..d28a54a1600 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.0/getting_started/02_breakdown.md @@ -0,0 +1,198 @@ +--- +title: Project Breakdown +description: + Learn about the anatomy of a Nargo project, including the purpose of the Prover and Verifier TOML + files, and how to prove and verify your program. +keywords: + [Nargo, Nargo project, Prover.toml, Verifier.toml, proof verification, private asset transfer] +--- + +This section breaks down our hello world program in section _1.2_. We elaborate on the project +structure and what the `prove` and `verify` commands did in the previous section. + +## Anatomy of a Nargo Project + +Upon creating a new project with `nargo new` and building the in/output files with `nargo check` +commands, you would get a minimal Nargo project of the following structure: + + - src + - Prover.toml + - Verifier.toml + - Nargo.toml + +The source directory _src_ holds the source code for your Noir program. By default only a _main.nr_ +file will be generated within it. + +### Prover.toml + +_Prover.toml_ is used for specifying the input values for executing and proving the program. You can specify `toml` files with different names by using the `--prover-name` or `-p` flags, see the [Prover](#provertoml) section below. Optionally you may specify expected output values for prove-time checking as well. + +### Verifier.toml + +_Verifier.toml_ contains public in/output values computed when executing the Noir program. + +### Nargo.toml + +_Nargo.toml_ contains the environmental options of your project. It contains a "package" section and a "dependencies" section. + +Example Nargo.toml: + +```toml +[package] +name = "noir_starter" +type = "bin" +authors = ["Alice"] +compiler_version = "0.9.0" +description = "Getting started with Noir" +entry = "circuit/main.nr" +license = "MIT" + +[dependencies] +ecrecover = {tag = "v0.9.0", git = "https://github.com/colinnielsen/ecrecover-noir.git"} +``` + +Nargo.toml for a [workspace](../modules_packages_crates/workspaces) will look a bit different. For example: + +```toml +[workspace] +members = ["crates/a", "crates/b"] +default-member = "crates/a" +``` + +#### Package section + +The package section requires a number of fields including: + +- `name` (**required**) - the name of the package +- `type` (**required**) - can be "bin", "lib", or "contract" to specify whether its a binary, library or Aztec contract +- `authors` (optional) - authors of the project +- `compiler_version` - specifies the version of the compiler to use. This is enforced by the compiler and follow's [Rust's versioning](https://doc.rust-lang.org/cargo/reference/manifest.html#the-version-field), so a `compiler_version = 0.18.0` will enforce Nargo version 0.18.0, `compiler_version = ^0.18.0` will enforce anything above 0.18.0 but below 0.19.0, etc. For more information, see how [Rust handles these operators](https://docs.rs/semver/latest/semver/enum.Op.html) +- `description` (optional) +- `entry` (optional) - a relative filepath to use as the entry point into your package (overrides the default of `src/lib.nr` or `src/main.nr`) +- `backend` (optional) +- `license` (optional) + +#### Dependencies section + +This is where you will specify any dependencies for your project. See the [Dependencies page](../modules_packages_crates/dependencies) for more info. + +`./proofs/` and `./contract/` directories will not be immediately visible until you create a proof or +verifier contract respectively. + +### main.nr + +The _main.nr_ file contains a `main` method, this method is the entry point into your Noir program. + +In our sample program, _main.nr_ looks like this: + +```rust +fn main(x : Field, y : Field) { + assert(x != y); +} +``` + +The parameters `x` and `y` can be seen as the API for the program and must be supplied by the +prover. Since neither `x` nor `y` is marked as public, the verifier does not supply any inputs, when +verifying the proof. + +The prover supplies the values for `x` and `y` in the _Prover.toml_ file. + +As for the program body, `assert` ensures the satisfaction of the condition (e.g. `x != y`) is +constrained by the proof of the execution of said program (i.e. if the condition was not met, the +verifier would reject the proof as an invalid proof). + +### Prover.toml + +The _Prover.toml_ file is a file which the prover uses to supply his witness values(both private and +public). + +In our hello world program the _Prover.toml_ file looks like this: + +```toml +x = "1" +y = "2" +``` + +When the command `nargo prove` is executed, two processes happen: + +1. Noir creates a proof that `x` which holds the value of `1` and `y` which holds the value of `2` + is not equal. This not equal constraint is due to the line `assert(x != y)`. + +2. Noir creates and stores the proof of this statement in the _proofs_ directory in a file called your-project.proof. So if your project is named "private_voting" (defined in the project Nargo.toml), the proof will be saved at `./proofs/private_voting.proof`. Opening this file will display the proof in hex format. + +#### Arrays of Structs + +The following code shows how to pass an array of structs to a Noir program to generate a proof. + +```rust +// main.nr +struct Foo { + bar: Field, + baz: Field, +} + +fn main(foos: [Foo; 3]) -> pub Field { + foos[2].bar + foos[2].baz +} +``` + +Prover.toml: + +```toml +[[foos]] # foos[0] +bar = 0 +baz = 0 + +[[foos]] # foos[1] +bar = 0 +baz = 0 + +[[foos]] # foos[2] +bar = 1 +baz = 2 +``` + +#### Custom toml files + +You can specify a `toml` file with a different name to use for proving by using the `--prover-name` or `-p` flags. + +This command looks for proof inputs in the default **Prover.toml** and generates the proof and saves it at `./proofs/.proof`: + +```bash +nargo prove +``` + +This command looks for proof inputs in the custom **OtherProver.toml** and generates proof and saves it at `./proofs/.proof`: + +```bash +nargo prove -p OtherProver +``` + +## Verifying a Proof + +When the command `nargo verify` is executed, two processes happen: + +1. Noir checks in the _proofs_ directory for a proof file with the project name (eg. test_project.proof) + +2. If that file is found, the proof's validity is checked + +> **Note:** The validity of the proof is linked to the current Noir program; if the program is +> changed and the verifier verifies the proof, it will fail because the proof is not valid for the +> _modified_ Noir program. + +In production, the prover and the verifier are usually two separate entities. A prover would +retrieve the necessary inputs, execute the Noir program, generate a proof and pass it to the +verifier. The verifier would then retrieve the public inputs from usually external sources and +verifies the validity of the proof against it. + +Take a private asset transfer as an example: + +A user on browser as the prover would retrieve private inputs (e.g. the user's private key) and +public inputs (e.g. the user's encrypted balance on-chain), compute the transfer, generate a proof +and submit it to the verifier smart contract. + +The verifier contract would then draw the user's encrypted balance directly from the blockchain and +verify the proof submitted against it. If the verification passes, additional functions in the +verifier contract could trigger (e.g. approve the asset transfer). + +Now that you understand the concepts, you'll probably want some editor feedback while you are writing more complex code. diff --git a/docs/versioned_docs/version-v0.19.0/index.md b/docs/versioned_docs/version-v0.19.0/index.md new file mode 100644 index 00000000000..380368db036 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.0/index.md @@ -0,0 +1,100 @@ +--- +title: Introducing Noir +description: + Learn about the public alpha release of Noir, a domain specific language heavily influenced by + Rust that compiles to an intermediate language which can be compiled to an arithmetic circuit or a + rank-1 constraint system. +keywords: + [ + Noir, + Domain Specific Language, + Rust, + Intermediate Language, + Arithmetic Circuit, + Rank-1 Constraint System, + Ethereum Developers, + Protocol Developers, + Blockchain Developers, + Proving System, + Smart Contract Language, + ] +slug: / +--- + +## What is Noir? + +Noir is a Domain Specific Language for SNARK proving systems. It has been designed to use any ACIR compatible proving system. + +Its design choices are influenced heavily by Rust and focuses on a simple, familiar syntax. + +## Who is Noir for? + +Noir can be used for a variety of purposes. + +### Solidity Developers + +Noir currently includes a command to create a Solidity contract which verifies your Noir program. This will +be modularized in the future; however, as of the alpha, you can use the [`nargo codegen-verifier`](./nargo/commands#nargo-codegen-verifier) command to create +a verifier contract. + +### Protocol Developers + +As a protocol developer, you may not want to use the Aztec backend due to it not being a fit for +your stack, or maybe you simply want to use a different proving system. Since Noir does not compile +to a specific proof system, it is possible for protocol developers to replace the PLONK-based +proving system with a different proving system altogether. + +### Blockchain developers + +As a blockchain developer, you will be constrained by parameters set by your blockchain (for example, the +proving system and smart contract language has been pre-defined). In order for you to use Noir in +your blockchain, a proving system backend and a smart contract interface +must be implemented for it. + +## What's new about Noir? + +Noir is simple and flexible in its design, as it does not compile immediately to a fixed +NP-complete language. Instead, Noir compiles to an intermediate language (ACIR), which itself can be compiled +to an arithmetic circuit (if choosing to target Aztec's barretenberg backend) or a rank-1 constraint system (if choosing to target an R1CS backend like Arkworks' Marlin backend, or others). + +This in itself brings up a few challenges within the design process, but allows one to decouple the programming language completely from the backend. This is similar in theory to LLVM. + +## Current Features + +Compiler: + +- Module System +- For expressions +- Arrays +- Bit Operations +- Binary operations (\<, \<=, >, >=, +, -, \*, /, %) [See documentation for an extensive list] +- Unsigned integers +- If statements +- Structures and Tuples +- Generics + +ACIR Supported OPCODES: + +- Sha256 +- Blake2s +- Schnorr signature verification +- MerkleMembership +- Pedersen Commitment +- Pedersen Hash +- HashToField + +## Libraries + +Noir does not currently have an official package manager. You can find a list of available Noir libraries in the [awesome-noir repo here](https://github.com/noir-lang/awesome-noir#libraries). + +Some libraries that are available today include: + +- [Standard Library](https://github.com/noir-lang/noir/tree/master/noir_stdlib) - the Noir Standard Library +- [Ethereum Storage Proof Verification](https://github.com/aragonzkresearch/noir-trie-proofs) - a library that contains the primitives necessary for RLP decoding (in the form of look-up table construction) and Ethereum state and storage proof verification (or verification of any trie proof involving 32-byte long keys) +- [BigInt](https://github.com/shuklaayush/noir-bigint) - a library that provides a custom BigUint56 data type, allowing for computations on large unsigned integers +- [ECrecover](https://github.com/colinnielsen/ecrecover-noir/tree/main) - a library to verify an ECDSA signature and return the source Ethereum address +- [Sparse Merkle Tree Verifier](https://github.com/vocdoni/smtverifier-noir/tree/main) - a library for verification of sparse Merkle trees +- [Signed Int](https://github.com/resurgencelabs/signed_int) - a library for accessing a custom Signed Integer data type, allowing access to negative numbers on Noir +- [Fraction](https://github.com/resurgencelabs/fraction) - a library for accessing fractional number data type in Noir, allowing results that aren't whole numbers + +See the section on [dependencies](./modules_packages_crates/dependencies) for more information. diff --git a/docs/versioned_docs/version-v0.19.0/language_concepts/01_functions.md b/docs/versioned_docs/version-v0.19.0/language_concepts/01_functions.md new file mode 100644 index 00000000000..5eb22170e54 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.0/language_concepts/01_functions.md @@ -0,0 +1,225 @@ +--- +title: Functions +description: + Learn how to declare functions and methods in Noir, a programming language with Rust semantics. + This guide covers parameter declaration, return types, call expressions, and more. +keywords: [Noir, Rust, functions, methods, parameter declaration, return types, call expressions] +--- + +Functions in Noir follow the same semantics of Rust, though Noir does not support early returns. + +To declare a function the `fn` keyword is used. + +```rust +fn foo() {} +``` + +By default, functions are visible only within the package they are defined. To make them visible outside of that package (for example, as part of a [library](../modules_packages_crates/crates_and_packages.md#libraries)), you should mark them as `pub`: + +```rust +pub fn foo() {} +``` + +You can also restrict the visibility of the function to only the crate it was defined in, by specifying `pub(crate)`: + +```rust +pub(crate) fn foo() {} //foo can only be called within its crate +``` + +All parameters in a function must have a type and all types are known at compile time. The parameter +is pre-pended with a colon and the parameter type. Multiple parameters are separated using a comma. + +```rust +fn foo(x : Field, y : Field){} +``` + +The return type of a function can be stated by using the `->` arrow notation. The function below +states that the foo function must return a `Field`. If the function returns no value, then the arrow +is omitted. + +```rust +fn foo(x : Field, y : Field) -> Field { + x + y +} +``` + +Note that a `return` keyword is unneeded in this case - the last expression in a function's body is +returned. + +## Main function + +If you're writing a binary, the `main` function is the starting point of your program. You can pass all types of expressions to it, as long as they have a fixed size at compile time: + +```rust +fn main(x : Field) // this is fine: passing a Field +fn main(x : [Field; 2]) // this is also fine: passing a Field with known size at compile-time +fn main(x : (Field, bool)) // 👌: passing a (Field, bool) tuple means size 2 +fn main(x : str<5>) // this is fine, as long as you pass a string of size 5 + +fn main(x : Vec) // can't compile, has variable size +fn main(x : [Field]) // can't compile, has variable size +fn main(....// i think you got it by now +``` + +Keep in mind [tests](../nargo/02_testing.md) don't differentiate between `main` and any other function. The following snippet passes tests, but won't compile or prove: + +```rust +fn main(x : [Field]) { + assert(x[0] == 1); +} + +#[test] +fn test_one() { + main([1, 2]); +} +``` + +```bash +$ nargo test +[testing] Running 1 test functions +[testing] Testing test_one... ok +[testing] All tests passed + +$ nargo check +The application panicked (crashed). +Message: Cannot have variable sized arrays as a parameter to main +``` + +## Call Expressions + +Calling a function in Noir is executed by using the function name and passing in the necessary +arguments. + +Below we show how to call the `foo` function from the `main` function using a call expression: + +```rust +fn main(x : Field, y : Field) { + let z = foo(x); +} + +fn foo(x : Field) -> Field { + x + x +} +``` + +## Methods + +You can define methods in Noir on any struct type in scope. + +```rust +struct MyStruct { + foo: Field, + bar: Field, +} + +impl MyStruct { + fn new(foo: Field) -> MyStruct { + MyStruct { + foo, + bar: 2, + } + } + + fn sum(self) -> Field { + self.foo + self.bar + } +} + +fn main() { + let s = MyStruct::new(40); + assert(s.sum() == 42); +} +``` + +Methods are just syntactic sugar for functions, so if we wanted to we could also call `sum` as +follows: + +```rust +assert(MyStruct::sum(s) == 42); +``` + +It is also possible to specialize which method is chosen depending on the [generic](./06_generics.md) type that is used. In this example, the `foo` function returns different values depending on its type: + +```rust +struct Foo {} + +impl Foo { + fn foo(self) -> Field { 1 } +} + +impl Foo { + fn foo(self) -> Field { 2 } +} + +fn main() { + let f1: Foo = Foo{}; + let f2: Foo = Foo{}; + assert(f1.foo() + f2.foo() == 3); +} +``` + +Also note that impls with the same method name defined in them cannot overlap. For example, if we already have `foo` defined for `Foo` and `Foo` like we do above, we cannot also define `foo` in an `impl Foo` since it would be ambiguous which version of `foo` to choose. + +```rust +// Including this impl in the same project as the above snippet would +// cause an overlapping impls error +impl Foo { + fn foo(self) -> Field { 3 } +} +``` + +## Lambdas + +Lambdas are anonymous functions. They follow the syntax of Rust - `|arg1, arg2, ..., argN| return_expression`. + +```rust +let add_50 = |val| val + 50; +assert(add_50(100) == 150); +``` + +See [Lambdas](./08_lambdas.md) for more details. + +## Attributes + +Attributes are metadata that can be applied to a function, using the following syntax: `#[attribute(value)]`. + +Supported attributes include: + +- **builtin**: the function is implemented by the compiler, for efficiency purposes. +- **deprecated**: mark the function as _deprecated_. Calling the function will generate a warning: `warning: use of deprecated function` +- **field**: Used to enable conditional compilation of code depending on the field size. See below for more details +- **oracle**: mark the function as _oracle_; meaning it is an external unconstrained function, implemented in noir_js. See [Unconstrained](./05_unconstrained.md) and [NoirJS](../noir_js/noir_js.md) for more details. +- **test**: mark the function as unit tests. See [Tests](../nargo/02_testing.md) for more details + +### Field Attribute + +The field attribute defines which field the function is compatible for. The function is conditionally compiled, under the condition that the field attribute matches the Noir native field. +The field can be defined implicitly, by using the name of the elliptic curve usually associated to it - for instance bn254, bls12_381 - or explicitly by using the field (prime) order, in decimal or hexadecimal form. +As a result, it is possible to define multiple versions of a function with each version specialized for a different field attribute. This can be useful when a function requires different parameters depending on the underlying elliptic curve. + +Example: we define the function `foo()` three times below. Once for the default Noir bn254 curve, once for the field $\mathbb F_{23}$, which will normally never be used by Noir, and once again for the bls12_381 curve. + +```rust +#[field(bn254)] +fn foo() -> u32 { + 1 +} + +#[field(23)] +fn foo() -> u32 { + 2 +} + +// This commented code would not compile as foo would be defined twice because it is the same field as bn254 +// #[field(21888242871839275222246405745257275088548364400416034343698204186575808495617)] +// fn foo() -> u32 { +// 2 +// } + +#[field(bls12_381)] +fn foo() -> u32 { + 3 +} +``` + +If the field name is not known to Noir, it will discard the function. Field names are case insensitive. diff --git a/docs/docs/language_concepts/02_control_flow.md b/docs/versioned_docs/version-v0.19.0/language_concepts/02_control_flow.md similarity index 100% rename from docs/docs/language_concepts/02_control_flow.md rename to docs/versioned_docs/version-v0.19.0/language_concepts/02_control_flow.md diff --git a/docs/versioned_docs/version-v0.19.0/language_concepts/03_ops.md b/docs/versioned_docs/version-v0.19.0/language_concepts/03_ops.md new file mode 100644 index 00000000000..0e35ef5e584 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.0/language_concepts/03_ops.md @@ -0,0 +1,97 @@ +--- +title: Logical Operations +description: + Learn about the supported arithmetic and logical operations in the Noir programming language. + Discover how to perform operations on private input types, integers, and booleans. +keywords: + [ + Noir programming language, + supported operations, + arithmetic operations, + logical operations, + predicate operators, + bitwise operations, + short-circuiting, + backend, + ] +--- + +# Operations + +## Table of Supported Operations + +| Operation | Description | Requirements | +| :-------- | :------------------------------------------------------------: | -------------------------------------: | +| + | Adds two private input types together | Types must be private input | +| - | Subtracts two private input types together | Types must be private input | +| \* | Multiplies two private input types together | Types must be private input | +| / | Divides two private input types together | Types must be private input | +| ^ | XOR two private input types together | Types must be integer | +| & | AND two private input types together | Types must be integer | +| \| | OR two private input types together | Types must be integer | +| \<\< | Left shift an integer by another integer amount | Types must be integer | +| >> | Right shift an integer by another integer amount | Types must be integer | +| ! | Bitwise not of a value | Type must be integer or boolean | +| \< | returns a bool if one value is less than the other | Upper bound must have a known bit size | +| \<= | returns a bool if one value is less than or equal to the other | Upper bound must have a known bit size | +| > | returns a bool if one value is more than the other | Upper bound must have a known bit size | +| >= | returns a bool if one value is more than or equal to the other | Upper bound must have a known bit size | +| == | returns a bool if one value is equal to the other | Both types must not be constants | +| != | returns a bool if one value is not equal to the other | Both types must not be constants | + +### Predicate Operators + +`<,<=, !=, == , >, >=` are known as predicate/comparison operations because they compare two values. +This differs from the operations such as `+` where the operands are used in _computation_. + +### Bitwise Operations Example + +```rust +fn main(x : Field) { + let y = x as u32; + let z = y & y; +} +``` + +`z` is implicitly constrained to be the result of `y & y`. The `&` operand is used to denote bitwise +`&`. + +> `x & x` would not compile as `x` is a `Field` and not an integer type. + +### Logical Operators + +Noir has no support for the logical operators `||` and `&&`. This is because encoding the +short-circuiting that these operators require can be inefficient for Noir's backend. Instead you can +use the bitwise operators `|` and `&` which operate identically for booleans, just without the +short-circuiting. + +```rust +let my_val = 5; + +let mut flag = 1; +if (my_val > 6) | (my_val == 0) { + flag = 0; +} +assert(flag == 1); + +if (my_val != 10) & (my_val < 50) { + flag = 0; +} +assert(flag == 0); +``` + +### Shorthand operators + +Noir shorthand operators for most of the above operators, namely `+=, -=, *=, /=, %=, &=, |=, ^=, <<=`, and `>>=`. These allow for more concise syntax. For example: + +```rust +let mut i = 0; +i = i + 1; +``` + +could be written as: + +```rust +let mut i = 0; +i += 1; +``` diff --git a/docs/docs/language_concepts/04_assert.md b/docs/versioned_docs/version-v0.19.0/language_concepts/04_assert.md similarity index 100% rename from docs/docs/language_concepts/04_assert.md rename to docs/versioned_docs/version-v0.19.0/language_concepts/04_assert.md diff --git a/docs/versioned_docs/version-v0.19.0/language_concepts/05_unconstrained.md b/docs/versioned_docs/version-v0.19.0/language_concepts/05_unconstrained.md new file mode 100644 index 00000000000..097d6ee9894 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.0/language_concepts/05_unconstrained.md @@ -0,0 +1,96 @@ +--- +title: Unconstrained Functions +description: "Learn about what unconstrained functions in Noir are, how to use them and when you'd want to." + +keywords: [Noir programming language, unconstrained, open] +--- + + + +Unconstrained functions are functions which do not constrain any of the included computation and allow for non-deterministic computation. + +## Why? + +Zero-knowledge (ZK) domain-specific languages (DSL) enable developers to generate ZK proofs from their programs by compiling code down to the constraints of an NP complete language (such as R1CS or PLONKish languages). However, the hard bounds of a constraint system can be very limiting to the functionality of a ZK DSL. + +Enabling a circuit language to perform unconstrained execution is a powerful tool. Said another way, unconstrained execution lets developers generate witnesses from code that does not generate any constraints. Being able to execute logic outside of a circuit is critical for both circuit performance and constructing proofs on information that is external to a circuit. + +Fetching information from somewhere external to a circuit can also be used to enable developers to improve circuit efficiency. + +A ZK DSL does not just prove computation, but proves that some computation was handled correctly. Thus, it is necessary that when we switch from performing some operation directly inside of a circuit to inside of an unconstrained environment that the appropriate constraints are still laid down elsewhere in the circuit. + +## Example + +An in depth example might help drive the point home. This example comes from the excellent [post](https://discord.com/channels/1113924620781883405/1124022445054111926/1128747641853972590) by Tom in the Noir Discord. + +Let's look at how we can optimize a function to turn a `u72` into an array of `u8`s. + +```rust +fn main(num: u72) -> pub [u8; 8] { + let mut out: [u8; 8] = [0; 8]; + for i in 0..8 { + out[i] = (num >> (56 - (i * 8)) as u72 & 0xff) as u8; + } + + out +} +``` + +``` +Total ACIR opcodes generated for language PLONKCSat { width: 3 }: 91 +Backend circuit size: 3619 +``` + +A lot of the operations in this function are optimized away by the compiler (all the bit-shifts turn into divisions by constants). However we can save a bunch of gates by casting to u8 a bit earlier. This automatically truncates the bit-shifted value to fit in a u8 which allows us to remove the XOR against 0xff. This saves us ~480 gates in total. + +```rust +fn main(num: u72) -> pub [u8; 8] { + let mut out: [u8; 8] = [0; 8]; + for i in 0..8 { + out[i] = (num >> (56 - (i * 8)) as u8; + } + + out +} +``` + +``` +Total ACIR opcodes generated for language PLONKCSat { width: 3 }: 75 +Backend circuit size: 3143 +``` + +Those are some nice savings already but we can do better. This code is all constrained so we're proving every step of calculating out using num, but we don't actually care about how we calculate this, just that it's correct. This is where brillig comes in. + +It turns out that truncating a u72 into a u8 is hard to do inside a snark, each time we do as u8 we lay down 4 ACIR opcodes which get converted into multiple gates. It's actually much easier to calculate num from out than the other way around. All we need to do is multiply each element of out by a constant and add them all together, both relatively easy operations inside a snark. + +We can then run u72_to_u8 as unconstrained brillig code in order to calculate out, then use that result in our constrained function and assert that if we were to do the reverse calculation we'd get back num. This looks a little like the below: + +```rust +fn main(num: u72) -> pub [u8; 8] { + let out = u72_to_u8(num); + + let mut reconstructed_num: u72 = 0; + for i in 0..8 { + reconstructed_num += (out[i] as u72 << (56 - (8 * i))); + } + assert(num == reconstructed_num); + out +} + +unconstrained fn u72_to_u8(num: u72) -> [u8; 8] { + let mut out: [u8; 8] = [0; 8]; + for i in 0..8 { + out[i] = (num >> (56 - (i * 8))) as u8; + } + out +} +``` + +``` +Total ACIR opcodes generated for language PLONKCSat { width: 3 }: 78 +Backend circuit size: 2902 +``` + +This ends up taking off another ~250 gates from our circuit! We've ended up with more ACIR opcodes than before but they're easier for the backend to prove (resulting in fewer gates). + +Generally we want to use brillig whenever there's something that's easy to verify but hard to compute within the circuit. For example, if you wanted to calculate a square root of a number it'll be a much better idea to calculate this in brillig and then assert that if you square the result you get back your number. diff --git a/docs/versioned_docs/version-v0.19.0/language_concepts/06_generics.md b/docs/versioned_docs/version-v0.19.0/language_concepts/06_generics.md new file mode 100644 index 00000000000..b700bd5bc5b --- /dev/null +++ b/docs/versioned_docs/version-v0.19.0/language_concepts/06_generics.md @@ -0,0 +1,113 @@ +--- +title: Generics +description: Learn how to use Generics in Noir +keywords: [Noir, Rust, generics, functions, structs] +--- + +Generics allow you to use the same functions with multiple different concrete data types. You can +read more about the concept of generics in the Rust documentation +[here](https://doc.rust-lang.org/book/ch10-01-syntax.html). + +Here is a trivial example showing the identity function that supports any type. In Rust, it is +common to refer to the most general type as `T`. We follow the same convention in Noir. + +```rust +fn id(x: T) -> T { + x +} +``` + +## In Structs + +Generics are useful for specifying types in structs. For example, we can specify that a field in a +struct will be of a certain generic type. In this case `value` is of type `T`. + +```rust +struct RepeatedValue { + value: T, + count: Field, +} + +impl RepeatedValue { + fn new(value: T) -> Self { + Self { value, count: 1 } + } + + fn increment(mut repeated: Self) -> Self { + repeated.count += 1; + repeated + } + + fn print(self) { + for _i in 0 .. self.count { + dep::std::println(self.value); + } + } +} + +fn main() { + let mut repeated = RepeatedValue::new("Hello!"); + repeated = repeated.increment(); + repeated.print(); +} +``` + +The `print` function will print `Hello!` an arbitrary number of times, twice in this case. + +If we want to be generic over array lengths (which are type-level integers), we can use numeric +generics. Using these looks just like using regular generics, but these generics can resolve to +integers at compile-time, rather than resolving to types. Here's an example of a struct that is +generic over the size of the array it contains internally: + +```rust +struct BigInt { + limbs: [u32; N], +} + +impl BigInt { + // `N` is in scope of all methods in the impl + fn first(first: BigInt, second: BigInt) -> Self { + assert(first.limbs != second.limbs); + first + + fn second(first: BigInt, second: Self) -> Self { + assert(first.limbs != second.limbs); + second + } +} +``` + +## Calling functions on generic parameters + +Unlike Rust, Noir does not have traits, so how can one translate the equivalent of a trait bound in +Rust into Noir? That is, how can we write a function that is generic over some type `T`, while also +requiring there is a function like `eq: fn(T, T) -> bool` that works on the type? + +The answer is that we can translate this by passing in the function manually. Here's an example of +implementing array equality in Noir: + +```rust +fn array_eq(array1: [T; N], array2: [T; N], elem_eq: fn(T, T) -> bool) -> bool { + if array1.len() != array2.len() { + false + } else { + let mut result = true; + for i in 0 .. array1.len() { + result &= elem_eq(array1[i], array2[i]); + } + result + } +} + +fn main() { + assert(array_eq([1, 2, 3], [1, 2, 3], |a, b| a == b)); + + // We can use array_eq even for arrays of structs, as long as we have + // an equality function for these structs we can pass in + let array = [MyStruct::new(), MyStruct::new()]; + assert(array_eq(array, array, MyStruct::eq)); +} +``` + +You can see an example of generics in the tests +[here](https://github.com/noir-lang/noir/blob/master/crates/nargo_cli/tests/test_data/generics/src/main.nr). diff --git a/docs/versioned_docs/version-v0.19.0/language_concepts/07_mutability.md b/docs/versioned_docs/version-v0.19.0/language_concepts/07_mutability.md new file mode 100644 index 00000000000..ed3fed820ec --- /dev/null +++ b/docs/versioned_docs/version-v0.19.0/language_concepts/07_mutability.md @@ -0,0 +1,92 @@ +--- +title: Mutability +description: + Learn about mutable variables, constants, and globals in Noir programming language. Discover how + to declare, modify, and use them in your programs. +keywords: [noir programming language, mutability in noir, mutable variables, constants, globals] +--- + +Variables in noir can be declared mutable via the `mut` keyword. Mutable variables can be reassigned +to via an assignment expression. + +```rust +let x = 2; +x = 3; // error: x must be mutable to be assigned to + +let mut y = 3; +let y = 4; // OK +``` + +The `mut` modifier can also apply to patterns: + +```rust +let (a, mut b) = (1, 2); +a = 11; // error: a must be mutable to be assigned to +b = 12; // OK + +let mut (c, d) = (3, 4); +c = 13; // OK +d = 14; // OK + +// etc. +let MyStruct { x: mut y } = MyStruct { x: a }; +// y is now in scope +``` + +Note that mutability in noir is local and everything is passed by value, so if a called function +mutates its parameters then the parent function will keep the old value of the parameters. + +```rust +fn main() -> pub Field { + let x = 3; + helper(x); + x // x is still 3 +} + +fn helper(mut x: i32) { + x = 4; +} +``` + +## Comptime Values + +:::warning + +The 'comptime' keyword was removed in version 0.10. The comptime keyword and syntax are currently still kept and parsed for backwards compatibility, but are now deprecated and will issue a warning when used. `comptime` has been removed because it is no longer needed for accessing arrays. + +::: + +## Globals + +Noir also supports global variables. However, they must be known at compile-time. The global type can also be inferred by the compiler entirely. Globals can also be used to specify array +annotations for function parameters and can be imported from submodules. + +```rust +global N: Field = 5; // Same as `global N: Field = 5` + +fn main(x : Field, y : [Field; N]) { + let res = x * N; + + assert(res == y[0]); + + let res2 = x * my_submodule::N; + assert(res != res2); +} + +mod my_submodule { + use dep::std; + + global N: Field = 10; + + fn my_helper() -> Field { + let x = N; + x + } +} +``` + +## Why only local mutability? + +Witnesses in a proving system are immutable in nature. Noir aims to _closely_ mirror this setting +without applying additional overhead to the user. Modeling a mutable reference is not as +straightforward as on conventional architectures and would incur some possibly unexpected overhead. diff --git a/docs/docs/language_concepts/08_lambdas.md b/docs/versioned_docs/version-v0.19.0/language_concepts/08_lambdas.md similarity index 100% rename from docs/docs/language_concepts/08_lambdas.md rename to docs/versioned_docs/version-v0.19.0/language_concepts/08_lambdas.md diff --git a/docs/docs/language_concepts/09_comments.md b/docs/versioned_docs/version-v0.19.0/language_concepts/09_comments.md similarity index 100% rename from docs/docs/language_concepts/09_comments.md rename to docs/versioned_docs/version-v0.19.0/language_concepts/09_comments.md diff --git a/docs/docs/language_concepts/10_distinct.md b/docs/versioned_docs/version-v0.19.0/language_concepts/10_distinct.md similarity index 100% rename from docs/docs/language_concepts/10_distinct.md rename to docs/versioned_docs/version-v0.19.0/language_concepts/10_distinct.md diff --git a/docs/docs/language_concepts/11_shadowing.md b/docs/versioned_docs/version-v0.19.0/language_concepts/11_shadowing.md similarity index 100% rename from docs/docs/language_concepts/11_shadowing.md rename to docs/versioned_docs/version-v0.19.0/language_concepts/11_shadowing.md diff --git a/docs/versioned_docs/version-v0.19.0/language_concepts/data_types.md b/docs/versioned_docs/version-v0.19.0/language_concepts/data_types.md new file mode 100644 index 00000000000..74f573f7d3f --- /dev/null +++ b/docs/versioned_docs/version-v0.19.0/language_concepts/data_types.md @@ -0,0 +1,96 @@ +--- +title: Data Types +description: + Get a clear understanding of the two categories of Noir data types - primitive types and compound + types. Learn about their characteristics, differences, and how to use them in your Noir + programming. +keywords: + [ + noir, + data types, + primitive types, + compound types, + private types, + public types, + ] +--- + +Every value in Noir has a type, which determines which operations are valid for it. + +All values in Noir are fundamentally composed of `Field` elements. For a more approachable +developing experience, abstractions are added on top to introduce different data types in Noir. + +Noir has two category of data types: primitive types (e.g. `Field`, integers, `bool`) and compound +types that group primitive types (e.g. arrays, tuples, structs). Each value can either be private or +public. + +## Private & Public Types + +A **private value** is known only to the Prover, while a **public value** is known by both the +Prover and Verifier. Mark values as `private` when the value should only be known to the prover. All +primitive types (including individual fields of compound types) in Noir are private by default, and +can be marked public when certain values are intended to be revealed to the Verifier. + +> **Note:** For public values defined in Noir programs paired with smart contract verifiers, once +> the proofs are verified on-chain the values can be considered known to everyone that has access to +> that blockchain. + +Public data types are treated no differently to private types apart from the fact that their values +will be revealed in proofs generated. Simply changing the value of a public type will not change the +circuit (where the same goes for changing values of private types as well). + +_Private values_ are also referred to as _witnesses_ sometimes. + +> **Note:** The terms private and public when applied to a type (e.g. `pub Field`) have a different +> meaning than when applied to a function (e.g. `pub fn foo() {}`). +> +> The former is a visibility modifier for the Prover to interpret if a value should be made known to +> the Verifier, while the latter is a visibility modifier for the compiler to interpret if a +> function should be made accessible to external Noir programs like in other languages. + +### pub Modifier + +All data types in Noir are private by default. Types are explicitly declared as public using the +`pub` modifier: + +```rust +fn main(x : Field, y : pub Field) -> pub Field { + x + y +} +``` + +In this example, `x` is **private** while `y` and `x + y` (the return value) are **public**. Note +that visibility is handled **per variable**, so it is perfectly valid to have one input that is +private and another that is public. + +> **Note:** Public types can only be declared through parameters on `main`. + +## Type Aliases + +A type alias is a new name for an existing type. Type aliases are declared with the keyword `type`: + +```rust +type Id = u8; + +fn main() { + let id: Id = 1; + let zero: u8 = 0; + assert(zero + 1 == id); +} +``` + +Type aliases can also be used with [generics](./06_generics.md): + +```rust +type Id = Size; + +fn main() { + let id: Id = 1; + let zero: u32 = 0; + assert(zero + 1 == id); +} +``` + +### BigInt + +You can achieve BigInt functionality using the [Noir BigInt](https://github.com/shuklaayush/noir-bigint) library. diff --git a/docs/versioned_docs/version-v0.19.0/language_concepts/data_types/00_fields.md b/docs/versioned_docs/version-v0.19.0/language_concepts/data_types/00_fields.md new file mode 100644 index 00000000000..78d3d2af166 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.0/language_concepts/data_types/00_fields.md @@ -0,0 +1,165 @@ +--- +title: Fields +description: + Dive deep into the Field data type in Noir. Understand its methods, practical examples, and best practices to effectively use Fields in your Noir programs. +keywords: + [ + noir, + field type, + methods, + examples, + best practices, + ] +--- + +The field type corresponds to the native field type of the proving backend. + +The size of a Noir field depends on the elliptic curve's finite field for the proving backend +adopted. For example, a field would be a 254-bit integer when paired with the default backend that +spans the Grumpkin curve. + +Fields support integer arithmetic and are often used as the default numeric type in Noir: + +```rust +fn main(x : Field, y : Field) { + let z = x + y; +} +``` + +`x`, `y` and `z` are all private fields in this example. Using the `let` keyword we defined a new +private value `z` constrained to be equal to `x + y`. + +If proving efficiency is of priority, fields should be used as a default for solving problems. +Smaller integer types (e.g. `u64`) incur extra range constraints. + +## Methods + +After declaring a Field, you can use these common methods on it: + +### to_le_bits + +Transforms the field into an array of bits, Little Endian. + +```rust +fn to_le_bits(_x : Field, _bit_size: u32) -> [u1; N] +``` + +example: + +```rust +fn main() { + let field = 2; + let bits = field.to_le_bits(32); +} +``` + +### to_be_bits + +Transforms the field into an array of bits, Big Endian. + +```rust +fn to_be_bits(_x : Field, _bit_size: u32) -> [u1; N] +``` + +example: + +```rust +fn main() { + let field = 2; + let bits = field.to_be_bits(32); +} +``` + +### to_le_bytes + +Transforms into an array of bytes, Little Endian + +```rust +fn to_le_bytes(_x : Field, byte_size: u32) -> [u8] +``` + +example: + +```rust +fn main() { + let field = 2; + let bytes = field.to_le_bytes(4); +} +``` + +### to_be_bytes + +Transforms into an array of bytes, Big Endian + +```rust +fn to_be_bytes(_x : Field, byte_size: u32) -> [u8] +``` + +example: + +```rust +fn main() { + let field = 2; + let bytes = field.to_be_bytes(4); +} +``` + +### to_le_radix + +Decomposes into a vector over the specified base, Little Endian + +```rust +fn to_le_radix(_x : Field, _radix: u32, _result_len: u32) -> [u8] +``` + +example: + +```rust +fn main() { + let field = 2; + let radix = field.to_le_radix(256, 4); +} +``` + +### to_be_radix + +Decomposes into a vector over the specified base, Big Endian + +```rust +fn to_be_radix(_x : Field, _radix: u32, _result_len: u32) -> [u8] +``` + +example: + +```rust +fn main() { + let field = 2; + let radix = field.to_be_radix(256, 4); +} +``` + +### pow_32 + +Returns the value to the power of the specified exponent + +```rust +fn pow_32(self, exponent: Field) -> Field +``` + +example: + +```rust +fn main() { + let field = 2 + let pow = field.pow_32(4); + assert(pow == 16); +} +``` + +### sgn0 + +Parity of (prime) Field element, i.e. sgn0(x mod p) = 0 if x ∈ \{0, ..., p-1\} is even, otherwise sgn0(x mod p) = 1. + +```rust +fn sgn0(self) -> u1 +``` diff --git a/docs/docs/language_concepts/data_types/01_integers.md b/docs/versioned_docs/version-v0.19.0/language_concepts/data_types/01_integers.md similarity index 100% rename from docs/docs/language_concepts/data_types/01_integers.md rename to docs/versioned_docs/version-v0.19.0/language_concepts/data_types/01_integers.md diff --git a/docs/docs/language_concepts/data_types/02_booleans.md b/docs/versioned_docs/version-v0.19.0/language_concepts/data_types/02_booleans.md similarity index 100% rename from docs/docs/language_concepts/data_types/02_booleans.md rename to docs/versioned_docs/version-v0.19.0/language_concepts/data_types/02_booleans.md diff --git a/docs/docs/language_concepts/data_types/03_strings.md b/docs/versioned_docs/version-v0.19.0/language_concepts/data_types/03_strings.md similarity index 100% rename from docs/docs/language_concepts/data_types/03_strings.md rename to docs/versioned_docs/version-v0.19.0/language_concepts/data_types/03_strings.md diff --git a/docs/docs/language_concepts/data_types/04_arrays.md b/docs/versioned_docs/version-v0.19.0/language_concepts/data_types/04_arrays.md similarity index 100% rename from docs/docs/language_concepts/data_types/04_arrays.md rename to docs/versioned_docs/version-v0.19.0/language_concepts/data_types/04_arrays.md diff --git a/docs/docs/language_concepts/data_types/05_slices.mdx b/docs/versioned_docs/version-v0.19.0/language_concepts/data_types/05_slices.mdx similarity index 100% rename from docs/docs/language_concepts/data_types/05_slices.mdx rename to docs/versioned_docs/version-v0.19.0/language_concepts/data_types/05_slices.mdx diff --git a/docs/docs/language_concepts/data_types/06_vectors.mdx b/docs/versioned_docs/version-v0.19.0/language_concepts/data_types/06_vectors.mdx similarity index 100% rename from docs/docs/language_concepts/data_types/06_vectors.mdx rename to docs/versioned_docs/version-v0.19.0/language_concepts/data_types/06_vectors.mdx diff --git a/docs/docs/language_concepts/data_types/07_tuples.md b/docs/versioned_docs/version-v0.19.0/language_concepts/data_types/07_tuples.md similarity index 100% rename from docs/docs/language_concepts/data_types/07_tuples.md rename to docs/versioned_docs/version-v0.19.0/language_concepts/data_types/07_tuples.md diff --git a/docs/versioned_docs/version-v0.10.5/language_concepts/data_types/08_structs.md b/docs/versioned_docs/version-v0.19.0/language_concepts/data_types/08_structs.md similarity index 100% rename from docs/versioned_docs/version-v0.10.5/language_concepts/data_types/08_structs.md rename to docs/versioned_docs/version-v0.19.0/language_concepts/data_types/08_structs.md diff --git a/docs/docs/language_concepts/data_types/09_references.md b/docs/versioned_docs/version-v0.19.0/language_concepts/data_types/09_references.md similarity index 100% rename from docs/docs/language_concepts/data_types/09_references.md rename to docs/versioned_docs/version-v0.19.0/language_concepts/data_types/09_references.md diff --git a/docs/docs/language_concepts/data_types/10_function_types.md b/docs/versioned_docs/version-v0.19.0/language_concepts/data_types/10_function_types.md similarity index 100% rename from docs/docs/language_concepts/data_types/10_function_types.md rename to docs/versioned_docs/version-v0.19.0/language_concepts/data_types/10_function_types.md diff --git a/docs/versioned_docs/version-v0.19.0/migration_notes.md b/docs/versioned_docs/version-v0.19.0/migration_notes.md new file mode 100644 index 00000000000..905bca3d30c --- /dev/null +++ b/docs/versioned_docs/version-v0.19.0/migration_notes.md @@ -0,0 +1,91 @@ +--- +title: Migration notes +description: Read about migration notes from previous versions, which could solve problems while updating +keywords: [Noir, notes, migration, updating, upgrading] +--- + +Noir is in full-speed development. Things break fast, wild, and often. This page attempts to leave some notes on errors you might encounter when upgrading and how to resolve them until proper patches are built. + +## ≥0.19 + +### Enforcing `compiler_version` + +From this version on, the compiler will check for the `compiler_version` field in `Nargo.toml`, and will error if it doesn't match the current Nargo version in use. + +To update, please make sure this field in `Nargo.toml` matches the output of `nargo --version`. + +## ≥0.14 + +The index of the [for loops](./language_concepts/02_control_flow.md#loops) is now of type `u64` instead of `Field`. An example refactor would be: + +```rust +for i in 0..10 { + let i = i as Field; +} +``` + +## ≥v0.11.0 and Nargo backend + +From this version onwards, Nargo starts managing backends through the `nargo backend` command. Upgrading to the versions per usual steps might lead to: + +### `backend encountered an error` + +This is likely due to the existing locally installed version of proving backend (e.g. barretenberg) is incompatible with the version of Nargo in use. + +To fix the issue: + +1. Uninstall the existing backend + +```bash +nargo backend uninstall acvm-backend-barretenberg +``` + +You may replace _acvm-backend-barretenberg_ with the name of your backend listed in `nargo backend ls` or in ~/.nargo/backends. + +2. Reinstall a compatible version of the proving backend. + +If you are using the default barretenberg backend, simply run: + +``` +nargo prove +``` + +with your Noir program. + +This will trigger the download and installation of the latest version of barretenberg compatible with your Nargo in use. + +### `backend encountered an error: illegal instruction` + +On certain Intel-based systems, an `illegal instruction` error may arise due to incompatibility of barretenberg with certain CPU instructions. + +To fix the issue: + +1. Uninstall the existing backend + +```bash +nargo backend uninstall acvm-backend-barretenberg +``` + +You may replace _acvm-backend-barretenberg_ with the name of your backend listed in `nargo backend ls` or in ~/.nargo/backends. + +2. Reinstall a compatible version of the proving backend. + +If you are using the default barretenberg backend, simply run: + +``` +nargo backend install acvm-backend-barretenberg https://github.com/noir-lang/barretenberg-js-binary/raw/master/run-bb.tar.gz +``` + +This downloads and installs a specific bb.js based version of barretenberg binary from GitHub. + +The gzipped file is running [this bash script](https://github.com/noir-lang/barretenberg-js-binary/blob/master/run-bb-js.sh), where we need to gzip it as the Nargo currently expect the backend to be zipped up. + +Then run: + +``` +DESIRED_BINARY_VERSION=0.8.1 nargo info +``` + +This overrides the bb native binary with a bb.js node application instead, which should be compatible with most if not all hardware. This does come with the drawback of being generally slower than native binary. + +0.8.1 indicates bb.js version 0.8.1, so if you change that it will update to a different version or the default version in the script if none was supplied. diff --git a/docs/versioned_docs/version-v0.19.0/modules_packages_crates/crates_and_packages.md b/docs/versioned_docs/version-v0.19.0/modules_packages_crates/crates_and_packages.md new file mode 100644 index 00000000000..744de72bb2c --- /dev/null +++ b/docs/versioned_docs/version-v0.19.0/modules_packages_crates/crates_and_packages.md @@ -0,0 +1,42 @@ +--- +title: Crates and Packages +description: Learn how to use Crates and Packages in your Noir project +keywords: [Nargo, dependencies, package management, crates, package] +--- + +## Crates + +A crate is the smallest amount of code that the Noir compiler considers at a time. +Crates can contain modules, and the modules may be defined in other files that get compiled with the crate, as we’ll see in the coming sections. + +### Crate Types + +A Noir crate can come in several forms: binaries, libraries or contracts. + +#### Binaries + +_Binary crates_ are programs which you can compile to an ACIR circuit which you can then create proofs against. Each must have a function called `main` that defines the ACIR circuit which is to be proved. + +#### Libraries + +_Library crates_ don't have a `main` function and they don't compile down to ACIR. Instead they define functionality intended to be shared with multiple projects, and eventually included in a binary crate. + +#### Contracts + +Contract crates are similar to binary crates in that they compile to ACIR which you can create proofs against. They are different in that they do not have a single `main` function, but are a collection of functions to be deployed to the [Aztec network](https://aztec.network). You can learn more about the technical details of Aztec in the [monorepo](https://github.com/AztecProtocol/aztec-packages) or contract [examples](https://github.com/AztecProtocol/aztec-packages/tree/master/yarn-project/noir-contracts/contracts). + +### Crate Root + +Every crate has a root, which is the source file that the compiler starts, this is also known as the root module. The Noir compiler does not enforce any conditions on the name of the file which is the crate root, however if you are compiling via Nargo the crate root must be called `lib.nr` or `main.nr` for library or binary crates respectively. + +## Packages + +A Nargo _package_ is a collection of one of more crates that provides a set of functionality. A package must include a Nargo.toml file. + +A package _must_ contain either a library or a binary crate, but not both. + +### Differences from Cargo Packages + +One notable difference between Rust's Cargo and Noir's Nargo is that while Cargo allows a package to contain an unlimited number of binary crates and a single library crate, Nargo currently only allows a package to contain a single crate. + +In future this restriction may be lifted to allow a Nargo package to contain both a binary and library crate or multiple binary crates. diff --git a/docs/versioned_docs/version-v0.19.0/modules_packages_crates/dependencies.md b/docs/versioned_docs/version-v0.19.0/modules_packages_crates/dependencies.md new file mode 100644 index 00000000000..e91e73a4c4f --- /dev/null +++ b/docs/versioned_docs/version-v0.19.0/modules_packages_crates/dependencies.md @@ -0,0 +1,123 @@ +--- +title: Dependencies +description: + Learn how to specify and manage dependencies in Nargo, allowing you to upload packages to GitHub + and use them easily in your project. +keywords: [Nargo, dependencies, GitHub, package management, versioning] +--- + +Nargo allows you to upload packages to GitHub and use them as dependencies. + +## Specifying a dependency + +Specifying a dependency requires a tag to a specific commit and the git url to the url containing +the package. + +Currently, there are no requirements on the tag contents. If requirements are added, it would follow +semver 2.0 guidelines. + +> Note: Without a `tag` , there would be no versioning and dependencies would change each time you +> compile your project. + +For example, to add the [ecrecover-noir library](https://github.com/colinnielsen/ecrecover-noir) to your project, add it to `Nargo.toml`: + +```toml +# Nargo.toml + +[dependencies] +ecrecover = {tag = "v0.8.0", git = "https://github.com/colinnielsen/ecrecover-noir"} +``` + +If the module is in a subdirectory, you can define a subdirectory in your git repository, for example: + +```toml +# Nargo.toml + +[dependencies] +easy_private_token_contract = {tag ="v0.1.0-alpha62", git = "https://github.com/AztecProtocol/aztec-packages", directory = "yarn-project/noir-contracts/contracts/easy_private_token_contract"} +``` + +## Specifying a local dependency + +You can also specify dependencies that are local to your machine. + +For example, this file structure has a library and binary crate + +```tree +├── binary_crate +│   ├── Nargo.toml +│   └── src +│   └── main.nr +└── lib_a + ├── Nargo.toml + └── src + └── lib.nr +``` + +Inside of the binary crate, you can specify: + +```toml +# Nargo.toml + +[dependencies] +lib_a = { path = "../lib_a" } +``` + +## Importing dependencies + +You can import a dependency to a Noir file using the following syntax. For example, to import the +ecrecover-noir library and local lib_a referenced above: + +```rust +use dep::ecrecover; +use dep::lib_a; +``` + +You can also import only the specific parts of dependency that you want to use, like so: + +```rust +use dep::std::hash::sha256; +use dep::std::scalar_mul::fixed_base_embedded_curve; +``` + +Lastly, as demonstrated in the +[elliptic curve example](../standard_library/cryptographic_primitives/ec_primitives#examples), you +can import multiple items in the same line by enclosing them in curly braces: + +```rust +use dep::std::ec::tecurve::affine::{Curve, Point}; +``` + +We don't have a way to consume libraries from inside a [workspace](./workspaces) as external dependencies right now. + +Inside a workspace, these are consumed as `{ path = "../to_lib" }` dependencies in Nargo.toml. + +## Dependencies of Dependencies + +Note that when you import a dependency, you also get access to all of the dependencies of that package. + +For example, the [phy_vector](https://github.com/resurgencelabs/phy_vector) library imports an [fraction](https://github.com/resurgencelabs/fraction) library. If you're importing the phy_vector library, then you can access the functions in fractions library like so: + +```rust +use dep::phy_vector; + +fn main(x : Field, y : pub Field) { + //... + let f = phy_vector::fraction::toFraction(true, 2, 1); + //... +} +``` + +## Available Libraries + +Noir does not currently have an official package manager. You can find a list of available Noir libraries in the [awesome-noir repo here](https://github.com/noir-lang/awesome-noir#libraries). + +Some libraries that are available today include: + +- [Standard Library](https://github.com/noir-lang/noir/tree/master/noir_stdlib) - the Noir Standard Library +- [Ethereum Storage Proof Verification](https://github.com/aragonzkresearch/noir-trie-proofs) - a library that contains the primitives necessary for RLP decoding (in the form of look-up table construction) and Ethereum state and storage proof verification (or verification of any trie proof involving 32-byte long keys) +- [BigInt](https://github.com/shuklaayush/noir-bigint) - a library that provides a custom BigUint56 data type, allowing for computations on large unsigned integers +- [ECrecover](https://github.com/colinnielsen/ecrecover-noir/tree/main) - a library to verify an ECDSA signature and return the source Ethereum address +- [Sparse Merkle Tree Verifier](https://github.com/vocdoni/smtverifier-noir/tree/main) - a library for verification of sparse Merkle trees +- [Signed Int](https://github.com/resurgencelabs/signed_int) - a library for accessing a custom Signed Integer data type, allowing access to negative numbers on Noir +- [Fraction](https://github.com/resurgencelabs/fraction) - a library for accessing fractional number data type in Noir, allowing results that aren't whole numbers diff --git a/docs/versioned_docs/version-v0.19.0/modules_packages_crates/modules.md b/docs/versioned_docs/version-v0.19.0/modules_packages_crates/modules.md new file mode 100644 index 00000000000..11e60cbf35e --- /dev/null +++ b/docs/versioned_docs/version-v0.19.0/modules_packages_crates/modules.md @@ -0,0 +1,104 @@ +--- +title: Modules +description: + Learn how to organize your files using modules in Noir, following the same convention as Rust's + module system. Examples included. +keywords: [Noir, Rust, modules, organizing files, sub-modules] +--- + +Noir's module system follows the same convention as the _newer_ version of Rust's module system. + +## Purpose of Modules + +Modules are used to organize files. Without modules all of your code would need to live in a single +file. In Noir, the compiler does not automatically scan all of your files to detect modules. This +must be done explicitly by the developer. + +## Examples + +### Importing a module in the crate root + +Filename : `src/main.nr` + +```rust +mod foo; + +fn main() { + foo::hello_world(); +} +``` + +Filename : `src/foo.nr` + +```rust +fn from_foo() {} +``` + +In the above snippet, the crate root is the `src/main.nr` file. The compiler sees the module +declaration `mod foo` which prompts it to look for a foo.nr file. + +Visually this module hierarchy looks like the following : + +``` +crate + ├── main + │ + └── foo + └── from_foo + +``` + +### Importing a module throughout the tree + +All modules are accessible from the `crate::` namespace. + +``` +crate + ├── bar + ├── foo + └── main + +``` + +In the above snippet, if `bar` would like to use functions in `foo`, it can do so by `use crate::foo::function_name`. + +### Sub-modules + +Filename : `src/main.nr` + +```rust +mod foo; + +fn main() { + foo::from_foo(); +} +``` + +Filename : `src/foo.nr` + +```rust +mod bar; +fn from_foo() {} +``` + +Filename : `src/foo/bar.nr` + +```rust +fn from_bar() {} +``` + +In the above snippet, we have added an extra module to the module tree; `bar`. `bar` is a submodule +of `foo` hence we declare bar in `foo.nr` with `mod bar`. Since `foo` is not the crate root, the +compiler looks for the file associated with the `bar` module in `src/foo/bar.nr` + +Visually the module hierarchy looks as follows: + +``` +crate + ├── main + │ + └── foo + ├── from_foo + └── bar + └── from_bar +``` diff --git a/docs/versioned_docs/version-v0.19.0/modules_packages_crates/workspaces.md b/docs/versioned_docs/version-v0.19.0/modules_packages_crates/workspaces.md new file mode 100644 index 00000000000..d9ac92667c9 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.0/modules_packages_crates/workspaces.md @@ -0,0 +1,39 @@ +--- +title: Workspaces +--- + +Workspaces are a feature of nargo that allow you to manage multiple related Noir packages in a single repository. A workspace is essentially a group of related projects that share common build output directories and configurations. + +Each Noir project (with it's own Nargo.toml file) can be thought of as a package. Each package is expected to contain exactly one "named circuit", being the "name" defined in Nargo.toml with the program logic defined in `./src/main.nr`. + +For a project with the following structure: + +```tree +├── crates +│   ├── a +│   │   ├── Nargo.toml +│   │   └── src +│   │   └── main.nr +│   └── b +│   ├── Nargo.toml +│   └── src +│   └── main.nr +├── Nargo.toml +└── Prover.toml +``` + +You can define a workspace in Nargo.toml like so: + +```toml +[workspace] +members = ["crates/a", "crates/b"] +default-member = "crates/a" +``` + +`members` indicates which packages are included in the workspace. As such, all member packages of a workspace will be processed when the `--workspace` flag is used with various commands or if a `default-member` is not specified. + +`default-member` indicates which package various commands process by default. + +Libraries can be defined in a workspace. Inside a workspace, these are consumed as `{ path = "../to_lib" }` dependencies in Nargo.toml. + +Inside a workspace, these are consumed as `{ path = "../to_lib" }` dependencies in Nargo.toml. diff --git a/docs/docs/nargo/01_commands.md b/docs/versioned_docs/version-v0.19.0/nargo/01_commands.md similarity index 100% rename from docs/docs/nargo/01_commands.md rename to docs/versioned_docs/version-v0.19.0/nargo/01_commands.md diff --git a/docs/versioned_docs/version-v0.19.0/nargo/02_testing.md b/docs/versioned_docs/version-v0.19.0/nargo/02_testing.md new file mode 100644 index 00000000000..5c57ef92705 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.0/nargo/02_testing.md @@ -0,0 +1,61 @@ +--- +title: Testing in Noir +description: Learn how to use Nargo to test your Noir program in a quick and easy way +keywords: [Nargo, testing, Noir, compile, test] +--- + +You can test your Noir programs using Noir circuits. + +Nargo will automatically compile and run any functions which have the decorator `#[test]` on them if +you run `nargo test`. + +For example if you have a program like: + +```rust +fn add(x: u64, y: u64) -> u64 { + x + y +} +#[test] +fn test_add() { + assert(add(2,2) == 4); + assert(add(0,1) == 1); + assert(add(1,0) == 1); +} +``` + +Running `nargo test` will test that the `test_add` function can be executed while satisfying all +the constraints which allows you to test that add returns the expected values. Test functions can't +have any arguments currently. + +### Test fail + +You can write tests that are expected to fail by using the decorator `#[test(should_fail)]`. For example: + +```rust +fn add(x: u64, y: u64) -> u64 { + x + y +} +#[test(should_fail)] +fn test_add() { + assert(add(2,2) == 5); +} +``` + +You can be more specific and make it fail with a specific reason by using `should_fail_with = "`: + +```rust +fn main(african_swallow_avg_speed : Field) { + assert(african_swallow_avg_speed == 65, "What is the airspeed velocity of an unladen swallow"); +} + +#[test] +fn test_king_arthur() { + main(65); +} + +#[test(should_fail_with = "What is the airspeed velocity of an unladen swallow")] +fn test_bridgekeeper() { + main(32); +} + +``` diff --git a/docs/docs/nargo/03_solidity_verifier.md b/docs/versioned_docs/version-v0.19.0/nargo/03_solidity_verifier.md similarity index 100% rename from docs/docs/nargo/03_solidity_verifier.md rename to docs/versioned_docs/version-v0.19.0/nargo/03_solidity_verifier.md diff --git a/docs/docs/nargo/04_language_server.md b/docs/versioned_docs/version-v0.19.0/nargo/04_language_server.md similarity index 100% rename from docs/docs/nargo/04_language_server.md rename to docs/versioned_docs/version-v0.19.0/nargo/04_language_server.md diff --git a/docs/docs/noir_js/getting_started/01_tiny_noir_app.md b/docs/versioned_docs/version-v0.19.0/noir_js/getting_started/01_tiny_noir_app.md similarity index 100% rename from docs/docs/noir_js/getting_started/01_tiny_noir_app.md rename to docs/versioned_docs/version-v0.19.0/noir_js/getting_started/01_tiny_noir_app.md diff --git a/docs/versioned_docs/version-v0.19.0/noir_js/noir_js.md b/docs/versioned_docs/version-v0.19.0/noir_js/noir_js.md new file mode 100644 index 00000000000..23ea550e156 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.0/noir_js/noir_js.md @@ -0,0 +1,36 @@ +--- +title: NoirJS +description: Interact with Noir in Typescript or Javascript +keywords: [Noir project, javascript, typescript, node.js, browser, react] +--- + +NoirJS is a TypeScript library that make it easy to use Noir on your dapp, webapp, Node.js server, website, etc. + +A typical workflow would be composed of two major elements: + +- NoirJS +- Proving backend of choice's JavaScript package + + + +To install NoirJS, install Node.js if you have not already and run this in your JavaScript project: + +```bash +npm i @noir-lang/noir_js +``` + +## Proving backend + +Since Noir is backend agnostic, you can instantiate NoirJS without any backend (i.e. to execute a function). But for proving, you would have to instantiate NoirJS with any of the supported backends through their own `js` interface. + +### Barretenberg + +Aztec Labs maintains the `barretenberg` proving backend, which you can instantiate and make use of alongside NoirJS. It is also the default proving backend installed and used with Nargo, the Noir CLI tool. + +To install its JavaScript library, run this in your project: + +```bash +npm i @noir-lang/backend_barretenberg +``` + +For more details on how to instantiate and use the libraries, refer to the [Full Noir App Guide](./getting_started/01_tiny_noir_app.md) and [Reference](./reference/01_noirjs.md) sections. diff --git a/docs/versioned_docs/version-v0.19.0/noir_js/reference/01_noirjs.md b/docs/versioned_docs/version-v0.19.0/noir_js/reference/01_noirjs.md new file mode 100644 index 00000000000..2e90779ceab --- /dev/null +++ b/docs/versioned_docs/version-v0.19.0/noir_js/reference/01_noirjs.md @@ -0,0 +1,147 @@ +--- +title: Noir +description: Reference to noir_js library and the Noir class +keywords: [Noir project, javascript, typescript, node.js, browser, react, class, reference] +--- + +## Table of Contents + +- [constructor](#constructor) +- [init](#init) +- [generateFinalProof](#generatefinalproof) +- [verifyFinalProof](#verifyfinalproof) + +## `constructor` + +The `constructor` is a method used to create and initialize objects created within the `Noir` class. In the `Noir` class constructor, you need to pass two parameters: `circuit` and `backend`. + +### Syntax + +```js +constructor(circuit, backend); +``` + +### Parameters + +| Parameter | Type | Description | +| --------- | ------ | ------------------------------------------------------------------------------------------------------------------------------------------------------ | +| `circuit` | Object | A circuit represented in a `json` format, containing the ABI and bytecode. Typically obtained by running [`nargo compile`](../../nargo/01_commands.md) | +| `backend` | Object | A backend instance, before initialization. | + +### Usage + +```js +const noir = new Noir(circuit, backend); +``` + +## `init` + +This async method should be called after class instantiation. It will run processes on the ACVM, instantiate your backend, etc. + +### Syntax + +```js +async init() +``` + +### Parameters + +This method takes no parameters + +### Usage + +```js +await noirInstance.init(); +``` + +## `execute` + +This async method allows to execute a circuit to get its witness and return value. [`generateFinalProof`](#generatefinalproof) calls it for you, but you can call it directly (i.e. to feed directly to a backend, or to get the return value). + +You can optionally provide a foreignCallHandler, to handle functions that should run outside of the prover (e.g. `std::println`) + +### Syntax + +```js +async execute(inputs, foreignCallHandler) +``` + +### Parameters + +| Parameter | Type | Description | +| --------- | ------ | ------------------------------------------------ | +| `inputs` | Object | An object containing the inputs to your circuit. | +| `foreignCallHandler` (optional) | Function | A function handling the foreign call from your circuit | + +### Returns + +| Return value | Type | Description | +| ------------ | --------------------- | --------------------------------------------------- | +| `witness` | Promise \ | The witness | +| `returnValue` | Promise \ | The return value | + +### Usage + +```js +const { witness, returnValue } = await noir.execute(inputs) +const { witness, returnValue } = await noir.execute(inputs, (name, args) => console.log(`Received foreign call ${name} with arguments ${args}`)) +``` + +## `generateFinalProof` + +This async method generates a witness and a proof given an object as input. + +### Syntax + +```js +async generateFinalProof(input) +``` + +### Parameters + +| Parameter | Type | Description | +| --------- | ------ | ------------------------------------------------ | +| `input` | Object | An object containing the inputs to your circuit. | + +### Returns + +| Return value | Type | Description | +| ------------ | --------------------- | --------------------------------------------------- | +| `proof` | Promise \ | An array with the byte representation of the proof. | + +### Usage + +```js +// consider the Standard Noir Example given with nargo init +const input = { x: 1, y: 2 }; +noirInstance.generateProof(input); +``` + +## `verifyFinalProof` + +This async method instantiates the verification key and verifies your proof. + +### Syntax + +```js +async verifyFinalProof(proof) +``` + +### Parameters + +| Parameter | Type | Description | +| --------- | ---------- | --------------------------------------------------------------------------------------------- | +| `proof` | Uint8Array | The Uint8Array representation of your proof, usually obtained by calling `generateFinalProof` | + +### Returns + +| Return value | Type | Description | +| ------------ | ------------------ | -------------------------------------------- | +| `verified` | Promise \ | A boolean for whether the proof was verified | + +### Usage + +```js +const proof = noirInstance.generateProof(input); +noirInstance.verifyFinalProof(proof); +``` diff --git a/docs/versioned_docs/version-v0.19.0/noir_js/reference/02_bb_backend.md b/docs/versioned_docs/version-v0.19.0/noir_js/reference/02_bb_backend.md new file mode 100644 index 00000000000..958cabd6289 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.0/noir_js/reference/02_bb_backend.md @@ -0,0 +1,272 @@ +--- +title: BarretenbergBackend +description: Reference documentation for the barretenberg_backend library and the BarretenbergBackend class +keywords: + [ + BarretenbergBackend, + Barretenberg, + javascript, + typescript, + node.js, + browser, + class, + reference, + noir_js, + ] +--- + +## Table of Contents + +- [constructor](#constructor) +- [generateFinalProof](#generatefinalproof) +- [generateIntermediateProof](#generateintermediateproof) +- [generateProof](#generateproof) +- [generateIntermediateProofArtifacts](#generateintermediateproofartifacts) +- [verifyFinalProof](#verifyfinalproof) +- [verifyIntermediateProof](#verifyintermediateproof) +- [verifyProof](#verifyproof) +- [destroy](#destroy) + +## `constructor` + +The `constructor` is a method used to create and initialize objects created within the `BarretenbergBackend` class. In this class, you should pass at least one argument for the `circuit`. + +### Syntax + +```js +constructor(acirCircuit, (numberOfThreads = 1)); +``` + +### Parameters + +| Parameter | Type | Description | +| ----------------- | ----------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `acirCircuit` | Object | A circuit represented in a `json` format, containing the ABI and bytecode typically obtained by running [`nargo compile`](../../nargo/01_commands.md). This is the same circuit expected to be passed to [the Noir class](01_noirjs.md) | +| `numberOfThreads` | Number (optional) | The number of threads to be used by the backend. Defaults to 1. | + +### Usage + +```js +const backend = new BarretenbergBackend(acirCircuit); +``` + +## `generateFinalProof` + +An async wrapper around the [generateProof](#generateproof) method that passes a `false` flag. Usually called by the Noir class. + +### Syntax + +```js +async generateFinalProof(decompressedWitness) +``` + +### Parameters + +| Parameter | Type | Description | +| --------------------- | ------ | -------------------------------------------------------- | +| `decompressedWitness` | Object | The decompressed witness for generating the final proof. | + +### Returns + +| Return value | Type | Description | +| ------------ | -------------------- | --------------------------------------------------------- | +| `proof` | Promise\ | An array with the byte representation of the final proof. | + +### Usage + +```js +const finalProof = await backend.generateFinalProof(decompressedWitness); +``` + +## `generateIntermediateProof` + +An async wrapper around the [generateProof](#generateproof) method that passes a `true` flag. It's not currently being used by the Noir class, but developers can call this method directly to use Noir's recursive features. + +### Syntax + +```js +async generateIntermediateProof(witness) +``` + +### Parameters + +| Parameter | Type | Description | +| --------- | ------ | -------------------------------------------------- | +| `witness` | Object | The witness for generating the intermediate proof. | + +### Returns + +| Return value | Type | Description | +| ------------ | -------------------- | --------------------------------------------------------------- | +| `proof` | Promise\ | An array with the byte representation of the intermediate proof | + +### Usage + +```js +const intermediateProof = await backend.generateIntermediateProof(witness); +``` + +## `generateProof` + +This async method generates a proof. Takes the witness generated by ACVM, and a boolean that evaluates to `true` when the proof _is_ meant to be verified in another circuit. Not currently used by the Noir class. + +### Syntax + +```js +async generateProof(decompressedWitness, makeEasyToVerifyInCircuit) +``` + +### Parameters + +| Parameter | Type | Description | +| --------------------------- | ------- | ---------------------------------------------------------------------------------------------- | +| `decompressedWitness` | Object | The decompressed witness for generating the proof. | +| `makeEasyToVerifyInCircuit` | Boolean | A flag indicating whether to generate proof components for easy verification within a circuit. | + +### Returns + +| Return value | Type | Description | +| ------------ | -------------------- | -------------------------------------------------- | +| `proof` | Promise\ | An array with the byte representation of the proof | + +### Usage + +```js +const proof = await backend.generateProof(decompressedWitness, makeEasyToVerifyInCircuit); +``` + +## `generateIntermediateProofArtifacts` + +This async method returns the artifacts needed to verify the intermediate proof in another circuit. It's not currently being used by the Noir class, but developers can call this method directly to use Noir's recursive features. + +### Syntax + +```js +async generateIntermediateProofArtifacts(proof, numOfPublicInputs = 0) +``` + +### Parameters + +| Parameter | Type | Description | +| ------------------- | ----------------- | ---------------------------------------------------------------- | +| `proof` | Object | The proof object. | +| `numOfPublicInputs` | Number (optional) | The number of public inputs in the inner proof, defaulting to 0. | + +### Returns + +| Return value | Type | Description | +| --------------- | -------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `proofAsFields` | string[] | An array of strings with the hexadecimal representation of the [Fields](../../language_concepts/data_types/00_fields.md) that make up a proof | +| `vkAsFields` | string[] | An array of strings with the hexadecimal representation of the [Fields](../../language_concepts/data_types/00_fields.md) that make up the verification key | +| `vkHash` | string | A pedersen hash of the verification key | + +### Usage + +```js +const artifacts = await backend.generateIntermediateProofArtifacts(proof, numOfPublicInputs); +``` + +## `verifyFinalProof` + +An async wrapper around [verifyProof](#verifyproof) that sets the `false` flag. Usually called by the Noir class. + +### Syntax + +```js +async verifyFinalProof(proof) +``` + +### Parameters + +| Parameter | Type | Description | +| --------- | ------ | --------------------------- | +| `proof` | Object | The proof object to verify. | + +### Returns + +| Return value | Type | Description | +| ------------ | ------------------ | -------------------------------------------- | +| `verified` | Promise \ | A boolean for whether the proof was verified | + +### Usage + +```js +const isValidFinal = await backend.verifyFinalProof(proof); +``` + +## `verifyIntermediateProof` + +An async wrapper around [verifyProof](#verifyproof) that sets the `true` flag. It's not currently being used by the Noir class, but developers can call this method directly to use Noir's recursive features. + +### Syntax + +```js +async verifyIntermediateProof(proof) +``` + +### Parameters + +| Parameter | Type | Description | +| --------- | ------ | ---------------------------------------- | +| `proof` | Object | The intermediate proof object to verify. | + +### Returns + +| Return value | Type | Description | +| ------------ | ------------------ | -------------------------------------------- | +| `verified` | Promise \ | A boolean for whether the proof was verified | + +### Usage + +```js +const isValidIntermediate = await backend.verifyIntermediateProof(proof); +``` + +## `verifyProof` + +This async method verifies a proof. Takes the proof, and a boolean that evaluates to `true` when the proof is intermediate. + +### Syntax + +```js +async verifyProof(proof, makeEasyToVerifyInCircuit) +``` + +### Parameters + +| Parameter | Type | Description | +| --------------------------- | ------- | ------------------------------------------------------------ | +| `proof` | Object | The proof object to verify | +| `makeEasyToVerifyInCircuit` | Boolean | A flag indicating whether the proof is intermediate or final | + +### Returns + +| Parameter | Type | Description | +| ---------- | ------------------ | -------------------------------------------- | +| `verified` | Promise\ | A boolean for whether the proof was verified | + +### Usage + +```js +const isValid = await backend.verifyProof(proof, makeEasyToVerifyInCircuit); +``` + +## `destroy` + +This method destroys the resources allocated in the [instantiate](#instantiate) method. Noir doesn't currently call this method, but it's highly recommended that developers do so in order to save resources. + +### Syntax + +```js +async destroy() +``` + +### Parameters + +This method takes no parameters. + +### Usage + +```js +await backend.destroy(); +``` diff --git a/docs/versioned_docs/version-v0.19.0/noir_js/reference/backend_barretenberg/.nojekyll b/docs/versioned_docs/version-v0.19.0/noir_js/reference/backend_barretenberg/.nojekyll new file mode 100644 index 00000000000..e2ac6616add --- /dev/null +++ b/docs/versioned_docs/version-v0.19.0/noir_js/reference/backend_barretenberg/.nojekyll @@ -0,0 +1 @@ +TypeDoc added this file to prevent GitHub Pages from using Jekyll. You can turn off this behavior by setting the `githubPages` option to false. \ No newline at end of file diff --git a/docs/versioned_docs/version-v0.19.0/noir_js/reference/backend_barretenberg/classes/BarretenbergBackend.md b/docs/versioned_docs/version-v0.19.0/noir_js/reference/backend_barretenberg/classes/BarretenbergBackend.md new file mode 100644 index 00000000000..5cbe9421b92 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.0/noir_js/reference/backend_barretenberg/classes/BarretenbergBackend.md @@ -0,0 +1,185 @@ +# BarretenbergBackend + +## Implements + +- [`Backend`](../interfaces/Backend.md) + +## Constructors + +### new BarretenbergBackend(acirCircuit, options) + +```ts +new BarretenbergBackend(acirCircuit, options): BarretenbergBackend +``` + +#### Parameters + +| Parameter | Type | +| :------ | :------ | +| `acirCircuit` | [`CompiledCircuit`](../type-aliases/CompiledCircuit.md) | +| `options` | [`BackendOptions`](../type-aliases/BackendOptions.md) | + +#### Returns + +[`BarretenbergBackend`](BarretenbergBackend.md) + +## Methods + +### destroy() + +```ts +destroy(): Promise +``` + +#### Returns + +`Promise`\<`void`\> + +#### Implementation of + +[`Backend`](../interfaces/Backend.md).[`destroy`](../interfaces/Backend.md#destroy) + +#### Description + +Destroys the backend + +*** + +### generateFinalProof() + +```ts +generateFinalProof(decompressedWitness): Promise +``` + +#### Parameters + +| Parameter | Type | +| :------ | :------ | +| `decompressedWitness` | `Uint8Array` | + +#### Returns + +`Promise`\<[`ProofData`](../type-aliases/ProofData.md)\> + +#### Implementation of + +[`Backend`](../interfaces/Backend.md).[`generateFinalProof`](../interfaces/Backend.md#generatefinalproof) + +#### Description + +Generates a final proof (not meant to be verified in another circuit) + +*** + +### generateIntermediateProof() + +```ts +generateIntermediateProof(witness): Promise +``` + +#### Parameters + +| Parameter | Type | +| :------ | :------ | +| `witness` | `Uint8Array` | + +#### Returns + +`Promise`\<[`ProofData`](../type-aliases/ProofData.md)\> + +#### Implementation of + +[`Backend`](../interfaces/Backend.md).[`generateIntermediateProof`](../interfaces/Backend.md#generateintermediateproof) + +#### Example + +```typescript +const intermediateProof = await backend.generateIntermediateProof(witness); +``` + +*** + +### generateIntermediateProofArtifacts() + +```ts +generateIntermediateProofArtifacts(proofData, numOfPublicInputs): Promise +``` + +#### Parameters + +| Parameter | Type | Default value | +| :------ | :------ | :------ | +| `proofData` | [`ProofData`](../type-aliases/ProofData.md) | `undefined` | +| `numOfPublicInputs` | `number` | `0` | + +#### Returns + +`Promise`\<`object`\> + +#### Implementation of + +[`Backend`](../interfaces/Backend.md).[`generateIntermediateProofArtifacts`](../interfaces/Backend.md#generateintermediateproofartifacts) + +#### Example + +```typescript +const artifacts = await backend.generateIntermediateProofArtifacts(proof, numOfPublicInputs); +``` + +*** + +### verifyFinalProof() + +```ts +verifyFinalProof(proofData): Promise +``` + +#### Parameters + +| Parameter | Type | +| :------ | :------ | +| `proofData` | [`ProofData`](../type-aliases/ProofData.md) | + +#### Returns + +`Promise`\<`boolean`\> + +#### Implementation of + +[`Backend`](../interfaces/Backend.md).[`verifyFinalProof`](../interfaces/Backend.md#verifyfinalproof) + +#### Description + +Verifies a final proof + +*** + +### verifyIntermediateProof() + +```ts +verifyIntermediateProof(proofData): Promise +``` + +#### Parameters + +| Parameter | Type | +| :------ | :------ | +| `proofData` | [`ProofData`](../type-aliases/ProofData.md) | + +#### Returns + +`Promise`\<`boolean`\> + +#### Implementation of + +[`Backend`](../interfaces/Backend.md).[`verifyIntermediateProof`](../interfaces/Backend.md#verifyintermediateproof) + +#### Example + +```typescript +const isValidIntermediate = await backend.verifyIntermediateProof(proof); +``` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.19.0/noir_js/reference/backend_barretenberg/index.md b/docs/versioned_docs/version-v0.19.0/noir_js/reference/backend_barretenberg/index.md new file mode 100644 index 00000000000..3680ba3ca77 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.0/noir_js/reference/backend_barretenberg/index.md @@ -0,0 +1,27 @@ +# Backend Barretenberg + +## Exports + +### Classes + +| Class | Description | +| :------ | :------ | +| [BarretenbergBackend](classes/BarretenbergBackend.md) | - | + +### Interfaces + +| Interface | Description | +| :------ | :------ | +| [Backend](interfaces/Backend.md) | - | + +### Type Aliases + +| Type alias | Description | +| :------ | :------ | +| [BackendOptions](type-aliases/BackendOptions.md) | - | +| [CompiledCircuit](type-aliases/CompiledCircuit.md) | - | +| [ProofData](type-aliases/ProofData.md) | - | + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.19.0/noir_js/reference/backend_barretenberg/interfaces/Backend.md b/docs/versioned_docs/version-v0.19.0/noir_js/reference/backend_barretenberg/interfaces/Backend.md new file mode 100644 index 00000000000..3eb9645c8d2 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.0/noir_js/reference/backend_barretenberg/interfaces/Backend.md @@ -0,0 +1,132 @@ +# Backend + +## Methods + +### destroy() + +```ts +destroy(): Promise +``` + +#### Returns + +`Promise`\<`void`\> + +#### Description + +Destroys the backend + +*** + +### generateFinalProof() + +```ts +generateFinalProof(decompressedWitness): Promise +``` + +#### Parameters + +| Parameter | Type | +| :------ | :------ | +| `decompressedWitness` | `Uint8Array` | + +#### Returns + +`Promise`\<[`ProofData`](../type-aliases/ProofData.md)\> + +#### Description + +Generates a final proof (not meant to be verified in another circuit) + +*** + +### generateIntermediateProof() + +```ts +generateIntermediateProof(decompressedWitness): Promise +``` + +#### Parameters + +| Parameter | Type | +| :------ | :------ | +| `decompressedWitness` | `Uint8Array` | + +#### Returns + +`Promise`\<[`ProofData`](../type-aliases/ProofData.md)\> + +#### Description + +Generates an intermediate proof (meant to be verified in another circuit) + +*** + +### generateIntermediateProofArtifacts() + +```ts +generateIntermediateProofArtifacts(proofData, numOfPublicInputs): Promise +``` + +#### Parameters + +| Parameter | Type | +| :------ | :------ | +| `proofData` | [`ProofData`](../type-aliases/ProofData.md) | +| `numOfPublicInputs` | `number` | + +#### Returns + +`Promise`\<`object`\> + +#### Description + +Retrieves the artifacts from a proof in the Field format + +*** + +### verifyFinalProof() + +```ts +verifyFinalProof(proofData): Promise +``` + +#### Parameters + +| Parameter | Type | +| :------ | :------ | +| `proofData` | [`ProofData`](../type-aliases/ProofData.md) | + +#### Returns + +`Promise`\<`boolean`\> + +#### Description + +Verifies a final proof + +*** + +### verifyIntermediateProof() + +```ts +verifyIntermediateProof(proofData): Promise +``` + +#### Parameters + +| Parameter | Type | +| :------ | :------ | +| `proofData` | [`ProofData`](../type-aliases/ProofData.md) | + +#### Returns + +`Promise`\<`boolean`\> + +#### Description + +Verifies an intermediate proof + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.19.0/noir_js/reference/backend_barretenberg/type-aliases/BackendOptions.md b/docs/versioned_docs/version-v0.19.0/noir_js/reference/backend_barretenberg/type-aliases/BackendOptions.md new file mode 100644 index 00000000000..266ade75d17 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.0/noir_js/reference/backend_barretenberg/type-aliases/BackendOptions.md @@ -0,0 +1,19 @@ +# BackendOptions + +```ts +type BackendOptions: object; +``` + +## Description + +An options object, currently only used to specify the number of threads to use. + +## Type declaration + +| Member | Type | Description | +| :------ | :------ | :------ | +| `threads` | `number` | **Description**

Number of threads | + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.19.0/noir_js/reference/backend_barretenberg/type-aliases/CompiledCircuit.md b/docs/versioned_docs/version-v0.19.0/noir_js/reference/backend_barretenberg/type-aliases/CompiledCircuit.md new file mode 100644 index 00000000000..34e0dd04205 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.0/noir_js/reference/backend_barretenberg/type-aliases/CompiledCircuit.md @@ -0,0 +1,20 @@ +# CompiledCircuit + +```ts +type CompiledCircuit: object; +``` + +## Description + +The representation of a compiled circuit + +## Type declaration + +| Member | Type | Description | +| :------ | :------ | :------ | +| `abi` | `Abi` | **Description**

ABI representation of the circuit | +| `bytecode` | `string` | **Description**

The bytecode of the circuit | + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.19.0/noir_js/reference/backend_barretenberg/type-aliases/ProofData.md b/docs/versioned_docs/version-v0.19.0/noir_js/reference/backend_barretenberg/type-aliases/ProofData.md new file mode 100644 index 00000000000..4aeff73d3e4 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.0/noir_js/reference/backend_barretenberg/type-aliases/ProofData.md @@ -0,0 +1,20 @@ +# ProofData + +```ts +type ProofData: object; +``` + +## Description + +The representation of a proof + +## Type declaration + +| Member | Type | Description | +| :------ | :------ | :------ | +| `proof` | `Uint8Array` | **Description**

An byte array representing the proof | +| `publicInputs` | `Uint8Array`[] | **Description**

Public inputs of a proof | + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.19.0/noir_js/reference/backend_barretenberg/typedoc-sidebar.cjs b/docs/versioned_docs/version-v0.19.0/noir_js/reference/backend_barretenberg/typedoc-sidebar.cjs new file mode 100644 index 00000000000..04e662c845f --- /dev/null +++ b/docs/versioned_docs/version-v0.19.0/noir_js/reference/backend_barretenberg/typedoc-sidebar.cjs @@ -0,0 +1,4 @@ +// @ts-check +/** @type {import('@docusaurus/plugin-content-docs').SidebarsConfig} */ +const typedocSidebar = { items: [{"type":"category","label":"Classes","items":[{"type":"doc","id":"noir_js/reference/backend_barretenberg/classes/BarretenbergBackend","label":"BarretenbergBackend"}]},{"type":"category","label":"Interfaces","items":[{"type":"doc","id":"noir_js/reference/backend_barretenberg/interfaces/Backend","label":"Backend"}]},{"type":"category","label":"Type Aliases","items":[{"type":"doc","id":"noir_js/reference/backend_barretenberg/type-aliases/BackendOptions","label":"BackendOptions"},{"type":"doc","id":"noir_js/reference/backend_barretenberg/type-aliases/CompiledCircuit","label":"CompiledCircuit"},{"type":"doc","id":"noir_js/reference/backend_barretenberg/type-aliases/ProofData","label":"ProofData"}]}]}; +module.exports = typedocSidebar.items; \ No newline at end of file diff --git a/docs/versioned_docs/version-v0.19.0/noir_js/reference/noir_js/.nojekyll b/docs/versioned_docs/version-v0.19.0/noir_js/reference/noir_js/.nojekyll new file mode 100644 index 00000000000..e2ac6616add --- /dev/null +++ b/docs/versioned_docs/version-v0.19.0/noir_js/reference/noir_js/.nojekyll @@ -0,0 +1 @@ +TypeDoc added this file to prevent GitHub Pages from using Jekyll. You can turn off this behavior by setting the `githubPages` option to false. \ No newline at end of file diff --git a/docs/versioned_docs/version-v0.19.0/noir_js/reference/noir_js/classes/Noir.md b/docs/versioned_docs/version-v0.19.0/noir_js/reference/noir_js/classes/Noir.md new file mode 100644 index 00000000000..1d7b54a9dca --- /dev/null +++ b/docs/versioned_docs/version-v0.19.0/noir_js/reference/noir_js/classes/Noir.md @@ -0,0 +1,131 @@ +# Noir + +## Constructors + +### new Noir(circuit, backend) + +```ts +new Noir(circuit, backend?): Noir +``` + +#### Parameters + +| Parameter | Type | +| :------ | :------ | +| `circuit` | [`CompiledCircuit`](../type-aliases/CompiledCircuit.md) | +| `backend`? | `Backend` | + +#### Returns + +[`Noir`](Noir.md) + +## Methods + +### destroy() + +```ts +destroy(): Promise +``` + +#### Returns + +`Promise`\<`void`\> + +#### Description + +Destroys the underlying backend instance. + +#### Example + +```typescript +await noir.destroy(); +``` + +*** + +### execute() + +```ts +execute(inputs, foreignCallHandler?): Promise +``` + +#### Parameters + +| Parameter | Type | +| :------ | :------ | +| `inputs` | `InputMap` | +| `foreignCallHandler`? | [`ForeignCallHandler`](../type-aliases/ForeignCallHandler.md) | + +#### Returns + +`Promise`\<`object`\> + +#### Description + +Allows to execute a circuit to get its witness and return value. + +#### Example + +```typescript +async execute(inputs) +``` + +*** + +### generateFinalProof() + +```ts +generateFinalProof(inputs): Promise +``` + +#### Parameters + +| Parameter | Type | +| :------ | :------ | +| `inputs` | `InputMap` | + +#### Returns + +`Promise`\<[`ProofData`](../type-aliases/ProofData.md)\> + +#### Description + +Generates a witness and a proof given an object as input. + +#### Example + +```typescript +async generateFinalProof(input) +``` + +*** + +### verifyFinalProof() + +```ts +verifyFinalProof(proofData): Promise +``` + +#### Parameters + +| Parameter | Type | +| :------ | :------ | +| `proofData` | [`ProofData`](../type-aliases/ProofData.md) | + +#### Returns + +`Promise`\<`boolean`\> + +#### Description + +Instantiates the verification key and verifies a proof. + +#### Example + +```typescript +async verifyFinalProof(proof) +``` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.19.0/noir_js/reference/noir_js/functions/and.md b/docs/versioned_docs/version-v0.19.0/noir_js/reference/noir_js/functions/and.md new file mode 100644 index 00000000000..c783283e396 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.0/noir_js/reference/noir_js/functions/and.md @@ -0,0 +1,22 @@ +# and() + +```ts +and(lhs, rhs): string +``` + +Performs a bitwise AND operation between `lhs` and `rhs` + +## Parameters + +| Parameter | Type | Description | +| :------ | :------ | :------ | +| `lhs` | `string` | | +| `rhs` | `string` | | + +## Returns + +`string` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.19.0/noir_js/reference/noir_js/functions/blake2s256.md b/docs/versioned_docs/version-v0.19.0/noir_js/reference/noir_js/functions/blake2s256.md new file mode 100644 index 00000000000..7882d0da8d5 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.0/noir_js/reference/noir_js/functions/blake2s256.md @@ -0,0 +1,21 @@ +# blake2s256() + +```ts +blake2s256(inputs): Uint8Array +``` + +Calculates the Blake2s256 hash of the input bytes + +## Parameters + +| Parameter | Type | Description | +| :------ | :------ | :------ | +| `inputs` | `Uint8Array` | | + +## Returns + +`Uint8Array` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.19.0/noir_js/reference/noir_js/functions/ecdsa_secp256k1_verify.md b/docs/versioned_docs/version-v0.19.0/noir_js/reference/noir_js/functions/ecdsa_secp256k1_verify.md new file mode 100644 index 00000000000..0ba5783f0d5 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.0/noir_js/reference/noir_js/functions/ecdsa_secp256k1_verify.md @@ -0,0 +1,29 @@ +# ecdsa\_secp256k1\_verify() + +```ts +ecdsa_secp256k1_verify( + hashed_msg, + public_key_x_bytes, + public_key_y_bytes, + signature): boolean +``` + +Calculates the Blake2s256 hash of the input bytes and represents these as a single field element. +Verifies a ECDSA signature over the secp256k1 curve. + +## Parameters + +| Parameter | Type | Description | +| :------ | :------ | :------ | +| `hashed_msg` | `Uint8Array` | | +| `public_key_x_bytes` | `Uint8Array` | | +| `public_key_y_bytes` | `Uint8Array` | | +| `signature` | `Uint8Array` | | + +## Returns + +`boolean` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.19.0/noir_js/reference/noir_js/functions/ecdsa_secp256r1_verify.md b/docs/versioned_docs/version-v0.19.0/noir_js/reference/noir_js/functions/ecdsa_secp256r1_verify.md new file mode 100644 index 00000000000..0b20ff68957 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.0/noir_js/reference/noir_js/functions/ecdsa_secp256r1_verify.md @@ -0,0 +1,28 @@ +# ecdsa\_secp256r1\_verify() + +```ts +ecdsa_secp256r1_verify( + hashed_msg, + public_key_x_bytes, + public_key_y_bytes, + signature): boolean +``` + +Verifies a ECDSA signature over the secp256r1 curve. + +## Parameters + +| Parameter | Type | Description | +| :------ | :------ | :------ | +| `hashed_msg` | `Uint8Array` | | +| `public_key_x_bytes` | `Uint8Array` | | +| `public_key_y_bytes` | `Uint8Array` | | +| `signature` | `Uint8Array` | | + +## Returns + +`boolean` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.19.0/noir_js/reference/noir_js/functions/keccak256.md b/docs/versioned_docs/version-v0.19.0/noir_js/reference/noir_js/functions/keccak256.md new file mode 100644 index 00000000000..d10f155ce86 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.0/noir_js/reference/noir_js/functions/keccak256.md @@ -0,0 +1,21 @@ +# keccak256() + +```ts +keccak256(inputs): Uint8Array +``` + +Calculates the Keccak256 hash of the input bytes + +## Parameters + +| Parameter | Type | Description | +| :------ | :------ | :------ | +| `inputs` | `Uint8Array` | | + +## Returns + +`Uint8Array` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.19.0/noir_js/reference/noir_js/functions/sha256.md b/docs/versioned_docs/version-v0.19.0/noir_js/reference/noir_js/functions/sha256.md new file mode 100644 index 00000000000..6ba4ecac022 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.0/noir_js/reference/noir_js/functions/sha256.md @@ -0,0 +1,21 @@ +# sha256() + +```ts +sha256(inputs): Uint8Array +``` + +Calculates the SHA256 hash of the input bytes + +## Parameters + +| Parameter | Type | Description | +| :------ | :------ | :------ | +| `inputs` | `Uint8Array` | | + +## Returns + +`Uint8Array` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.19.0/noir_js/reference/noir_js/functions/xor.md b/docs/versioned_docs/version-v0.19.0/noir_js/reference/noir_js/functions/xor.md new file mode 100644 index 00000000000..8d762b895d3 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.0/noir_js/reference/noir_js/functions/xor.md @@ -0,0 +1,22 @@ +# xor() + +```ts +xor(lhs, rhs): string +``` + +Performs a bitwise XOR operation between `lhs` and `rhs` + +## Parameters + +| Parameter | Type | Description | +| :------ | :------ | :------ | +| `lhs` | `string` | | +| `rhs` | `string` | | + +## Returns + +`string` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.19.0/noir_js/reference/noir_js/index.md b/docs/versioned_docs/version-v0.19.0/noir_js/reference/noir_js/index.md new file mode 100644 index 00000000000..58902c17b99 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.0/noir_js/reference/noir_js/index.md @@ -0,0 +1,36 @@ +# Noir JS + +## Exports + +### Classes + +| Class | Description | +| :------ | :------ | +| [Noir](classes/Noir.md) | - | + +### Type Aliases + +| Type alias | Description | +| :------ | :------ | +| [CompiledCircuit](type-aliases/CompiledCircuit.md) | - | +| [ForeignCallHandler](type-aliases/ForeignCallHandler.md) | A callback which performs an foreign call and returns the response. | +| [ForeignCallInput](type-aliases/ForeignCallInput.md) | - | +| [ForeignCallOutput](type-aliases/ForeignCallOutput.md) | - | +| [ProofData](type-aliases/ProofData.md) | - | +| [WitnessMap](type-aliases/WitnessMap.md) | - | + +### Functions + +| Function | Description | +| :------ | :------ | +| [and](functions/and.md) | Performs a bitwise AND operation between `lhs` and `rhs` | +| [blake2s256](functions/blake2s256.md) | Calculates the Blake2s256 hash of the input bytes | +| [ecdsa\_secp256k1\_verify](functions/ecdsa_secp256k1_verify.md) | Calculates the Blake2s256 hash of the input bytes and represents these as a single field element. | +| [ecdsa\_secp256r1\_verify](functions/ecdsa_secp256r1_verify.md) | Verifies a ECDSA signature over the secp256r1 curve. | +| [keccak256](functions/keccak256.md) | Calculates the Keccak256 hash of the input bytes | +| [sha256](functions/sha256.md) | Calculates the SHA256 hash of the input bytes | +| [xor](functions/xor.md) | Performs a bitwise XOR operation between `lhs` and `rhs` | + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.19.0/noir_js/reference/noir_js/type-aliases/CompiledCircuit.md b/docs/versioned_docs/version-v0.19.0/noir_js/reference/noir_js/type-aliases/CompiledCircuit.md new file mode 100644 index 00000000000..34e0dd04205 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.0/noir_js/reference/noir_js/type-aliases/CompiledCircuit.md @@ -0,0 +1,20 @@ +# CompiledCircuit + +```ts +type CompiledCircuit: object; +``` + +## Description + +The representation of a compiled circuit + +## Type declaration + +| Member | Type | Description | +| :------ | :------ | :------ | +| `abi` | `Abi` | **Description**

ABI representation of the circuit | +| `bytecode` | `string` | **Description**

The bytecode of the circuit | + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.19.0/noir_js/reference/noir_js/type-aliases/ForeignCallHandler.md b/docs/versioned_docs/version-v0.19.0/noir_js/reference/noir_js/type-aliases/ForeignCallHandler.md new file mode 100644 index 00000000000..812b8b16481 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.0/noir_js/reference/noir_js/type-aliases/ForeignCallHandler.md @@ -0,0 +1,24 @@ +# ForeignCallHandler + +```ts +type ForeignCallHandler: (name, inputs) => Promise; +``` + +A callback which performs an foreign call and returns the response. + +## Parameters + +| Parameter | Type | Description | +| :------ | :------ | :------ | +| `name` | `string` | The identifier for the type of foreign call being performed. | +| `inputs` | [`ForeignCallInput`](ForeignCallInput.md)[] | An array of hex encoded inputs to the foreign call. | + +## Returns + +`Promise`\<[`ForeignCallOutput`](ForeignCallOutput.md)[]\> + +outputs - An array of hex encoded outputs containing the results of the foreign call. + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.19.0/noir_js/reference/noir_js/type-aliases/ForeignCallInput.md b/docs/versioned_docs/version-v0.19.0/noir_js/reference/noir_js/type-aliases/ForeignCallInput.md new file mode 100644 index 00000000000..dd95809186a --- /dev/null +++ b/docs/versioned_docs/version-v0.19.0/noir_js/reference/noir_js/type-aliases/ForeignCallInput.md @@ -0,0 +1,9 @@ +# ForeignCallInput + +```ts +type ForeignCallInput: string[]; +``` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.19.0/noir_js/reference/noir_js/type-aliases/ForeignCallOutput.md b/docs/versioned_docs/version-v0.19.0/noir_js/reference/noir_js/type-aliases/ForeignCallOutput.md new file mode 100644 index 00000000000..b71fb78a946 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.0/noir_js/reference/noir_js/type-aliases/ForeignCallOutput.md @@ -0,0 +1,9 @@ +# ForeignCallOutput + +```ts +type ForeignCallOutput: string | string[]; +``` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.19.0/noir_js/reference/noir_js/type-aliases/ProofData.md b/docs/versioned_docs/version-v0.19.0/noir_js/reference/noir_js/type-aliases/ProofData.md new file mode 100644 index 00000000000..4aeff73d3e4 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.0/noir_js/reference/noir_js/type-aliases/ProofData.md @@ -0,0 +1,20 @@ +# ProofData + +```ts +type ProofData: object; +``` + +## Description + +The representation of a proof + +## Type declaration + +| Member | Type | Description | +| :------ | :------ | :------ | +| `proof` | `Uint8Array` | **Description**

An byte array representing the proof | +| `publicInputs` | `Uint8Array`[] | **Description**

Public inputs of a proof | + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.19.0/noir_js/reference/noir_js/type-aliases/WitnessMap.md b/docs/versioned_docs/version-v0.19.0/noir_js/reference/noir_js/type-aliases/WitnessMap.md new file mode 100644 index 00000000000..258c46f9d0c --- /dev/null +++ b/docs/versioned_docs/version-v0.19.0/noir_js/reference/noir_js/type-aliases/WitnessMap.md @@ -0,0 +1,9 @@ +# WitnessMap + +```ts +type WitnessMap: Map; +``` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.19.0/noir_js/reference/noir_js/typedoc-sidebar.cjs b/docs/versioned_docs/version-v0.19.0/noir_js/reference/noir_js/typedoc-sidebar.cjs new file mode 100644 index 00000000000..c18318850d0 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.0/noir_js/reference/noir_js/typedoc-sidebar.cjs @@ -0,0 +1,4 @@ +// @ts-check +/** @type {import('@docusaurus/plugin-content-docs').SidebarsConfig} */ +const typedocSidebar = { items: [{"type":"category","label":"Classes","items":[{"type":"doc","id":"noir_js/reference/noir_js/classes/Noir","label":"Noir"}]},{"type":"category","label":"Type Aliases","items":[{"type":"doc","id":"noir_js/reference/noir_js/type-aliases/CompiledCircuit","label":"CompiledCircuit"},{"type":"doc","id":"noir_js/reference/noir_js/type-aliases/ForeignCallHandler","label":"ForeignCallHandler"},{"type":"doc","id":"noir_js/reference/noir_js/type-aliases/ForeignCallInput","label":"ForeignCallInput"},{"type":"doc","id":"noir_js/reference/noir_js/type-aliases/ForeignCallOutput","label":"ForeignCallOutput"},{"type":"doc","id":"noir_js/reference/noir_js/type-aliases/ProofData","label":"ProofData"},{"type":"doc","id":"noir_js/reference/noir_js/type-aliases/WitnessMap","label":"WitnessMap"}]},{"type":"category","label":"Functions","items":[{"type":"doc","id":"noir_js/reference/noir_js/functions/and","label":"and"},{"type":"doc","id":"noir_js/reference/noir_js/functions/blake2s256","label":"blake2s256"},{"type":"doc","id":"noir_js/reference/noir_js/functions/ecdsa_secp256k1_verify","label":"ecdsa_secp256k1_verify"},{"type":"doc","id":"noir_js/reference/noir_js/functions/ecdsa_secp256r1_verify","label":"ecdsa_secp256r1_verify"},{"type":"doc","id":"noir_js/reference/noir_js/functions/keccak256","label":"keccak256"},{"type":"doc","id":"noir_js/reference/noir_js/functions/sha256","label":"sha256"},{"type":"doc","id":"noir_js/reference/noir_js/functions/xor","label":"xor"}]}]}; +module.exports = typedocSidebar.items; \ No newline at end of file diff --git a/docs/versioned_docs/version-v0.19.0/standard_library/black_box_fns.md b/docs/versioned_docs/version-v0.19.0/standard_library/black_box_fns.md new file mode 100644 index 00000000000..e0c6d475c1f --- /dev/null +++ b/docs/versioned_docs/version-v0.19.0/standard_library/black_box_fns.md @@ -0,0 +1,46 @@ +--- +title: Black Box Functions +description: Black box functions are functions in Noir that rely on backends implementing support for specialized constraints. +keywords: [noir, black box functions] +--- + +Black box functions are functions in Noir that rely on backends implementing support for specialized constraints. This makes certain zk-snark unfriendly computations cheaper than if they were implemented in Noir. + +:::warning + +It is likely that not all backends will support a particular black box function. + +::: + +Because it is not guaranteed that all backends will support black box functions, it is possible that certain Noir programs won't compile against a particular backend if they use an unsupported black box function. It is possible to fallback to less efficient implementations written in Noir/ACIR in some cases. + +Black box functions are specified with the `#[foreign(black_box_fn)]` attribute. For example, the SHA256 function in the Noir [source code](https://github.com/noir-lang/noir/blob/v0.5.1/noir_stdlib/src/hash.nr) looks like: + +```rust +#[foreign(sha256)] +fn sha256(_input : [u8; N]) -> [u8; 32] {} +``` + +## Function list + +Here is a list of the current black box functions that are supported by UltraPlonk: + +- AES +- [SHA256](./cryptographic_primitives/hashes#sha256) +- [Schnorr signature verification](./cryptographic_primitives/schnorr) +- [Blake2s](./cryptographic_primitives/hashes#blake2s) +- [Pedersen Hash](./cryptographic_primitives/hashes#pedersen_hash) +- [Pedersen Commitment](./cryptographic_primitives/hashes#pedersen_commitment) +- [HashToField128Security](./cryptographic_primitives/hashes#hash_to_field) +- [ECDSA signature verification](./cryptographic_primitives/ecdsa_sig_verification) +- [Fixed base scalar multiplication](./cryptographic_primitives/scalar) +- [Compute merkle root](./merkle_trees#compute_merkle_root) +- AND +- XOR +- RANGE +- [Keccak256](./cryptographic_primitives/hashes#keccak256) +- [Recursive proof verification](./recursion) + +Most black box functions are included as part of the Noir standard library, however `AND`, `XOR` and `RANGE` are used as part of the Noir language syntax. For instance, using the bitwise operator `&` will invoke the `AND` black box function. To ensure compatibility across backends, the ACVM has fallback implementations of `AND`, `XOR` and `RANGE` defined in its standard library which it can seamlessly fallback to if the backend doesn't support them. + +You can view the black box functions defined in the ACVM code [here](https://github.com/noir-lang/noir/blob/master/acvm-repo/acir/src/circuit/black_box_functions.rs). diff --git a/docs/docs/standard_library/cryptographic_primitives.md b/docs/versioned_docs/version-v0.19.0/standard_library/cryptographic_primitives.md similarity index 100% rename from docs/docs/standard_library/cryptographic_primitives.md rename to docs/versioned_docs/version-v0.19.0/standard_library/cryptographic_primitives.md diff --git a/docs/docs/standard_library/cryptographic_primitives/00_hashes.mdx b/docs/versioned_docs/version-v0.19.0/standard_library/cryptographic_primitives/00_hashes.mdx similarity index 100% rename from docs/docs/standard_library/cryptographic_primitives/00_hashes.mdx rename to docs/versioned_docs/version-v0.19.0/standard_library/cryptographic_primitives/00_hashes.mdx diff --git a/docs/docs/standard_library/cryptographic_primitives/01_scalar.mdx b/docs/versioned_docs/version-v0.19.0/standard_library/cryptographic_primitives/01_scalar.mdx similarity index 100% rename from docs/docs/standard_library/cryptographic_primitives/01_scalar.mdx rename to docs/versioned_docs/version-v0.19.0/standard_library/cryptographic_primitives/01_scalar.mdx diff --git a/docs/docs/standard_library/cryptographic_primitives/02_schnorr.mdx b/docs/versioned_docs/version-v0.19.0/standard_library/cryptographic_primitives/02_schnorr.mdx similarity index 100% rename from docs/docs/standard_library/cryptographic_primitives/02_schnorr.mdx rename to docs/versioned_docs/version-v0.19.0/standard_library/cryptographic_primitives/02_schnorr.mdx diff --git a/docs/docs/standard_library/cryptographic_primitives/03_ecdsa_sig_verification.mdx b/docs/versioned_docs/version-v0.19.0/standard_library/cryptographic_primitives/03_ecdsa_sig_verification.mdx similarity index 100% rename from docs/docs/standard_library/cryptographic_primitives/03_ecdsa_sig_verification.mdx rename to docs/versioned_docs/version-v0.19.0/standard_library/cryptographic_primitives/03_ecdsa_sig_verification.mdx diff --git a/docs/docs/standard_library/cryptographic_primitives/04_ec_primitives.md b/docs/versioned_docs/version-v0.19.0/standard_library/cryptographic_primitives/04_ec_primitives.md similarity index 100% rename from docs/docs/standard_library/cryptographic_primitives/04_ec_primitives.md rename to docs/versioned_docs/version-v0.19.0/standard_library/cryptographic_primitives/04_ec_primitives.md diff --git a/docs/docs/standard_library/cryptographic_primitives/05_eddsa.mdx b/docs/versioned_docs/version-v0.19.0/standard_library/cryptographic_primitives/05_eddsa.mdx similarity index 100% rename from docs/docs/standard_library/cryptographic_primitives/05_eddsa.mdx rename to docs/versioned_docs/version-v0.19.0/standard_library/cryptographic_primitives/05_eddsa.mdx diff --git a/docs/versioned_docs/version-v0.19.0/standard_library/logging.md b/docs/versioned_docs/version-v0.19.0/standard_library/logging.md new file mode 100644 index 00000000000..4ba0fe0e656 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.0/standard_library/logging.md @@ -0,0 +1,62 @@ +--- +title: Logging +description: + Learn how to use the println statement for debugging in Noir with this tutorial. Understand the + basics of logging in Noir and how to implement it in your code. +keywords: + [ + noir logging, + println statement, + debugging in noir, + noir std library, + logging tutorial, + basic logging in noir, + noir logging implementation, + noir debugging techniques, + rust, + ] +--- + +The standard library provides a familiar `println` statement you can use. Despite being a limited +implementation of rust's `println!` macro, this construct can be useful for debugging. + +You can print the output of println statements in your Noir code by using the `nargo execute` command or the `--show-output` flag when using `nargo test` (provided there are println statements in your tests). + +It is recommended to use `nargo execute` if you want to debug failing constraints with `println` statements. This is due to every input in a test being a constant rather than a witness, so we issue an error during compilation while we only print during execution (which comes after compilation). `println` will not work for failed constraints caught at compile time. + +The `println` statement is unconstrained, so it works for outputting integers, fields, strings, and even structs or expressions. For example: + +```rust +use dep::std; + +struct Person { + age : Field, + height : Field, +} + +fn main(age : Field, height : Field) { + let person = Person { age : age, height : height }; + std::println(person); + std::println(age + height); + std::println("Hello world!"); +} + +``` + +You can print multiple different types in the same statement and string as well as a new "fmtstr" type. A `fmtstr` can be specified in the same way as a normal string it just should be prepended with an "f" character: + +```rust + let fmt_str = f"i: {i}, j: {j}"; + std::println(fmt_str); + + let s = myStruct { y: x, x: y }; + std::println(s); + + std::println(f"i: {i}, s: {s}"); + + std::println(x); + std::println([x, y]); + + let foo = fooStruct { my_struct: s, foo: 15 }; + std::println(f"s: {s}, foo: {foo}"); +``` diff --git a/docs/docs/standard_library/merkle_trees.md b/docs/versioned_docs/version-v0.19.0/standard_library/merkle_trees.md similarity index 100% rename from docs/docs/standard_library/merkle_trees.md rename to docs/versioned_docs/version-v0.19.0/standard_library/merkle_trees.md diff --git a/docs/docs/standard_library/options.md b/docs/versioned_docs/version-v0.19.0/standard_library/options.md similarity index 100% rename from docs/docs/standard_library/options.md rename to docs/versioned_docs/version-v0.19.0/standard_library/options.md diff --git a/docs/docs/standard_library/recursion.md b/docs/versioned_docs/version-v0.19.0/standard_library/recursion.md similarity index 100% rename from docs/docs/standard_library/recursion.md rename to docs/versioned_docs/version-v0.19.0/standard_library/recursion.md diff --git a/docs/versioned_docs/version-v0.10.5/standard_library/zeroed.md b/docs/versioned_docs/version-v0.19.0/standard_library/zeroed.md similarity index 100% rename from docs/versioned_docs/version-v0.10.5/standard_library/zeroed.md rename to docs/versioned_docs/version-v0.19.0/standard_library/zeroed.md diff --git a/docs/versioned_docs/version-v0.19.1/getting_started/00_nargo_installation.md b/docs/versioned_docs/version-v0.19.1/getting_started/00_nargo_installation.md index 725c5f4d373..7d3c88c7693 100644 --- a/docs/versioned_docs/version-v0.19.1/getting_started/00_nargo_installation.md +++ b/docs/versioned_docs/version-v0.19.1/getting_started/00_nargo_installation.md @@ -14,7 +14,7 @@ Alternatively, the interactions can also be performed in [NoirJS](../noir_js/noi ### UltraPlonk -Nargo versions <0.5.0 of `aztec_backend` and `aztec_wasm_backend` are based on the TurboPlonk +Nargo versions \<0.5.0 of `aztec_backend` and `aztec_wasm_backend` are based on the TurboPlonk version of Aztec Backend, which lacks efficient implementations of useful primitives (e.g. Keccak256 in 18k constraints, ECDSA verification in 36k constraints) that the UltraPlonk version offers. ## Installation @@ -140,9 +140,9 @@ Commands: ### Option 3: Compile from Source -Due to the large number of native dependencies, Noir projects uses [Nix](https://nixos.org/) and [direnv](https://direnv.net/) to streamline the development experience. It helps mitigating ssues commonly associated with dependency management, such as conflicts between required package versions for different projects (often referred to as "dependency hell"). +Due to the large number of native dependencies, Noir projects uses [Nix](https://nixos.org/) and [direnv](https://direnv.net/) to streamline the development experience. It helps mitigating issues commonly associated with dependency management, such as conflicts between required package versions for different projects (often referred to as "dependency hell"). -Combined with direnv, which automatically sets or unsets environment variables based on the directory, it further simplifies the development process by seamlessly integrating with the developer's shell, facilitating an efficient and reliable workflow for managing and deploying Noir projects with multiple dependencies. +Combined with direnv, which automatically sets or clears environment variables based on the directory, it further simplifies the development process by seamlessly integrating with the developer's shell, facilitating an efficient and reliable workflow for managing and deploying Noir projects with multiple dependencies. #### Setting up your environment @@ -220,7 +220,7 @@ Advanced: If you aren't using direnv nor launching your editor within the subshe ### Option 4: WSL (for Windows) -The default backend for Noir (Barretenberg) doesn't provide Windows binaries at this time. For that reason, Noir cannot be installed nativerly. However, it is available by using Windows Subsystem for Linux (WSL). +The default backend for Noir (Barretenberg) doesn't provide Windows binaries at this time. For that reason, Noir cannot be installed natively. However, it is available by using Windows Subsystem for Linux (WSL). Step 1: Follow the instructions [here](https://learn.microsoft.com/en-us/windows/wsl/install) to install and run WSL. diff --git a/docs/versioned_docs/version-v0.19.1/getting_started/02_breakdown.md b/docs/versioned_docs/version-v0.19.1/getting_started/02_breakdown.md index 9a17f5d6360..d28a54a1600 100644 --- a/docs/versioned_docs/version-v0.19.1/getting_started/02_breakdown.md +++ b/docs/versioned_docs/version-v0.19.1/getting_started/02_breakdown.md @@ -39,7 +39,7 @@ Example Nargo.toml: ```toml [package] -name = "noirstarter" +name = "noir_starter" type = "bin" authors = ["Alice"] compiler_version = "0.9.0" diff --git a/docs/versioned_docs/version-v0.19.1/index.md b/docs/versioned_docs/version-v0.19.1/index.md index 75e1abf2932..380368db036 100644 --- a/docs/versioned_docs/version-v0.19.1/index.md +++ b/docs/versioned_docs/version-v0.19.1/index.md @@ -25,7 +25,7 @@ slug: / Noir is a Domain Specific Language for SNARK proving systems. It has been designed to use any ACIR compatible proving system. -It's design choices are influenced heavily by Rust and focuses on a simple, familiar syntax. +Its design choices are influenced heavily by Rust and focuses on a simple, familiar syntax. ## Who is Noir for? @@ -34,7 +34,7 @@ Noir can be used for a variety of purposes. ### Solidity Developers Noir currently includes a command to create a Solidity contract which verifies your Noir program. This will -be modularised in the future; however, as of the alpha, you can use the [`nargo codegen-verifier`](./nargo/commands#nargo-codegen-verifier) command to create +be modularized in the future; however, as of the alpha, you can use the [`nargo codegen-verifier`](./nargo/commands#nargo-codegen-verifier) command to create a verifier contract. ### Protocol Developers @@ -55,7 +55,7 @@ must be implemented for it. Noir is simple and flexible in its design, as it does not compile immediately to a fixed NP-complete language. Instead, Noir compiles to an intermediate language (ACIR), which itself can be compiled -to an arithmetic circuit (if choosing to target Aztec's barretenberg backend) or a rank-1 constraint system (if choosing to target an R1CS backend like Arkwork's Marlin backend, or others). +to an arithmetic circuit (if choosing to target Aztec's barretenberg backend) or a rank-1 constraint system (if choosing to target an R1CS backend like Arkworks' Marlin backend, or others). This in itself brings up a few challenges within the design process, but allows one to decouple the programming language completely from the backend. This is similar in theory to LLVM. @@ -67,7 +67,7 @@ Compiler: - For expressions - Arrays - Bit Operations -- Binary operations (<, <=, >, >=, +, -, \*, /, %) [See documentation for an extensive list] +- Binary operations (\<, \<=, >, >=, +, -, \*, /, %) [See documentation for an extensive list] - Unsigned integers - If statements - Structures and Tuples diff --git a/docs/versioned_docs/version-v0.19.1/language_concepts/01_functions.md b/docs/versioned_docs/version-v0.19.1/language_concepts/01_functions.md index 47cdea0cf04..5eb22170e54 100644 --- a/docs/versioned_docs/version-v0.19.1/language_concepts/01_functions.md +++ b/docs/versioned_docs/version-v0.19.1/language_concepts/01_functions.md @@ -30,7 +30,7 @@ All parameters in a function must have a type and all types are known at compile is pre-pended with a colon and the parameter type. Multiple parameters are separated using a comma. ```rust -fn foo(x : Field, y : pub Field){} +fn foo(x : Field, y : Field){} ``` The return type of a function can be stated by using the `->` arrow notation. The function below @@ -38,7 +38,7 @@ states that the foo function must return a `Field`. If the function returns no v is omitted. ```rust -fn foo(x : Field, y : pub Field) -> Field { +fn foo(x : Field, y : Field) -> Field { x + y } ``` diff --git a/docs/versioned_docs/version-v0.19.1/language_concepts/03_ops.md b/docs/versioned_docs/version-v0.19.1/language_concepts/03_ops.md index da02b126059..0e35ef5e584 100644 --- a/docs/versioned_docs/version-v0.19.1/language_concepts/03_ops.md +++ b/docs/versioned_docs/version-v0.19.1/language_concepts/03_ops.md @@ -29,11 +29,11 @@ keywords: | ^ | XOR two private input types together | Types must be integer | | & | AND two private input types together | Types must be integer | | \| | OR two private input types together | Types must be integer | -| << | Left shift an integer by another integer amount | Types must be integer | +| \<\< | Left shift an integer by another integer amount | Types must be integer | | >> | Right shift an integer by another integer amount | Types must be integer | | ! | Bitwise not of a value | Type must be integer or boolean | -| < | returns a bool if one value is less than the other | Upper bound must have a known bit size | -| <= | returns a bool if one value is less than or equal to the other | Upper bound must have a known bit size | +| \< | returns a bool if one value is less than the other | Upper bound must have a known bit size | +| \<= | returns a bool if one value is less than or equal to the other | Upper bound must have a known bit size | | > | returns a bool if one value is more than the other | Upper bound must have a known bit size | | >= | returns a bool if one value is more than or equal to the other | Upper bound must have a known bit size | | == | returns a bool if one value is equal to the other | Both types must not be constants | @@ -62,7 +62,7 @@ fn main(x : Field) { Noir has no support for the logical operators `||` and `&&`. This is because encoding the short-circuiting that these operators require can be inefficient for Noir's backend. Instead you can -use the bitwise operators `|` and `&` which operate indentically for booleans, just without the +use the bitwise operators `|` and `&` which operate identically for booleans, just without the short-circuiting. ```rust diff --git a/docs/versioned_docs/version-v0.19.1/language_concepts/05_unconstrained.md b/docs/versioned_docs/version-v0.19.1/language_concepts/05_unconstrained.md index 6b621eda3eb..097d6ee9894 100644 --- a/docs/versioned_docs/version-v0.19.1/language_concepts/05_unconstrained.md +++ b/docs/versioned_docs/version-v0.19.1/language_concepts/05_unconstrained.md @@ -7,7 +7,7 @@ keywords: [Noir programming language, unconstrained, open] -Unconstrained functions are functions which do not constrain any of the included computation and allow for non-determinisitic computation. +Unconstrained functions are functions which do not constrain any of the included computation and allow for non-deterministic computation. ## Why? diff --git a/docs/versioned_docs/version-v0.19.1/language_concepts/07_mutability.md b/docs/versioned_docs/version-v0.19.1/language_concepts/07_mutability.md index 4641521b1d9..ed3fed820ec 100644 --- a/docs/versioned_docs/version-v0.19.1/language_concepts/07_mutability.md +++ b/docs/versioned_docs/version-v0.19.1/language_concepts/07_mutability.md @@ -37,7 +37,7 @@ Note that mutability in noir is local and everything is passed by value, so if a mutates its parameters then the parent function will keep the old value of the parameters. ```rust -fn main() -> Field { +fn main() -> pub Field { let x = 3; helper(x); x // x is still 3 @@ -69,11 +69,11 @@ fn main(x : Field, y : [Field; N]) { assert(res == y[0]); - let res2 = x * mysubmodule::N; + let res2 = x * my_submodule::N; assert(res != res2); } -mod mysubmodule { +mod my_submodule { use dep::std; global N: Field = 10; diff --git a/docs/versioned_docs/version-v0.19.1/language_concepts/data_types.md b/docs/versioned_docs/version-v0.19.1/language_concepts/data_types.md index d546cc463a8..74f573f7d3f 100644 --- a/docs/versioned_docs/version-v0.19.1/language_concepts/data_types.md +++ b/docs/versioned_docs/version-v0.19.1/language_concepts/data_types.md @@ -93,4 +93,4 @@ fn main() { ### BigInt -You can acheive BigInt functionality using the [Noir BigInt](https://github.com/shuklaayush/noir-bigint) library. +You can achieve BigInt functionality using the [Noir BigInt](https://github.com/shuklaayush/noir-bigint) library. diff --git a/docs/versioned_docs/version-v0.19.1/language_concepts/data_types/00_fields.md b/docs/versioned_docs/version-v0.19.1/language_concepts/data_types/00_fields.md index 658a0441ffb..78d3d2af166 100644 --- a/docs/versioned_docs/version-v0.19.1/language_concepts/data_types/00_fields.md +++ b/docs/versioned_docs/version-v0.19.1/language_concepts/data_types/00_fields.md @@ -158,7 +158,7 @@ fn main() { ### sgn0 -Parity of (prime) Field element, i.e. sgn0(x mod p) = 0 if x ∈ {0, ..., p-1} is even, otherwise sgn0(x mod p) = 1. +Parity of (prime) Field element, i.e. sgn0(x mod p) = 0 if x ∈ \{0, ..., p-1\} is even, otherwise sgn0(x mod p) = 1. ```rust fn sgn0(self) -> u1 diff --git a/docs/versioned_docs/version-v0.19.1/migration_notes.md b/docs/versioned_docs/version-v0.19.1/migration_notes.md index e87eb1feaba..905bca3d30c 100644 --- a/docs/versioned_docs/version-v0.19.1/migration_notes.md +++ b/docs/versioned_docs/version-v0.19.1/migration_notes.md @@ -50,7 +50,7 @@ If you are using the default barretenberg backend, simply run: nargo prove ``` -with you Noir program. +with your Noir program. This will trigger the download and installation of the latest version of barretenberg compatible with your Nargo in use. @@ -78,7 +78,7 @@ nargo backend install acvm-backend-barretenberg https://github.com/noir-lang/bar This downloads and installs a specific bb.js based version of barretenberg binary from GitHub. -The gzipped filed is running this bash script: , where we need to gzip it as the Nargo currently expect the backend to be zipped up. +The gzipped file is running [this bash script](https://github.com/noir-lang/barretenberg-js-binary/blob/master/run-bb-js.sh), where we need to gzip it as the Nargo currently expect the backend to be zipped up. Then run: diff --git a/docs/versioned_docs/version-v0.19.1/modules_packages_crates/crates_and_packages.md b/docs/versioned_docs/version-v0.19.1/modules_packages_crates/crates_and_packages.md index fb83a33d94e..744de72bb2c 100644 --- a/docs/versioned_docs/version-v0.19.1/modules_packages_crates/crates_and_packages.md +++ b/docs/versioned_docs/version-v0.19.1/modules_packages_crates/crates_and_packages.md @@ -23,7 +23,7 @@ _Library crates_ don't have a `main` function and they don't compile down to ACI #### Contracts -Contract crates are similar to binary crates in that they compile to ACIR which you can create proofs against. They are different in that they do not have a single `main` function, but are a collection of functions to be deployed to the [Aztec network](https://aztec.network). You can learn more about the technical details of Aztec in the [monorepo](https://github.com/AztecProtocol/aztec-packages) or contract [examples](https://github.com/AztecProtocol/aztec-packages/tree/master/yarn-project/noir-contracts/src/contracts). +Contract crates are similar to binary crates in that they compile to ACIR which you can create proofs against. They are different in that they do not have a single `main` function, but are a collection of functions to be deployed to the [Aztec network](https://aztec.network). You can learn more about the technical details of Aztec in the [monorepo](https://github.com/AztecProtocol/aztec-packages) or contract [examples](https://github.com/AztecProtocol/aztec-packages/tree/master/yarn-project/noir-contracts/contracts). ### Crate Root diff --git a/docs/versioned_docs/version-v0.19.1/modules_packages_crates/dependencies.md b/docs/versioned_docs/version-v0.19.1/modules_packages_crates/dependencies.md index 75f95aaa305..e91e73a4c4f 100644 --- a/docs/versioned_docs/version-v0.19.1/modules_packages_crates/dependencies.md +++ b/docs/versioned_docs/version-v0.19.1/modules_packages_crates/dependencies.md @@ -34,7 +34,7 @@ If the module is in a subdirectory, you can define a subdirectory in your git re # Nargo.toml [dependencies] -easy_private_token_contract = {tag ="v0.1.0-alpha62", git = "https://github.com/AztecProtocol/aztec-packages", directory = "yarn-project/noir-contracts/src/contracts/easy_private_token_contract"} +easy_private_token_contract = {tag ="v0.1.0-alpha62", git = "https://github.com/AztecProtocol/aztec-packages", directory = "yarn-project/noir-contracts/contracts/easy_private_token_contract"} ``` ## Specifying a local dependency @@ -48,7 +48,7 @@ For example, this file structure has a library and binary crate │   ├── Nargo.toml │   └── src │   └── main.nr -└── liba +└── lib_a ├── Nargo.toml └── src └── lib.nr @@ -60,17 +60,17 @@ Inside of the binary crate, you can specify: # Nargo.toml [dependencies] -libA = { path = "../liba" } +lib_a = { path = "../lib_a" } ``` ## Importing dependencies You can import a dependency to a Noir file using the following syntax. For example, to import the -ecrecover-noir library and local liba referenced above: +ecrecover-noir library and local lib_a referenced above: ```rust use dep::ecrecover; -use dep::libA; +use dep::lib_a; ``` You can also import only the specific parts of dependency that you want to use, like so: diff --git a/docs/versioned_docs/version-v0.19.1/modules_packages_crates/modules.md b/docs/versioned_docs/version-v0.19.1/modules_packages_crates/modules.md index 147c9b284e8..11e60cbf35e 100644 --- a/docs/versioned_docs/version-v0.19.1/modules_packages_crates/modules.md +++ b/docs/versioned_docs/version-v0.19.1/modules_packages_crates/modules.md @@ -10,7 +10,7 @@ Noir's module system follows the same convention as the _newer_ version of Rust' ## Purpose of Modules -Modules are used to organise files. Without modules all of your code would need to live in a single +Modules are used to organize files. Without modules all of your code would need to live in a single file. In Noir, the compiler does not automatically scan all of your files to detect modules. This must be done explicitly by the developer. diff --git a/docs/versioned_docs/version-v0.19.1/nargo/02_testing.md b/docs/versioned_docs/version-v0.19.1/nargo/02_testing.md index da767274efd..5c57ef92705 100644 --- a/docs/versioned_docs/version-v0.19.1/nargo/02_testing.md +++ b/docs/versioned_docs/version-v0.19.1/nargo/02_testing.md @@ -23,8 +23,8 @@ fn test_add() { } ``` -Running `nargo test` will test that the `test_add` function can be executed while satisfying the all -the contraints which allows you to test that add returns the expected values. Test functions can't +Running `nargo test` will test that the `test_add` function can be executed while satisfying all +the constraints which allows you to test that add returns the expected values. Test functions can't have any arguments currently. ### Test fail diff --git a/docs/versioned_docs/version-v0.19.1/noir_js/reference/noir_js/classes/Noir.md b/docs/versioned_docs/version-v0.19.1/noir_js/reference/noir_js/classes/Noir.md index a8a0bb451c1..1d7b54a9dca 100644 --- a/docs/versioned_docs/version-v0.19.1/noir_js/reference/noir_js/classes/Noir.md +++ b/docs/versioned_docs/version-v0.19.1/noir_js/reference/noir_js/classes/Noir.md @@ -95,7 +95,7 @@ Generates a witness and a proof given an object as input. #### Example ```typescript -async generateFinalproof(input) +async generateFinalProof(input) ``` *** diff --git a/docs/versioned_docs/version-v0.19.1/standard_library/black_box_fns.md b/docs/versioned_docs/version-v0.19.1/standard_library/black_box_fns.md index 1dfabfe8f22..e0c6d475c1f 100644 --- a/docs/versioned_docs/version-v0.19.1/standard_library/black_box_fns.md +++ b/docs/versioned_docs/version-v0.19.1/standard_library/black_box_fns.md @@ -43,4 +43,4 @@ Here is a list of the current black box functions that are supported by UltraPlo Most black box functions are included as part of the Noir standard library, however `AND`, `XOR` and `RANGE` are used as part of the Noir language syntax. For instance, using the bitwise operator `&` will invoke the `AND` black box function. To ensure compatibility across backends, the ACVM has fallback implementations of `AND`, `XOR` and `RANGE` defined in its standard library which it can seamlessly fallback to if the backend doesn't support them. -You can view the black box functions defined in the ACVM code [here](https://github.com/noir-lang/acvm/blob/acir-v0.12.0/acir/src/circuit/black_box_functions.rs). +You can view the black box functions defined in the ACVM code [here](https://github.com/noir-lang/noir/blob/master/acvm-repo/acir/src/circuit/black_box_functions.rs). diff --git a/docs/versioned_docs/version-v0.19.1/standard_library/logging.md b/docs/versioned_docs/version-v0.19.1/standard_library/logging.md index 7e2fd9b9aff..4ba0fe0e656 100644 --- a/docs/versioned_docs/version-v0.19.1/standard_library/logging.md +++ b/docs/versioned_docs/version-v0.19.1/standard_library/logging.md @@ -22,7 +22,7 @@ implementation of rust's `println!` macro, this construct can be useful for debu You can print the output of println statements in your Noir code by using the `nargo execute` command or the `--show-output` flag when using `nargo test` (provided there are println statements in your tests). -It is recommended to use `nargo execute` if you want to debug failing constrains with `println` statements. This is due to every input in a test being a constant rather than a witness, so we issue an error during compilation while we only print during execution (which comes after compilation). `println` will not work for failed constraints caught at compile time. +It is recommended to use `nargo execute` if you want to debug failing constraints with `println` statements. This is due to every input in a test being a constant rather than a witness, so we issue an error during compilation while we only print during execution (which comes after compilation). `println` will not work for failed constraints caught at compile time. The `println` statement is unconstrained, so it works for outputting integers, fields, strings, and even structs or expressions. For example: diff --git a/docs/versioned_docs/version-v0.19.2/getting_started/00_nargo_installation.md b/docs/versioned_docs/version-v0.19.2/getting_started/00_nargo_installation.md index 725c5f4d373..7d3c88c7693 100644 --- a/docs/versioned_docs/version-v0.19.2/getting_started/00_nargo_installation.md +++ b/docs/versioned_docs/version-v0.19.2/getting_started/00_nargo_installation.md @@ -14,7 +14,7 @@ Alternatively, the interactions can also be performed in [NoirJS](../noir_js/noi ### UltraPlonk -Nargo versions <0.5.0 of `aztec_backend` and `aztec_wasm_backend` are based on the TurboPlonk +Nargo versions \<0.5.0 of `aztec_backend` and `aztec_wasm_backend` are based on the TurboPlonk version of Aztec Backend, which lacks efficient implementations of useful primitives (e.g. Keccak256 in 18k constraints, ECDSA verification in 36k constraints) that the UltraPlonk version offers. ## Installation @@ -140,9 +140,9 @@ Commands: ### Option 3: Compile from Source -Due to the large number of native dependencies, Noir projects uses [Nix](https://nixos.org/) and [direnv](https://direnv.net/) to streamline the development experience. It helps mitigating ssues commonly associated with dependency management, such as conflicts between required package versions for different projects (often referred to as "dependency hell"). +Due to the large number of native dependencies, Noir projects uses [Nix](https://nixos.org/) and [direnv](https://direnv.net/) to streamline the development experience. It helps mitigating issues commonly associated with dependency management, such as conflicts between required package versions for different projects (often referred to as "dependency hell"). -Combined with direnv, which automatically sets or unsets environment variables based on the directory, it further simplifies the development process by seamlessly integrating with the developer's shell, facilitating an efficient and reliable workflow for managing and deploying Noir projects with multiple dependencies. +Combined with direnv, which automatically sets or clears environment variables based on the directory, it further simplifies the development process by seamlessly integrating with the developer's shell, facilitating an efficient and reliable workflow for managing and deploying Noir projects with multiple dependencies. #### Setting up your environment @@ -220,7 +220,7 @@ Advanced: If you aren't using direnv nor launching your editor within the subshe ### Option 4: WSL (for Windows) -The default backend for Noir (Barretenberg) doesn't provide Windows binaries at this time. For that reason, Noir cannot be installed nativerly. However, it is available by using Windows Subsystem for Linux (WSL). +The default backend for Noir (Barretenberg) doesn't provide Windows binaries at this time. For that reason, Noir cannot be installed natively. However, it is available by using Windows Subsystem for Linux (WSL). Step 1: Follow the instructions [here](https://learn.microsoft.com/en-us/windows/wsl/install) to install and run WSL. diff --git a/docs/versioned_docs/version-v0.19.2/getting_started/02_breakdown.md b/docs/versioned_docs/version-v0.19.2/getting_started/02_breakdown.md index 9a17f5d6360..d28a54a1600 100644 --- a/docs/versioned_docs/version-v0.19.2/getting_started/02_breakdown.md +++ b/docs/versioned_docs/version-v0.19.2/getting_started/02_breakdown.md @@ -39,7 +39,7 @@ Example Nargo.toml: ```toml [package] -name = "noirstarter" +name = "noir_starter" type = "bin" authors = ["Alice"] compiler_version = "0.9.0" diff --git a/docs/versioned_docs/version-v0.19.2/index.md b/docs/versioned_docs/version-v0.19.2/index.md index 75e1abf2932..380368db036 100644 --- a/docs/versioned_docs/version-v0.19.2/index.md +++ b/docs/versioned_docs/version-v0.19.2/index.md @@ -25,7 +25,7 @@ slug: / Noir is a Domain Specific Language for SNARK proving systems. It has been designed to use any ACIR compatible proving system. -It's design choices are influenced heavily by Rust and focuses on a simple, familiar syntax. +Its design choices are influenced heavily by Rust and focuses on a simple, familiar syntax. ## Who is Noir for? @@ -34,7 +34,7 @@ Noir can be used for a variety of purposes. ### Solidity Developers Noir currently includes a command to create a Solidity contract which verifies your Noir program. This will -be modularised in the future; however, as of the alpha, you can use the [`nargo codegen-verifier`](./nargo/commands#nargo-codegen-verifier) command to create +be modularized in the future; however, as of the alpha, you can use the [`nargo codegen-verifier`](./nargo/commands#nargo-codegen-verifier) command to create a verifier contract. ### Protocol Developers @@ -55,7 +55,7 @@ must be implemented for it. Noir is simple and flexible in its design, as it does not compile immediately to a fixed NP-complete language. Instead, Noir compiles to an intermediate language (ACIR), which itself can be compiled -to an arithmetic circuit (if choosing to target Aztec's barretenberg backend) or a rank-1 constraint system (if choosing to target an R1CS backend like Arkwork's Marlin backend, or others). +to an arithmetic circuit (if choosing to target Aztec's barretenberg backend) or a rank-1 constraint system (if choosing to target an R1CS backend like Arkworks' Marlin backend, or others). This in itself brings up a few challenges within the design process, but allows one to decouple the programming language completely from the backend. This is similar in theory to LLVM. @@ -67,7 +67,7 @@ Compiler: - For expressions - Arrays - Bit Operations -- Binary operations (<, <=, >, >=, +, -, \*, /, %) [See documentation for an extensive list] +- Binary operations (\<, \<=, >, >=, +, -, \*, /, %) [See documentation for an extensive list] - Unsigned integers - If statements - Structures and Tuples diff --git a/docs/versioned_docs/version-v0.19.2/language_concepts/01_functions.md b/docs/versioned_docs/version-v0.19.2/language_concepts/01_functions.md index 47cdea0cf04..5eb22170e54 100644 --- a/docs/versioned_docs/version-v0.19.2/language_concepts/01_functions.md +++ b/docs/versioned_docs/version-v0.19.2/language_concepts/01_functions.md @@ -30,7 +30,7 @@ All parameters in a function must have a type and all types are known at compile is pre-pended with a colon and the parameter type. Multiple parameters are separated using a comma. ```rust -fn foo(x : Field, y : pub Field){} +fn foo(x : Field, y : Field){} ``` The return type of a function can be stated by using the `->` arrow notation. The function below @@ -38,7 +38,7 @@ states that the foo function must return a `Field`. If the function returns no v is omitted. ```rust -fn foo(x : Field, y : pub Field) -> Field { +fn foo(x : Field, y : Field) -> Field { x + y } ``` diff --git a/docs/versioned_docs/version-v0.19.2/language_concepts/03_ops.md b/docs/versioned_docs/version-v0.19.2/language_concepts/03_ops.md index da02b126059..0e35ef5e584 100644 --- a/docs/versioned_docs/version-v0.19.2/language_concepts/03_ops.md +++ b/docs/versioned_docs/version-v0.19.2/language_concepts/03_ops.md @@ -29,11 +29,11 @@ keywords: | ^ | XOR two private input types together | Types must be integer | | & | AND two private input types together | Types must be integer | | \| | OR two private input types together | Types must be integer | -| << | Left shift an integer by another integer amount | Types must be integer | +| \<\< | Left shift an integer by another integer amount | Types must be integer | | >> | Right shift an integer by another integer amount | Types must be integer | | ! | Bitwise not of a value | Type must be integer or boolean | -| < | returns a bool if one value is less than the other | Upper bound must have a known bit size | -| <= | returns a bool if one value is less than or equal to the other | Upper bound must have a known bit size | +| \< | returns a bool if one value is less than the other | Upper bound must have a known bit size | +| \<= | returns a bool if one value is less than or equal to the other | Upper bound must have a known bit size | | > | returns a bool if one value is more than the other | Upper bound must have a known bit size | | >= | returns a bool if one value is more than or equal to the other | Upper bound must have a known bit size | | == | returns a bool if one value is equal to the other | Both types must not be constants | @@ -62,7 +62,7 @@ fn main(x : Field) { Noir has no support for the logical operators `||` and `&&`. This is because encoding the short-circuiting that these operators require can be inefficient for Noir's backend. Instead you can -use the bitwise operators `|` and `&` which operate indentically for booleans, just without the +use the bitwise operators `|` and `&` which operate identically for booleans, just without the short-circuiting. ```rust diff --git a/docs/versioned_docs/version-v0.19.2/language_concepts/05_unconstrained.md b/docs/versioned_docs/version-v0.19.2/language_concepts/05_unconstrained.md index 6b621eda3eb..097d6ee9894 100644 --- a/docs/versioned_docs/version-v0.19.2/language_concepts/05_unconstrained.md +++ b/docs/versioned_docs/version-v0.19.2/language_concepts/05_unconstrained.md @@ -7,7 +7,7 @@ keywords: [Noir programming language, unconstrained, open] -Unconstrained functions are functions which do not constrain any of the included computation and allow for non-determinisitic computation. +Unconstrained functions are functions which do not constrain any of the included computation and allow for non-deterministic computation. ## Why? diff --git a/docs/versioned_docs/version-v0.19.2/language_concepts/07_mutability.md b/docs/versioned_docs/version-v0.19.2/language_concepts/07_mutability.md index 4641521b1d9..ed3fed820ec 100644 --- a/docs/versioned_docs/version-v0.19.2/language_concepts/07_mutability.md +++ b/docs/versioned_docs/version-v0.19.2/language_concepts/07_mutability.md @@ -37,7 +37,7 @@ Note that mutability in noir is local and everything is passed by value, so if a mutates its parameters then the parent function will keep the old value of the parameters. ```rust -fn main() -> Field { +fn main() -> pub Field { let x = 3; helper(x); x // x is still 3 @@ -69,11 +69,11 @@ fn main(x : Field, y : [Field; N]) { assert(res == y[0]); - let res2 = x * mysubmodule::N; + let res2 = x * my_submodule::N; assert(res != res2); } -mod mysubmodule { +mod my_submodule { use dep::std; global N: Field = 10; diff --git a/docs/versioned_docs/version-v0.19.2/language_concepts/data_types.md b/docs/versioned_docs/version-v0.19.2/language_concepts/data_types.md index d546cc463a8..74f573f7d3f 100644 --- a/docs/versioned_docs/version-v0.19.2/language_concepts/data_types.md +++ b/docs/versioned_docs/version-v0.19.2/language_concepts/data_types.md @@ -93,4 +93,4 @@ fn main() { ### BigInt -You can acheive BigInt functionality using the [Noir BigInt](https://github.com/shuklaayush/noir-bigint) library. +You can achieve BigInt functionality using the [Noir BigInt](https://github.com/shuklaayush/noir-bigint) library. diff --git a/docs/versioned_docs/version-v0.19.2/language_concepts/data_types/00_fields.md b/docs/versioned_docs/version-v0.19.2/language_concepts/data_types/00_fields.md index 658a0441ffb..78d3d2af166 100644 --- a/docs/versioned_docs/version-v0.19.2/language_concepts/data_types/00_fields.md +++ b/docs/versioned_docs/version-v0.19.2/language_concepts/data_types/00_fields.md @@ -158,7 +158,7 @@ fn main() { ### sgn0 -Parity of (prime) Field element, i.e. sgn0(x mod p) = 0 if x ∈ {0, ..., p-1} is even, otherwise sgn0(x mod p) = 1. +Parity of (prime) Field element, i.e. sgn0(x mod p) = 0 if x ∈ \{0, ..., p-1\} is even, otherwise sgn0(x mod p) = 1. ```rust fn sgn0(self) -> u1 diff --git a/docs/versioned_docs/version-v0.19.2/migration_notes.md b/docs/versioned_docs/version-v0.19.2/migration_notes.md index e87eb1feaba..905bca3d30c 100644 --- a/docs/versioned_docs/version-v0.19.2/migration_notes.md +++ b/docs/versioned_docs/version-v0.19.2/migration_notes.md @@ -50,7 +50,7 @@ If you are using the default barretenberg backend, simply run: nargo prove ``` -with you Noir program. +with your Noir program. This will trigger the download and installation of the latest version of barretenberg compatible with your Nargo in use. @@ -78,7 +78,7 @@ nargo backend install acvm-backend-barretenberg https://github.com/noir-lang/bar This downloads and installs a specific bb.js based version of barretenberg binary from GitHub. -The gzipped filed is running this bash script: , where we need to gzip it as the Nargo currently expect the backend to be zipped up. +The gzipped file is running [this bash script](https://github.com/noir-lang/barretenberg-js-binary/blob/master/run-bb-js.sh), where we need to gzip it as the Nargo currently expect the backend to be zipped up. Then run: diff --git a/docs/versioned_docs/version-v0.19.2/modules_packages_crates/crates_and_packages.md b/docs/versioned_docs/version-v0.19.2/modules_packages_crates/crates_and_packages.md index fb83a33d94e..744de72bb2c 100644 --- a/docs/versioned_docs/version-v0.19.2/modules_packages_crates/crates_and_packages.md +++ b/docs/versioned_docs/version-v0.19.2/modules_packages_crates/crates_and_packages.md @@ -23,7 +23,7 @@ _Library crates_ don't have a `main` function and they don't compile down to ACI #### Contracts -Contract crates are similar to binary crates in that they compile to ACIR which you can create proofs against. They are different in that they do not have a single `main` function, but are a collection of functions to be deployed to the [Aztec network](https://aztec.network). You can learn more about the technical details of Aztec in the [monorepo](https://github.com/AztecProtocol/aztec-packages) or contract [examples](https://github.com/AztecProtocol/aztec-packages/tree/master/yarn-project/noir-contracts/src/contracts). +Contract crates are similar to binary crates in that they compile to ACIR which you can create proofs against. They are different in that they do not have a single `main` function, but are a collection of functions to be deployed to the [Aztec network](https://aztec.network). You can learn more about the technical details of Aztec in the [monorepo](https://github.com/AztecProtocol/aztec-packages) or contract [examples](https://github.com/AztecProtocol/aztec-packages/tree/master/yarn-project/noir-contracts/contracts). ### Crate Root diff --git a/docs/versioned_docs/version-v0.19.2/modules_packages_crates/dependencies.md b/docs/versioned_docs/version-v0.19.2/modules_packages_crates/dependencies.md index 75f95aaa305..e91e73a4c4f 100644 --- a/docs/versioned_docs/version-v0.19.2/modules_packages_crates/dependencies.md +++ b/docs/versioned_docs/version-v0.19.2/modules_packages_crates/dependencies.md @@ -34,7 +34,7 @@ If the module is in a subdirectory, you can define a subdirectory in your git re # Nargo.toml [dependencies] -easy_private_token_contract = {tag ="v0.1.0-alpha62", git = "https://github.com/AztecProtocol/aztec-packages", directory = "yarn-project/noir-contracts/src/contracts/easy_private_token_contract"} +easy_private_token_contract = {tag ="v0.1.0-alpha62", git = "https://github.com/AztecProtocol/aztec-packages", directory = "yarn-project/noir-contracts/contracts/easy_private_token_contract"} ``` ## Specifying a local dependency @@ -48,7 +48,7 @@ For example, this file structure has a library and binary crate │   ├── Nargo.toml │   └── src │   └── main.nr -└── liba +└── lib_a ├── Nargo.toml └── src └── lib.nr @@ -60,17 +60,17 @@ Inside of the binary crate, you can specify: # Nargo.toml [dependencies] -libA = { path = "../liba" } +lib_a = { path = "../lib_a" } ``` ## Importing dependencies You can import a dependency to a Noir file using the following syntax. For example, to import the -ecrecover-noir library and local liba referenced above: +ecrecover-noir library and local lib_a referenced above: ```rust use dep::ecrecover; -use dep::libA; +use dep::lib_a; ``` You can also import only the specific parts of dependency that you want to use, like so: diff --git a/docs/versioned_docs/version-v0.19.2/modules_packages_crates/modules.md b/docs/versioned_docs/version-v0.19.2/modules_packages_crates/modules.md index 147c9b284e8..11e60cbf35e 100644 --- a/docs/versioned_docs/version-v0.19.2/modules_packages_crates/modules.md +++ b/docs/versioned_docs/version-v0.19.2/modules_packages_crates/modules.md @@ -10,7 +10,7 @@ Noir's module system follows the same convention as the _newer_ version of Rust' ## Purpose of Modules -Modules are used to organise files. Without modules all of your code would need to live in a single +Modules are used to organize files. Without modules all of your code would need to live in a single file. In Noir, the compiler does not automatically scan all of your files to detect modules. This must be done explicitly by the developer. diff --git a/docs/versioned_docs/version-v0.19.2/nargo/02_testing.md b/docs/versioned_docs/version-v0.19.2/nargo/02_testing.md index da767274efd..5c57ef92705 100644 --- a/docs/versioned_docs/version-v0.19.2/nargo/02_testing.md +++ b/docs/versioned_docs/version-v0.19.2/nargo/02_testing.md @@ -23,8 +23,8 @@ fn test_add() { } ``` -Running `nargo test` will test that the `test_add` function can be executed while satisfying the all -the contraints which allows you to test that add returns the expected values. Test functions can't +Running `nargo test` will test that the `test_add` function can be executed while satisfying all +the constraints which allows you to test that add returns the expected values. Test functions can't have any arguments currently. ### Test fail diff --git a/docs/versioned_docs/version-v0.19.2/noir_js/reference/noir_js/classes/Noir.md b/docs/versioned_docs/version-v0.19.2/noir_js/reference/noir_js/classes/Noir.md index a8a0bb451c1..1d7b54a9dca 100644 --- a/docs/versioned_docs/version-v0.19.2/noir_js/reference/noir_js/classes/Noir.md +++ b/docs/versioned_docs/version-v0.19.2/noir_js/reference/noir_js/classes/Noir.md @@ -95,7 +95,7 @@ Generates a witness and a proof given an object as input. #### Example ```typescript -async generateFinalproof(input) +async generateFinalProof(input) ``` *** diff --git a/docs/versioned_docs/version-v0.19.2/standard_library/black_box_fns.md b/docs/versioned_docs/version-v0.19.2/standard_library/black_box_fns.md index 1dfabfe8f22..e0c6d475c1f 100644 --- a/docs/versioned_docs/version-v0.19.2/standard_library/black_box_fns.md +++ b/docs/versioned_docs/version-v0.19.2/standard_library/black_box_fns.md @@ -43,4 +43,4 @@ Here is a list of the current black box functions that are supported by UltraPlo Most black box functions are included as part of the Noir standard library, however `AND`, `XOR` and `RANGE` are used as part of the Noir language syntax. For instance, using the bitwise operator `&` will invoke the `AND` black box function. To ensure compatibility across backends, the ACVM has fallback implementations of `AND`, `XOR` and `RANGE` defined in its standard library which it can seamlessly fallback to if the backend doesn't support them. -You can view the black box functions defined in the ACVM code [here](https://github.com/noir-lang/acvm/blob/acir-v0.12.0/acir/src/circuit/black_box_functions.rs). +You can view the black box functions defined in the ACVM code [here](https://github.com/noir-lang/noir/blob/master/acvm-repo/acir/src/circuit/black_box_functions.rs). diff --git a/docs/versioned_docs/version-v0.19.2/standard_library/logging.md b/docs/versioned_docs/version-v0.19.2/standard_library/logging.md index 7e2fd9b9aff..4ba0fe0e656 100644 --- a/docs/versioned_docs/version-v0.19.2/standard_library/logging.md +++ b/docs/versioned_docs/version-v0.19.2/standard_library/logging.md @@ -22,7 +22,7 @@ implementation of rust's `println!` macro, this construct can be useful for debu You can print the output of println statements in your Noir code by using the `nargo execute` command or the `--show-output` flag when using `nargo test` (provided there are println statements in your tests). -It is recommended to use `nargo execute` if you want to debug failing constrains with `println` statements. This is due to every input in a test being a constant rather than a witness, so we issue an error during compilation while we only print during execution (which comes after compilation). `println` will not work for failed constraints caught at compile time. +It is recommended to use `nargo execute` if you want to debug failing constraints with `println` statements. This is due to every input in a test being a constant rather than a witness, so we issue an error during compilation while we only print during execution (which comes after compilation). `println` will not work for failed constraints caught at compile time. The `println` statement is unconstrained, so it works for outputting integers, fields, strings, and even structs or expressions. For example: diff --git a/docs/versioned_docs/version-v0.19.3/examples/merkle-proof.mdx b/docs/versioned_docs/version-v0.19.3/examples/merkle-proof.mdx new file mode 100644 index 00000000000..832fb4bb55e --- /dev/null +++ b/docs/versioned_docs/version-v0.19.3/examples/merkle-proof.mdx @@ -0,0 +1,48 @@ +--- +title: Merkle Proof Membership +description: + Learn how to use merkle membership proof in Noir to prove that a given leaf is a member of a + merkle tree with a specified root, at a given index. +keywords: + [merkle proof, merkle membership proof, Noir, rust, hash function, Pedersen, sha256, merkle tree] +--- + +Let's walk through an example of a merkle membership proof in Noir that proves that a given leaf is +in a merkle tree. + +```rust +use dep::std; + +fn main(message : [Field; 62], index : Field, hashpath : [Field; 40], root : Field) { + let leaf = std::hash::hash_to_field(message); + let merkle_root = std::merkle::compute_merkle_root(leaf, index, hashpath); + assert(merkle_root == root); +} + +``` + +The message is hashed using `hash_to_field`. The specific hash function that is being used is chosen +by the backend. The only requirement is that this hash function can heuristically be used as a +random oracle. If only collision resistance is needed, then one can call `std::hash::pedersen_hash` +instead. + +```rust +let leaf = std::hash::hash_to_field(message); +``` + +The leaf is then passed to a compute_merkle_root function with the root, index and hashpath. The returned root can then be asserted to be the same as the provided root. + +```rust +let merkle_root = std::merkle::compute_merkle_root(leaf, index, hashpath); +assert (merkle_root == root); +``` + +> **Note:** It is possible to re-implement the merkle tree implementation without standard library. +> However, for most usecases, it is enough. In general, the standard library will always opt to be +> as conservative as possible, while striking a balance with efficiency. + +An example, the merkle membership proof, only requires a hash function that has collision +resistance, hence a hash function like Pedersen is allowed, which in most cases is more efficient +than the even more conservative sha256. + +[View an example on the starter repo](https://github.com/noir-lang/noir-examples/blob/3ea09545cabfa464124ec2f3ea8e60c608abe6df/stealthdrop/circuits/src/main.nr#L20) diff --git a/docs/versioned_docs/version-v0.19.3/getting_started/00_nargo_installation.md b/docs/versioned_docs/version-v0.19.3/getting_started/00_nargo_installation.md new file mode 100644 index 00000000000..7d3c88c7693 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.3/getting_started/00_nargo_installation.md @@ -0,0 +1,249 @@ +--- +title: Nargo Installation +description: + nargo is a command line tool for interacting with Noir programs (e.g. compiling, proving, + verifying and more). Learn how to install and use Nargo for your projects with this comprehensive + guide. +keywords: [Nargo, command line tool, Noir programs, installation guide, how to use Nargo] +--- + +`nargo` is a command line tool for interacting with Noir programs (e.g. compiling, proving, +verifying and more). + +Alternatively, the interactions can also be performed in [NoirJS](../noir_js/noir_js.md). + +### UltraPlonk + +Nargo versions \<0.5.0 of `aztec_backend` and `aztec_wasm_backend` are based on the TurboPlonk +version of Aztec Backend, which lacks efficient implementations of useful primitives (e.g. Keccak256 in 18k constraints, ECDSA verification in 36k constraints) that the UltraPlonk version offers. + +## Installation + +There are four approaches for installing Nargo: + +- [Option 1: Noirup](#option-1-noirup) +- [Option 2: Binaries](#option-2-binaries) +- [Option 3: Compile from Source](#option-3-compile-from-source) +- [Option 4: WSL for Windows](#option-4-wsl-for-windows) + +Optionally you can also install [Noir VS Code extension] for syntax highlighting. + +### Option 1: Noirup + +If you're on OSX or Linux, the easiest way to start using Noir and Nargo is via noirup. Just open a +terminal and run: + +```bash +curl -L https://raw.githubusercontent.com/noir-lang/noirup/main/install | bash +``` + +Close the terminal, open another one, and run + +```bash +noirup +``` + +Done, you should have the latest version working. You can check with `nargo --version`. + +You can also install nightlies, specific versions +or branches, check out the [noirup repository](https://github.com/noir-lang/noirup) for more +information. + +#### GitHub Actions + +You can use `noirup` with GitHub Actions for CI/CD and automated testing. It is as simple as +installing `noirup` and running tests in your GitHub Action `yml` file. + +See the +[config file](https://github.com/TomAFrench/noir-hashes/blob/master/.github/workflows/noir.yml) in +this repo containing hash functions in Noir for an example. + +#### Nightly versions + +To install the nightly version of Noir (updated daily) run: + +```bash +noirup -n +``` + +### Option 2: Binaries + +See [GitHub Releases](https://github.com/noir-lang/noir/releases) for the latest and previous +platform specific binaries. + +#### Step 1 + +Paste and run the following in the terminal to extract and install the binary: + +> **macOS / Linux:** If you are prompted with `Permission denied` when running commands, prepend +> `sudo` and re-run it. + +##### macOS (Apple Silicon) + +```bash +mkdir -p $HOME/.nargo/bin && \ +curl -o $HOME/.nargo/bin/nargo-aarch64-apple-darwin.tar.gz -L https://github.com/noir-lang/noir/releases/download/v0.6.0/nargo-aarch64-apple-darwin.tar.gz && \ +tar -xvf $HOME/.nargo/bin/nargo-aarch64-apple-darwin.tar.gz -C $HOME/.nargo/bin/ && \ +echo '\nexport PATH=$PATH:$HOME/.nargo/bin' >> ~/.zshrc && \ +source ~/.zshrc +``` + +##### macOS (Intel) + +```bash +mkdir -p $HOME/.nargo/bin && \ +curl -o $HOME/.nargo/bin/nargo-x86_64-apple-darwin.tar.gz -L https://github.com/noir-lang/noir/releases/download/v0.6.0/nargo-x86_64-apple-darwin.tar.gz && \ +tar -xvf $HOME/.nargo/bin/nargo-x86_64-apple-darwin.tar.gz -C $HOME/.nargo/bin/ && \ +echo '\nexport PATH=$PATH:$HOME/.nargo/bin' >> ~/.zshrc && \ +source ~/.zshrc +``` + +##### Linux (Bash) + +```bash +mkdir -p $HOME/.nargo/bin && \ +curl -o $HOME/.nargo/bin/nargo-x86_64-unknown-linux-gnu.tar.gz -L https://github.com/noir-lang/noir/releases/download/v0.6.0/nargo-x86_64-unknown-linux-gnu.tar.gz && \ +tar -xvf $HOME/.nargo/bin/nargo-x86_64-unknown-linux-gnu.tar.gz -C $HOME/.nargo/bin/ && \ +echo -e '\nexport PATH=$PATH:$HOME/.nargo/bin' >> ~/.bashrc && \ +source ~/.bashrc +``` + +#### Step 2 + +Check if the installation was successful by running `nargo --help`. + +> **macOS:** If you are prompted with an OS alert, right-click and open the _nargo_ executable from +> Finder. Close the new terminal popped up and `nargo` should now be accessible. + +For a successful installation, you should see something similar to the following after running the +command: + +```sh +$ nargo --help + +Noir's package manager + +Usage: nargo + +Commands: + check Checks the constraint system for errors + codegen-verifier Generates a Solidity verifier smart contract for the program + compile Compile the program and its secret execution trace into ACIR format + new Create a new binary project + execute Executes a circuit to calculate its return value + prove Create proof for this program. The proof is returned as a hex encoded string + verify Given a proof and a program, verify whether the proof is valid + test Run the tests for this program + gates Counts the occurrences of different gates in circuit + help Print this message or the help of the given subcommand(s) +``` + +### Option 3: Compile from Source + +Due to the large number of native dependencies, Noir projects uses [Nix](https://nixos.org/) and [direnv](https://direnv.net/) to streamline the development experience. It helps mitigating issues commonly associated with dependency management, such as conflicts between required package versions for different projects (often referred to as "dependency hell"). + +Combined with direnv, which automatically sets or clears environment variables based on the directory, it further simplifies the development process by seamlessly integrating with the developer's shell, facilitating an efficient and reliable workflow for managing and deploying Noir projects with multiple dependencies. + +#### Setting up your environment + +For the best experience, please follow these instructions to setup your environment: + +1. Install Nix following [their guide](https://nixos.org/download.html) for your operating system. +2. Create the file `~/.config/nix/nix.conf` with the contents: + +```ini +experimental-features = nix-command +extra-experimental-features = flakes +``` + +3. Install direnv into your Nix profile by running: + +```sh +nix profile install nixpkgs#direnv +``` + +4. Add direnv to your shell following [their guide](https://direnv.net/docs/hook.html). + 1. For bash or zshell, add `eval "$(direnv hook bash)"` or `eval "$(direnv hook zsh)"` to your ~/.bashrc or ~/.zshrc file, respectively. +5. Restart your shell. + +#### Shell & editor experience + +Now that your environment is set up, you can get to work on the project. + +1. Clone the repository, such as: + +```sh +git clone git@github.com:noir-lang/noir +``` + +> Replacing `noir` with whichever repository you want to work on. + +2. Navigate to the directory: + +```sh +cd noir +``` + +> Replacing `noir` with whichever repository you cloned. + +3. You should see a **direnv error** because projects aren't allowed by default. Make sure you've reviewed and trust our `.envrc` file, then you need to run: + +```sh +direnv allow +``` + +4. Now, wait awhile for all the native dependencies to be built. This will take some time and direnv will warn you that it is taking a long time, but we just need to let it run. + +5. Once you are presented with your prompt again, you can start your editor within the project directory (we recommend [VSCode](https://code.visualstudio.com/)): + +```sh +code . +``` + +6. (Recommended) When launching VSCode for the first time, you should be prompted to install our recommended plugins. We highly recommend installing these for the best development experience. + +#### Building and testing + +Assuming you are using `direnv` to populate your environment, building and testing the project can be done +with the typical `cargo build`, `cargo test`, and `cargo clippy` commands. You'll notice that the `cargo` version matches the version we specify in `rust-toolchain.toml`, which is 1.71.1 at the time of this writing. + +If you want to build the entire project in an isolated sandbox, you can use Nix commands: + +1. `nix build .` (or `nix build . -L` for verbose output) to build the project in a Nix sandbox. +2. `nix flake check` (or `nix flake check -L` for verbose output) to run clippy and tests in a Nix sandbox. + +#### Without `direnv` + +If you have hesitations with using direnv, you can launch a subshell with `nix develop` and then launch your editor from within the subshell. However, if VSCode was already launched in the project directory, the environment won't be updated. + +Advanced: If you aren't using direnv nor launching your editor within the subshell, you can try to install Barretenberg and other global dependencies the package needs. This is an advanced workflow and likely won't receive support! + +### Option 4: WSL (for Windows) + +The default backend for Noir (Barretenberg) doesn't provide Windows binaries at this time. For that reason, Noir cannot be installed natively. However, it is available by using Windows Subsystem for Linux (WSL). + +Step 1: Follow the instructions [here](https://learn.microsoft.com/en-us/windows/wsl/install) to install and run WSL. + +step 2: Follow the [Noirup instructions](#option-1-noirup). + +## Uninstalling Nargo + +### Noirup + +If you installed Noir with `noirup`, you can uninstall Noir by removing the files in `~/.nargo`, `~/nargo` and `~/noir_cache`. + +```bash +rm -r ~/.nargo +rm -r ~/nargo +rm -r ~/noir_cache +``` + +### Nix + +If you installed Noir with Nix or from source, you can remove the binary located at `~/.nix-profile/bin/nargo`. + +```bash +rm ~/.nix-profile/bin/nargo +``` + +[noir vs code extension]: https://marketplace.visualstudio.com/items?itemName=noir-lang.vscode-noir diff --git a/docs/versioned_docs/version-v0.10.5/getting_started/01_hello_world.md b/docs/versioned_docs/version-v0.19.3/getting_started/01_hello_world.md similarity index 100% rename from docs/versioned_docs/version-v0.10.5/getting_started/01_hello_world.md rename to docs/versioned_docs/version-v0.19.3/getting_started/01_hello_world.md diff --git a/docs/versioned_docs/version-v0.19.3/getting_started/02_breakdown.md b/docs/versioned_docs/version-v0.19.3/getting_started/02_breakdown.md new file mode 100644 index 00000000000..7a7fb876c35 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.3/getting_started/02_breakdown.md @@ -0,0 +1,198 @@ +--- +title: Project Breakdown +description: + Learn about the anatomy of a Nargo project, including the purpose of the Prover and Verifier TOML + files, and how to prove and verify your program. +keywords: + [Nargo, Nargo project, Prover.toml, Verifier.toml, proof verification, private asset transfer] +--- + +This section breaks down our hello world program in section _1.2_. We elaborate on the project +structure and what the `prove` and `verify` commands did in the previous section. + +## Anatomy of a Nargo Project + +Upon creating a new project with `nargo new` and building the in/output files with `nargo check` +commands, you would get a minimal Nargo project of the following structure: + + - src + - Prover.toml + - Verifier.toml + - Nargo.toml + +The source directory _src_ holds the source code for your Noir program. By default only a _main.nr_ +file will be generated within it. + +### Prover.toml + +_Prover.toml_ is used for specifying the input values for executing and proving the program. You can specify `toml` files with different names by using the `--prover-name` or `-p` flags, see the [Prover](#provertoml) section below. Optionally you may specify expected output values for prove-time checking as well. + +### Verifier.toml + +_Verifier.toml_ contains public in/output values computed when executing the Noir program. + +### Nargo.toml + +_Nargo.toml_ contains the environmental options of your project. It contains a "package" section and a "dependencies" section. + +Example Nargo.toml: + +```toml +[package] +name = "noir_starter'" +type = "bin" +authors = ["Alice"] +compiler_version = "0.9.0" +description = "Getting started with Noir" +entry = "circuit/main.nr" +license = "MIT" + +[dependencies] +ecrecover = {tag = "v0.9.0", git = "https://github.com/colinnielsen/ecrecover-noir.git"} +``` + +Nargo.toml for a [workspace](../modules_packages_crates/workspaces) will look a bit different. For example: + +```toml +[workspace] +members = ["crates/a", "crates/b"] +default-member = "crates/a" +``` + +#### Package section + +The package section requires a number of fields including: + +- `name` (**required**) - the name of the package +- `type` (**required**) - can be "bin", "lib", or "contract" to specify whether its a binary, library or Aztec contract +- `authors` (optional) - authors of the project +- `compiler_version` - specifies the version of the compiler to use. This is enforced by the compiler and follow's [Rust's versioning](https://doc.rust-lang.org/cargo/reference/manifest.html#the-version-field), so a `compiler_version = 0.18.0` will enforce Nargo version 0.18.0, `compiler_version = ^0.18.0` will enforce anything above 0.18.0 but below 0.19.0, etc. For more information, see how [Rust handles these operators](https://docs.rs/semver/latest/semver/enum.Op.html) +- `description` (optional) +- `entry` (optional) - a relative filepath to use as the entry point into your package (overrides the default of `src/lib.nr` or `src/main.nr`) +- `backend` (optional) +- `license` (optional) + +#### Dependencies section + +This is where you will specify any dependencies for your project. See the [Dependencies page](../modules_packages_crates/dependencies) for more info. + +`./proofs/` and `./contract/` directories will not be immediately visible until you create a proof or +verifier contract respectively. + +### main.nr + +The _main.nr_ file contains a `main` method, this method is the entry point into your Noir program. + +In our sample program, _main.nr_ looks like this: + +```rust +fn main(x : Field, y : Field) { + assert(x != y); +} +``` + +The parameters `x` and `y` can be seen as the API for the program and must be supplied by the +prover. Since neither `x` nor `y` is marked as public, the verifier does not supply any inputs, when +verifying the proof. + +The prover supplies the values for `x` and `y` in the _Prover.toml_ file. + +As for the program body, `assert` ensures the satisfaction of the condition (e.g. `x != y`) is +constrained by the proof of the execution of said program (i.e. if the condition was not met, the +verifier would reject the proof as an invalid proof). + +### Prover.toml + +The _Prover.toml_ file is a file which the prover uses to supply his witness values(both private and +public). + +In our hello world program the _Prover.toml_ file looks like this: + +```toml +x = "1" +y = "2" +``` + +When the command `nargo prove` is executed, two processes happen: + +1. Noir creates a proof that `x` which holds the value of `1` and `y` which holds the value of `2` + is not equal. This not equal constraint is due to the line `assert(x != y)`. + +2. Noir creates and stores the proof of this statement in the _proofs_ directory in a file called your-project.proof. So if your project is named "private_voting" (defined in the project Nargo.toml), the proof will be saved at `./proofs/private_voting.proof`. Opening this file will display the proof in hex format. + +#### Arrays of Structs + +The following code shows how to pass an array of structs to a Noir program to generate a proof. + +```rust +// main.nr +struct Foo { + bar: Field, + baz: Field, +} + +fn main(foos: [Foo; 3]) -> pub Field { + foos[2].bar + foos[2].baz +} +``` + +Prover.toml: + +```toml +[[foos]] # foos[0] +bar = 0 +baz = 0 + +[[foos]] # foos[1] +bar = 0 +baz = 0 + +[[foos]] # foos[2] +bar = 1 +baz = 2 +``` + +#### Custom toml files + +You can specify a `toml` file with a different name to use for proving by using the `--prover-name` or `-p` flags. + +This command looks for proof inputs in the default **Prover.toml** and generates the proof and saves it at `./proofs/.proof`: + +```bash +nargo prove +``` + +This command looks for proof inputs in the custom **OtherProver.toml** and generates proof and saves it at `./proofs/.proof`: + +```bash +nargo prove -p OtherProver +``` + +## Verifying a Proof + +When the command `nargo verify` is executed, two processes happen: + +1. Noir checks in the _proofs_ directory for a proof file with the project name (eg. test_project.proof) + +2. If that file is found, the proof's validity is checked + +> **Note:** The validity of the proof is linked to the current Noir program; if the program is +> changed and the verifier verifies the proof, it will fail because the proof is not valid for the +> _modified_ Noir program. + +In production, the prover and the verifier are usually two separate entities. A prover would +retrieve the necessary inputs, execute the Noir program, generate a proof and pass it to the +verifier. The verifier would then retrieve the public inputs from usually external sources and +verifies the validity of the proof against it. + +Take a private asset transfer as an example: + +A user on browser as the prover would retrieve private inputs (e.g. the user's private key) and +public inputs (e.g. the user's encrypted balance on-chain), compute the transfer, generate a proof +and submit it to the verifier smart contract. + +The verifier contract would then draw the user's encrypted balance directly from the blockchain and +verify the proof submitted against it. If the verification passes, additional functions in the +verifier contract could trigger (e.g. approve the asset transfer). + +Now that you understand the concepts, you'll probably want some editor feedback while you are writing more complex code. diff --git a/docs/versioned_docs/version-v0.19.3/index.md b/docs/versioned_docs/version-v0.19.3/index.md new file mode 100644 index 00000000000..380368db036 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.3/index.md @@ -0,0 +1,100 @@ +--- +title: Introducing Noir +description: + Learn about the public alpha release of Noir, a domain specific language heavily influenced by + Rust that compiles to an intermediate language which can be compiled to an arithmetic circuit or a + rank-1 constraint system. +keywords: + [ + Noir, + Domain Specific Language, + Rust, + Intermediate Language, + Arithmetic Circuit, + Rank-1 Constraint System, + Ethereum Developers, + Protocol Developers, + Blockchain Developers, + Proving System, + Smart Contract Language, + ] +slug: / +--- + +## What is Noir? + +Noir is a Domain Specific Language for SNARK proving systems. It has been designed to use any ACIR compatible proving system. + +Its design choices are influenced heavily by Rust and focuses on a simple, familiar syntax. + +## Who is Noir for? + +Noir can be used for a variety of purposes. + +### Solidity Developers + +Noir currently includes a command to create a Solidity contract which verifies your Noir program. This will +be modularized in the future; however, as of the alpha, you can use the [`nargo codegen-verifier`](./nargo/commands#nargo-codegen-verifier) command to create +a verifier contract. + +### Protocol Developers + +As a protocol developer, you may not want to use the Aztec backend due to it not being a fit for +your stack, or maybe you simply want to use a different proving system. Since Noir does not compile +to a specific proof system, it is possible for protocol developers to replace the PLONK-based +proving system with a different proving system altogether. + +### Blockchain developers + +As a blockchain developer, you will be constrained by parameters set by your blockchain (for example, the +proving system and smart contract language has been pre-defined). In order for you to use Noir in +your blockchain, a proving system backend and a smart contract interface +must be implemented for it. + +## What's new about Noir? + +Noir is simple and flexible in its design, as it does not compile immediately to a fixed +NP-complete language. Instead, Noir compiles to an intermediate language (ACIR), which itself can be compiled +to an arithmetic circuit (if choosing to target Aztec's barretenberg backend) or a rank-1 constraint system (if choosing to target an R1CS backend like Arkworks' Marlin backend, or others). + +This in itself brings up a few challenges within the design process, but allows one to decouple the programming language completely from the backend. This is similar in theory to LLVM. + +## Current Features + +Compiler: + +- Module System +- For expressions +- Arrays +- Bit Operations +- Binary operations (\<, \<=, >, >=, +, -, \*, /, %) [See documentation for an extensive list] +- Unsigned integers +- If statements +- Structures and Tuples +- Generics + +ACIR Supported OPCODES: + +- Sha256 +- Blake2s +- Schnorr signature verification +- MerkleMembership +- Pedersen Commitment +- Pedersen Hash +- HashToField + +## Libraries + +Noir does not currently have an official package manager. You can find a list of available Noir libraries in the [awesome-noir repo here](https://github.com/noir-lang/awesome-noir#libraries). + +Some libraries that are available today include: + +- [Standard Library](https://github.com/noir-lang/noir/tree/master/noir_stdlib) - the Noir Standard Library +- [Ethereum Storage Proof Verification](https://github.com/aragonzkresearch/noir-trie-proofs) - a library that contains the primitives necessary for RLP decoding (in the form of look-up table construction) and Ethereum state and storage proof verification (or verification of any trie proof involving 32-byte long keys) +- [BigInt](https://github.com/shuklaayush/noir-bigint) - a library that provides a custom BigUint56 data type, allowing for computations on large unsigned integers +- [ECrecover](https://github.com/colinnielsen/ecrecover-noir/tree/main) - a library to verify an ECDSA signature and return the source Ethereum address +- [Sparse Merkle Tree Verifier](https://github.com/vocdoni/smtverifier-noir/tree/main) - a library for verification of sparse Merkle trees +- [Signed Int](https://github.com/resurgencelabs/signed_int) - a library for accessing a custom Signed Integer data type, allowing access to negative numbers on Noir +- [Fraction](https://github.com/resurgencelabs/fraction) - a library for accessing fractional number data type in Noir, allowing results that aren't whole numbers + +See the section on [dependencies](./modules_packages_crates/dependencies) for more information. diff --git a/docs/versioned_docs/version-v0.19.3/language_concepts/01_functions.md b/docs/versioned_docs/version-v0.19.3/language_concepts/01_functions.md new file mode 100644 index 00000000000..5eb22170e54 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.3/language_concepts/01_functions.md @@ -0,0 +1,225 @@ +--- +title: Functions +description: + Learn how to declare functions and methods in Noir, a programming language with Rust semantics. + This guide covers parameter declaration, return types, call expressions, and more. +keywords: [Noir, Rust, functions, methods, parameter declaration, return types, call expressions] +--- + +Functions in Noir follow the same semantics of Rust, though Noir does not support early returns. + +To declare a function the `fn` keyword is used. + +```rust +fn foo() {} +``` + +By default, functions are visible only within the package they are defined. To make them visible outside of that package (for example, as part of a [library](../modules_packages_crates/crates_and_packages.md#libraries)), you should mark them as `pub`: + +```rust +pub fn foo() {} +``` + +You can also restrict the visibility of the function to only the crate it was defined in, by specifying `pub(crate)`: + +```rust +pub(crate) fn foo() {} //foo can only be called within its crate +``` + +All parameters in a function must have a type and all types are known at compile time. The parameter +is pre-pended with a colon and the parameter type. Multiple parameters are separated using a comma. + +```rust +fn foo(x : Field, y : Field){} +``` + +The return type of a function can be stated by using the `->` arrow notation. The function below +states that the foo function must return a `Field`. If the function returns no value, then the arrow +is omitted. + +```rust +fn foo(x : Field, y : Field) -> Field { + x + y +} +``` + +Note that a `return` keyword is unneeded in this case - the last expression in a function's body is +returned. + +## Main function + +If you're writing a binary, the `main` function is the starting point of your program. You can pass all types of expressions to it, as long as they have a fixed size at compile time: + +```rust +fn main(x : Field) // this is fine: passing a Field +fn main(x : [Field; 2]) // this is also fine: passing a Field with known size at compile-time +fn main(x : (Field, bool)) // 👌: passing a (Field, bool) tuple means size 2 +fn main(x : str<5>) // this is fine, as long as you pass a string of size 5 + +fn main(x : Vec) // can't compile, has variable size +fn main(x : [Field]) // can't compile, has variable size +fn main(....// i think you got it by now +``` + +Keep in mind [tests](../nargo/02_testing.md) don't differentiate between `main` and any other function. The following snippet passes tests, but won't compile or prove: + +```rust +fn main(x : [Field]) { + assert(x[0] == 1); +} + +#[test] +fn test_one() { + main([1, 2]); +} +``` + +```bash +$ nargo test +[testing] Running 1 test functions +[testing] Testing test_one... ok +[testing] All tests passed + +$ nargo check +The application panicked (crashed). +Message: Cannot have variable sized arrays as a parameter to main +``` + +## Call Expressions + +Calling a function in Noir is executed by using the function name and passing in the necessary +arguments. + +Below we show how to call the `foo` function from the `main` function using a call expression: + +```rust +fn main(x : Field, y : Field) { + let z = foo(x); +} + +fn foo(x : Field) -> Field { + x + x +} +``` + +## Methods + +You can define methods in Noir on any struct type in scope. + +```rust +struct MyStruct { + foo: Field, + bar: Field, +} + +impl MyStruct { + fn new(foo: Field) -> MyStruct { + MyStruct { + foo, + bar: 2, + } + } + + fn sum(self) -> Field { + self.foo + self.bar + } +} + +fn main() { + let s = MyStruct::new(40); + assert(s.sum() == 42); +} +``` + +Methods are just syntactic sugar for functions, so if we wanted to we could also call `sum` as +follows: + +```rust +assert(MyStruct::sum(s) == 42); +``` + +It is also possible to specialize which method is chosen depending on the [generic](./06_generics.md) type that is used. In this example, the `foo` function returns different values depending on its type: + +```rust +struct Foo {} + +impl Foo { + fn foo(self) -> Field { 1 } +} + +impl Foo { + fn foo(self) -> Field { 2 } +} + +fn main() { + let f1: Foo = Foo{}; + let f2: Foo = Foo{}; + assert(f1.foo() + f2.foo() == 3); +} +``` + +Also note that impls with the same method name defined in them cannot overlap. For example, if we already have `foo` defined for `Foo` and `Foo` like we do above, we cannot also define `foo` in an `impl Foo` since it would be ambiguous which version of `foo` to choose. + +```rust +// Including this impl in the same project as the above snippet would +// cause an overlapping impls error +impl Foo { + fn foo(self) -> Field { 3 } +} +``` + +## Lambdas + +Lambdas are anonymous functions. They follow the syntax of Rust - `|arg1, arg2, ..., argN| return_expression`. + +```rust +let add_50 = |val| val + 50; +assert(add_50(100) == 150); +``` + +See [Lambdas](./08_lambdas.md) for more details. + +## Attributes + +Attributes are metadata that can be applied to a function, using the following syntax: `#[attribute(value)]`. + +Supported attributes include: + +- **builtin**: the function is implemented by the compiler, for efficiency purposes. +- **deprecated**: mark the function as _deprecated_. Calling the function will generate a warning: `warning: use of deprecated function` +- **field**: Used to enable conditional compilation of code depending on the field size. See below for more details +- **oracle**: mark the function as _oracle_; meaning it is an external unconstrained function, implemented in noir_js. See [Unconstrained](./05_unconstrained.md) and [NoirJS](../noir_js/noir_js.md) for more details. +- **test**: mark the function as unit tests. See [Tests](../nargo/02_testing.md) for more details + +### Field Attribute + +The field attribute defines which field the function is compatible for. The function is conditionally compiled, under the condition that the field attribute matches the Noir native field. +The field can be defined implicitly, by using the name of the elliptic curve usually associated to it - for instance bn254, bls12_381 - or explicitly by using the field (prime) order, in decimal or hexadecimal form. +As a result, it is possible to define multiple versions of a function with each version specialized for a different field attribute. This can be useful when a function requires different parameters depending on the underlying elliptic curve. + +Example: we define the function `foo()` three times below. Once for the default Noir bn254 curve, once for the field $\mathbb F_{23}$, which will normally never be used by Noir, and once again for the bls12_381 curve. + +```rust +#[field(bn254)] +fn foo() -> u32 { + 1 +} + +#[field(23)] +fn foo() -> u32 { + 2 +} + +// This commented code would not compile as foo would be defined twice because it is the same field as bn254 +// #[field(21888242871839275222246405745257275088548364400416034343698204186575808495617)] +// fn foo() -> u32 { +// 2 +// } + +#[field(bls12_381)] +fn foo() -> u32 { + 3 +} +``` + +If the field name is not known to Noir, it will discard the function. Field names are case insensitive. diff --git a/docs/versioned_docs/version-v0.19.3/language_concepts/02_control_flow.md b/docs/versioned_docs/version-v0.19.3/language_concepts/02_control_flow.md new file mode 100644 index 00000000000..a7f85360197 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.3/language_concepts/02_control_flow.md @@ -0,0 +1,44 @@ +--- +title: Control Flow +description: + Learn how to use loops and if expressions in the Noir programming language. Discover the syntax + and examples for for loops and if-else statements. +keywords: [Noir programming language, loops, for loop, if-else statements, Rust syntax] +--- + +## Loops + +Noir has one kind of loop: the `for` loop. `for` loops allow you to repeat a block of code multiple +times. + +The following block of code between the braces is run 10 times. + +```rust +for i in 0..10 { + // do something +}; +``` + +The index for loops is of type `u64`. + +## If Expressions + +Noir supports `if-else` statements. The syntax is most similar to Rust's where it is not required +for the statement's conditional to be surrounded by parentheses. + +```rust +let a = 0; +let mut x: u32 = 0; + +if a == 0 { + if a != 0 { + x = 6; + } else { + x = 2; + } +} else { + x = 5; + assert(x == 5); +} +assert(x == 2); +``` diff --git a/docs/versioned_docs/version-v0.19.3/language_concepts/03_ops.md b/docs/versioned_docs/version-v0.19.3/language_concepts/03_ops.md new file mode 100644 index 00000000000..0e35ef5e584 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.3/language_concepts/03_ops.md @@ -0,0 +1,97 @@ +--- +title: Logical Operations +description: + Learn about the supported arithmetic and logical operations in the Noir programming language. + Discover how to perform operations on private input types, integers, and booleans. +keywords: + [ + Noir programming language, + supported operations, + arithmetic operations, + logical operations, + predicate operators, + bitwise operations, + short-circuiting, + backend, + ] +--- + +# Operations + +## Table of Supported Operations + +| Operation | Description | Requirements | +| :-------- | :------------------------------------------------------------: | -------------------------------------: | +| + | Adds two private input types together | Types must be private input | +| - | Subtracts two private input types together | Types must be private input | +| \* | Multiplies two private input types together | Types must be private input | +| / | Divides two private input types together | Types must be private input | +| ^ | XOR two private input types together | Types must be integer | +| & | AND two private input types together | Types must be integer | +| \| | OR two private input types together | Types must be integer | +| \<\< | Left shift an integer by another integer amount | Types must be integer | +| >> | Right shift an integer by another integer amount | Types must be integer | +| ! | Bitwise not of a value | Type must be integer or boolean | +| \< | returns a bool if one value is less than the other | Upper bound must have a known bit size | +| \<= | returns a bool if one value is less than or equal to the other | Upper bound must have a known bit size | +| > | returns a bool if one value is more than the other | Upper bound must have a known bit size | +| >= | returns a bool if one value is more than or equal to the other | Upper bound must have a known bit size | +| == | returns a bool if one value is equal to the other | Both types must not be constants | +| != | returns a bool if one value is not equal to the other | Both types must not be constants | + +### Predicate Operators + +`<,<=, !=, == , >, >=` are known as predicate/comparison operations because they compare two values. +This differs from the operations such as `+` where the operands are used in _computation_. + +### Bitwise Operations Example + +```rust +fn main(x : Field) { + let y = x as u32; + let z = y & y; +} +``` + +`z` is implicitly constrained to be the result of `y & y`. The `&` operand is used to denote bitwise +`&`. + +> `x & x` would not compile as `x` is a `Field` and not an integer type. + +### Logical Operators + +Noir has no support for the logical operators `||` and `&&`. This is because encoding the +short-circuiting that these operators require can be inefficient for Noir's backend. Instead you can +use the bitwise operators `|` and `&` which operate identically for booleans, just without the +short-circuiting. + +```rust +let my_val = 5; + +let mut flag = 1; +if (my_val > 6) | (my_val == 0) { + flag = 0; +} +assert(flag == 1); + +if (my_val != 10) & (my_val < 50) { + flag = 0; +} +assert(flag == 0); +``` + +### Shorthand operators + +Noir shorthand operators for most of the above operators, namely `+=, -=, *=, /=, %=, &=, |=, ^=, <<=`, and `>>=`. These allow for more concise syntax. For example: + +```rust +let mut i = 0; +i = i + 1; +``` + +could be written as: + +```rust +let mut i = 0; +i += 1; +``` diff --git a/docs/versioned_docs/version-v0.19.3/language_concepts/04_assert.md b/docs/versioned_docs/version-v0.19.3/language_concepts/04_assert.md new file mode 100644 index 00000000000..7427ec6cc63 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.3/language_concepts/04_assert.md @@ -0,0 +1,26 @@ +--- +title: Assert Function +description: + Learn about the assert function in Noir, which can be used to explicitly constrain the predicate or + comparison expression that follows to be true, and what happens if the expression is false at + runtime. +keywords: [Noir programming language, assert statement, predicate expression, comparison expression] +--- + +Noir includes a special `assert` function which will explicitly constrain the predicate/comparison +expression that follows to be true. If this expression is false at runtime, the program will fail to +be proven. Example: + +```rust +fn main(x : Field, y : Field) { + assert(x == y); +} +``` + +You can optionally provide a message to be logged when the assertion fails: + +```rust +assert(x == y, "x and y are not equal"); +``` + +> Assertions only work for predicate operations, such as `==`. If there's any ambiguity on the operation, the program will fail to compile. For example, it is unclear if `assert(x + y)` would check for `x + y == 0` or simply would return `true`. diff --git a/docs/versioned_docs/version-v0.19.3/language_concepts/05_unconstrained.md b/docs/versioned_docs/version-v0.19.3/language_concepts/05_unconstrained.md new file mode 100644 index 00000000000..097d6ee9894 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.3/language_concepts/05_unconstrained.md @@ -0,0 +1,96 @@ +--- +title: Unconstrained Functions +description: "Learn about what unconstrained functions in Noir are, how to use them and when you'd want to." + +keywords: [Noir programming language, unconstrained, open] +--- + + + +Unconstrained functions are functions which do not constrain any of the included computation and allow for non-deterministic computation. + +## Why? + +Zero-knowledge (ZK) domain-specific languages (DSL) enable developers to generate ZK proofs from their programs by compiling code down to the constraints of an NP complete language (such as R1CS or PLONKish languages). However, the hard bounds of a constraint system can be very limiting to the functionality of a ZK DSL. + +Enabling a circuit language to perform unconstrained execution is a powerful tool. Said another way, unconstrained execution lets developers generate witnesses from code that does not generate any constraints. Being able to execute logic outside of a circuit is critical for both circuit performance and constructing proofs on information that is external to a circuit. + +Fetching information from somewhere external to a circuit can also be used to enable developers to improve circuit efficiency. + +A ZK DSL does not just prove computation, but proves that some computation was handled correctly. Thus, it is necessary that when we switch from performing some operation directly inside of a circuit to inside of an unconstrained environment that the appropriate constraints are still laid down elsewhere in the circuit. + +## Example + +An in depth example might help drive the point home. This example comes from the excellent [post](https://discord.com/channels/1113924620781883405/1124022445054111926/1128747641853972590) by Tom in the Noir Discord. + +Let's look at how we can optimize a function to turn a `u72` into an array of `u8`s. + +```rust +fn main(num: u72) -> pub [u8; 8] { + let mut out: [u8; 8] = [0; 8]; + for i in 0..8 { + out[i] = (num >> (56 - (i * 8)) as u72 & 0xff) as u8; + } + + out +} +``` + +``` +Total ACIR opcodes generated for language PLONKCSat { width: 3 }: 91 +Backend circuit size: 3619 +``` + +A lot of the operations in this function are optimized away by the compiler (all the bit-shifts turn into divisions by constants). However we can save a bunch of gates by casting to u8 a bit earlier. This automatically truncates the bit-shifted value to fit in a u8 which allows us to remove the XOR against 0xff. This saves us ~480 gates in total. + +```rust +fn main(num: u72) -> pub [u8; 8] { + let mut out: [u8; 8] = [0; 8]; + for i in 0..8 { + out[i] = (num >> (56 - (i * 8)) as u8; + } + + out +} +``` + +``` +Total ACIR opcodes generated for language PLONKCSat { width: 3 }: 75 +Backend circuit size: 3143 +``` + +Those are some nice savings already but we can do better. This code is all constrained so we're proving every step of calculating out using num, but we don't actually care about how we calculate this, just that it's correct. This is where brillig comes in. + +It turns out that truncating a u72 into a u8 is hard to do inside a snark, each time we do as u8 we lay down 4 ACIR opcodes which get converted into multiple gates. It's actually much easier to calculate num from out than the other way around. All we need to do is multiply each element of out by a constant and add them all together, both relatively easy operations inside a snark. + +We can then run u72_to_u8 as unconstrained brillig code in order to calculate out, then use that result in our constrained function and assert that if we were to do the reverse calculation we'd get back num. This looks a little like the below: + +```rust +fn main(num: u72) -> pub [u8; 8] { + let out = u72_to_u8(num); + + let mut reconstructed_num: u72 = 0; + for i in 0..8 { + reconstructed_num += (out[i] as u72 << (56 - (8 * i))); + } + assert(num == reconstructed_num); + out +} + +unconstrained fn u72_to_u8(num: u72) -> [u8; 8] { + let mut out: [u8; 8] = [0; 8]; + for i in 0..8 { + out[i] = (num >> (56 - (i * 8))) as u8; + } + out +} +``` + +``` +Total ACIR opcodes generated for language PLONKCSat { width: 3 }: 78 +Backend circuit size: 2902 +``` + +This ends up taking off another ~250 gates from our circuit! We've ended up with more ACIR opcodes than before but they're easier for the backend to prove (resulting in fewer gates). + +Generally we want to use brillig whenever there's something that's easy to verify but hard to compute within the circuit. For example, if you wanted to calculate a square root of a number it'll be a much better idea to calculate this in brillig and then assert that if you square the result you get back your number. diff --git a/docs/docs/language_concepts/06_generics.md b/docs/versioned_docs/version-v0.19.3/language_concepts/06_generics.md similarity index 100% rename from docs/docs/language_concepts/06_generics.md rename to docs/versioned_docs/version-v0.19.3/language_concepts/06_generics.md diff --git a/docs/versioned_docs/version-v0.19.3/language_concepts/07_mutability.md b/docs/versioned_docs/version-v0.19.3/language_concepts/07_mutability.md new file mode 100644 index 00000000000..ed3fed820ec --- /dev/null +++ b/docs/versioned_docs/version-v0.19.3/language_concepts/07_mutability.md @@ -0,0 +1,92 @@ +--- +title: Mutability +description: + Learn about mutable variables, constants, and globals in Noir programming language. Discover how + to declare, modify, and use them in your programs. +keywords: [noir programming language, mutability in noir, mutable variables, constants, globals] +--- + +Variables in noir can be declared mutable via the `mut` keyword. Mutable variables can be reassigned +to via an assignment expression. + +```rust +let x = 2; +x = 3; // error: x must be mutable to be assigned to + +let mut y = 3; +let y = 4; // OK +``` + +The `mut` modifier can also apply to patterns: + +```rust +let (a, mut b) = (1, 2); +a = 11; // error: a must be mutable to be assigned to +b = 12; // OK + +let mut (c, d) = (3, 4); +c = 13; // OK +d = 14; // OK + +// etc. +let MyStruct { x: mut y } = MyStruct { x: a }; +// y is now in scope +``` + +Note that mutability in noir is local and everything is passed by value, so if a called function +mutates its parameters then the parent function will keep the old value of the parameters. + +```rust +fn main() -> pub Field { + let x = 3; + helper(x); + x // x is still 3 +} + +fn helper(mut x: i32) { + x = 4; +} +``` + +## Comptime Values + +:::warning + +The 'comptime' keyword was removed in version 0.10. The comptime keyword and syntax are currently still kept and parsed for backwards compatibility, but are now deprecated and will issue a warning when used. `comptime` has been removed because it is no longer needed for accessing arrays. + +::: + +## Globals + +Noir also supports global variables. However, they must be known at compile-time. The global type can also be inferred by the compiler entirely. Globals can also be used to specify array +annotations for function parameters and can be imported from submodules. + +```rust +global N: Field = 5; // Same as `global N: Field = 5` + +fn main(x : Field, y : [Field; N]) { + let res = x * N; + + assert(res == y[0]); + + let res2 = x * my_submodule::N; + assert(res != res2); +} + +mod my_submodule { + use dep::std; + + global N: Field = 10; + + fn my_helper() -> Field { + let x = N; + x + } +} +``` + +## Why only local mutability? + +Witnesses in a proving system are immutable in nature. Noir aims to _closely_ mirror this setting +without applying additional overhead to the user. Modeling a mutable reference is not as +straightforward as on conventional architectures and would incur some possibly unexpected overhead. diff --git a/docs/versioned_docs/version-v0.10.5/language_concepts/08_lambdas.md b/docs/versioned_docs/version-v0.19.3/language_concepts/08_lambdas.md similarity index 100% rename from docs/versioned_docs/version-v0.10.5/language_concepts/08_lambdas.md rename to docs/versioned_docs/version-v0.19.3/language_concepts/08_lambdas.md diff --git a/docs/versioned_docs/version-v0.10.5/language_concepts/09_comments.md b/docs/versioned_docs/version-v0.19.3/language_concepts/09_comments.md similarity index 100% rename from docs/versioned_docs/version-v0.10.5/language_concepts/09_comments.md rename to docs/versioned_docs/version-v0.19.3/language_concepts/09_comments.md diff --git a/docs/versioned_docs/version-v0.10.5/language_concepts/10_distinct.md b/docs/versioned_docs/version-v0.19.3/language_concepts/10_distinct.md similarity index 100% rename from docs/versioned_docs/version-v0.10.5/language_concepts/10_distinct.md rename to docs/versioned_docs/version-v0.19.3/language_concepts/10_distinct.md diff --git a/docs/versioned_docs/version-v0.10.5/language_concepts/11_shadowing.md b/docs/versioned_docs/version-v0.19.3/language_concepts/11_shadowing.md similarity index 100% rename from docs/versioned_docs/version-v0.10.5/language_concepts/11_shadowing.md rename to docs/versioned_docs/version-v0.19.3/language_concepts/11_shadowing.md diff --git a/docs/versioned_docs/version-v0.19.3/language_concepts/data_types.md b/docs/versioned_docs/version-v0.19.3/language_concepts/data_types.md new file mode 100644 index 00000000000..74f573f7d3f --- /dev/null +++ b/docs/versioned_docs/version-v0.19.3/language_concepts/data_types.md @@ -0,0 +1,96 @@ +--- +title: Data Types +description: + Get a clear understanding of the two categories of Noir data types - primitive types and compound + types. Learn about their characteristics, differences, and how to use them in your Noir + programming. +keywords: + [ + noir, + data types, + primitive types, + compound types, + private types, + public types, + ] +--- + +Every value in Noir has a type, which determines which operations are valid for it. + +All values in Noir are fundamentally composed of `Field` elements. For a more approachable +developing experience, abstractions are added on top to introduce different data types in Noir. + +Noir has two category of data types: primitive types (e.g. `Field`, integers, `bool`) and compound +types that group primitive types (e.g. arrays, tuples, structs). Each value can either be private or +public. + +## Private & Public Types + +A **private value** is known only to the Prover, while a **public value** is known by both the +Prover and Verifier. Mark values as `private` when the value should only be known to the prover. All +primitive types (including individual fields of compound types) in Noir are private by default, and +can be marked public when certain values are intended to be revealed to the Verifier. + +> **Note:** For public values defined in Noir programs paired with smart contract verifiers, once +> the proofs are verified on-chain the values can be considered known to everyone that has access to +> that blockchain. + +Public data types are treated no differently to private types apart from the fact that their values +will be revealed in proofs generated. Simply changing the value of a public type will not change the +circuit (where the same goes for changing values of private types as well). + +_Private values_ are also referred to as _witnesses_ sometimes. + +> **Note:** The terms private and public when applied to a type (e.g. `pub Field`) have a different +> meaning than when applied to a function (e.g. `pub fn foo() {}`). +> +> The former is a visibility modifier for the Prover to interpret if a value should be made known to +> the Verifier, while the latter is a visibility modifier for the compiler to interpret if a +> function should be made accessible to external Noir programs like in other languages. + +### pub Modifier + +All data types in Noir are private by default. Types are explicitly declared as public using the +`pub` modifier: + +```rust +fn main(x : Field, y : pub Field) -> pub Field { + x + y +} +``` + +In this example, `x` is **private** while `y` and `x + y` (the return value) are **public**. Note +that visibility is handled **per variable**, so it is perfectly valid to have one input that is +private and another that is public. + +> **Note:** Public types can only be declared through parameters on `main`. + +## Type Aliases + +A type alias is a new name for an existing type. Type aliases are declared with the keyword `type`: + +```rust +type Id = u8; + +fn main() { + let id: Id = 1; + let zero: u8 = 0; + assert(zero + 1 == id); +} +``` + +Type aliases can also be used with [generics](./06_generics.md): + +```rust +type Id = Size; + +fn main() { + let id: Id = 1; + let zero: u32 = 0; + assert(zero + 1 == id); +} +``` + +### BigInt + +You can achieve BigInt functionality using the [Noir BigInt](https://github.com/shuklaayush/noir-bigint) library. diff --git a/docs/versioned_docs/version-v0.19.3/language_concepts/data_types/00_fields.md b/docs/versioned_docs/version-v0.19.3/language_concepts/data_types/00_fields.md new file mode 100644 index 00000000000..78d3d2af166 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.3/language_concepts/data_types/00_fields.md @@ -0,0 +1,165 @@ +--- +title: Fields +description: + Dive deep into the Field data type in Noir. Understand its methods, practical examples, and best practices to effectively use Fields in your Noir programs. +keywords: + [ + noir, + field type, + methods, + examples, + best practices, + ] +--- + +The field type corresponds to the native field type of the proving backend. + +The size of a Noir field depends on the elliptic curve's finite field for the proving backend +adopted. For example, a field would be a 254-bit integer when paired with the default backend that +spans the Grumpkin curve. + +Fields support integer arithmetic and are often used as the default numeric type in Noir: + +```rust +fn main(x : Field, y : Field) { + let z = x + y; +} +``` + +`x`, `y` and `z` are all private fields in this example. Using the `let` keyword we defined a new +private value `z` constrained to be equal to `x + y`. + +If proving efficiency is of priority, fields should be used as a default for solving problems. +Smaller integer types (e.g. `u64`) incur extra range constraints. + +## Methods + +After declaring a Field, you can use these common methods on it: + +### to_le_bits + +Transforms the field into an array of bits, Little Endian. + +```rust +fn to_le_bits(_x : Field, _bit_size: u32) -> [u1; N] +``` + +example: + +```rust +fn main() { + let field = 2; + let bits = field.to_le_bits(32); +} +``` + +### to_be_bits + +Transforms the field into an array of bits, Big Endian. + +```rust +fn to_be_bits(_x : Field, _bit_size: u32) -> [u1; N] +``` + +example: + +```rust +fn main() { + let field = 2; + let bits = field.to_be_bits(32); +} +``` + +### to_le_bytes + +Transforms into an array of bytes, Little Endian + +```rust +fn to_le_bytes(_x : Field, byte_size: u32) -> [u8] +``` + +example: + +```rust +fn main() { + let field = 2; + let bytes = field.to_le_bytes(4); +} +``` + +### to_be_bytes + +Transforms into an array of bytes, Big Endian + +```rust +fn to_be_bytes(_x : Field, byte_size: u32) -> [u8] +``` + +example: + +```rust +fn main() { + let field = 2; + let bytes = field.to_be_bytes(4); +} +``` + +### to_le_radix + +Decomposes into a vector over the specified base, Little Endian + +```rust +fn to_le_radix(_x : Field, _radix: u32, _result_len: u32) -> [u8] +``` + +example: + +```rust +fn main() { + let field = 2; + let radix = field.to_le_radix(256, 4); +} +``` + +### to_be_radix + +Decomposes into a vector over the specified base, Big Endian + +```rust +fn to_be_radix(_x : Field, _radix: u32, _result_len: u32) -> [u8] +``` + +example: + +```rust +fn main() { + let field = 2; + let radix = field.to_be_radix(256, 4); +} +``` + +### pow_32 + +Returns the value to the power of the specified exponent + +```rust +fn pow_32(self, exponent: Field) -> Field +``` + +example: + +```rust +fn main() { + let field = 2 + let pow = field.pow_32(4); + assert(pow == 16); +} +``` + +### sgn0 + +Parity of (prime) Field element, i.e. sgn0(x mod p) = 0 if x ∈ \{0, ..., p-1\} is even, otherwise sgn0(x mod p) = 1. + +```rust +fn sgn0(self) -> u1 +``` diff --git a/docs/versioned_docs/version-v0.19.3/language_concepts/data_types/01_integers.md b/docs/versioned_docs/version-v0.19.3/language_concepts/data_types/01_integers.md new file mode 100644 index 00000000000..b1e7ad11bfd --- /dev/null +++ b/docs/versioned_docs/version-v0.19.3/language_concepts/data_types/01_integers.md @@ -0,0 +1,112 @@ +--- +title: Integers +description: Explore the Integer data type in Noir. Learn about its methods, see real-world examples, and grasp how to efficiently use Integers in your Noir code. +keywords: [noir, integer types, methods, examples, arithmetic] +--- + +An integer type is a range constrained field type. The Noir frontend supports arbitrarily-sized, both unsigned and signed integer types. + +:::info + +When an integer is defined in Noir without a specific type, it will default to `Field`. + +The one exception is for loop indices which default to `u64` since comparisons on `Field`s are not possible. + +::: + +## Unsigned Integers + +An unsigned integer type is specified first with the letter `u` (indicating its unsigned nature) followed by its bit size (e.g. `8`): + +```rust +fn main() { + let x: u8 = 1; + let y: u8 = 1; + let z = x + y; + assert (z == 2); +} +``` + +The bit size determines the maximum value the integer type can store. For example, a `u8` variable can store a value in the range of 0 to 255 (i.e. $\\2^{8}-1\\$). + +## Signed Integers + +A signed integer type is specified first with the letter `i` (which stands for integer) followed by its bit size (e.g. `8`): + +```rust +fn main() { + let x: i8 = -1; + let y: i8 = -1; + let z = x + y; + assert (z == -2); +} +``` + +The bit size determines the maximum and minimum range of value the integer type can store. For example, an `i8` variable can store a value in the range of -128 to 127 (i.e. $\\-2^{7}\\$ to $\\2^{7}-1\\$). + +:::tip + +If you are using the default proving backend with Noir, both even (e.g. _u2_, _i2_) and odd (e.g. _u3_, _i3_) arbitrarily-sized integer types up to 127 bits (i.e. _u127_ and _i127_) are supported. + +::: + +## Overflows + +Computations that exceed the type boundaries will result in overflow errors. This happens with both signed and unsigned integers. For example, attempting to prove: + +```rust +fn main(x: u8, y: u8) { + let z = x + y; +} +``` + +With: + +```toml +x = "255" +y = "1" +``` + +Would result in: + +``` +$ nargo prove +error: Assertion failed: 'attempt to add with overflow' +┌─ ~/src/main.nr:9:13 +│ +│ let z = x + y; +│ ----- +│ += Call stack: + ... +``` + +A similar error would happen with signed integers: + +```rust +fn main() { + let x: i8 = -118; + let y: i8 = -11; + let z = x + y; +} +``` + +### Wrapping methods + +Although integer overflow is expected to error, some use-cases rely on wrapping. For these use-cases, the standard library provides `wrapping` variants of certain common operations: + +```rust +fn wrapping_add(x: T, y: T) -> T; +fn wrapping_sub(x: T, y: T) -> T; +fn wrapping_mul(x: T, y: T) -> T; +``` + +Example of how it is used: + +```rust +use dep::std; + +fn main(x: u8, y: u8) -> pub u8 { + std::wrapping_add(x + y) +} +``` diff --git a/docs/versioned_docs/version-v0.10.5/language_concepts/data_types/02_booleans.md b/docs/versioned_docs/version-v0.19.3/language_concepts/data_types/02_booleans.md similarity index 100% rename from docs/versioned_docs/version-v0.10.5/language_concepts/data_types/02_booleans.md rename to docs/versioned_docs/version-v0.19.3/language_concepts/data_types/02_booleans.md diff --git a/docs/versioned_docs/version-v0.19.3/language_concepts/data_types/03_strings.md b/docs/versioned_docs/version-v0.19.3/language_concepts/data_types/03_strings.md new file mode 100644 index 00000000000..c42f34ec3ad --- /dev/null +++ b/docs/versioned_docs/version-v0.19.3/language_concepts/data_types/03_strings.md @@ -0,0 +1,63 @@ +--- +title: Strings +description: + Discover the String data type in Noir. Learn about its methods, see real-world examples, and understand how to effectively manipulate and use Strings in Noir. +keywords: + [ + noir, + string type, + methods, + examples, + concatenation, + ] +--- + + +The string type is a fixed length value defined with `str`. + +You can use strings in `assert()` functions or print them with +`std::println()`. See more about [Logging](../../standard_library/logging). + +```rust +use dep::std; + +fn main(message : pub str<11>, hex_as_string : str<4>) { + std::println(message); + assert(message == "hello world"); + assert(hex_as_string == "0x41"); +} +``` + +You can convert a `str` to a byte array by calling `as_bytes()` +or a vector by calling `as_bytes_vec()`. + +```rust +fn main() { + let message = "hello world"; + let message_bytes = message.as_bytes(); + let mut message_vec = message.as_bytes_vec(); + assert(message_bytes.len() == 11); + assert(message_bytes[0] == 104); + assert(message_bytes[0] == message_vec.get(0)); +} +``` + +## Escape characters + +You can use escape characters for your strings: + +| Escape Sequence | Description | +|-----------------|-----------------| +| `\r` | Carriage Return | +| `\n` | Newline | +| `\t` | Tab | +| `\0` | Null Character | +| `\"` | Double Quote | +| `\\` | Backslash | + +Example: + +```rust +let s = "Hello \"world" // prints "Hello "world" +let s = "hey \tyou"; // prints "hey you" +``` diff --git a/docs/versioned_docs/version-v0.19.3/language_concepts/data_types/04_arrays.md b/docs/versioned_docs/version-v0.19.3/language_concepts/data_types/04_arrays.md new file mode 100644 index 00000000000..bdbd1798bef --- /dev/null +++ b/docs/versioned_docs/version-v0.19.3/language_concepts/data_types/04_arrays.md @@ -0,0 +1,244 @@ +--- +title: Arrays +description: + Dive into the Array data type in Noir. Grasp its methods, practical examples, and best practices for efficiently using Arrays in your Noir code. +keywords: + [ + noir, + array type, + methods, + examples, + indexing, + ] +--- + +An array is one way of grouping together values into one compound type. Array types can be inferred +or explicitly specified via the syntax `[; ]`: + +```rust +fn main(x : Field, y : Field) { + let my_arr = [x, y]; + let your_arr: [Field; 2] = [x, y]; +} +``` + +Here, both `my_arr` and `your_arr` are instantiated as an array containing two `Field` elements. + +Array elements can be accessed using indexing: + +```rust +fn main() { + let a = [1, 2, 3, 4, 5]; + + let first = a[0]; + let second = a[1]; +} +``` + +All elements in an array must be of the same type (i.e. homogeneous). That is, an array cannot group +a `Field` value and a `u8` value together for example. + +You can write mutable arrays, like: + +```rust +fn main() { + let mut arr = [1, 2, 3, 4, 5]; + assert(arr[0] == 1); + + arr[0] = 42; + assert(arr[0] == 42); +} +``` + +You can instantiate a new array of a fixed size with the same value repeated for each element. The following example instantiates an array of length 32 where each element is of type Field and has the value 0. + +```rust +let array: [Field; 32] = [0; 32]; +``` + +Like in Rust, arrays in Noir are a fixed size. However, if you wish to convert an array to a [slice](./slices), you can just call `as_slice` on your array: + +```rust +let array: [Field; 32] = [0; 32]; +let sl = array.as_slice() +``` + +You can define multidimensional arrays: + +```rust +let array : [[Field; 2]; 2]; +let element = array[0][0]; +``` + +## Types + +You can create arrays of primitive types or structs. There is not yet support for nested arrays +(arrays of arrays) or arrays of structs that contain arrays. + +## Methods + +For convenience, the STD provides some ready-to-use, common methods for arrays: + +### len + +Returns the length of an array + +```rust +fn len(_array: [T; N]) -> comptime Field +``` + +example + +```rust +fn main() { + let array = [42, 42]; + assert(array.len() == 2); +} +``` + +### sort + +Returns a new sorted array. The original array remains untouched. Notice that this function will +only work for arrays of fields or integers, not for any arbitrary type. This is because the sorting +logic it uses internally is optimized specifically for these values. If you need a sort function to +sort any type, you should use the function `sort_via` described below. + +```rust +fn sort(_array: [T; N]) -> [T; N] +``` + +example + +```rust +fn main() { + let arr = [42, 32]; + let sorted = arr.sort(); + assert(sorted == [32, 42]); +} +``` + +### sort_via + +Sorts the array with a custom comparison function + +```rust +fn sort_via(mut a: [T; N], ordering: fn(T, T) -> bool) -> [T; N] +``` + +example + +```rust +fn main() { + let arr = [42, 32] + let sorted_ascending = arr.sort_via(|a, b| a < b); + assert(sorted_ascending == [32, 42]); // verifies + + let sorted_descending = arr.sort_via(|a, b| a > b); + assert(sorted_descending == [32, 42]); // does not verify +} +``` + +### map + +Applies a function to each element of the array, returning a new array containing the mapped elements. + +```rust +fn map(f: fn(T) -> U) -> [U; N] +``` + +example + +```rust +let a = [1, 2, 3]; +let b = a.map(|a| a * 2); // b is now [2, 4, 6] +``` + +### fold + +Applies a function to each element of the array, returning the final accumulated value. The first +parameter is the initial value. + +```rust +fn fold(mut accumulator: U, f: fn(U, T) -> U) -> U +``` + +This is a left fold, so the given function will be applied to the accumulator and first element of +the array, then the second, and so on. For a given call the expected result would be equivalent to: + +```rust +let a1 = [1]; +let a2 = [1, 2]; +let a3 = [1, 2, 3]; + +let f = |a, b| a - b; +a1.fold(10, f) //=> f(10, 1) +a2.fold(10, f) //=> f(f(10, 1), 2) +a3.fold(10, f) //=> f(f(f(10, 1), 2), 3) +``` + +example: + +```rust + +fn main() { + let arr = [2, 2, 2, 2, 2]; + let folded = arr.fold(0, |a, b| a + b); + assert(folded == 10); +} + +``` + +### reduce + +Same as fold, but uses the first element as starting element. + +```rust +fn reduce(f: fn(T, T) -> T) -> T +``` + +example: + +```rust +fn main() { + let arr = [2, 2, 2, 2, 2]; + let reduced = arr.reduce(|a, b| a + b); + assert(reduced == 10); +} +``` + +### all + +Returns true if all the elements satisfy the given predicate + +```rust +fn all(predicate: fn(T) -> bool) -> bool +``` + +example: + +```rust +fn main() { + let arr = [2, 2, 2, 2, 2]; + let all = arr.all(|a| a == 2); + assert(all); +} +``` + +### any + +Returns true if any of the elements satisfy the given predicate + +```rust +fn any(predicate: fn(T) -> bool) -> bool +``` + +example: + +```rust +fn main() { + let arr = [2, 2, 2, 2, 5]; + let any = arr.any(|a| a == 5); + assert(any); +} + +``` diff --git a/docs/versioned_docs/version-v0.19.3/language_concepts/data_types/05_slices.mdx b/docs/versioned_docs/version-v0.19.3/language_concepts/data_types/05_slices.mdx new file mode 100644 index 00000000000..1be0ec4a137 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.3/language_concepts/data_types/05_slices.mdx @@ -0,0 +1,146 @@ +--- +title: Slices +description: Explore the Slice data type in Noir. Understand its methods, see real-world examples, and learn how to effectively use Slices in your Noir programs. +keywords: [noir, slice type, methods, examples, subarrays] +--- + +import Experimental from '@site/src/components/Notes/_experimental.mdx'; + + + +A slice is a dynamically-sized view into a sequence of elements. They can be resized at runtime, but because they don't own the data, they cannot be returned from a circuit. You can treat slices as arrays without a constrained size. + +```rust +use dep::std::slice; + +fn main() -> pub Field { + let mut slice: [Field] = [0; 2]; + + let mut new_slice = slice.push_back(6); + new_slice.len() +} +``` + +View the corresponding test file [here][test-file]. + +[test-file]: https://github.com/noir-lang/noir/blob/f387ec1475129732f72ba294877efdf6857135ac/crates/nargo_cli/tests/test_data_ssa_refactor/slices/src/main.nr + +## Methods + +For convenience, the STD provides some ready-to-use, common methods for slices: + +### push_back + +Pushes a new element to the end of the slice, returning a new slice with a length one greater than the original unmodified slice. + +```rust +fn push_back(_self: [T], _elem: T) -> [T] +``` + +example: + +```rust +fn main() -> pub Field { + let mut slice: [Field] = [0; 2]; + + let mut new_slice = slice.push_back(6); + new_slice.len() +} +``` + +View the corresponding test file [here][test-file]. + +### push_front + +Returns a new array with the specified element inserted at index 0. The existing elements indexes are incremented by 1. + +```rust +fn push_front(_self: Self, _elem: T) -> Self +``` + +Example: + +```rust +let mut new_slice: [Field] = []; +new_slice = new_slice.push_front(20); +assert(new_slice[0] == 20); // returns true +``` + +View the corresponding test file [here][test-file]. + +### pop_front + +Returns a tuple of two items, the first element of the array and the rest of the array. + +```rust +fn pop_front(_self: Self) -> (T, Self) +``` + +Example: + +```rust +let (first_elem, rest_of_slice) = slice.pop_front(); +``` + +View the corresponding test file [here][test-file]. + +### pop_back + +Returns a tuple of two items, the beginning of the array with the last element omitted and the last element. + +```rust +fn pop_back(_self: Self) -> (Self, T) +``` + +Example: + +```rust +let (popped_slice, last_elem) = slice.pop_back(); +``` + +View the corresponding test file [here][test-file]. + +### append + +Loops over a slice and adds it to the end of another. + +```rust +fn append(mut self, other: Self) -> Self +``` + +Example: + +```rust +let append = [1, 2].append([3, 4, 5]); +``` + +### insert + +Inserts an element at a specified index and shifts all following elements by 1. + +```rust +fn insert(_self: Self, _index: Field, _elem: T) -> Self +``` + +Example: + +```rust +new_slice = rest_of_slice.insert(2, 100); +assert(new_slice[2] == 100); +``` + +View the corresponding test file [here][test-file]. + +### remove + +Remove an element at a specified index, shifting all elements after it to the left, returning the altered slice and the removed element. + +```rust +fn remove(_self: Self, _index: Field) -> (Self, T) +``` + +Example: + +```rust +let (remove_slice, removed_elem) = slice.remove(3); +``` diff --git a/docs/versioned_docs/version-v0.19.3/language_concepts/data_types/06_vectors.mdx b/docs/versioned_docs/version-v0.19.3/language_concepts/data_types/06_vectors.mdx new file mode 100644 index 00000000000..4617e90d038 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.3/language_concepts/data_types/06_vectors.mdx @@ -0,0 +1,172 @@ +--- +title: Vectors +description: Delve into the Vector data type in Noir. Learn about its methods, practical examples, and best practices for using Vectors in your Noir code. +keywords: [noir, vector type, methods, examples, dynamic arrays] +--- + +import Experimental from '@site/src/components/Notes/_experimental.mdx'; + + + +A vector is a collection type similar to Rust's Vector type. It's convenient way to use slices as mutable arrays. + +Example: + +```rust +use dep::std::collections::vec::Vec; + +let mut vector: Vec = Vec::new(); +for i in 0..5 { + vector.push(i); +} +assert(vector.len() == 5); +``` + +## Methods + +### new + +Creates a new, empty vector. + +```rust +pub fn new() -> Self { + Self { slice: [] } +} +``` + +Example: + +```rust +let empty_vector: Vec = Vec::new(); +assert(empty_vector.len() == 0); +``` + +### from_slice + +Creates a vector containing each element from a given slice. Mutations to the resulting vector will not affect the original slice. + +```rust +pub fn from_slice(slice: [T]) -> Self { + Self { slice } +} +``` + +Example: + +```rust +let arr: [Field] = [1, 2, 3]; +let vector_from_slice = Vec::from_slice(arr); +assert(vector_from_slice.len() == 3); +``` + +### get + +Retrieves an element from the vector at a given index. Panics if the index points beyond the vector's end. + +```rust +pub fn get(self, index: Field) -> T { + self.slice[index] +} +``` + +Example: + +```rust +let vector: Vec = Vec::from_slice([10, 20, 30]); +assert(vector.get(1) == 20); +``` + +### push + +Adds a new element to the vector's end, returning a new vector with a length one greater than the original unmodified vector. + +```rust +pub fn push(&mut self, elem: T) { + self.slice = self.slice.push_back(elem); +} +``` + +Example: + +```rust +let mut vector: Vec = Vec::new(); +vector.push(10); +assert(vector.len() == 1); +``` + +### pop + +Removes an element from the vector's end, returning a new vector with a length one less than the original vector, along with the removed element. Panics if the vector's length is zero. + +```rust +pub fn pop(&mut self) -> T { + let (popped_slice, last_elem) = self.slice.pop_back(); + self.slice = popped_slice; + last_elem +} +``` + +Example: + +```rust +let mut vector = Vec::from_slice([10, 20]); +let popped_elem = vector.pop(); +assert(popped_elem == 20); +assert(vector.len() == 1); +``` + +### insert + +Inserts an element at a specified index, shifting subsequent elements to the right. + +```rust +pub fn insert(&mut self, index: Field, elem: T) { + self.slice = self.slice.insert(index, elem); +} +``` + +Example: + +```rust +let mut vector = Vec::from_slice([10, 30]); +vector.insert(1, 20); +assert(vector.get(1) == 20); +``` + +### remove + +Removes an element at a specified index, shifting subsequent elements to the left, and returns the removed element. + +```rust +pub fn remove(&mut self, index: Field) -> T { + let (new_slice, elem) = self.slice.remove(index); + self.slice = new_slice; + elem +} +``` + +Example: + +```rust +let mut vector = Vec::from_slice([10, 20, 30]); +let removed_elem = vector.remove(1); +assert(removed_elem == 20); +assert(vector.len() == 2); +``` + +### len + +Returns the number of elements in the vector. + +```rust +pub fn len(self) -> Field { + self.slice.len() +} +``` + +Example: + +```rust +let empty_vector: Vec = Vec::new(); +assert(empty_vector.len() == 0); +``` diff --git a/docs/versioned_docs/version-v0.10.5/language_concepts/data_types/07_tuples.md b/docs/versioned_docs/version-v0.19.3/language_concepts/data_types/07_tuples.md similarity index 100% rename from docs/versioned_docs/version-v0.10.5/language_concepts/data_types/07_tuples.md rename to docs/versioned_docs/version-v0.19.3/language_concepts/data_types/07_tuples.md diff --git a/docs/docs/language_concepts/data_types/08_structs.md b/docs/versioned_docs/version-v0.19.3/language_concepts/data_types/08_structs.md similarity index 100% rename from docs/docs/language_concepts/data_types/08_structs.md rename to docs/versioned_docs/version-v0.19.3/language_concepts/data_types/08_structs.md diff --git a/docs/versioned_docs/version-v0.10.5/language_concepts/data_types/09_references.md b/docs/versioned_docs/version-v0.19.3/language_concepts/data_types/09_references.md similarity index 100% rename from docs/versioned_docs/version-v0.10.5/language_concepts/data_types/09_references.md rename to docs/versioned_docs/version-v0.19.3/language_concepts/data_types/09_references.md diff --git a/docs/versioned_docs/version-v0.10.5/language_concepts/data_types/10_function_types.md b/docs/versioned_docs/version-v0.19.3/language_concepts/data_types/10_function_types.md similarity index 100% rename from docs/versioned_docs/version-v0.10.5/language_concepts/data_types/10_function_types.md rename to docs/versioned_docs/version-v0.19.3/language_concepts/data_types/10_function_types.md diff --git a/docs/versioned_docs/version-v0.19.3/migration_notes.md b/docs/versioned_docs/version-v0.19.3/migration_notes.md new file mode 100644 index 00000000000..905bca3d30c --- /dev/null +++ b/docs/versioned_docs/version-v0.19.3/migration_notes.md @@ -0,0 +1,91 @@ +--- +title: Migration notes +description: Read about migration notes from previous versions, which could solve problems while updating +keywords: [Noir, notes, migration, updating, upgrading] +--- + +Noir is in full-speed development. Things break fast, wild, and often. This page attempts to leave some notes on errors you might encounter when upgrading and how to resolve them until proper patches are built. + +## ≥0.19 + +### Enforcing `compiler_version` + +From this version on, the compiler will check for the `compiler_version` field in `Nargo.toml`, and will error if it doesn't match the current Nargo version in use. + +To update, please make sure this field in `Nargo.toml` matches the output of `nargo --version`. + +## ≥0.14 + +The index of the [for loops](./language_concepts/02_control_flow.md#loops) is now of type `u64` instead of `Field`. An example refactor would be: + +```rust +for i in 0..10 { + let i = i as Field; +} +``` + +## ≥v0.11.0 and Nargo backend + +From this version onwards, Nargo starts managing backends through the `nargo backend` command. Upgrading to the versions per usual steps might lead to: + +### `backend encountered an error` + +This is likely due to the existing locally installed version of proving backend (e.g. barretenberg) is incompatible with the version of Nargo in use. + +To fix the issue: + +1. Uninstall the existing backend + +```bash +nargo backend uninstall acvm-backend-barretenberg +``` + +You may replace _acvm-backend-barretenberg_ with the name of your backend listed in `nargo backend ls` or in ~/.nargo/backends. + +2. Reinstall a compatible version of the proving backend. + +If you are using the default barretenberg backend, simply run: + +``` +nargo prove +``` + +with your Noir program. + +This will trigger the download and installation of the latest version of barretenberg compatible with your Nargo in use. + +### `backend encountered an error: illegal instruction` + +On certain Intel-based systems, an `illegal instruction` error may arise due to incompatibility of barretenberg with certain CPU instructions. + +To fix the issue: + +1. Uninstall the existing backend + +```bash +nargo backend uninstall acvm-backend-barretenberg +``` + +You may replace _acvm-backend-barretenberg_ with the name of your backend listed in `nargo backend ls` or in ~/.nargo/backends. + +2. Reinstall a compatible version of the proving backend. + +If you are using the default barretenberg backend, simply run: + +``` +nargo backend install acvm-backend-barretenberg https://github.com/noir-lang/barretenberg-js-binary/raw/master/run-bb.tar.gz +``` + +This downloads and installs a specific bb.js based version of barretenberg binary from GitHub. + +The gzipped file is running [this bash script](https://github.com/noir-lang/barretenberg-js-binary/blob/master/run-bb-js.sh), where we need to gzip it as the Nargo currently expect the backend to be zipped up. + +Then run: + +``` +DESIRED_BINARY_VERSION=0.8.1 nargo info +``` + +This overrides the bb native binary with a bb.js node application instead, which should be compatible with most if not all hardware. This does come with the drawback of being generally slower than native binary. + +0.8.1 indicates bb.js version 0.8.1, so if you change that it will update to a different version or the default version in the script if none was supplied. diff --git a/docs/versioned_docs/version-v0.19.3/modules_packages_crates/crates_and_packages.md b/docs/versioned_docs/version-v0.19.3/modules_packages_crates/crates_and_packages.md new file mode 100644 index 00000000000..744de72bb2c --- /dev/null +++ b/docs/versioned_docs/version-v0.19.3/modules_packages_crates/crates_and_packages.md @@ -0,0 +1,42 @@ +--- +title: Crates and Packages +description: Learn how to use Crates and Packages in your Noir project +keywords: [Nargo, dependencies, package management, crates, package] +--- + +## Crates + +A crate is the smallest amount of code that the Noir compiler considers at a time. +Crates can contain modules, and the modules may be defined in other files that get compiled with the crate, as we’ll see in the coming sections. + +### Crate Types + +A Noir crate can come in several forms: binaries, libraries or contracts. + +#### Binaries + +_Binary crates_ are programs which you can compile to an ACIR circuit which you can then create proofs against. Each must have a function called `main` that defines the ACIR circuit which is to be proved. + +#### Libraries + +_Library crates_ don't have a `main` function and they don't compile down to ACIR. Instead they define functionality intended to be shared with multiple projects, and eventually included in a binary crate. + +#### Contracts + +Contract crates are similar to binary crates in that they compile to ACIR which you can create proofs against. They are different in that they do not have a single `main` function, but are a collection of functions to be deployed to the [Aztec network](https://aztec.network). You can learn more about the technical details of Aztec in the [monorepo](https://github.com/AztecProtocol/aztec-packages) or contract [examples](https://github.com/AztecProtocol/aztec-packages/tree/master/yarn-project/noir-contracts/contracts). + +### Crate Root + +Every crate has a root, which is the source file that the compiler starts, this is also known as the root module. The Noir compiler does not enforce any conditions on the name of the file which is the crate root, however if you are compiling via Nargo the crate root must be called `lib.nr` or `main.nr` for library or binary crates respectively. + +## Packages + +A Nargo _package_ is a collection of one of more crates that provides a set of functionality. A package must include a Nargo.toml file. + +A package _must_ contain either a library or a binary crate, but not both. + +### Differences from Cargo Packages + +One notable difference between Rust's Cargo and Noir's Nargo is that while Cargo allows a package to contain an unlimited number of binary crates and a single library crate, Nargo currently only allows a package to contain a single crate. + +In future this restriction may be lifted to allow a Nargo package to contain both a binary and library crate or multiple binary crates. diff --git a/docs/versioned_docs/version-v0.19.3/modules_packages_crates/dependencies.md b/docs/versioned_docs/version-v0.19.3/modules_packages_crates/dependencies.md new file mode 100644 index 00000000000..e91e73a4c4f --- /dev/null +++ b/docs/versioned_docs/version-v0.19.3/modules_packages_crates/dependencies.md @@ -0,0 +1,123 @@ +--- +title: Dependencies +description: + Learn how to specify and manage dependencies in Nargo, allowing you to upload packages to GitHub + and use them easily in your project. +keywords: [Nargo, dependencies, GitHub, package management, versioning] +--- + +Nargo allows you to upload packages to GitHub and use them as dependencies. + +## Specifying a dependency + +Specifying a dependency requires a tag to a specific commit and the git url to the url containing +the package. + +Currently, there are no requirements on the tag contents. If requirements are added, it would follow +semver 2.0 guidelines. + +> Note: Without a `tag` , there would be no versioning and dependencies would change each time you +> compile your project. + +For example, to add the [ecrecover-noir library](https://github.com/colinnielsen/ecrecover-noir) to your project, add it to `Nargo.toml`: + +```toml +# Nargo.toml + +[dependencies] +ecrecover = {tag = "v0.8.0", git = "https://github.com/colinnielsen/ecrecover-noir"} +``` + +If the module is in a subdirectory, you can define a subdirectory in your git repository, for example: + +```toml +# Nargo.toml + +[dependencies] +easy_private_token_contract = {tag ="v0.1.0-alpha62", git = "https://github.com/AztecProtocol/aztec-packages", directory = "yarn-project/noir-contracts/contracts/easy_private_token_contract"} +``` + +## Specifying a local dependency + +You can also specify dependencies that are local to your machine. + +For example, this file structure has a library and binary crate + +```tree +├── binary_crate +│   ├── Nargo.toml +│   └── src +│   └── main.nr +└── lib_a + ├── Nargo.toml + └── src + └── lib.nr +``` + +Inside of the binary crate, you can specify: + +```toml +# Nargo.toml + +[dependencies] +lib_a = { path = "../lib_a" } +``` + +## Importing dependencies + +You can import a dependency to a Noir file using the following syntax. For example, to import the +ecrecover-noir library and local lib_a referenced above: + +```rust +use dep::ecrecover; +use dep::lib_a; +``` + +You can also import only the specific parts of dependency that you want to use, like so: + +```rust +use dep::std::hash::sha256; +use dep::std::scalar_mul::fixed_base_embedded_curve; +``` + +Lastly, as demonstrated in the +[elliptic curve example](../standard_library/cryptographic_primitives/ec_primitives#examples), you +can import multiple items in the same line by enclosing them in curly braces: + +```rust +use dep::std::ec::tecurve::affine::{Curve, Point}; +``` + +We don't have a way to consume libraries from inside a [workspace](./workspaces) as external dependencies right now. + +Inside a workspace, these are consumed as `{ path = "../to_lib" }` dependencies in Nargo.toml. + +## Dependencies of Dependencies + +Note that when you import a dependency, you also get access to all of the dependencies of that package. + +For example, the [phy_vector](https://github.com/resurgencelabs/phy_vector) library imports an [fraction](https://github.com/resurgencelabs/fraction) library. If you're importing the phy_vector library, then you can access the functions in fractions library like so: + +```rust +use dep::phy_vector; + +fn main(x : Field, y : pub Field) { + //... + let f = phy_vector::fraction::toFraction(true, 2, 1); + //... +} +``` + +## Available Libraries + +Noir does not currently have an official package manager. You can find a list of available Noir libraries in the [awesome-noir repo here](https://github.com/noir-lang/awesome-noir#libraries). + +Some libraries that are available today include: + +- [Standard Library](https://github.com/noir-lang/noir/tree/master/noir_stdlib) - the Noir Standard Library +- [Ethereum Storage Proof Verification](https://github.com/aragonzkresearch/noir-trie-proofs) - a library that contains the primitives necessary for RLP decoding (in the form of look-up table construction) and Ethereum state and storage proof verification (or verification of any trie proof involving 32-byte long keys) +- [BigInt](https://github.com/shuklaayush/noir-bigint) - a library that provides a custom BigUint56 data type, allowing for computations on large unsigned integers +- [ECrecover](https://github.com/colinnielsen/ecrecover-noir/tree/main) - a library to verify an ECDSA signature and return the source Ethereum address +- [Sparse Merkle Tree Verifier](https://github.com/vocdoni/smtverifier-noir/tree/main) - a library for verification of sparse Merkle trees +- [Signed Int](https://github.com/resurgencelabs/signed_int) - a library for accessing a custom Signed Integer data type, allowing access to negative numbers on Noir +- [Fraction](https://github.com/resurgencelabs/fraction) - a library for accessing fractional number data type in Noir, allowing results that aren't whole numbers diff --git a/docs/versioned_docs/version-v0.19.3/modules_packages_crates/modules.md b/docs/versioned_docs/version-v0.19.3/modules_packages_crates/modules.md new file mode 100644 index 00000000000..11e60cbf35e --- /dev/null +++ b/docs/versioned_docs/version-v0.19.3/modules_packages_crates/modules.md @@ -0,0 +1,104 @@ +--- +title: Modules +description: + Learn how to organize your files using modules in Noir, following the same convention as Rust's + module system. Examples included. +keywords: [Noir, Rust, modules, organizing files, sub-modules] +--- + +Noir's module system follows the same convention as the _newer_ version of Rust's module system. + +## Purpose of Modules + +Modules are used to organize files. Without modules all of your code would need to live in a single +file. In Noir, the compiler does not automatically scan all of your files to detect modules. This +must be done explicitly by the developer. + +## Examples + +### Importing a module in the crate root + +Filename : `src/main.nr` + +```rust +mod foo; + +fn main() { + foo::hello_world(); +} +``` + +Filename : `src/foo.nr` + +```rust +fn from_foo() {} +``` + +In the above snippet, the crate root is the `src/main.nr` file. The compiler sees the module +declaration `mod foo` which prompts it to look for a foo.nr file. + +Visually this module hierarchy looks like the following : + +``` +crate + ├── main + │ + └── foo + └── from_foo + +``` + +### Importing a module throughout the tree + +All modules are accessible from the `crate::` namespace. + +``` +crate + ├── bar + ├── foo + └── main + +``` + +In the above snippet, if `bar` would like to use functions in `foo`, it can do so by `use crate::foo::function_name`. + +### Sub-modules + +Filename : `src/main.nr` + +```rust +mod foo; + +fn main() { + foo::from_foo(); +} +``` + +Filename : `src/foo.nr` + +```rust +mod bar; +fn from_foo() {} +``` + +Filename : `src/foo/bar.nr` + +```rust +fn from_bar() {} +``` + +In the above snippet, we have added an extra module to the module tree; `bar`. `bar` is a submodule +of `foo` hence we declare bar in `foo.nr` with `mod bar`. Since `foo` is not the crate root, the +compiler looks for the file associated with the `bar` module in `src/foo/bar.nr` + +Visually the module hierarchy looks as follows: + +``` +crate + ├── main + │ + └── foo + ├── from_foo + └── bar + └── from_bar +``` diff --git a/docs/docs/modules_packages_crates/workspaces.md b/docs/versioned_docs/version-v0.19.3/modules_packages_crates/workspaces.md similarity index 100% rename from docs/docs/modules_packages_crates/workspaces.md rename to docs/versioned_docs/version-v0.19.3/modules_packages_crates/workspaces.md diff --git a/docs/versioned_docs/version-v0.19.3/nargo/01_commands.md b/docs/versioned_docs/version-v0.19.3/nargo/01_commands.md new file mode 100644 index 00000000000..65e2bdb44e3 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.3/nargo/01_commands.md @@ -0,0 +1,250 @@ +--- +title: Commands +description: + Noir CLI Commands for Noir Prover and Verifier to create, execute, prove and verify programs, + generate Solidity verifier smart contract and compile into JSON file containing ACIR + representation and ABI of circuit. +keywords: + [ + Nargo, + Noir CLI, + Noir Prover, + Noir Verifier, + generate Solidity verifier, + compile JSON file, + ACIR representation, + ABI of circuit, + TypeScript, + ] +--- + +## General options + +| Option | Description | +| -------------------- | -------------------------------------------------- | +| `--show-ssa` | Emit debug information for the intermediate SSA IR | +| `--deny-warnings` | Quit execution when warnings are emitted | +| `--silence-warnings` | Suppress warnings | +| `-h, --help` | Print help | + +## `nargo help [subcommand]` + +Prints the list of available commands or specific information of a subcommand. + +_Arguments_ + +| Argument | Description | +| -------------- | -------------------------------------------- | +| `` | The subcommand whose help message to display | + +## `nargo backend` + +Installs and selects custom backends used to generate and verify proofs. + +### Commands + +| Command | Description | +| ----------- | --------------------------------------------------------- | +| `current` | Prints the name of the currently active backend | +| `ls` | Prints the list of currently installed backends | +| `use` | Select the backend to use | +| `install` | Install a new backend from a URL | +| `uninstall` | Uninstalls a backend | +| `help` | Print this message or the help of the given subcommand(s) | + +### Options + +| Option | Description | +| ------------ | ----------- | +| `-h, --help` | Print help | + +## `nargo check` + +Generate the `Prover.toml` and `Verifier.toml` files for specifying prover and verifier in/output +values of the Noir program respectively. + +### Options + +| Option | Description | +| --------------------- | ------------------------------------- | +| `--package ` | The name of the package to check | +| `--workspace` | Check all packages in the workspace | +| `--print-acir` | Display the ACIR for compiled circuit | +| `--deny-warnings` | Treat all warnings as errors | +| `--silence-warnings` | Suppress warnings | +| `-h, --help` | Print help | + +### `nargo codegen-verifier` + +Generate a Solidity verifier smart contract for the program. + +### Options + +| Option | Description | +| --------------------- | ------------------------------------- | +| `--package ` | The name of the package to codegen | +| `--workspace` | Codegen all packages in the workspace | +| `--print-acir` | Display the ACIR for compiled circuit | +| `--deny-warnings` | Treat all warnings as errors | +| `--silence-warnings` | Suppress warnings | +| `-h, --help` | Print help | + +## `nargo compile` + +Compile the program into a JSON build artifact file containing the ACIR representation and the ABI +of the circuit. This build artifact can then be used to generate and verify proofs. + +You can also use "build" as an alias for compile (e.g. `nargo build`). + +### Options + +| Option | Description | +| --------------------- | ------------------------------------------------------------ | +| `--include-keys` | Include Proving and Verification keys in the build artifacts | +| `--package ` | The name of the package to compile | +| `--workspace` | Compile all packages in the workspace | +| `--print-acir` | Display the ACIR for compiled circuit | +| `--deny-warnings` | Treat all warnings as errors | +| `--silence-warnings` | Suppress warnings | +| `-h, --help` | Print help | + +## `nargo new ` + +Creates a new Noir project in a new folder. + +**Arguments** + +| Argument | Description | +| -------- | -------------------------------- | +| `` | The path to save the new project | + +### Options + +| Option | Description | +| --------------- | ----------------------------------------------------- | +| `--name ` | Name of the package [default: package directory name] | +| `--lib` | Use a library template | +| `--bin` | Use a binary template [default] | +| `--contract` | Use a contract template | +| `-h, --help` | Print help | + +## `nargo init` + +Creates a new Noir project in the current directory. + +### Options + +| Option | Description | +| --------------- | ----------------------------------------------------- | +| `--name ` | Name of the package [default: current directory name] | +| `--lib` | Use a library template | +| `--bin` | Use a binary template [default] | +| `--contract` | Use a contract template | +| `-h, --help` | Print help | + +## `nargo execute [WITNESS_NAME]` + +Runs the Noir program and prints its return value. + +**Arguments** + +| Argument | Description | +| ---------------- | ----------------------------------------- | +| `[WITNESS_NAME]` | Write the execution witness to named file | + +### Options + +| Option | Description | +| --------------------------------- | ------------------------------------------------------------------------------------ | +| `-p, --prover-name ` | The name of the toml file which contains the inputs for the prover [default: Prover] | +| `--package ` | The name of the package to execute | +| `--workspace` | Execute all packages in the workspace | +| `--print-acir` | Display the ACIR for compiled circuit | +| `--deny-warnings` | Treat all warnings as errors | +| `--silence-warnings` | Suppress warnings | +| `-h, --help` | Print help | + +_Usage_ + +The inputs to the circuit are read from the `Prover.toml` file generated by `nargo check`, which +must be filled in. + +To save the witness to file, run the command with a value for the `WITNESS_NAME` argument. A +`.tr` file will then be saved in the `./target` folder. + +## `nargo prove` + +Creates a proof for the program. + +### Options + +| Option | Description | +| ------------------------------------- | ---------------------------------------------------------------------------------------- | +| `-p, --prover-name ` | The name of the toml file which contains the inputs for the prover [default: Prover] | +| `-v, --verifier-name ` | The name of the toml file which contains the inputs for the verifier [default: Verifier] | +| `--verify` | Verify proof after proving | +| `--package ` | The name of the package to prove | +| `--workspace` | Prove all packages in the workspace | +| `--print-acir` | Display the ACIR for compiled circuit | +| `--deny-warnings` | Treat all warnings as errors | +| `--silence-warnings` | Suppress warnings | +| `-h, --help` | Print help | + +## `nargo verify` + +Given a proof and a program, verify whether the proof is valid. + +### Options + +| Option | Description | +| ------------------------------------- | ---------------------------------------------------------------------------------------- | +| `-v, --verifier-name ` | The name of the toml file which contains the inputs for the verifier [default: Verifier] | +| `--package ` | The name of the package to verify | +| `--workspace` | Verify all packages in the workspace | +| `--print-acir` | Display the ACIR for compiled circuit | +| `--deny-warnings` | Treat all warnings as errors | +| `--silence-warnings` | Suppress warnings | +| `-h, --help` | Print help | + +## `nargo test [TEST_NAME]` + +Nargo will automatically compile and run any functions which have the decorator `#[test]` on them if +you run `nargo test`. To print `println` statements in tests, use the `--show-output` flag. + +Takes an optional `--exact` flag which allows you to select tests based on an exact name. + +See an example on the [testing page](./testing). + +### Options + +| Option | Description | +| --------------------- | -------------------------------------- | +| `--show-output` | Display output of `println` statements | +| `--exact` | Only run tests that match exactly | +| `--package ` | The name of the package to test | +| `--workspace` | Test all packages in the workspace | +| `--print-acir` | Display the ACIR for compiled circuit | +| `--deny-warnings` | Treat all warnings as errors | +| `--silence-warnings` | Suppress warnings | +| `-h, --help` | Print help | + +## `nargo info` + +Prints a table containing the information of the package. + +Currently the table provide + +1. The number of ACIR opcodes +2. The final number gates in the circuit used by a backend + +If the file contains a contract the table will provide the +above information about each function of the contract. + +## `nargo lsp` + +Start a long-running Language Server process that communicates over stdin/stdout. +Usually this command is not run by a user, but instead will be run by a Language Client, such as [vscode-noir](https://github.com/noir-lang/vscode-noir). + +## `nargo fmt` + +Automatically formats your Noir source code based on the default formatting settings. diff --git a/docs/versioned_docs/version-v0.19.3/nargo/02_testing.md b/docs/versioned_docs/version-v0.19.3/nargo/02_testing.md new file mode 100644 index 00000000000..5c57ef92705 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.3/nargo/02_testing.md @@ -0,0 +1,61 @@ +--- +title: Testing in Noir +description: Learn how to use Nargo to test your Noir program in a quick and easy way +keywords: [Nargo, testing, Noir, compile, test] +--- + +You can test your Noir programs using Noir circuits. + +Nargo will automatically compile and run any functions which have the decorator `#[test]` on them if +you run `nargo test`. + +For example if you have a program like: + +```rust +fn add(x: u64, y: u64) -> u64 { + x + y +} +#[test] +fn test_add() { + assert(add(2,2) == 4); + assert(add(0,1) == 1); + assert(add(1,0) == 1); +} +``` + +Running `nargo test` will test that the `test_add` function can be executed while satisfying all +the constraints which allows you to test that add returns the expected values. Test functions can't +have any arguments currently. + +### Test fail + +You can write tests that are expected to fail by using the decorator `#[test(should_fail)]`. For example: + +```rust +fn add(x: u64, y: u64) -> u64 { + x + y +} +#[test(should_fail)] +fn test_add() { + assert(add(2,2) == 5); +} +``` + +You can be more specific and make it fail with a specific reason by using `should_fail_with = "`: + +```rust +fn main(african_swallow_avg_speed : Field) { + assert(african_swallow_avg_speed == 65, "What is the airspeed velocity of an unladen swallow"); +} + +#[test] +fn test_king_arthur() { + main(65); +} + +#[test(should_fail_with = "What is the airspeed velocity of an unladen swallow")] +fn test_bridgekeeper() { + main(32); +} + +``` diff --git a/docs/versioned_docs/version-v0.10.5/nargo/03_solidity_verifier.md b/docs/versioned_docs/version-v0.19.3/nargo/03_solidity_verifier.md similarity index 100% rename from docs/versioned_docs/version-v0.10.5/nargo/03_solidity_verifier.md rename to docs/versioned_docs/version-v0.19.3/nargo/03_solidity_verifier.md diff --git a/docs/versioned_docs/version-v0.19.3/nargo/04_language_server.md b/docs/versioned_docs/version-v0.19.3/nargo/04_language_server.md new file mode 100644 index 00000000000..48c01465f6e --- /dev/null +++ b/docs/versioned_docs/version-v0.19.3/nargo/04_language_server.md @@ -0,0 +1,42 @@ +--- +title: Language Server +description: Learn about the Noir Language Server, how to install the components, and configuration that may be required. +keywords: [Nargo, Language Server, LSP, VSCode, Visual Studio Code] +--- + +This section helps you install and configure the Noir Language Server. + +The Language Server Protocol (LSP) has two components, the [Server](#language-server) and the [Client](#language-client). Below we describe each in the context of Noir. + +## Language Server + +The Server component is provided by the Nargo command line tool that you installed at the beginning of this guide. +As long as Nargo is installed and you've used it to run other commands in this guide, it should be good to go! + +If you'd like to verify that the `nargo lsp` command is available, you can run `nargo --help` and look for `lsp` in the list of commands. If you see it, you're using a version of Noir with LSP support. + +## Language Client + +The Client component is usually an editor plugin that launches the Server. It communicates LSP messages between the editor and the Server. For example, when you save a file, the Client will alert the Server, so it can try to compile the project and report any errors. + +Currently, Noir provides a Language Client for Visual Studio Code via the [vscode-noir](https://github.com/noir-lang/vscode-noir) extension. You can install it via the [Visual Studio Marketplace](https://marketplace.visualstudio.com/items?itemName=noir-lang.vscode-noir). + +> **Note:** Noir's Language Server Protocol support currently assumes users' VSCode workspace root to be the same as users' Noir project root (i.e. where Nargo.toml lies). +> +> If LSP features seem to be missing / malfunctioning, make sure you are opening your Noir project directly (instead of as a sub-folder) in your VSCode instance. + +When your language server is running correctly and the VSCode plugin is installed, you should see handy codelens buttons for compilation, measuring circuit size, execution, and tests: + +![Compile and Execute](@site/static/img/codelens_compile_execute.png) +![Run test](@site/static/img/codelens_run_test.png) + +You should also see your tests in the `testing` panel: + +![Testing panel](@site/static/img/codelens_testing_panel.png) + +### Configuration + +- **Noir: Enable LSP** - If checked, the extension will launch the Language Server via `nargo lsp` and communicate with it. +- **Noir: Nargo Flags** - Additional flags may be specified if you require them to be added when the extension calls `nargo lsp`. +- **Noir: Nargo Path** - An absolute path to a Nargo binary with the `lsp` command. This may be useful if Nargo is not within the `PATH` of your editor. +- **Noir > Trace: Server** - Setting this to `"messages"` or `"verbose"` will log LSP messages between the Client and Server. Useful for debugging. diff --git a/docs/versioned_docs/version-v0.19.3/noir_js/getting_started/01_tiny_noir_app.md b/docs/versioned_docs/version-v0.19.3/noir_js/getting_started/01_tiny_noir_app.md new file mode 100644 index 00000000000..c51ed61de52 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.3/noir_js/getting_started/01_tiny_noir_app.md @@ -0,0 +1,260 @@ +--- +title: End-to-end +description: Learn how to setup a new app that uses Noir to generate and verify zero-knowledge SNARK proofs in a typescript or javascript environment +keywords: [how to, guide, javascript, typescript, noir, barretenberg, zero-knowledge, proofs] +--- + +NoirJS works both on the browser and on the server, and works for both ESM and CJS module systems. In this page, we will learn how can we write a simple test and a simple web app to verify the standard Noir example. + +You can find the complete app code for this guide [here](https://github.com/noir-lang/tiny-noirjs-app). + +## Before we start + +:::note + +Feel free to use whatever versions, just keep in mind that Nargo and the NoirJS packages are meant to be in sync. For example, Nargo 0.18.x matches `noir_js@0.18.x`, etc. + +In this guide, we will be pinned to 0.17.0. + +::: + +Make sure you have Node installed on your machine by opening a terminal and executing `node --version`. If you don't see a version, you should install [node](https://github.com/nvm-sh/nvm). You can also use `yarn` if you prefer that package manager over npm (which comes with node). + +First of all, follow the the [Nargo guide](../../getting_started/00_nargo_installation.md) to install nargo version 0.17.0 and create a new project with `nargo new circuit`. Once there, `cd` into the `circuit` folder. You should then be able to compile your circuit into `json` format and see it inside the `target` folder: + +```bash +nargo compile +``` + +Your folder structure should look like: + +```tree +. +└── circuit + ├── Nargo.toml + ├── src + │ └── main.nr + └── target + └── circuit.json +``` + +## Starting a new project + +Go back to the previous folder and start a new project by running run `npm init`. You can configure your project or just leave the defaults, and see a `package.json` appear in your root folder. + +## Installing dependencies + +We'll need two `npm` packages. These packages will provide us the methods we need to run and verify proofs: + +```bash +npm i @noir-lang/backend_barretenberg@^0.17.0 @noir-lang/noir_js@^0.17.0 +``` + +To serve our page, we can use a build tool such as `vite`. Because we're gonna use some `wasm` files, we need to install a plugin as well. Run: + +```bash +npm i --save-dev vite rollup-plugin-copy +``` + +Since we're on the dependency world, we may as well define a nice starting script. Vite makes it easy. Just open `package.json`, find the block "scripts" and add this just below the line with `"test" : "echo......."`: + +```json + "start": "vite --open" +``` + +If you want do build a static website, you can also add some build and preview scripts: + +```json + "build": "vite build", + "preview": "vite preview" +``` + +## Vite plugins + +Vite is great, but support from `wasm` doesn't work out-of-the-box. We're gonna write a quick plugin and use another one. Just copy and paste this into a file named `vite.config.js`. You don't need to understand it, just trust me bro. + +```js +import { defineConfig } from 'vite'; +import copy from 'rollup-plugin-copy'; +import fs from 'fs'; +import path from 'path'; + +const wasmContentTypePlugin = { + name: 'wasm-content-type-plugin', + configureServer(server) { + server.middlewares.use(async (req, res, next) => { + if (req.url.endsWith('.wasm')) { + res.setHeader('Content-Type', 'application/wasm'); + const newPath = req.url.replace('deps', 'dist'); + const targetPath = path.join(__dirname, newPath); + const wasmContent = fs.readFileSync(targetPath); + return res.end(wasmContent); + } + next(); + }); + }, +}; + +export default defineConfig(({ command }) => { + if (command === 'serve') { + return { + plugins: [ + copy({ + targets: [{ src: 'node_modules/**/*.wasm', dest: 'node_modules/.vite/dist' }], + copySync: true, + hook: 'buildStart', + }), + command === 'serve' ? wasmContentTypePlugin : [], + ], + }; + } + + return {}; +}); +``` + +## HTML + +Here's the simplest HTML with some terrible UI. Create a file called `index.html` and paste this: + +```html + + + + + + +

Very basic Noir app

+
+

Logs

+

Proof

+
+ + +``` + +## Some good old vanilla Javascript + +Create a new file `app.js`, which is where our javascript code will live. Let's start with this code inside: + +```js +document.addEventListener('DOMContentLoaded', async () => { + // here's where the magic happens +}); + +function display(container, msg) { + const c = document.getElementById(container); + const p = document.createElement('p'); + p.textContent = msg; + c.appendChild(p); +} +``` + +We can manipulate our website with this little function, so we can see our website working. + +## Adding Noir + +If you come from the previous page, your folder structure should look like this: + +```tree +├── app.js +├── circuit +│ ├── Nargo.toml +│ ├── src +│ │ └── main.nr +│ └── target +│ └── circuit.json +├── index.html +├── package.json +└── vite.config.js +``` + +You'll see other files and folders showing up (like `package-lock.json`, `yarn.lock`, `node_modules`) but you shouldn't have to care about those. + +## Importing our dependencies + +We're starting with the good stuff now. At the top of the new javascript file, import the packages: + +```ts +import { BarretenbergBackend } from '@noir-lang/backend_barretenberg'; +import { Noir } from '@noir-lang/noir_js'; +``` + +We also need to import the `circuit` JSON file we created. If you have the suggested folder structure, you can add this line: + +```ts +import circuit from './circuit/target/circuit.json'; +``` + +## Write code + +:::note + +We're gonna be adding code inside the `document.addEventListener...etc` block: + +```js +// forget stuff here +document.addEventListener('DOMContentLoaded', async () => { + // here's where the magic happens +}); +// forget stuff here +``` + +::: + +Our dependencies exported two classes: `BarretenbergBackend` and `Noir`. Let's `init` them and add some logs, just to flex: + +```ts +const backend = new BarretenbergBackend(circuit); +const noir = new Noir(circuit, backend); +``` + +## Proving + +Now we're ready to prove stuff! Let's feed some inputs to our circuit and calculate the proof: + +```js +const input = { x: 1, y: 2 }; +display('logs', 'Generating proof... ⌛'); +const proof = await noir.generateFinalProof(input); +display('logs', 'Generating proof... ✅'); +display('results', proof.proof); +``` + +You're probably eager to see stuff happening, so go and run your app now! + +From your terminal, run `npm start` (or `yarn start`). If it doesn't open a browser for you, just visit `localhost:5173`. On a modern laptop, proof will generate in less than 100ms, and you'll see this: + +![Getting Started 0](@site/static/img/noir_getting_started_1.png) + +If you're human, you shouldn't be able to understand anything on the "proof" box. That's OK. We like you, human. + +In any case, this means your proof was generated! But you shouldn't trust me just yet. Add these lines to see it being verified: + +```js +display('logs', 'Verifying proof... ⌛'); +const verification = await noir.verifyFinalProof(proof); +if (verification) display('logs', 'Verifying proof... ✅'); +``` + +By saving, your app will refresh and here's our complete Tiny Noir App! + +You can find the complete app code for this guide [here](https://github.com/noir-lang/tiny-noirjs-app). + +## Further Reading + +You can see how noirjs is used in a full stack Next.js hardhat application in the [noir-starter repo here](https://github.com/noir-lang/noir-starter/tree/main/next-hardhat). The example shows how to calculate a proof in the browser and verify it with a deployed Solidity verifier contract from noirjs. + +You should also check out the more advanced examples in the [noir-examples repo](https://github.com/noir-lang/noir-examples), where you'll find reference usage for some cool apps. diff --git a/docs/docs/noir_js/noir_js.md b/docs/versioned_docs/version-v0.19.3/noir_js/noir_js.md similarity index 100% rename from docs/docs/noir_js/noir_js.md rename to docs/versioned_docs/version-v0.19.3/noir_js/noir_js.md diff --git a/docs/versioned_docs/version-v0.19.3/noir_js/reference/backend_barretenberg/.nojekyll b/docs/versioned_docs/version-v0.19.3/noir_js/reference/backend_barretenberg/.nojekyll new file mode 100644 index 00000000000..e2ac6616add --- /dev/null +++ b/docs/versioned_docs/version-v0.19.3/noir_js/reference/backend_barretenberg/.nojekyll @@ -0,0 +1 @@ +TypeDoc added this file to prevent GitHub Pages from using Jekyll. You can turn off this behavior by setting the `githubPages` option to false. \ No newline at end of file diff --git a/docs/versioned_docs/version-v0.19.3/noir_js/reference/backend_barretenberg/classes/BarretenbergBackend.md b/docs/versioned_docs/version-v0.19.3/noir_js/reference/backend_barretenberg/classes/BarretenbergBackend.md new file mode 100644 index 00000000000..5cbe9421b92 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.3/noir_js/reference/backend_barretenberg/classes/BarretenbergBackend.md @@ -0,0 +1,185 @@ +# BarretenbergBackend + +## Implements + +- [`Backend`](../interfaces/Backend.md) + +## Constructors + +### new BarretenbergBackend(acirCircuit, options) + +```ts +new BarretenbergBackend(acirCircuit, options): BarretenbergBackend +``` + +#### Parameters + +| Parameter | Type | +| :------ | :------ | +| `acirCircuit` | [`CompiledCircuit`](../type-aliases/CompiledCircuit.md) | +| `options` | [`BackendOptions`](../type-aliases/BackendOptions.md) | + +#### Returns + +[`BarretenbergBackend`](BarretenbergBackend.md) + +## Methods + +### destroy() + +```ts +destroy(): Promise +``` + +#### Returns + +`Promise`\<`void`\> + +#### Implementation of + +[`Backend`](../interfaces/Backend.md).[`destroy`](../interfaces/Backend.md#destroy) + +#### Description + +Destroys the backend + +*** + +### generateFinalProof() + +```ts +generateFinalProof(decompressedWitness): Promise +``` + +#### Parameters + +| Parameter | Type | +| :------ | :------ | +| `decompressedWitness` | `Uint8Array` | + +#### Returns + +`Promise`\<[`ProofData`](../type-aliases/ProofData.md)\> + +#### Implementation of + +[`Backend`](../interfaces/Backend.md).[`generateFinalProof`](../interfaces/Backend.md#generatefinalproof) + +#### Description + +Generates a final proof (not meant to be verified in another circuit) + +*** + +### generateIntermediateProof() + +```ts +generateIntermediateProof(witness): Promise +``` + +#### Parameters + +| Parameter | Type | +| :------ | :------ | +| `witness` | `Uint8Array` | + +#### Returns + +`Promise`\<[`ProofData`](../type-aliases/ProofData.md)\> + +#### Implementation of + +[`Backend`](../interfaces/Backend.md).[`generateIntermediateProof`](../interfaces/Backend.md#generateintermediateproof) + +#### Example + +```typescript +const intermediateProof = await backend.generateIntermediateProof(witness); +``` + +*** + +### generateIntermediateProofArtifacts() + +```ts +generateIntermediateProofArtifacts(proofData, numOfPublicInputs): Promise +``` + +#### Parameters + +| Parameter | Type | Default value | +| :------ | :------ | :------ | +| `proofData` | [`ProofData`](../type-aliases/ProofData.md) | `undefined` | +| `numOfPublicInputs` | `number` | `0` | + +#### Returns + +`Promise`\<`object`\> + +#### Implementation of + +[`Backend`](../interfaces/Backend.md).[`generateIntermediateProofArtifacts`](../interfaces/Backend.md#generateintermediateproofartifacts) + +#### Example + +```typescript +const artifacts = await backend.generateIntermediateProofArtifacts(proof, numOfPublicInputs); +``` + +*** + +### verifyFinalProof() + +```ts +verifyFinalProof(proofData): Promise +``` + +#### Parameters + +| Parameter | Type | +| :------ | :------ | +| `proofData` | [`ProofData`](../type-aliases/ProofData.md) | + +#### Returns + +`Promise`\<`boolean`\> + +#### Implementation of + +[`Backend`](../interfaces/Backend.md).[`verifyFinalProof`](../interfaces/Backend.md#verifyfinalproof) + +#### Description + +Verifies a final proof + +*** + +### verifyIntermediateProof() + +```ts +verifyIntermediateProof(proofData): Promise +``` + +#### Parameters + +| Parameter | Type | +| :------ | :------ | +| `proofData` | [`ProofData`](../type-aliases/ProofData.md) | + +#### Returns + +`Promise`\<`boolean`\> + +#### Implementation of + +[`Backend`](../interfaces/Backend.md).[`verifyIntermediateProof`](../interfaces/Backend.md#verifyintermediateproof) + +#### Example + +```typescript +const isValidIntermediate = await backend.verifyIntermediateProof(proof); +``` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.19.3/noir_js/reference/backend_barretenberg/index.md b/docs/versioned_docs/version-v0.19.3/noir_js/reference/backend_barretenberg/index.md new file mode 100644 index 00000000000..3680ba3ca77 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.3/noir_js/reference/backend_barretenberg/index.md @@ -0,0 +1,27 @@ +# Backend Barretenberg + +## Exports + +### Classes + +| Class | Description | +| :------ | :------ | +| [BarretenbergBackend](classes/BarretenbergBackend.md) | - | + +### Interfaces + +| Interface | Description | +| :------ | :------ | +| [Backend](interfaces/Backend.md) | - | + +### Type Aliases + +| Type alias | Description | +| :------ | :------ | +| [BackendOptions](type-aliases/BackendOptions.md) | - | +| [CompiledCircuit](type-aliases/CompiledCircuit.md) | - | +| [ProofData](type-aliases/ProofData.md) | - | + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.19.3/noir_js/reference/backend_barretenberg/interfaces/Backend.md b/docs/versioned_docs/version-v0.19.3/noir_js/reference/backend_barretenberg/interfaces/Backend.md new file mode 100644 index 00000000000..3eb9645c8d2 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.3/noir_js/reference/backend_barretenberg/interfaces/Backend.md @@ -0,0 +1,132 @@ +# Backend + +## Methods + +### destroy() + +```ts +destroy(): Promise +``` + +#### Returns + +`Promise`\<`void`\> + +#### Description + +Destroys the backend + +*** + +### generateFinalProof() + +```ts +generateFinalProof(decompressedWitness): Promise +``` + +#### Parameters + +| Parameter | Type | +| :------ | :------ | +| `decompressedWitness` | `Uint8Array` | + +#### Returns + +`Promise`\<[`ProofData`](../type-aliases/ProofData.md)\> + +#### Description + +Generates a final proof (not meant to be verified in another circuit) + +*** + +### generateIntermediateProof() + +```ts +generateIntermediateProof(decompressedWitness): Promise +``` + +#### Parameters + +| Parameter | Type | +| :------ | :------ | +| `decompressedWitness` | `Uint8Array` | + +#### Returns + +`Promise`\<[`ProofData`](../type-aliases/ProofData.md)\> + +#### Description + +Generates an intermediate proof (meant to be verified in another circuit) + +*** + +### generateIntermediateProofArtifacts() + +```ts +generateIntermediateProofArtifacts(proofData, numOfPublicInputs): Promise +``` + +#### Parameters + +| Parameter | Type | +| :------ | :------ | +| `proofData` | [`ProofData`](../type-aliases/ProofData.md) | +| `numOfPublicInputs` | `number` | + +#### Returns + +`Promise`\<`object`\> + +#### Description + +Retrieves the artifacts from a proof in the Field format + +*** + +### verifyFinalProof() + +```ts +verifyFinalProof(proofData): Promise +``` + +#### Parameters + +| Parameter | Type | +| :------ | :------ | +| `proofData` | [`ProofData`](../type-aliases/ProofData.md) | + +#### Returns + +`Promise`\<`boolean`\> + +#### Description + +Verifies a final proof + +*** + +### verifyIntermediateProof() + +```ts +verifyIntermediateProof(proofData): Promise +``` + +#### Parameters + +| Parameter | Type | +| :------ | :------ | +| `proofData` | [`ProofData`](../type-aliases/ProofData.md) | + +#### Returns + +`Promise`\<`boolean`\> + +#### Description + +Verifies an intermediate proof + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.19.3/noir_js/reference/backend_barretenberg/type-aliases/BackendOptions.md b/docs/versioned_docs/version-v0.19.3/noir_js/reference/backend_barretenberg/type-aliases/BackendOptions.md new file mode 100644 index 00000000000..266ade75d17 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.3/noir_js/reference/backend_barretenberg/type-aliases/BackendOptions.md @@ -0,0 +1,19 @@ +# BackendOptions + +```ts +type BackendOptions: object; +``` + +## Description + +An options object, currently only used to specify the number of threads to use. + +## Type declaration + +| Member | Type | Description | +| :------ | :------ | :------ | +| `threads` | `number` | **Description**

Number of threads | + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.19.3/noir_js/reference/backend_barretenberg/type-aliases/CompiledCircuit.md b/docs/versioned_docs/version-v0.19.3/noir_js/reference/backend_barretenberg/type-aliases/CompiledCircuit.md new file mode 100644 index 00000000000..34e0dd04205 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.3/noir_js/reference/backend_barretenberg/type-aliases/CompiledCircuit.md @@ -0,0 +1,20 @@ +# CompiledCircuit + +```ts +type CompiledCircuit: object; +``` + +## Description + +The representation of a compiled circuit + +## Type declaration + +| Member | Type | Description | +| :------ | :------ | :------ | +| `abi` | `Abi` | **Description**

ABI representation of the circuit | +| `bytecode` | `string` | **Description**

The bytecode of the circuit | + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.19.3/noir_js/reference/backend_barretenberg/type-aliases/ProofData.md b/docs/versioned_docs/version-v0.19.3/noir_js/reference/backend_barretenberg/type-aliases/ProofData.md new file mode 100644 index 00000000000..4aeff73d3e4 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.3/noir_js/reference/backend_barretenberg/type-aliases/ProofData.md @@ -0,0 +1,20 @@ +# ProofData + +```ts +type ProofData: object; +``` + +## Description + +The representation of a proof + +## Type declaration + +| Member | Type | Description | +| :------ | :------ | :------ | +| `proof` | `Uint8Array` | **Description**

An byte array representing the proof | +| `publicInputs` | `Uint8Array`[] | **Description**

Public inputs of a proof | + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.19.3/noir_js/reference/backend_barretenberg/typedoc-sidebar.cjs b/docs/versioned_docs/version-v0.19.3/noir_js/reference/backend_barretenberg/typedoc-sidebar.cjs new file mode 100644 index 00000000000..04e662c845f --- /dev/null +++ b/docs/versioned_docs/version-v0.19.3/noir_js/reference/backend_barretenberg/typedoc-sidebar.cjs @@ -0,0 +1,4 @@ +// @ts-check +/** @type {import('@docusaurus/plugin-content-docs').SidebarsConfig} */ +const typedocSidebar = { items: [{"type":"category","label":"Classes","items":[{"type":"doc","id":"noir_js/reference/backend_barretenberg/classes/BarretenbergBackend","label":"BarretenbergBackend"}]},{"type":"category","label":"Interfaces","items":[{"type":"doc","id":"noir_js/reference/backend_barretenberg/interfaces/Backend","label":"Backend"}]},{"type":"category","label":"Type Aliases","items":[{"type":"doc","id":"noir_js/reference/backend_barretenberg/type-aliases/BackendOptions","label":"BackendOptions"},{"type":"doc","id":"noir_js/reference/backend_barretenberg/type-aliases/CompiledCircuit","label":"CompiledCircuit"},{"type":"doc","id":"noir_js/reference/backend_barretenberg/type-aliases/ProofData","label":"ProofData"}]}]}; +module.exports = typedocSidebar.items; \ No newline at end of file diff --git a/docs/versioned_docs/version-v0.19.3/noir_js/reference/noir_js/.nojekyll b/docs/versioned_docs/version-v0.19.3/noir_js/reference/noir_js/.nojekyll new file mode 100644 index 00000000000..e2ac6616add --- /dev/null +++ b/docs/versioned_docs/version-v0.19.3/noir_js/reference/noir_js/.nojekyll @@ -0,0 +1 @@ +TypeDoc added this file to prevent GitHub Pages from using Jekyll. You can turn off this behavior by setting the `githubPages` option to false. \ No newline at end of file diff --git a/docs/versioned_docs/version-v0.19.3/noir_js/reference/noir_js/classes/Noir.md b/docs/versioned_docs/version-v0.19.3/noir_js/reference/noir_js/classes/Noir.md new file mode 100644 index 00000000000..1d7b54a9dca --- /dev/null +++ b/docs/versioned_docs/version-v0.19.3/noir_js/reference/noir_js/classes/Noir.md @@ -0,0 +1,131 @@ +# Noir + +## Constructors + +### new Noir(circuit, backend) + +```ts +new Noir(circuit, backend?): Noir +``` + +#### Parameters + +| Parameter | Type | +| :------ | :------ | +| `circuit` | [`CompiledCircuit`](../type-aliases/CompiledCircuit.md) | +| `backend`? | `Backend` | + +#### Returns + +[`Noir`](Noir.md) + +## Methods + +### destroy() + +```ts +destroy(): Promise +``` + +#### Returns + +`Promise`\<`void`\> + +#### Description + +Destroys the underlying backend instance. + +#### Example + +```typescript +await noir.destroy(); +``` + +*** + +### execute() + +```ts +execute(inputs, foreignCallHandler?): Promise +``` + +#### Parameters + +| Parameter | Type | +| :------ | :------ | +| `inputs` | `InputMap` | +| `foreignCallHandler`? | [`ForeignCallHandler`](../type-aliases/ForeignCallHandler.md) | + +#### Returns + +`Promise`\<`object`\> + +#### Description + +Allows to execute a circuit to get its witness and return value. + +#### Example + +```typescript +async execute(inputs) +``` + +*** + +### generateFinalProof() + +```ts +generateFinalProof(inputs): Promise +``` + +#### Parameters + +| Parameter | Type | +| :------ | :------ | +| `inputs` | `InputMap` | + +#### Returns + +`Promise`\<[`ProofData`](../type-aliases/ProofData.md)\> + +#### Description + +Generates a witness and a proof given an object as input. + +#### Example + +```typescript +async generateFinalProof(input) +``` + +*** + +### verifyFinalProof() + +```ts +verifyFinalProof(proofData): Promise +``` + +#### Parameters + +| Parameter | Type | +| :------ | :------ | +| `proofData` | [`ProofData`](../type-aliases/ProofData.md) | + +#### Returns + +`Promise`\<`boolean`\> + +#### Description + +Instantiates the verification key and verifies a proof. + +#### Example + +```typescript +async verifyFinalProof(proof) +``` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.19.3/noir_js/reference/noir_js/functions/and.md b/docs/versioned_docs/version-v0.19.3/noir_js/reference/noir_js/functions/and.md new file mode 100644 index 00000000000..c783283e396 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.3/noir_js/reference/noir_js/functions/and.md @@ -0,0 +1,22 @@ +# and() + +```ts +and(lhs, rhs): string +``` + +Performs a bitwise AND operation between `lhs` and `rhs` + +## Parameters + +| Parameter | Type | Description | +| :------ | :------ | :------ | +| `lhs` | `string` | | +| `rhs` | `string` | | + +## Returns + +`string` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.19.3/noir_js/reference/noir_js/functions/blake2s256.md b/docs/versioned_docs/version-v0.19.3/noir_js/reference/noir_js/functions/blake2s256.md new file mode 100644 index 00000000000..7882d0da8d5 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.3/noir_js/reference/noir_js/functions/blake2s256.md @@ -0,0 +1,21 @@ +# blake2s256() + +```ts +blake2s256(inputs): Uint8Array +``` + +Calculates the Blake2s256 hash of the input bytes + +## Parameters + +| Parameter | Type | Description | +| :------ | :------ | :------ | +| `inputs` | `Uint8Array` | | + +## Returns + +`Uint8Array` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.19.3/noir_js/reference/noir_js/functions/ecdsa_secp256k1_verify.md b/docs/versioned_docs/version-v0.19.3/noir_js/reference/noir_js/functions/ecdsa_secp256k1_verify.md new file mode 100644 index 00000000000..0ba5783f0d5 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.3/noir_js/reference/noir_js/functions/ecdsa_secp256k1_verify.md @@ -0,0 +1,29 @@ +# ecdsa\_secp256k1\_verify() + +```ts +ecdsa_secp256k1_verify( + hashed_msg, + public_key_x_bytes, + public_key_y_bytes, + signature): boolean +``` + +Calculates the Blake2s256 hash of the input bytes and represents these as a single field element. +Verifies a ECDSA signature over the secp256k1 curve. + +## Parameters + +| Parameter | Type | Description | +| :------ | :------ | :------ | +| `hashed_msg` | `Uint8Array` | | +| `public_key_x_bytes` | `Uint8Array` | | +| `public_key_y_bytes` | `Uint8Array` | | +| `signature` | `Uint8Array` | | + +## Returns + +`boolean` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.19.3/noir_js/reference/noir_js/functions/ecdsa_secp256r1_verify.md b/docs/versioned_docs/version-v0.19.3/noir_js/reference/noir_js/functions/ecdsa_secp256r1_verify.md new file mode 100644 index 00000000000..0b20ff68957 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.3/noir_js/reference/noir_js/functions/ecdsa_secp256r1_verify.md @@ -0,0 +1,28 @@ +# ecdsa\_secp256r1\_verify() + +```ts +ecdsa_secp256r1_verify( + hashed_msg, + public_key_x_bytes, + public_key_y_bytes, + signature): boolean +``` + +Verifies a ECDSA signature over the secp256r1 curve. + +## Parameters + +| Parameter | Type | Description | +| :------ | :------ | :------ | +| `hashed_msg` | `Uint8Array` | | +| `public_key_x_bytes` | `Uint8Array` | | +| `public_key_y_bytes` | `Uint8Array` | | +| `signature` | `Uint8Array` | | + +## Returns + +`boolean` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.19.3/noir_js/reference/noir_js/functions/keccak256.md b/docs/versioned_docs/version-v0.19.3/noir_js/reference/noir_js/functions/keccak256.md new file mode 100644 index 00000000000..d10f155ce86 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.3/noir_js/reference/noir_js/functions/keccak256.md @@ -0,0 +1,21 @@ +# keccak256() + +```ts +keccak256(inputs): Uint8Array +``` + +Calculates the Keccak256 hash of the input bytes + +## Parameters + +| Parameter | Type | Description | +| :------ | :------ | :------ | +| `inputs` | `Uint8Array` | | + +## Returns + +`Uint8Array` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.19.3/noir_js/reference/noir_js/functions/sha256.md b/docs/versioned_docs/version-v0.19.3/noir_js/reference/noir_js/functions/sha256.md new file mode 100644 index 00000000000..6ba4ecac022 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.3/noir_js/reference/noir_js/functions/sha256.md @@ -0,0 +1,21 @@ +# sha256() + +```ts +sha256(inputs): Uint8Array +``` + +Calculates the SHA256 hash of the input bytes + +## Parameters + +| Parameter | Type | Description | +| :------ | :------ | :------ | +| `inputs` | `Uint8Array` | | + +## Returns + +`Uint8Array` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.19.3/noir_js/reference/noir_js/functions/xor.md b/docs/versioned_docs/version-v0.19.3/noir_js/reference/noir_js/functions/xor.md new file mode 100644 index 00000000000..8d762b895d3 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.3/noir_js/reference/noir_js/functions/xor.md @@ -0,0 +1,22 @@ +# xor() + +```ts +xor(lhs, rhs): string +``` + +Performs a bitwise XOR operation between `lhs` and `rhs` + +## Parameters + +| Parameter | Type | Description | +| :------ | :------ | :------ | +| `lhs` | `string` | | +| `rhs` | `string` | | + +## Returns + +`string` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.19.3/noir_js/reference/noir_js/index.md b/docs/versioned_docs/version-v0.19.3/noir_js/reference/noir_js/index.md new file mode 100644 index 00000000000..58902c17b99 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.3/noir_js/reference/noir_js/index.md @@ -0,0 +1,36 @@ +# Noir JS + +## Exports + +### Classes + +| Class | Description | +| :------ | :------ | +| [Noir](classes/Noir.md) | - | + +### Type Aliases + +| Type alias | Description | +| :------ | :------ | +| [CompiledCircuit](type-aliases/CompiledCircuit.md) | - | +| [ForeignCallHandler](type-aliases/ForeignCallHandler.md) | A callback which performs an foreign call and returns the response. | +| [ForeignCallInput](type-aliases/ForeignCallInput.md) | - | +| [ForeignCallOutput](type-aliases/ForeignCallOutput.md) | - | +| [ProofData](type-aliases/ProofData.md) | - | +| [WitnessMap](type-aliases/WitnessMap.md) | - | + +### Functions + +| Function | Description | +| :------ | :------ | +| [and](functions/and.md) | Performs a bitwise AND operation between `lhs` and `rhs` | +| [blake2s256](functions/blake2s256.md) | Calculates the Blake2s256 hash of the input bytes | +| [ecdsa\_secp256k1\_verify](functions/ecdsa_secp256k1_verify.md) | Calculates the Blake2s256 hash of the input bytes and represents these as a single field element. | +| [ecdsa\_secp256r1\_verify](functions/ecdsa_secp256r1_verify.md) | Verifies a ECDSA signature over the secp256r1 curve. | +| [keccak256](functions/keccak256.md) | Calculates the Keccak256 hash of the input bytes | +| [sha256](functions/sha256.md) | Calculates the SHA256 hash of the input bytes | +| [xor](functions/xor.md) | Performs a bitwise XOR operation between `lhs` and `rhs` | + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.19.3/noir_js/reference/noir_js/type-aliases/CompiledCircuit.md b/docs/versioned_docs/version-v0.19.3/noir_js/reference/noir_js/type-aliases/CompiledCircuit.md new file mode 100644 index 00000000000..34e0dd04205 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.3/noir_js/reference/noir_js/type-aliases/CompiledCircuit.md @@ -0,0 +1,20 @@ +# CompiledCircuit + +```ts +type CompiledCircuit: object; +``` + +## Description + +The representation of a compiled circuit + +## Type declaration + +| Member | Type | Description | +| :------ | :------ | :------ | +| `abi` | `Abi` | **Description**

ABI representation of the circuit | +| `bytecode` | `string` | **Description**

The bytecode of the circuit | + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.19.3/noir_js/reference/noir_js/type-aliases/ForeignCallHandler.md b/docs/versioned_docs/version-v0.19.3/noir_js/reference/noir_js/type-aliases/ForeignCallHandler.md new file mode 100644 index 00000000000..812b8b16481 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.3/noir_js/reference/noir_js/type-aliases/ForeignCallHandler.md @@ -0,0 +1,24 @@ +# ForeignCallHandler + +```ts +type ForeignCallHandler: (name, inputs) => Promise; +``` + +A callback which performs an foreign call and returns the response. + +## Parameters + +| Parameter | Type | Description | +| :------ | :------ | :------ | +| `name` | `string` | The identifier for the type of foreign call being performed. | +| `inputs` | [`ForeignCallInput`](ForeignCallInput.md)[] | An array of hex encoded inputs to the foreign call. | + +## Returns + +`Promise`\<[`ForeignCallOutput`](ForeignCallOutput.md)[]\> + +outputs - An array of hex encoded outputs containing the results of the foreign call. + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.19.3/noir_js/reference/noir_js/type-aliases/ForeignCallInput.md b/docs/versioned_docs/version-v0.19.3/noir_js/reference/noir_js/type-aliases/ForeignCallInput.md new file mode 100644 index 00000000000..dd95809186a --- /dev/null +++ b/docs/versioned_docs/version-v0.19.3/noir_js/reference/noir_js/type-aliases/ForeignCallInput.md @@ -0,0 +1,9 @@ +# ForeignCallInput + +```ts +type ForeignCallInput: string[]; +``` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.19.3/noir_js/reference/noir_js/type-aliases/ForeignCallOutput.md b/docs/versioned_docs/version-v0.19.3/noir_js/reference/noir_js/type-aliases/ForeignCallOutput.md new file mode 100644 index 00000000000..b71fb78a946 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.3/noir_js/reference/noir_js/type-aliases/ForeignCallOutput.md @@ -0,0 +1,9 @@ +# ForeignCallOutput + +```ts +type ForeignCallOutput: string | string[]; +``` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.19.3/noir_js/reference/noir_js/type-aliases/ProofData.md b/docs/versioned_docs/version-v0.19.3/noir_js/reference/noir_js/type-aliases/ProofData.md new file mode 100644 index 00000000000..4aeff73d3e4 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.3/noir_js/reference/noir_js/type-aliases/ProofData.md @@ -0,0 +1,20 @@ +# ProofData + +```ts +type ProofData: object; +``` + +## Description + +The representation of a proof + +## Type declaration + +| Member | Type | Description | +| :------ | :------ | :------ | +| `proof` | `Uint8Array` | **Description**

An byte array representing the proof | +| `publicInputs` | `Uint8Array`[] | **Description**

Public inputs of a proof | + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.19.3/noir_js/reference/noir_js/type-aliases/WitnessMap.md b/docs/versioned_docs/version-v0.19.3/noir_js/reference/noir_js/type-aliases/WitnessMap.md new file mode 100644 index 00000000000..258c46f9d0c --- /dev/null +++ b/docs/versioned_docs/version-v0.19.3/noir_js/reference/noir_js/type-aliases/WitnessMap.md @@ -0,0 +1,9 @@ +# WitnessMap + +```ts +type WitnessMap: Map; +``` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.19.3/noir_js/reference/noir_js/typedoc-sidebar.cjs b/docs/versioned_docs/version-v0.19.3/noir_js/reference/noir_js/typedoc-sidebar.cjs new file mode 100644 index 00000000000..c18318850d0 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.3/noir_js/reference/noir_js/typedoc-sidebar.cjs @@ -0,0 +1,4 @@ +// @ts-check +/** @type {import('@docusaurus/plugin-content-docs').SidebarsConfig} */ +const typedocSidebar = { items: [{"type":"category","label":"Classes","items":[{"type":"doc","id":"noir_js/reference/noir_js/classes/Noir","label":"Noir"}]},{"type":"category","label":"Type Aliases","items":[{"type":"doc","id":"noir_js/reference/noir_js/type-aliases/CompiledCircuit","label":"CompiledCircuit"},{"type":"doc","id":"noir_js/reference/noir_js/type-aliases/ForeignCallHandler","label":"ForeignCallHandler"},{"type":"doc","id":"noir_js/reference/noir_js/type-aliases/ForeignCallInput","label":"ForeignCallInput"},{"type":"doc","id":"noir_js/reference/noir_js/type-aliases/ForeignCallOutput","label":"ForeignCallOutput"},{"type":"doc","id":"noir_js/reference/noir_js/type-aliases/ProofData","label":"ProofData"},{"type":"doc","id":"noir_js/reference/noir_js/type-aliases/WitnessMap","label":"WitnessMap"}]},{"type":"category","label":"Functions","items":[{"type":"doc","id":"noir_js/reference/noir_js/functions/and","label":"and"},{"type":"doc","id":"noir_js/reference/noir_js/functions/blake2s256","label":"blake2s256"},{"type":"doc","id":"noir_js/reference/noir_js/functions/ecdsa_secp256k1_verify","label":"ecdsa_secp256k1_verify"},{"type":"doc","id":"noir_js/reference/noir_js/functions/ecdsa_secp256r1_verify","label":"ecdsa_secp256r1_verify"},{"type":"doc","id":"noir_js/reference/noir_js/functions/keccak256","label":"keccak256"},{"type":"doc","id":"noir_js/reference/noir_js/functions/sha256","label":"sha256"},{"type":"doc","id":"noir_js/reference/noir_js/functions/xor","label":"xor"}]}]}; +module.exports = typedocSidebar.items; \ No newline at end of file diff --git a/docs/versioned_docs/version-v0.19.3/standard_library/black_box_fns.md b/docs/versioned_docs/version-v0.19.3/standard_library/black_box_fns.md new file mode 100644 index 00000000000..e0c6d475c1f --- /dev/null +++ b/docs/versioned_docs/version-v0.19.3/standard_library/black_box_fns.md @@ -0,0 +1,46 @@ +--- +title: Black Box Functions +description: Black box functions are functions in Noir that rely on backends implementing support for specialized constraints. +keywords: [noir, black box functions] +--- + +Black box functions are functions in Noir that rely on backends implementing support for specialized constraints. This makes certain zk-snark unfriendly computations cheaper than if they were implemented in Noir. + +:::warning + +It is likely that not all backends will support a particular black box function. + +::: + +Because it is not guaranteed that all backends will support black box functions, it is possible that certain Noir programs won't compile against a particular backend if they use an unsupported black box function. It is possible to fallback to less efficient implementations written in Noir/ACIR in some cases. + +Black box functions are specified with the `#[foreign(black_box_fn)]` attribute. For example, the SHA256 function in the Noir [source code](https://github.com/noir-lang/noir/blob/v0.5.1/noir_stdlib/src/hash.nr) looks like: + +```rust +#[foreign(sha256)] +fn sha256(_input : [u8; N]) -> [u8; 32] {} +``` + +## Function list + +Here is a list of the current black box functions that are supported by UltraPlonk: + +- AES +- [SHA256](./cryptographic_primitives/hashes#sha256) +- [Schnorr signature verification](./cryptographic_primitives/schnorr) +- [Blake2s](./cryptographic_primitives/hashes#blake2s) +- [Pedersen Hash](./cryptographic_primitives/hashes#pedersen_hash) +- [Pedersen Commitment](./cryptographic_primitives/hashes#pedersen_commitment) +- [HashToField128Security](./cryptographic_primitives/hashes#hash_to_field) +- [ECDSA signature verification](./cryptographic_primitives/ecdsa_sig_verification) +- [Fixed base scalar multiplication](./cryptographic_primitives/scalar) +- [Compute merkle root](./merkle_trees#compute_merkle_root) +- AND +- XOR +- RANGE +- [Keccak256](./cryptographic_primitives/hashes#keccak256) +- [Recursive proof verification](./recursion) + +Most black box functions are included as part of the Noir standard library, however `AND`, `XOR` and `RANGE` are used as part of the Noir language syntax. For instance, using the bitwise operator `&` will invoke the `AND` black box function. To ensure compatibility across backends, the ACVM has fallback implementations of `AND`, `XOR` and `RANGE` defined in its standard library which it can seamlessly fallback to if the backend doesn't support them. + +You can view the black box functions defined in the ACVM code [here](https://github.com/noir-lang/noir/blob/master/acvm-repo/acir/src/circuit/black_box_functions.rs). diff --git a/docs/versioned_docs/version-v0.10.5/standard_library/cryptographic_primitives.md b/docs/versioned_docs/version-v0.19.3/standard_library/cryptographic_primitives.md similarity index 100% rename from docs/versioned_docs/version-v0.10.5/standard_library/cryptographic_primitives.md rename to docs/versioned_docs/version-v0.19.3/standard_library/cryptographic_primitives.md diff --git a/docs/versioned_docs/version-v0.19.3/standard_library/cryptographic_primitives/00_hashes.mdx b/docs/versioned_docs/version-v0.19.3/standard_library/cryptographic_primitives/00_hashes.mdx new file mode 100644 index 00000000000..76745196681 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.3/standard_library/cryptographic_primitives/00_hashes.mdx @@ -0,0 +1,167 @@ +--- +title: Hash methods +description: + Learn about the cryptographic primitives ready to use for any Noir project, including sha256, + blake2s, pedersen, mimc_bn254 and mimc +keywords: + [cryptographic primitives, Noir project, sha256, blake2s, pedersen, mimc_bn254, mimc, hash] +--- + +import BlackBoxInfo from '@site/src/components/Notes/_blackbox.mdx'; + +## sha256 + +Given an array of bytes, returns the resulting sha256 hash. + +```rust +fn sha256(_input : [u8]) -> [u8; 32] +``` + +example: + +```rust +fn main() { + let x = [163, 117, 178, 149]; // some random bytes + let hash = std::hash::sha256(x); +} +``` + + + +## blake2s + +Given an array of bytes, returns an array with the Blake2 hash + +```rust +fn blake2s(_input : [u8]) -> [u8; 32] +``` + +example: + +```rust +fn main() { + let x = [163, 117, 178, 149]; // some random bytes + let hash = std::hash::blake2s(x); +} +``` + + + +## pedersen_hash + +Given an array of Fields, returns the Pedersen hash. + +```rust +fn pedersen_hash(_input : [Field]) -> Field +``` + +example: + +```rust +fn main() { + let x = [163, 117, 178, 149]; // some random bytes + let hash = std::hash::pedersen_hash(x); +} +``` + + + + + +## pedersen_commitment + +Given an array of Fields, returns the Pedersen commitment. + +```rust +fn pedersen_commitment(_input : [Field]) -> [Field; 2] +``` + +example: + +```rust +fn main() { + let x = [163, 117, 178, 149]; // some random bytes + let commitment = std::hash::pedersen_commitment(x); +} +``` + + + +## keccak256 + +Given an array of bytes (`u8`), returns the resulting keccak hash as an array of 32 bytes +(`[u8; 32]`). Specify a message_size to hash only the first `message_size` bytes +of the input. + +```rust +fn keccak256(_input : [u8; N], _message_size: u32) -> [u8; 32] +``` + +example: + +```rust +fn main() { + let x = [163, 117, 178, 149]; // some random bytes + let message_size = 4; + let hash = std::hash::keccak256(x, message_size); +} +``` + + + +## poseidon + +Given an array of Fields, returns a new Field with the Poseidon Hash. Mind that you need to specify +how many inputs are there to your Poseidon function. + +```rust +// example for hash_1, hash_2 accepts an array of length 2, etc +fn hash_1(input: [Field; 1]) -> Field +``` + +example: + +```rust +fn main() +{ + let hash1 = std::hash::poseidon::bn254::hash_2([1, 2]); + assert(hash1 == 0x115cc0f5e7d690413df64c6b9662e9cf2a3617f2743245519e19607a4417189a); +} +``` + +## mimc_bn254 and mimc + +`mimc_bn254` is `mimc`, but with hardcoded parameters for the BN254 curve. You can use it by +providing an array of Fields, and it returns a Field with the hash. You can use the `mimc` method if +you're willing to input your own constants: + +```rust +fn mimc(x: Field, k: Field, constants: [Field; N], exp : Field) -> Field +``` + +otherwise, use the `mimc_bn254` method: + +```rust +fn mimc_bn254(array: [Field; N]) -> Field +``` + +example: + +```rust + +fn main() { + let x = [163, 117, 178, 149]; // some random bytes + let hash = std::hash::mimc::mimc_bn254(x); +} +``` + +## hash_to_field + +```rust +fn hash_to_field(_input : [Field; N]) -> Field {} +``` + +Calculates the `blake2s` hash of the inputs and returns the hash modulo the field modulus to return +a value which can be represented as a `Field`. + + diff --git a/docs/versioned_docs/version-v0.19.3/standard_library/cryptographic_primitives/01_scalar.mdx b/docs/versioned_docs/version-v0.19.3/standard_library/cryptographic_primitives/01_scalar.mdx new file mode 100644 index 00000000000..c7eed820a80 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.3/standard_library/cryptographic_primitives/01_scalar.mdx @@ -0,0 +1,27 @@ +--- +title: Scalar multiplication +description: See how you can perform scalar multiplications over a fixed base in Noir +keywords: [cryptographic primitives, Noir project, scalar multiplication] +--- + +import BlackBoxInfo from '@site/src/components/Notes/_blackbox.mdx'; + +## scalar_mul::fixed_base_embedded_curve + +Performs scalar multiplication over the embedded curve whose coordinates are defined by the +configured noir field. For the BN254 scalar field, this is BabyJubJub or Grumpkin. + +```rust +fn fixed_base_embedded_curve(_input : Field) -> [Field; 2] +``` + +example + +```rust +fn main(x : Field) { + let scal = std::scalar_mul::fixed_base_embedded_curve(x); + std::println(scal); +} +``` + + diff --git a/docs/versioned_docs/version-v0.19.3/standard_library/cryptographic_primitives/02_schnorr.mdx b/docs/versioned_docs/version-v0.19.3/standard_library/cryptographic_primitives/02_schnorr.mdx new file mode 100644 index 00000000000..c184ce28120 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.3/standard_library/cryptographic_primitives/02_schnorr.mdx @@ -0,0 +1,37 @@ +--- +title: Schnorr Signatures +description: Learn how you can verify Schnorr signatures using Noir +keywords: [cryptographic primitives, Noir project, schnorr, signatures] +--- + +import BlackBoxInfo from '@site/src/components/Notes/_blackbox.mdx'; + +## schnorr::verify_signature + +Verifier for Schnorr signatures over the embedded curve (for BN254 it is Grumpkin). + +```rust +fn verify_signature(_public_key_x: Field, _public_key_y: Field, _signature: [u8; 64], _message: [u8]) -> bool +``` + +where `_signature` can be generated like so using the npm package +[@noir-lang/barretenberg](https://www.npmjs.com/package/@noir-lang/barretenberg) + +```js +const { BarretenbergWasm } = require('@noir-lang/barretenberg/dest/wasm'); +const { Schnorr } = require('@noir-lang/barretenberg/dest/crypto/schnorr'); + +... + +const barretenberg = await BarretenbergWasm.new(); +const schnorr = new Schnorr(barretenberg); +const pubKey = schnorr.computePublicKey(privateKey); +const message = ... +const signature = Array.from( + schnorr.constructSignature(hash, privateKey).toBuffer() +); + +... +``` + + diff --git a/docs/versioned_docs/version-v0.19.3/standard_library/cryptographic_primitives/03_ecdsa_sig_verification.mdx b/docs/versioned_docs/version-v0.19.3/standard_library/cryptographic_primitives/03_ecdsa_sig_verification.mdx new file mode 100644 index 00000000000..72bce984821 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.3/standard_library/cryptographic_primitives/03_ecdsa_sig_verification.mdx @@ -0,0 +1,45 @@ +--- +title: ECDSA Signature Verification +description: Learn about the cryptographic primitives regarding ECDSA over the secp256k1 and secp256r1 curves +keywords: [cryptographic primitives, Noir project, ecdsa, secp256k1, secp256r1, signatures] +--- + +import BlackBoxInfo from '@site/src/components/Notes/_blackbox.mdx'; + +Noir supports ECDSA signatures verification over the secp256k1 and secp256r1 curves. + +## ecdsa_secp256k1::verify_signature + +Verifier for ECDSA Secp256k1 signatures + +```rust +fn verify_signature(_public_key_x : [u8; 32], _public_key_y : [u8; 32], _signature: [u8; 64], _message: [u8]) -> bool +``` + +example: + +```rust +fn main(hashed_message : [u8;32], pub_key_x : [u8;32], pub_key_y : [u8;32], signature : [u8;64]) { + let valid_signature = std::ecdsa_secp256k1::verify_signature(pub_key_x, pub_key_y, signature, hashed_message); + assert(valid_signature); +} +``` + +## ecdsa_secp256r1::verify_signature + +Verifier for ECDSA Secp256r1 signatures + +```rust +fn verify_signature(_public_key_x : [u8; 32], _public_key_y : [u8; 32], _signature: [u8; 64], _message: [u8]) -> bool +``` + +example: + +```rust +fn main(hashed_message : [u8;32], pub_key_x : [u8;32], pub_key_y : [u8;32], signature : [u8;64]) { + let valid_signature = std::ecdsa_secp256r1::verify_signature(pub_key_x, pub_key_y, signature, hashed_message); + assert(valid_signature); +} +``` + + diff --git a/docs/versioned_docs/version-v0.10.5/standard_library/cryptographic_primitives/04_ec_primitives.md b/docs/versioned_docs/version-v0.19.3/standard_library/cryptographic_primitives/04_ec_primitives.md similarity index 100% rename from docs/versioned_docs/version-v0.10.5/standard_library/cryptographic_primitives/04_ec_primitives.md rename to docs/versioned_docs/version-v0.19.3/standard_library/cryptographic_primitives/04_ec_primitives.md diff --git a/docs/versioned_docs/version-v0.19.3/standard_library/cryptographic_primitives/05_eddsa.mdx b/docs/versioned_docs/version-v0.19.3/standard_library/cryptographic_primitives/05_eddsa.mdx new file mode 100644 index 00000000000..9a5beb55ee9 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.3/standard_library/cryptographic_primitives/05_eddsa.mdx @@ -0,0 +1,17 @@ +--- +title: EdDSA Verification +description: Learn about the cryptographic primitives regarding EdDSA +keywords: [cryptographic primitives, Noir project, eddsa, signatures] +--- + +import BlackBoxInfo from '@site/src/components/Notes/_blackbox.mdx'; + +## eddsa::eddsa_poseidon_verify + +Verifier for EdDSA signatures + +```rust +fn eddsa_poseidon_verify(public_key_x : Field, public_key_y : Field, signature_s: Field, signature_r8_x: Field, signature_r8_y: Field, message: Field) -> bool +``` + + diff --git a/docs/versioned_docs/version-v0.19.3/standard_library/logging.md b/docs/versioned_docs/version-v0.19.3/standard_library/logging.md new file mode 100644 index 00000000000..4ba0fe0e656 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.3/standard_library/logging.md @@ -0,0 +1,62 @@ +--- +title: Logging +description: + Learn how to use the println statement for debugging in Noir with this tutorial. Understand the + basics of logging in Noir and how to implement it in your code. +keywords: + [ + noir logging, + println statement, + debugging in noir, + noir std library, + logging tutorial, + basic logging in noir, + noir logging implementation, + noir debugging techniques, + rust, + ] +--- + +The standard library provides a familiar `println` statement you can use. Despite being a limited +implementation of rust's `println!` macro, this construct can be useful for debugging. + +You can print the output of println statements in your Noir code by using the `nargo execute` command or the `--show-output` flag when using `nargo test` (provided there are println statements in your tests). + +It is recommended to use `nargo execute` if you want to debug failing constraints with `println` statements. This is due to every input in a test being a constant rather than a witness, so we issue an error during compilation while we only print during execution (which comes after compilation). `println` will not work for failed constraints caught at compile time. + +The `println` statement is unconstrained, so it works for outputting integers, fields, strings, and even structs or expressions. For example: + +```rust +use dep::std; + +struct Person { + age : Field, + height : Field, +} + +fn main(age : Field, height : Field) { + let person = Person { age : age, height : height }; + std::println(person); + std::println(age + height); + std::println("Hello world!"); +} + +``` + +You can print multiple different types in the same statement and string as well as a new "fmtstr" type. A `fmtstr` can be specified in the same way as a normal string it just should be prepended with an "f" character: + +```rust + let fmt_str = f"i: {i}, j: {j}"; + std::println(fmt_str); + + let s = myStruct { y: x, x: y }; + std::println(s); + + std::println(f"i: {i}, s: {s}"); + + std::println(x); + std::println([x, y]); + + let foo = fooStruct { my_struct: s, foo: 15 }; + std::println(f"s: {s}, foo: {foo}"); +``` diff --git a/docs/versioned_docs/version-v0.19.3/standard_library/merkle_trees.md b/docs/versioned_docs/version-v0.19.3/standard_library/merkle_trees.md new file mode 100644 index 00000000000..dc383a1426b --- /dev/null +++ b/docs/versioned_docs/version-v0.19.3/standard_library/merkle_trees.md @@ -0,0 +1,58 @@ +--- +title: Merkle Trees +description: Learn about Merkle Trees in Noir with this tutorial. Explore the basics of computing a merkle root using a proof, with examples. +keywords: + [ + Merkle trees in Noir, + Noir programming language, + check membership, + computing root from leaf, + Noir Merkle tree implementation, + Merkle tree tutorial, + Merkle tree code examples, + Noir libraries, + pedersen hash., + ] +--- + +## compute_merkle_root + +Returns the root of the tree from the provided leaf and its hash path, using a [Pedersen hash](cryptographic_primitives/00_hashes.mdx#pedersen_hash). + +```rust +fn compute_merkle_root(leaf : Field, index : Field, hash_path: [Field]) -> Field +``` + +example: + +```rust +/** + // these values are for this example only + index = "0" + priv_key = "0x000000000000000000000000000000000000000000000000000000616c696365" + secret = "0x1929ea3ab8d9106a899386883d9428f8256cfedb3c4f6b66bf4aa4d28a79988f" + note_hash_path = [ + "0x1e61bdae0f027b1b2159e1f9d3f8d00fa668a952dddd822fda80dc745d6f65cc", + "0x0e4223f3925f98934393c74975142bd73079ab0621f4ee133cee050a3c194f1a", + "0x2fd7bb412155bf8693a3bd2a3e7581a679c95c68a052f835dddca85fa1569a40" + ] + */ +fn main(index: Field, priv_key: Field, secret: Field, note_hash_path: [Field; 3]) { + + let pubkey = std::scalar_mul::fixed_base_embedded_curve(priv_key); + let pubkey_x = pubkey[0]; + let pubkey_y = pubkey[1]; + let note_commitment = std::hash::pedersen([pubkey_x, pubkey_y, secret]); + + let root = std::merkle::compute_merkle_root(note_commitment[0], index, note_hash_path); + std::println(root); +} +``` + +To check merkle tree membership: + +1. Include a merkle root as a program input. +2. Compute the merkle root of a given leaf, index and hash path. +3. Assert the merkle roots are equal. + +For more info about merkle trees, see the Wikipedia [page](https://en.wikipedia.org/wiki/Merkle_tree). diff --git a/docs/versioned_docs/version-v0.10.5/standard_library/options.md b/docs/versioned_docs/version-v0.19.3/standard_library/options.md similarity index 100% rename from docs/versioned_docs/version-v0.10.5/standard_library/options.md rename to docs/versioned_docs/version-v0.19.3/standard_library/options.md diff --git a/docs/versioned_docs/version-v0.10.5/standard_library/recursion.md b/docs/versioned_docs/version-v0.19.3/standard_library/recursion.md similarity index 100% rename from docs/versioned_docs/version-v0.10.5/standard_library/recursion.md rename to docs/versioned_docs/version-v0.19.3/standard_library/recursion.md diff --git a/docs/versioned_docs/version-v0.6.0/standard_library/zeroed.md b/docs/versioned_docs/version-v0.19.3/standard_library/zeroed.md similarity index 100% rename from docs/versioned_docs/version-v0.6.0/standard_library/zeroed.md rename to docs/versioned_docs/version-v0.19.3/standard_library/zeroed.md diff --git a/docs/versioned_docs/version-v0.19.4/examples/merkle-proof.mdx b/docs/versioned_docs/version-v0.19.4/examples/merkle-proof.mdx new file mode 100644 index 00000000000..832fb4bb55e --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/examples/merkle-proof.mdx @@ -0,0 +1,48 @@ +--- +title: Merkle Proof Membership +description: + Learn how to use merkle membership proof in Noir to prove that a given leaf is a member of a + merkle tree with a specified root, at a given index. +keywords: + [merkle proof, merkle membership proof, Noir, rust, hash function, Pedersen, sha256, merkle tree] +--- + +Let's walk through an example of a merkle membership proof in Noir that proves that a given leaf is +in a merkle tree. + +```rust +use dep::std; + +fn main(message : [Field; 62], index : Field, hashpath : [Field; 40], root : Field) { + let leaf = std::hash::hash_to_field(message); + let merkle_root = std::merkle::compute_merkle_root(leaf, index, hashpath); + assert(merkle_root == root); +} + +``` + +The message is hashed using `hash_to_field`. The specific hash function that is being used is chosen +by the backend. The only requirement is that this hash function can heuristically be used as a +random oracle. If only collision resistance is needed, then one can call `std::hash::pedersen_hash` +instead. + +```rust +let leaf = std::hash::hash_to_field(message); +``` + +The leaf is then passed to a compute_merkle_root function with the root, index and hashpath. The returned root can then be asserted to be the same as the provided root. + +```rust +let merkle_root = std::merkle::compute_merkle_root(leaf, index, hashpath); +assert (merkle_root == root); +``` + +> **Note:** It is possible to re-implement the merkle tree implementation without standard library. +> However, for most usecases, it is enough. In general, the standard library will always opt to be +> as conservative as possible, while striking a balance with efficiency. + +An example, the merkle membership proof, only requires a hash function that has collision +resistance, hence a hash function like Pedersen is allowed, which in most cases is more efficient +than the even more conservative sha256. + +[View an example on the starter repo](https://github.com/noir-lang/noir-examples/blob/3ea09545cabfa464124ec2f3ea8e60c608abe6df/stealthdrop/circuits/src/main.nr#L20) diff --git a/docs/versioned_docs/version-v0.19.4/getting_started/00_nargo_installation.md b/docs/versioned_docs/version-v0.19.4/getting_started/00_nargo_installation.md new file mode 100644 index 00000000000..7d3c88c7693 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/getting_started/00_nargo_installation.md @@ -0,0 +1,249 @@ +--- +title: Nargo Installation +description: + nargo is a command line tool for interacting with Noir programs (e.g. compiling, proving, + verifying and more). Learn how to install and use Nargo for your projects with this comprehensive + guide. +keywords: [Nargo, command line tool, Noir programs, installation guide, how to use Nargo] +--- + +`nargo` is a command line tool for interacting with Noir programs (e.g. compiling, proving, +verifying and more). + +Alternatively, the interactions can also be performed in [NoirJS](../noir_js/noir_js.md). + +### UltraPlonk + +Nargo versions \<0.5.0 of `aztec_backend` and `aztec_wasm_backend` are based on the TurboPlonk +version of Aztec Backend, which lacks efficient implementations of useful primitives (e.g. Keccak256 in 18k constraints, ECDSA verification in 36k constraints) that the UltraPlonk version offers. + +## Installation + +There are four approaches for installing Nargo: + +- [Option 1: Noirup](#option-1-noirup) +- [Option 2: Binaries](#option-2-binaries) +- [Option 3: Compile from Source](#option-3-compile-from-source) +- [Option 4: WSL for Windows](#option-4-wsl-for-windows) + +Optionally you can also install [Noir VS Code extension] for syntax highlighting. + +### Option 1: Noirup + +If you're on OSX or Linux, the easiest way to start using Noir and Nargo is via noirup. Just open a +terminal and run: + +```bash +curl -L https://raw.githubusercontent.com/noir-lang/noirup/main/install | bash +``` + +Close the terminal, open another one, and run + +```bash +noirup +``` + +Done, you should have the latest version working. You can check with `nargo --version`. + +You can also install nightlies, specific versions +or branches, check out the [noirup repository](https://github.com/noir-lang/noirup) for more +information. + +#### GitHub Actions + +You can use `noirup` with GitHub Actions for CI/CD and automated testing. It is as simple as +installing `noirup` and running tests in your GitHub Action `yml` file. + +See the +[config file](https://github.com/TomAFrench/noir-hashes/blob/master/.github/workflows/noir.yml) in +this repo containing hash functions in Noir for an example. + +#### Nightly versions + +To install the nightly version of Noir (updated daily) run: + +```bash +noirup -n +``` + +### Option 2: Binaries + +See [GitHub Releases](https://github.com/noir-lang/noir/releases) for the latest and previous +platform specific binaries. + +#### Step 1 + +Paste and run the following in the terminal to extract and install the binary: + +> **macOS / Linux:** If you are prompted with `Permission denied` when running commands, prepend +> `sudo` and re-run it. + +##### macOS (Apple Silicon) + +```bash +mkdir -p $HOME/.nargo/bin && \ +curl -o $HOME/.nargo/bin/nargo-aarch64-apple-darwin.tar.gz -L https://github.com/noir-lang/noir/releases/download/v0.6.0/nargo-aarch64-apple-darwin.tar.gz && \ +tar -xvf $HOME/.nargo/bin/nargo-aarch64-apple-darwin.tar.gz -C $HOME/.nargo/bin/ && \ +echo '\nexport PATH=$PATH:$HOME/.nargo/bin' >> ~/.zshrc && \ +source ~/.zshrc +``` + +##### macOS (Intel) + +```bash +mkdir -p $HOME/.nargo/bin && \ +curl -o $HOME/.nargo/bin/nargo-x86_64-apple-darwin.tar.gz -L https://github.com/noir-lang/noir/releases/download/v0.6.0/nargo-x86_64-apple-darwin.tar.gz && \ +tar -xvf $HOME/.nargo/bin/nargo-x86_64-apple-darwin.tar.gz -C $HOME/.nargo/bin/ && \ +echo '\nexport PATH=$PATH:$HOME/.nargo/bin' >> ~/.zshrc && \ +source ~/.zshrc +``` + +##### Linux (Bash) + +```bash +mkdir -p $HOME/.nargo/bin && \ +curl -o $HOME/.nargo/bin/nargo-x86_64-unknown-linux-gnu.tar.gz -L https://github.com/noir-lang/noir/releases/download/v0.6.0/nargo-x86_64-unknown-linux-gnu.tar.gz && \ +tar -xvf $HOME/.nargo/bin/nargo-x86_64-unknown-linux-gnu.tar.gz -C $HOME/.nargo/bin/ && \ +echo -e '\nexport PATH=$PATH:$HOME/.nargo/bin' >> ~/.bashrc && \ +source ~/.bashrc +``` + +#### Step 2 + +Check if the installation was successful by running `nargo --help`. + +> **macOS:** If you are prompted with an OS alert, right-click and open the _nargo_ executable from +> Finder. Close the new terminal popped up and `nargo` should now be accessible. + +For a successful installation, you should see something similar to the following after running the +command: + +```sh +$ nargo --help + +Noir's package manager + +Usage: nargo + +Commands: + check Checks the constraint system for errors + codegen-verifier Generates a Solidity verifier smart contract for the program + compile Compile the program and its secret execution trace into ACIR format + new Create a new binary project + execute Executes a circuit to calculate its return value + prove Create proof for this program. The proof is returned as a hex encoded string + verify Given a proof and a program, verify whether the proof is valid + test Run the tests for this program + gates Counts the occurrences of different gates in circuit + help Print this message or the help of the given subcommand(s) +``` + +### Option 3: Compile from Source + +Due to the large number of native dependencies, Noir projects uses [Nix](https://nixos.org/) and [direnv](https://direnv.net/) to streamline the development experience. It helps mitigating issues commonly associated with dependency management, such as conflicts between required package versions for different projects (often referred to as "dependency hell"). + +Combined with direnv, which automatically sets or clears environment variables based on the directory, it further simplifies the development process by seamlessly integrating with the developer's shell, facilitating an efficient and reliable workflow for managing and deploying Noir projects with multiple dependencies. + +#### Setting up your environment + +For the best experience, please follow these instructions to setup your environment: + +1. Install Nix following [their guide](https://nixos.org/download.html) for your operating system. +2. Create the file `~/.config/nix/nix.conf` with the contents: + +```ini +experimental-features = nix-command +extra-experimental-features = flakes +``` + +3. Install direnv into your Nix profile by running: + +```sh +nix profile install nixpkgs#direnv +``` + +4. Add direnv to your shell following [their guide](https://direnv.net/docs/hook.html). + 1. For bash or zshell, add `eval "$(direnv hook bash)"` or `eval "$(direnv hook zsh)"` to your ~/.bashrc or ~/.zshrc file, respectively. +5. Restart your shell. + +#### Shell & editor experience + +Now that your environment is set up, you can get to work on the project. + +1. Clone the repository, such as: + +```sh +git clone git@github.com:noir-lang/noir +``` + +> Replacing `noir` with whichever repository you want to work on. + +2. Navigate to the directory: + +```sh +cd noir +``` + +> Replacing `noir` with whichever repository you cloned. + +3. You should see a **direnv error** because projects aren't allowed by default. Make sure you've reviewed and trust our `.envrc` file, then you need to run: + +```sh +direnv allow +``` + +4. Now, wait awhile for all the native dependencies to be built. This will take some time and direnv will warn you that it is taking a long time, but we just need to let it run. + +5. Once you are presented with your prompt again, you can start your editor within the project directory (we recommend [VSCode](https://code.visualstudio.com/)): + +```sh +code . +``` + +6. (Recommended) When launching VSCode for the first time, you should be prompted to install our recommended plugins. We highly recommend installing these for the best development experience. + +#### Building and testing + +Assuming you are using `direnv` to populate your environment, building and testing the project can be done +with the typical `cargo build`, `cargo test`, and `cargo clippy` commands. You'll notice that the `cargo` version matches the version we specify in `rust-toolchain.toml`, which is 1.71.1 at the time of this writing. + +If you want to build the entire project in an isolated sandbox, you can use Nix commands: + +1. `nix build .` (or `nix build . -L` for verbose output) to build the project in a Nix sandbox. +2. `nix flake check` (or `nix flake check -L` for verbose output) to run clippy and tests in a Nix sandbox. + +#### Without `direnv` + +If you have hesitations with using direnv, you can launch a subshell with `nix develop` and then launch your editor from within the subshell. However, if VSCode was already launched in the project directory, the environment won't be updated. + +Advanced: If you aren't using direnv nor launching your editor within the subshell, you can try to install Barretenberg and other global dependencies the package needs. This is an advanced workflow and likely won't receive support! + +### Option 4: WSL (for Windows) + +The default backend for Noir (Barretenberg) doesn't provide Windows binaries at this time. For that reason, Noir cannot be installed natively. However, it is available by using Windows Subsystem for Linux (WSL). + +Step 1: Follow the instructions [here](https://learn.microsoft.com/en-us/windows/wsl/install) to install and run WSL. + +step 2: Follow the [Noirup instructions](#option-1-noirup). + +## Uninstalling Nargo + +### Noirup + +If you installed Noir with `noirup`, you can uninstall Noir by removing the files in `~/.nargo`, `~/nargo` and `~/noir_cache`. + +```bash +rm -r ~/.nargo +rm -r ~/nargo +rm -r ~/noir_cache +``` + +### Nix + +If you installed Noir with Nix or from source, you can remove the binary located at `~/.nix-profile/bin/nargo`. + +```bash +rm ~/.nix-profile/bin/nargo +``` + +[noir vs code extension]: https://marketplace.visualstudio.com/items?itemName=noir-lang.vscode-noir diff --git a/docs/versioned_docs/version-v0.19.4/getting_started/01_hello_world.md b/docs/versioned_docs/version-v0.19.4/getting_started/01_hello_world.md new file mode 100644 index 00000000000..8b4416beba1 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/getting_started/01_hello_world.md @@ -0,0 +1,147 @@ +--- +title: Create A Project +description: + Learn how to create and verify your first Noir program using Nargo, a programming language for + zero-knowledge proofs. +keywords: + [ + Nargo, + Noir, + zero-knowledge proofs, + programming language, + create Noir program, + verify Noir program, + step-by-step guide, + ] +--- + +Now that we have installed Nargo, it is time to make our first hello world program! + +## Create a Project Directory + +Noir code can live anywhere on your computer. Let us create a _projects_ folder in the home +directory to house our Noir programs. + +For Linux, macOS, and Windows PowerShell, create the directory and change directory into it by +running: + +```sh +mkdir ~/projects +cd ~/projects +``` + +For Windows CMD, run: + +```sh +> mkdir "%USERPROFILE%\projects" +> cd /d "%USERPROFILE%\projects" +``` + +## Create Our First Nargo Project + +Now that we are in the projects directory, create a new Nargo project by running: + +```sh +nargo new hello_world +``` + +> **Note:** `hello_world` can be any arbitrary project name, we are simply using `hello_world` for +> demonstration. +> +> In production, the common practice is to name the project folder as `circuits` for better +> identifiability when sitting alongside other folders in the codebase (e.g. `contracts`, `scripts`, +> `test`). + +A `hello_world` folder would be created. Similar to Rust, the folder houses _src/main.nr_ and +_Nargo.toml_ that contains the source code and environmental options of your Noir program +respectively. + +### Intro to Noir Syntax + +Let us take a closer look at _main.nr_. The default _main.nr_ generated should look like this: + +```rust +fn main(x : Field, y : pub Field) { + assert(x != y); +} +``` + +The first line of the program specifies the program's inputs: + +```rust +x : Field, y : pub Field +``` + +Program inputs in Noir are private by default (e.g. `x`), but can be labeled public using the +keyword `pub` (e.g. `y`). To learn more about private and public values, check the +[Data Types](../language_concepts/data_types) section. + +The next line of the program specifies its body: + +```rust +assert(x != y); +``` + +The Noir syntax `assert` can be interpreted as something similar to constraints in other zk-contract languages. + +For more Noir syntax, check the [Language Concepts](../language_concepts/comments) chapter. + +## Build In/Output Files + +Change directory into _hello_world_ and build in/output files for your Noir program by running: + +```sh +cd hello_world +nargo check +``` + +Two additional files would be generated in your project directory: + +_Prover.toml_ houses input values, and _Verifier.toml_ houses public values. + +## Prove Our Noir Program + +Now that the project is set up, we can create a proof of correct execution on our Noir program. + +Fill in input values for execution in the _Prover.toml_ file. For example: + +```toml +x = "1" +y = "2" +``` + +Prove the valid execution of your Noir program: + +```sh +nargo prove +``` + +A new folder _proofs_ would then be generated in your project directory, containing the proof file +`.proof`, where the project name is defined in Nargo.toml. + +The _Verifier.toml_ file would also be updated with the public values computed from program +execution (in this case the value of `y`): + +```toml +y = "0x0000000000000000000000000000000000000000000000000000000000000002" +``` + +> **Note:** Values in _Verifier.toml_ are computed as 32-byte hex values. + +## Verify Our Noir Program + +Once a proof is generated, we can verify correct execution of our Noir program by verifying the +proof file. + +Verify your proof by running: + +```sh +nargo verify +``` + +The verification will complete in silence if it is successful. If it fails, it will log the +corresponding error instead. + +Congratulations, you have now created and verified a proof for your very first Noir program! + +In the [next section](breakdown), we will go into more detail on each step performed. diff --git a/docs/versioned_docs/version-v0.19.4/getting_started/02_breakdown.md b/docs/versioned_docs/version-v0.19.4/getting_started/02_breakdown.md new file mode 100644 index 00000000000..d28a54a1600 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/getting_started/02_breakdown.md @@ -0,0 +1,198 @@ +--- +title: Project Breakdown +description: + Learn about the anatomy of a Nargo project, including the purpose of the Prover and Verifier TOML + files, and how to prove and verify your program. +keywords: + [Nargo, Nargo project, Prover.toml, Verifier.toml, proof verification, private asset transfer] +--- + +This section breaks down our hello world program in section _1.2_. We elaborate on the project +structure and what the `prove` and `verify` commands did in the previous section. + +## Anatomy of a Nargo Project + +Upon creating a new project with `nargo new` and building the in/output files with `nargo check` +commands, you would get a minimal Nargo project of the following structure: + + - src + - Prover.toml + - Verifier.toml + - Nargo.toml + +The source directory _src_ holds the source code for your Noir program. By default only a _main.nr_ +file will be generated within it. + +### Prover.toml + +_Prover.toml_ is used for specifying the input values for executing and proving the program. You can specify `toml` files with different names by using the `--prover-name` or `-p` flags, see the [Prover](#provertoml) section below. Optionally you may specify expected output values for prove-time checking as well. + +### Verifier.toml + +_Verifier.toml_ contains public in/output values computed when executing the Noir program. + +### Nargo.toml + +_Nargo.toml_ contains the environmental options of your project. It contains a "package" section and a "dependencies" section. + +Example Nargo.toml: + +```toml +[package] +name = "noir_starter" +type = "bin" +authors = ["Alice"] +compiler_version = "0.9.0" +description = "Getting started with Noir" +entry = "circuit/main.nr" +license = "MIT" + +[dependencies] +ecrecover = {tag = "v0.9.0", git = "https://github.com/colinnielsen/ecrecover-noir.git"} +``` + +Nargo.toml for a [workspace](../modules_packages_crates/workspaces) will look a bit different. For example: + +```toml +[workspace] +members = ["crates/a", "crates/b"] +default-member = "crates/a" +``` + +#### Package section + +The package section requires a number of fields including: + +- `name` (**required**) - the name of the package +- `type` (**required**) - can be "bin", "lib", or "contract" to specify whether its a binary, library or Aztec contract +- `authors` (optional) - authors of the project +- `compiler_version` - specifies the version of the compiler to use. This is enforced by the compiler and follow's [Rust's versioning](https://doc.rust-lang.org/cargo/reference/manifest.html#the-version-field), so a `compiler_version = 0.18.0` will enforce Nargo version 0.18.0, `compiler_version = ^0.18.0` will enforce anything above 0.18.0 but below 0.19.0, etc. For more information, see how [Rust handles these operators](https://docs.rs/semver/latest/semver/enum.Op.html) +- `description` (optional) +- `entry` (optional) - a relative filepath to use as the entry point into your package (overrides the default of `src/lib.nr` or `src/main.nr`) +- `backend` (optional) +- `license` (optional) + +#### Dependencies section + +This is where you will specify any dependencies for your project. See the [Dependencies page](../modules_packages_crates/dependencies) for more info. + +`./proofs/` and `./contract/` directories will not be immediately visible until you create a proof or +verifier contract respectively. + +### main.nr + +The _main.nr_ file contains a `main` method, this method is the entry point into your Noir program. + +In our sample program, _main.nr_ looks like this: + +```rust +fn main(x : Field, y : Field) { + assert(x != y); +} +``` + +The parameters `x` and `y` can be seen as the API for the program and must be supplied by the +prover. Since neither `x` nor `y` is marked as public, the verifier does not supply any inputs, when +verifying the proof. + +The prover supplies the values for `x` and `y` in the _Prover.toml_ file. + +As for the program body, `assert` ensures the satisfaction of the condition (e.g. `x != y`) is +constrained by the proof of the execution of said program (i.e. if the condition was not met, the +verifier would reject the proof as an invalid proof). + +### Prover.toml + +The _Prover.toml_ file is a file which the prover uses to supply his witness values(both private and +public). + +In our hello world program the _Prover.toml_ file looks like this: + +```toml +x = "1" +y = "2" +``` + +When the command `nargo prove` is executed, two processes happen: + +1. Noir creates a proof that `x` which holds the value of `1` and `y` which holds the value of `2` + is not equal. This not equal constraint is due to the line `assert(x != y)`. + +2. Noir creates and stores the proof of this statement in the _proofs_ directory in a file called your-project.proof. So if your project is named "private_voting" (defined in the project Nargo.toml), the proof will be saved at `./proofs/private_voting.proof`. Opening this file will display the proof in hex format. + +#### Arrays of Structs + +The following code shows how to pass an array of structs to a Noir program to generate a proof. + +```rust +// main.nr +struct Foo { + bar: Field, + baz: Field, +} + +fn main(foos: [Foo; 3]) -> pub Field { + foos[2].bar + foos[2].baz +} +``` + +Prover.toml: + +```toml +[[foos]] # foos[0] +bar = 0 +baz = 0 + +[[foos]] # foos[1] +bar = 0 +baz = 0 + +[[foos]] # foos[2] +bar = 1 +baz = 2 +``` + +#### Custom toml files + +You can specify a `toml` file with a different name to use for proving by using the `--prover-name` or `-p` flags. + +This command looks for proof inputs in the default **Prover.toml** and generates the proof and saves it at `./proofs/.proof`: + +```bash +nargo prove +``` + +This command looks for proof inputs in the custom **OtherProver.toml** and generates proof and saves it at `./proofs/.proof`: + +```bash +nargo prove -p OtherProver +``` + +## Verifying a Proof + +When the command `nargo verify` is executed, two processes happen: + +1. Noir checks in the _proofs_ directory for a proof file with the project name (eg. test_project.proof) + +2. If that file is found, the proof's validity is checked + +> **Note:** The validity of the proof is linked to the current Noir program; if the program is +> changed and the verifier verifies the proof, it will fail because the proof is not valid for the +> _modified_ Noir program. + +In production, the prover and the verifier are usually two separate entities. A prover would +retrieve the necessary inputs, execute the Noir program, generate a proof and pass it to the +verifier. The verifier would then retrieve the public inputs from usually external sources and +verifies the validity of the proof against it. + +Take a private asset transfer as an example: + +A user on browser as the prover would retrieve private inputs (e.g. the user's private key) and +public inputs (e.g. the user's encrypted balance on-chain), compute the transfer, generate a proof +and submit it to the verifier smart contract. + +The verifier contract would then draw the user's encrypted balance directly from the blockchain and +verify the proof submitted against it. If the verification passes, additional functions in the +verifier contract could trigger (e.g. approve the asset transfer). + +Now that you understand the concepts, you'll probably want some editor feedback while you are writing more complex code. diff --git a/docs/versioned_docs/version-v0.19.4/index.md b/docs/versioned_docs/version-v0.19.4/index.md new file mode 100644 index 00000000000..380368db036 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/index.md @@ -0,0 +1,100 @@ +--- +title: Introducing Noir +description: + Learn about the public alpha release of Noir, a domain specific language heavily influenced by + Rust that compiles to an intermediate language which can be compiled to an arithmetic circuit or a + rank-1 constraint system. +keywords: + [ + Noir, + Domain Specific Language, + Rust, + Intermediate Language, + Arithmetic Circuit, + Rank-1 Constraint System, + Ethereum Developers, + Protocol Developers, + Blockchain Developers, + Proving System, + Smart Contract Language, + ] +slug: / +--- + +## What is Noir? + +Noir is a Domain Specific Language for SNARK proving systems. It has been designed to use any ACIR compatible proving system. + +Its design choices are influenced heavily by Rust and focuses on a simple, familiar syntax. + +## Who is Noir for? + +Noir can be used for a variety of purposes. + +### Solidity Developers + +Noir currently includes a command to create a Solidity contract which verifies your Noir program. This will +be modularized in the future; however, as of the alpha, you can use the [`nargo codegen-verifier`](./nargo/commands#nargo-codegen-verifier) command to create +a verifier contract. + +### Protocol Developers + +As a protocol developer, you may not want to use the Aztec backend due to it not being a fit for +your stack, or maybe you simply want to use a different proving system. Since Noir does not compile +to a specific proof system, it is possible for protocol developers to replace the PLONK-based +proving system with a different proving system altogether. + +### Blockchain developers + +As a blockchain developer, you will be constrained by parameters set by your blockchain (for example, the +proving system and smart contract language has been pre-defined). In order for you to use Noir in +your blockchain, a proving system backend and a smart contract interface +must be implemented for it. + +## What's new about Noir? + +Noir is simple and flexible in its design, as it does not compile immediately to a fixed +NP-complete language. Instead, Noir compiles to an intermediate language (ACIR), which itself can be compiled +to an arithmetic circuit (if choosing to target Aztec's barretenberg backend) or a rank-1 constraint system (if choosing to target an R1CS backend like Arkworks' Marlin backend, or others). + +This in itself brings up a few challenges within the design process, but allows one to decouple the programming language completely from the backend. This is similar in theory to LLVM. + +## Current Features + +Compiler: + +- Module System +- For expressions +- Arrays +- Bit Operations +- Binary operations (\<, \<=, >, >=, +, -, \*, /, %) [See documentation for an extensive list] +- Unsigned integers +- If statements +- Structures and Tuples +- Generics + +ACIR Supported OPCODES: + +- Sha256 +- Blake2s +- Schnorr signature verification +- MerkleMembership +- Pedersen Commitment +- Pedersen Hash +- HashToField + +## Libraries + +Noir does not currently have an official package manager. You can find a list of available Noir libraries in the [awesome-noir repo here](https://github.com/noir-lang/awesome-noir#libraries). + +Some libraries that are available today include: + +- [Standard Library](https://github.com/noir-lang/noir/tree/master/noir_stdlib) - the Noir Standard Library +- [Ethereum Storage Proof Verification](https://github.com/aragonzkresearch/noir-trie-proofs) - a library that contains the primitives necessary for RLP decoding (in the form of look-up table construction) and Ethereum state and storage proof verification (or verification of any trie proof involving 32-byte long keys) +- [BigInt](https://github.com/shuklaayush/noir-bigint) - a library that provides a custom BigUint56 data type, allowing for computations on large unsigned integers +- [ECrecover](https://github.com/colinnielsen/ecrecover-noir/tree/main) - a library to verify an ECDSA signature and return the source Ethereum address +- [Sparse Merkle Tree Verifier](https://github.com/vocdoni/smtverifier-noir/tree/main) - a library for verification of sparse Merkle trees +- [Signed Int](https://github.com/resurgencelabs/signed_int) - a library for accessing a custom Signed Integer data type, allowing access to negative numbers on Noir +- [Fraction](https://github.com/resurgencelabs/fraction) - a library for accessing fractional number data type in Noir, allowing results that aren't whole numbers + +See the section on [dependencies](./modules_packages_crates/dependencies) for more information. diff --git a/docs/versioned_docs/version-v0.19.4/language_concepts/01_functions.md b/docs/versioned_docs/version-v0.19.4/language_concepts/01_functions.md new file mode 100644 index 00000000000..5eb22170e54 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/language_concepts/01_functions.md @@ -0,0 +1,225 @@ +--- +title: Functions +description: + Learn how to declare functions and methods in Noir, a programming language with Rust semantics. + This guide covers parameter declaration, return types, call expressions, and more. +keywords: [Noir, Rust, functions, methods, parameter declaration, return types, call expressions] +--- + +Functions in Noir follow the same semantics of Rust, though Noir does not support early returns. + +To declare a function the `fn` keyword is used. + +```rust +fn foo() {} +``` + +By default, functions are visible only within the package they are defined. To make them visible outside of that package (for example, as part of a [library](../modules_packages_crates/crates_and_packages.md#libraries)), you should mark them as `pub`: + +```rust +pub fn foo() {} +``` + +You can also restrict the visibility of the function to only the crate it was defined in, by specifying `pub(crate)`: + +```rust +pub(crate) fn foo() {} //foo can only be called within its crate +``` + +All parameters in a function must have a type and all types are known at compile time. The parameter +is pre-pended with a colon and the parameter type. Multiple parameters are separated using a comma. + +```rust +fn foo(x : Field, y : Field){} +``` + +The return type of a function can be stated by using the `->` arrow notation. The function below +states that the foo function must return a `Field`. If the function returns no value, then the arrow +is omitted. + +```rust +fn foo(x : Field, y : Field) -> Field { + x + y +} +``` + +Note that a `return` keyword is unneeded in this case - the last expression in a function's body is +returned. + +## Main function + +If you're writing a binary, the `main` function is the starting point of your program. You can pass all types of expressions to it, as long as they have a fixed size at compile time: + +```rust +fn main(x : Field) // this is fine: passing a Field +fn main(x : [Field; 2]) // this is also fine: passing a Field with known size at compile-time +fn main(x : (Field, bool)) // 👌: passing a (Field, bool) tuple means size 2 +fn main(x : str<5>) // this is fine, as long as you pass a string of size 5 + +fn main(x : Vec) // can't compile, has variable size +fn main(x : [Field]) // can't compile, has variable size +fn main(....// i think you got it by now +``` + +Keep in mind [tests](../nargo/02_testing.md) don't differentiate between `main` and any other function. The following snippet passes tests, but won't compile or prove: + +```rust +fn main(x : [Field]) { + assert(x[0] == 1); +} + +#[test] +fn test_one() { + main([1, 2]); +} +``` + +```bash +$ nargo test +[testing] Running 1 test functions +[testing] Testing test_one... ok +[testing] All tests passed + +$ nargo check +The application panicked (crashed). +Message: Cannot have variable sized arrays as a parameter to main +``` + +## Call Expressions + +Calling a function in Noir is executed by using the function name and passing in the necessary +arguments. + +Below we show how to call the `foo` function from the `main` function using a call expression: + +```rust +fn main(x : Field, y : Field) { + let z = foo(x); +} + +fn foo(x : Field) -> Field { + x + x +} +``` + +## Methods + +You can define methods in Noir on any struct type in scope. + +```rust +struct MyStruct { + foo: Field, + bar: Field, +} + +impl MyStruct { + fn new(foo: Field) -> MyStruct { + MyStruct { + foo, + bar: 2, + } + } + + fn sum(self) -> Field { + self.foo + self.bar + } +} + +fn main() { + let s = MyStruct::new(40); + assert(s.sum() == 42); +} +``` + +Methods are just syntactic sugar for functions, so if we wanted to we could also call `sum` as +follows: + +```rust +assert(MyStruct::sum(s) == 42); +``` + +It is also possible to specialize which method is chosen depending on the [generic](./06_generics.md) type that is used. In this example, the `foo` function returns different values depending on its type: + +```rust +struct Foo {} + +impl Foo { + fn foo(self) -> Field { 1 } +} + +impl Foo { + fn foo(self) -> Field { 2 } +} + +fn main() { + let f1: Foo = Foo{}; + let f2: Foo = Foo{}; + assert(f1.foo() + f2.foo() == 3); +} +``` + +Also note that impls with the same method name defined in them cannot overlap. For example, if we already have `foo` defined for `Foo` and `Foo` like we do above, we cannot also define `foo` in an `impl Foo` since it would be ambiguous which version of `foo` to choose. + +```rust +// Including this impl in the same project as the above snippet would +// cause an overlapping impls error +impl Foo { + fn foo(self) -> Field { 3 } +} +``` + +## Lambdas + +Lambdas are anonymous functions. They follow the syntax of Rust - `|arg1, arg2, ..., argN| return_expression`. + +```rust +let add_50 = |val| val + 50; +assert(add_50(100) == 150); +``` + +See [Lambdas](./08_lambdas.md) for more details. + +## Attributes + +Attributes are metadata that can be applied to a function, using the following syntax: `#[attribute(value)]`. + +Supported attributes include: + +- **builtin**: the function is implemented by the compiler, for efficiency purposes. +- **deprecated**: mark the function as _deprecated_. Calling the function will generate a warning: `warning: use of deprecated function` +- **field**: Used to enable conditional compilation of code depending on the field size. See below for more details +- **oracle**: mark the function as _oracle_; meaning it is an external unconstrained function, implemented in noir_js. See [Unconstrained](./05_unconstrained.md) and [NoirJS](../noir_js/noir_js.md) for more details. +- **test**: mark the function as unit tests. See [Tests](../nargo/02_testing.md) for more details + +### Field Attribute + +The field attribute defines which field the function is compatible for. The function is conditionally compiled, under the condition that the field attribute matches the Noir native field. +The field can be defined implicitly, by using the name of the elliptic curve usually associated to it - for instance bn254, bls12_381 - or explicitly by using the field (prime) order, in decimal or hexadecimal form. +As a result, it is possible to define multiple versions of a function with each version specialized for a different field attribute. This can be useful when a function requires different parameters depending on the underlying elliptic curve. + +Example: we define the function `foo()` three times below. Once for the default Noir bn254 curve, once for the field $\mathbb F_{23}$, which will normally never be used by Noir, and once again for the bls12_381 curve. + +```rust +#[field(bn254)] +fn foo() -> u32 { + 1 +} + +#[field(23)] +fn foo() -> u32 { + 2 +} + +// This commented code would not compile as foo would be defined twice because it is the same field as bn254 +// #[field(21888242871839275222246405745257275088548364400416034343698204186575808495617)] +// fn foo() -> u32 { +// 2 +// } + +#[field(bls12_381)] +fn foo() -> u32 { + 3 +} +``` + +If the field name is not known to Noir, it will discard the function. Field names are case insensitive. diff --git a/docs/versioned_docs/version-v0.19.4/language_concepts/02_control_flow.md b/docs/versioned_docs/version-v0.19.4/language_concepts/02_control_flow.md new file mode 100644 index 00000000000..a7f85360197 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/language_concepts/02_control_flow.md @@ -0,0 +1,44 @@ +--- +title: Control Flow +description: + Learn how to use loops and if expressions in the Noir programming language. Discover the syntax + and examples for for loops and if-else statements. +keywords: [Noir programming language, loops, for loop, if-else statements, Rust syntax] +--- + +## Loops + +Noir has one kind of loop: the `for` loop. `for` loops allow you to repeat a block of code multiple +times. + +The following block of code between the braces is run 10 times. + +```rust +for i in 0..10 { + // do something +}; +``` + +The index for loops is of type `u64`. + +## If Expressions + +Noir supports `if-else` statements. The syntax is most similar to Rust's where it is not required +for the statement's conditional to be surrounded by parentheses. + +```rust +let a = 0; +let mut x: u32 = 0; + +if a == 0 { + if a != 0 { + x = 6; + } else { + x = 2; + } +} else { + x = 5; + assert(x == 5); +} +assert(x == 2); +``` diff --git a/docs/versioned_docs/version-v0.19.4/language_concepts/03_ops.md b/docs/versioned_docs/version-v0.19.4/language_concepts/03_ops.md new file mode 100644 index 00000000000..0e35ef5e584 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/language_concepts/03_ops.md @@ -0,0 +1,97 @@ +--- +title: Logical Operations +description: + Learn about the supported arithmetic and logical operations in the Noir programming language. + Discover how to perform operations on private input types, integers, and booleans. +keywords: + [ + Noir programming language, + supported operations, + arithmetic operations, + logical operations, + predicate operators, + bitwise operations, + short-circuiting, + backend, + ] +--- + +# Operations + +## Table of Supported Operations + +| Operation | Description | Requirements | +| :-------- | :------------------------------------------------------------: | -------------------------------------: | +| + | Adds two private input types together | Types must be private input | +| - | Subtracts two private input types together | Types must be private input | +| \* | Multiplies two private input types together | Types must be private input | +| / | Divides two private input types together | Types must be private input | +| ^ | XOR two private input types together | Types must be integer | +| & | AND two private input types together | Types must be integer | +| \| | OR two private input types together | Types must be integer | +| \<\< | Left shift an integer by another integer amount | Types must be integer | +| >> | Right shift an integer by another integer amount | Types must be integer | +| ! | Bitwise not of a value | Type must be integer or boolean | +| \< | returns a bool if one value is less than the other | Upper bound must have a known bit size | +| \<= | returns a bool if one value is less than or equal to the other | Upper bound must have a known bit size | +| > | returns a bool if one value is more than the other | Upper bound must have a known bit size | +| >= | returns a bool if one value is more than or equal to the other | Upper bound must have a known bit size | +| == | returns a bool if one value is equal to the other | Both types must not be constants | +| != | returns a bool if one value is not equal to the other | Both types must not be constants | + +### Predicate Operators + +`<,<=, !=, == , >, >=` are known as predicate/comparison operations because they compare two values. +This differs from the operations such as `+` where the operands are used in _computation_. + +### Bitwise Operations Example + +```rust +fn main(x : Field) { + let y = x as u32; + let z = y & y; +} +``` + +`z` is implicitly constrained to be the result of `y & y`. The `&` operand is used to denote bitwise +`&`. + +> `x & x` would not compile as `x` is a `Field` and not an integer type. + +### Logical Operators + +Noir has no support for the logical operators `||` and `&&`. This is because encoding the +short-circuiting that these operators require can be inefficient for Noir's backend. Instead you can +use the bitwise operators `|` and `&` which operate identically for booleans, just without the +short-circuiting. + +```rust +let my_val = 5; + +let mut flag = 1; +if (my_val > 6) | (my_val == 0) { + flag = 0; +} +assert(flag == 1); + +if (my_val != 10) & (my_val < 50) { + flag = 0; +} +assert(flag == 0); +``` + +### Shorthand operators + +Noir shorthand operators for most of the above operators, namely `+=, -=, *=, /=, %=, &=, |=, ^=, <<=`, and `>>=`. These allow for more concise syntax. For example: + +```rust +let mut i = 0; +i = i + 1; +``` + +could be written as: + +```rust +let mut i = 0; +i += 1; +``` diff --git a/docs/versioned_docs/version-v0.19.4/language_concepts/04_assert.md b/docs/versioned_docs/version-v0.19.4/language_concepts/04_assert.md new file mode 100644 index 00000000000..7427ec6cc63 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/language_concepts/04_assert.md @@ -0,0 +1,26 @@ +--- +title: Assert Function +description: + Learn about the assert function in Noir, which can be used to explicitly constrain the predicate or + comparison expression that follows to be true, and what happens if the expression is false at + runtime. +keywords: [Noir programming language, assert statement, predicate expression, comparison expression] +--- + +Noir includes a special `assert` function which will explicitly constrain the predicate/comparison +expression that follows to be true. If this expression is false at runtime, the program will fail to +be proven. Example: + +```rust +fn main(x : Field, y : Field) { + assert(x == y); +} +``` + +You can optionally provide a message to be logged when the assertion fails: + +```rust +assert(x == y, "x and y are not equal"); +``` + +> Assertions only work for predicate operations, such as `==`. If there's any ambiguity on the operation, the program will fail to compile. For example, it is unclear if `assert(x + y)` would check for `x + y == 0` or simply would return `true`. diff --git a/docs/versioned_docs/version-v0.19.4/language_concepts/05_unconstrained.md b/docs/versioned_docs/version-v0.19.4/language_concepts/05_unconstrained.md new file mode 100644 index 00000000000..097d6ee9894 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/language_concepts/05_unconstrained.md @@ -0,0 +1,96 @@ +--- +title: Unconstrained Functions +description: "Learn about what unconstrained functions in Noir are, how to use them and when you'd want to." + +keywords: [Noir programming language, unconstrained, open] +--- + + + +Unconstrained functions are functions which do not constrain any of the included computation and allow for non-deterministic computation. + +## Why? + +Zero-knowledge (ZK) domain-specific languages (DSL) enable developers to generate ZK proofs from their programs by compiling code down to the constraints of an NP complete language (such as R1CS or PLONKish languages). However, the hard bounds of a constraint system can be very limiting to the functionality of a ZK DSL. + +Enabling a circuit language to perform unconstrained execution is a powerful tool. Said another way, unconstrained execution lets developers generate witnesses from code that does not generate any constraints. Being able to execute logic outside of a circuit is critical for both circuit performance and constructing proofs on information that is external to a circuit. + +Fetching information from somewhere external to a circuit can also be used to enable developers to improve circuit efficiency. + +A ZK DSL does not just prove computation, but proves that some computation was handled correctly. Thus, it is necessary that when we switch from performing some operation directly inside of a circuit to inside of an unconstrained environment that the appropriate constraints are still laid down elsewhere in the circuit. + +## Example + +An in depth example might help drive the point home. This example comes from the excellent [post](https://discord.com/channels/1113924620781883405/1124022445054111926/1128747641853972590) by Tom in the Noir Discord. + +Let's look at how we can optimize a function to turn a `u72` into an array of `u8`s. + +```rust +fn main(num: u72) -> pub [u8; 8] { + let mut out: [u8; 8] = [0; 8]; + for i in 0..8 { + out[i] = (num >> (56 - (i * 8)) as u72 & 0xff) as u8; + } + + out +} +``` + +``` +Total ACIR opcodes generated for language PLONKCSat { width: 3 }: 91 +Backend circuit size: 3619 +``` + +A lot of the operations in this function are optimized away by the compiler (all the bit-shifts turn into divisions by constants). However we can save a bunch of gates by casting to u8 a bit earlier. This automatically truncates the bit-shifted value to fit in a u8 which allows us to remove the XOR against 0xff. This saves us ~480 gates in total. + +```rust +fn main(num: u72) -> pub [u8; 8] { + let mut out: [u8; 8] = [0; 8]; + for i in 0..8 { + out[i] = (num >> (56 - (i * 8)) as u8; + } + + out +} +``` + +``` +Total ACIR opcodes generated for language PLONKCSat { width: 3 }: 75 +Backend circuit size: 3143 +``` + +Those are some nice savings already but we can do better. This code is all constrained so we're proving every step of calculating out using num, but we don't actually care about how we calculate this, just that it's correct. This is where brillig comes in. + +It turns out that truncating a u72 into a u8 is hard to do inside a snark, each time we do as u8 we lay down 4 ACIR opcodes which get converted into multiple gates. It's actually much easier to calculate num from out than the other way around. All we need to do is multiply each element of out by a constant and add them all together, both relatively easy operations inside a snark. + +We can then run u72_to_u8 as unconstrained brillig code in order to calculate out, then use that result in our constrained function and assert that if we were to do the reverse calculation we'd get back num. This looks a little like the below: + +```rust +fn main(num: u72) -> pub [u8; 8] { + let out = u72_to_u8(num); + + let mut reconstructed_num: u72 = 0; + for i in 0..8 { + reconstructed_num += (out[i] as u72 << (56 - (8 * i))); + } + assert(num == reconstructed_num); + out +} + +unconstrained fn u72_to_u8(num: u72) -> [u8; 8] { + let mut out: [u8; 8] = [0; 8]; + for i in 0..8 { + out[i] = (num >> (56 - (i * 8))) as u8; + } + out +} +``` + +``` +Total ACIR opcodes generated for language PLONKCSat { width: 3 }: 78 +Backend circuit size: 2902 +``` + +This ends up taking off another ~250 gates from our circuit! We've ended up with more ACIR opcodes than before but they're easier for the backend to prove (resulting in fewer gates). + +Generally we want to use brillig whenever there's something that's easy to verify but hard to compute within the circuit. For example, if you wanted to calculate a square root of a number it'll be a much better idea to calculate this in brillig and then assert that if you square the result you get back your number. diff --git a/docs/versioned_docs/version-v0.10.5/language_concepts/06_generics.md b/docs/versioned_docs/version-v0.19.4/language_concepts/06_generics.md similarity index 100% rename from docs/versioned_docs/version-v0.10.5/language_concepts/06_generics.md rename to docs/versioned_docs/version-v0.19.4/language_concepts/06_generics.md diff --git a/docs/versioned_docs/version-v0.19.4/language_concepts/07_mutability.md b/docs/versioned_docs/version-v0.19.4/language_concepts/07_mutability.md new file mode 100644 index 00000000000..ed3fed820ec --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/language_concepts/07_mutability.md @@ -0,0 +1,92 @@ +--- +title: Mutability +description: + Learn about mutable variables, constants, and globals in Noir programming language. Discover how + to declare, modify, and use them in your programs. +keywords: [noir programming language, mutability in noir, mutable variables, constants, globals] +--- + +Variables in noir can be declared mutable via the `mut` keyword. Mutable variables can be reassigned +to via an assignment expression. + +```rust +let x = 2; +x = 3; // error: x must be mutable to be assigned to + +let mut y = 3; +let y = 4; // OK +``` + +The `mut` modifier can also apply to patterns: + +```rust +let (a, mut b) = (1, 2); +a = 11; // error: a must be mutable to be assigned to +b = 12; // OK + +let mut (c, d) = (3, 4); +c = 13; // OK +d = 14; // OK + +// etc. +let MyStruct { x: mut y } = MyStruct { x: a }; +// y is now in scope +``` + +Note that mutability in noir is local and everything is passed by value, so if a called function +mutates its parameters then the parent function will keep the old value of the parameters. + +```rust +fn main() -> pub Field { + let x = 3; + helper(x); + x // x is still 3 +} + +fn helper(mut x: i32) { + x = 4; +} +``` + +## Comptime Values + +:::warning + +The 'comptime' keyword was removed in version 0.10. The comptime keyword and syntax are currently still kept and parsed for backwards compatibility, but are now deprecated and will issue a warning when used. `comptime` has been removed because it is no longer needed for accessing arrays. + +::: + +## Globals + +Noir also supports global variables. However, they must be known at compile-time. The global type can also be inferred by the compiler entirely. Globals can also be used to specify array +annotations for function parameters and can be imported from submodules. + +```rust +global N: Field = 5; // Same as `global N: Field = 5` + +fn main(x : Field, y : [Field; N]) { + let res = x * N; + + assert(res == y[0]); + + let res2 = x * my_submodule::N; + assert(res != res2); +} + +mod my_submodule { + use dep::std; + + global N: Field = 10; + + fn my_helper() -> Field { + let x = N; + x + } +} +``` + +## Why only local mutability? + +Witnesses in a proving system are immutable in nature. Noir aims to _closely_ mirror this setting +without applying additional overhead to the user. Modeling a mutable reference is not as +straightforward as on conventional architectures and would incur some possibly unexpected overhead. diff --git a/docs/versioned_docs/version-v0.19.4/language_concepts/08_lambdas.md b/docs/versioned_docs/version-v0.19.4/language_concepts/08_lambdas.md new file mode 100644 index 00000000000..ae1e6aecab1 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/language_concepts/08_lambdas.md @@ -0,0 +1,80 @@ +--- +title: Lambdas +description: Learn how to use anonymous functions in Noir programming language. +keywords: [Noir programming language, lambda, closure, function, anonymous function] +--- + +## Introduction + +Lambdas are anonymous functions. The syntax is `|arg1, arg2, ..., argN| return_expression`. + +```rust +let add_50 = |val| val + 50; +assert(add_50(100) == 150); +``` + +A block can be used as the body of a lambda, allowing you to declare local variables inside it: + +```rust +let cool = || { + let x = 100; + let y = 100; + x + y +} + +assert(cool() == 200); +``` + +## Closures + +Inside the body of a lambda, you can use variables defined in the enclosing function. Such lambdas are called **closures**. In this example `x` is defined inside `main` and is accessed from within the lambda: + +```rust +fn main() { + let x = 100; + let closure = || x + 150; + assert(closure() == 250); +} +``` + +## Passing closures to higher-order functions + +It may catch you by surprise that the following code fails to compile: + +```rust +fn foo(f: fn () -> Field) -> Field { + f() +} + +fn main() { + let (x, y) = (50, 50); + assert(foo(|| x + y) == 100); // error :( +} +``` + +The reason is that the closure's capture environment affects its type - we have a closure that captures two Fields and `foo` +expects a regular function as an argument - those are incompatible. +:::note + +Variables contained within the `||` are the closure's parameters, and the expression that follows it is the closure's body. The capture environment is comprised of any variables used in the closure's body that are not parameters. + +E.g. in |x| x + y, y would be a captured variable, but x would not be, since it is a parameter of the closure. + +::: +The syntax for the type of a closure is `fn[env](args) -> ret_type`, where `env` is the capture environment of the closure - +in this example that's `(Field, Field)`. + +The best solution in our case is to make `foo` generic over the environment type of its parameter, so that it can be called +with closures with any environment, as well as with regular functions: + +```rust +fn foo(f: fn[Env]() -> Field) -> Field { + f() +} + +fn main() { + let (x, y) = (50, 50); + assert(foo(|| x + y) == 100); // compiles fine + assert(foo(|| 60) == 60); // compiles fine +} +``` diff --git a/docs/versioned_docs/version-v0.9.0/language_concepts/08_comments.md b/docs/versioned_docs/version-v0.19.4/language_concepts/09_comments.md similarity index 100% rename from docs/versioned_docs/version-v0.9.0/language_concepts/08_comments.md rename to docs/versioned_docs/version-v0.19.4/language_concepts/09_comments.md diff --git a/docs/versioned_docs/version-v0.9.0/language_concepts/09_distinct.md b/docs/versioned_docs/version-v0.19.4/language_concepts/10_distinct.md similarity index 100% rename from docs/versioned_docs/version-v0.9.0/language_concepts/09_distinct.md rename to docs/versioned_docs/version-v0.19.4/language_concepts/10_distinct.md diff --git a/docs/versioned_docs/version-v0.9.0/language_concepts/10_shadowing.md b/docs/versioned_docs/version-v0.19.4/language_concepts/11_shadowing.md similarity index 100% rename from docs/versioned_docs/version-v0.9.0/language_concepts/10_shadowing.md rename to docs/versioned_docs/version-v0.19.4/language_concepts/11_shadowing.md diff --git a/docs/versioned_docs/version-v0.19.4/language_concepts/data_types.md b/docs/versioned_docs/version-v0.19.4/language_concepts/data_types.md new file mode 100644 index 00000000000..74f573f7d3f --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/language_concepts/data_types.md @@ -0,0 +1,96 @@ +--- +title: Data Types +description: + Get a clear understanding of the two categories of Noir data types - primitive types and compound + types. Learn about their characteristics, differences, and how to use them in your Noir + programming. +keywords: + [ + noir, + data types, + primitive types, + compound types, + private types, + public types, + ] +--- + +Every value in Noir has a type, which determines which operations are valid for it. + +All values in Noir are fundamentally composed of `Field` elements. For a more approachable +developing experience, abstractions are added on top to introduce different data types in Noir. + +Noir has two category of data types: primitive types (e.g. `Field`, integers, `bool`) and compound +types that group primitive types (e.g. arrays, tuples, structs). Each value can either be private or +public. + +## Private & Public Types + +A **private value** is known only to the Prover, while a **public value** is known by both the +Prover and Verifier. Mark values as `private` when the value should only be known to the prover. All +primitive types (including individual fields of compound types) in Noir are private by default, and +can be marked public when certain values are intended to be revealed to the Verifier. + +> **Note:** For public values defined in Noir programs paired with smart contract verifiers, once +> the proofs are verified on-chain the values can be considered known to everyone that has access to +> that blockchain. + +Public data types are treated no differently to private types apart from the fact that their values +will be revealed in proofs generated. Simply changing the value of a public type will not change the +circuit (where the same goes for changing values of private types as well). + +_Private values_ are also referred to as _witnesses_ sometimes. + +> **Note:** The terms private and public when applied to a type (e.g. `pub Field`) have a different +> meaning than when applied to a function (e.g. `pub fn foo() {}`). +> +> The former is a visibility modifier for the Prover to interpret if a value should be made known to +> the Verifier, while the latter is a visibility modifier for the compiler to interpret if a +> function should be made accessible to external Noir programs like in other languages. + +### pub Modifier + +All data types in Noir are private by default. Types are explicitly declared as public using the +`pub` modifier: + +```rust +fn main(x : Field, y : pub Field) -> pub Field { + x + y +} +``` + +In this example, `x` is **private** while `y` and `x + y` (the return value) are **public**. Note +that visibility is handled **per variable**, so it is perfectly valid to have one input that is +private and another that is public. + +> **Note:** Public types can only be declared through parameters on `main`. + +## Type Aliases + +A type alias is a new name for an existing type. Type aliases are declared with the keyword `type`: + +```rust +type Id = u8; + +fn main() { + let id: Id = 1; + let zero: u8 = 0; + assert(zero + 1 == id); +} +``` + +Type aliases can also be used with [generics](./06_generics.md): + +```rust +type Id = Size; + +fn main() { + let id: Id = 1; + let zero: u32 = 0; + assert(zero + 1 == id); +} +``` + +### BigInt + +You can achieve BigInt functionality using the [Noir BigInt](https://github.com/shuklaayush/noir-bigint) library. diff --git a/docs/versioned_docs/version-v0.19.4/language_concepts/data_types/00_fields.md b/docs/versioned_docs/version-v0.19.4/language_concepts/data_types/00_fields.md new file mode 100644 index 00000000000..78d3d2af166 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/language_concepts/data_types/00_fields.md @@ -0,0 +1,165 @@ +--- +title: Fields +description: + Dive deep into the Field data type in Noir. Understand its methods, practical examples, and best practices to effectively use Fields in your Noir programs. +keywords: + [ + noir, + field type, + methods, + examples, + best practices, + ] +--- + +The field type corresponds to the native field type of the proving backend. + +The size of a Noir field depends on the elliptic curve's finite field for the proving backend +adopted. For example, a field would be a 254-bit integer when paired with the default backend that +spans the Grumpkin curve. + +Fields support integer arithmetic and are often used as the default numeric type in Noir: + +```rust +fn main(x : Field, y : Field) { + let z = x + y; +} +``` + +`x`, `y` and `z` are all private fields in this example. Using the `let` keyword we defined a new +private value `z` constrained to be equal to `x + y`. + +If proving efficiency is of priority, fields should be used as a default for solving problems. +Smaller integer types (e.g. `u64`) incur extra range constraints. + +## Methods + +After declaring a Field, you can use these common methods on it: + +### to_le_bits + +Transforms the field into an array of bits, Little Endian. + +```rust +fn to_le_bits(_x : Field, _bit_size: u32) -> [u1; N] +``` + +example: + +```rust +fn main() { + let field = 2; + let bits = field.to_le_bits(32); +} +``` + +### to_be_bits + +Transforms the field into an array of bits, Big Endian. + +```rust +fn to_be_bits(_x : Field, _bit_size: u32) -> [u1; N] +``` + +example: + +```rust +fn main() { + let field = 2; + let bits = field.to_be_bits(32); +} +``` + +### to_le_bytes + +Transforms into an array of bytes, Little Endian + +```rust +fn to_le_bytes(_x : Field, byte_size: u32) -> [u8] +``` + +example: + +```rust +fn main() { + let field = 2; + let bytes = field.to_le_bytes(4); +} +``` + +### to_be_bytes + +Transforms into an array of bytes, Big Endian + +```rust +fn to_be_bytes(_x : Field, byte_size: u32) -> [u8] +``` + +example: + +```rust +fn main() { + let field = 2; + let bytes = field.to_be_bytes(4); +} +``` + +### to_le_radix + +Decomposes into a vector over the specified base, Little Endian + +```rust +fn to_le_radix(_x : Field, _radix: u32, _result_len: u32) -> [u8] +``` + +example: + +```rust +fn main() { + let field = 2; + let radix = field.to_le_radix(256, 4); +} +``` + +### to_be_radix + +Decomposes into a vector over the specified base, Big Endian + +```rust +fn to_be_radix(_x : Field, _radix: u32, _result_len: u32) -> [u8] +``` + +example: + +```rust +fn main() { + let field = 2; + let radix = field.to_be_radix(256, 4); +} +``` + +### pow_32 + +Returns the value to the power of the specified exponent + +```rust +fn pow_32(self, exponent: Field) -> Field +``` + +example: + +```rust +fn main() { + let field = 2 + let pow = field.pow_32(4); + assert(pow == 16); +} +``` + +### sgn0 + +Parity of (prime) Field element, i.e. sgn0(x mod p) = 0 if x ∈ \{0, ..., p-1\} is even, otherwise sgn0(x mod p) = 1. + +```rust +fn sgn0(self) -> u1 +``` diff --git a/docs/versioned_docs/version-v0.19.4/language_concepts/data_types/01_integers.md b/docs/versioned_docs/version-v0.19.4/language_concepts/data_types/01_integers.md new file mode 100644 index 00000000000..1814365800a --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/language_concepts/data_types/01_integers.md @@ -0,0 +1,112 @@ +--- +title: Integers +description: Explore the Integer data type in Noir. Learn about its methods, see real-world examples, and grasp how to efficiently use Integers in your Noir code. +keywords: [noir, integer types, methods, examples, arithmetic] +--- + +An integer type is a range constrained field type. The Noir frontend supports arbitrarily-sized, both unsigned and signed integer types. + +:::info + +When an integer is defined in Noir without a specific type, it will default to `Field`. + +The one exception is for loop indices which default to `u64` since comparisons on `Field`s are not possible. + +::: + +## Unsigned Integers + +An unsigned integer type is specified first with the letter `u` (indicating its unsigned nature) followed by its bit size (e.g. `8`): + +```rust +fn main() { + let x: u8 = 1; + let y: u8 = 1; + let z = x + y; + assert (z == 2); +} +``` + +The bit size determines the maximum value the integer type can store. For example, a `u8` variable can store a value in the range of 0 to 255 (i.e. $\\2^{8}-1\\$). + +## Signed Integers + +A signed integer type is specified first with the letter `i` (which stands for integer) followed by its bit size (e.g. `8`): + +```rust +fn main() { + let x: i8 = -1; + let y: i8 = -1; + let z = x + y; + assert (z == -2); +} +``` + +The bit size determines the maximum and minimum range of value the integer type can store. For example, an `i8` variable can store a value in the range of -128 to 127 (i.e. $\\-2^{7}\\$ to $\\2^{7}-1\\$). + +:::tip + +If you are using the default proving backend with Noir, both even (e.g. _u2_, _i2_) and odd (e.g. _u3_, _i3_) arbitrarily-sized integer types up to 127 bits (i.e. _u127_ and _i127_) are supported. + +::: + +## Overflows + +Computations that exceed the type boundaries will result in overflow errors. This happens with both signed and unsigned integers. For example, attempting to prove: + +```rust +fn main(x: u8, y: u8) { + let z = x + y; +} +``` + +With: + +```toml +x = "255" +y = "1" +``` + +Would result in: + +``` +$ nargo prove +error: Assertion failed: 'attempt to add with overflow' +┌─ ~/src/main.nr:9:13 +│ +│ let z = x + y; +│ ----- +│ += Call stack: + ... +``` + +A similar error would happen with signed integers: + +```rust +fn main() { + let x: i8 = -118; + let y: i8 = -11; + let z = x + y; +} +``` + +### Wrapping methods + +Although integer overflow is expected to error, some use-cases rely on wrapping. For these use-cases, the standard library provides `wrapping` variants of certain common operations: + +```rust +fn wrapping_add(x: T, y: T) -> T; +fn wrapping_sub(x: T, y: T) -> T; +fn wrapping_mul(x: T, y: T) -> T; +``` + +Example of how it is used: + +```rust +use dep::std; + +fn main(x: u8, y: u8) -> pub u8 { + std::wrapping_add(x, y) +} +``` diff --git a/docs/versioned_docs/version-v0.19.4/language_concepts/data_types/02_booleans.md b/docs/versioned_docs/version-v0.19.4/language_concepts/data_types/02_booleans.md new file mode 100644 index 00000000000..885db167d83 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/language_concepts/data_types/02_booleans.md @@ -0,0 +1,30 @@ +--- +title: Booleans +description: + Delve into the Boolean data type in Noir. Understand its methods, practical examples, and best practices for using Booleans in your Noir programs. +keywords: + [ + noir, + boolean type, + methods, + examples, + logical operations, + ] +--- + + +The `bool` type in Noir has two possible values: `true` and `false`: + +```rust +fn main() { + let t = true; + let f: bool = false; +} +``` + +> **Note:** When returning a boolean value, it will show up as a value of 1 for `true` and 0 for +> `false` in _Verifier.toml_. + +The boolean type is most commonly used in conditionals like `if` expressions and `assert` +statements. More about conditionals is covered in the [Control Flow](../control_flow) and +[Assert Function](../assert) sections. diff --git a/docs/versioned_docs/version-v0.19.4/language_concepts/data_types/03_strings.md b/docs/versioned_docs/version-v0.19.4/language_concepts/data_types/03_strings.md new file mode 100644 index 00000000000..b4c75942bb8 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/language_concepts/data_types/03_strings.md @@ -0,0 +1,88 @@ +--- +title: Strings +description: + Discover the String data type in Noir. Learn about its methods, see real-world examples, and understand how to effectively manipulate and use Strings in Noir. +keywords: + [ + noir, + string type, + methods, + examples, + concatenation, + ] +--- + + +The string type is a fixed length value defined with `str`. + +You can use strings in `assert()` functions or print them with `std::println()`. See more about [Logging](../../standard_library/logging). + +```rust +use dep::std; + +fn main(message : pub str<11>, hex_as_string : str<4>) { + std::println(message); + assert(message == "hello world"); + assert(hex_as_string == "0x41"); +} +``` + +You can convert a `str` to a byte array by calling `as_bytes()` +or a vector by calling `as_bytes_vec()`. + +```rust +fn main() { + let message = "hello world"; + let message_bytes = message.as_bytes(); + let mut message_vec = message.as_bytes_vec(); + assert(message_bytes.len() == 11); + assert(message_bytes[0] == 104); + assert(message_bytes[0] == message_vec.get(0)); +} +``` + +## Escape Characters + +You can use escape characters for your strings: + +| Escape Sequence | Description | +|-----------------|-----------------| +| `\r` | Carriage Return | +| `\n` | Newline | +| `\t` | Tab | +| `\0` | Null Character | +| `\"` | Double Quote | +| `\\` | Backslash | + +Example: + +```rust +let s = "Hello \"world"; // prints "Hello "world" +let s = "hey \tyou"; // prints "hey you" +``` + +## Formatted Strings + +You can prepend a string with the singular `f` token to create a formatted string. This is useful when logging, as it allows injection of local variables: + +```rust +let var = 15; +std::println(f"var {var}") // prints "var 0x0F" + +let var = -1 as u8; +std::println(f"var {var}") // prints "var 255" + +let var : i8 = -1; +std::println(f"var {var}") // prints "var -1" + +// prints "Hello +//world" +std::println(f"Hello +world"); + +std::println(f"hey \tyou"); // prints "hey \tyou" +``` + +A type can be specified to print numbers either as hex via `Field`, unsigned via `u*` types and signed via `i*` types. + +Note that escaped characters in formatted strings `fmtstr` will be outputted as defined, i.e. "\n" will be printed `\n`, not as a new line. You can add a newline or other whitespace by creating a multiline string as in the example above. diff --git a/docs/versioned_docs/version-v0.19.4/language_concepts/data_types/04_arrays.md b/docs/versioned_docs/version-v0.19.4/language_concepts/data_types/04_arrays.md new file mode 100644 index 00000000000..bdbd1798bef --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/language_concepts/data_types/04_arrays.md @@ -0,0 +1,244 @@ +--- +title: Arrays +description: + Dive into the Array data type in Noir. Grasp its methods, practical examples, and best practices for efficiently using Arrays in your Noir code. +keywords: + [ + noir, + array type, + methods, + examples, + indexing, + ] +--- + +An array is one way of grouping together values into one compound type. Array types can be inferred +or explicitly specified via the syntax `[; ]`: + +```rust +fn main(x : Field, y : Field) { + let my_arr = [x, y]; + let your_arr: [Field; 2] = [x, y]; +} +``` + +Here, both `my_arr` and `your_arr` are instantiated as an array containing two `Field` elements. + +Array elements can be accessed using indexing: + +```rust +fn main() { + let a = [1, 2, 3, 4, 5]; + + let first = a[0]; + let second = a[1]; +} +``` + +All elements in an array must be of the same type (i.e. homogeneous). That is, an array cannot group +a `Field` value and a `u8` value together for example. + +You can write mutable arrays, like: + +```rust +fn main() { + let mut arr = [1, 2, 3, 4, 5]; + assert(arr[0] == 1); + + arr[0] = 42; + assert(arr[0] == 42); +} +``` + +You can instantiate a new array of a fixed size with the same value repeated for each element. The following example instantiates an array of length 32 where each element is of type Field and has the value 0. + +```rust +let array: [Field; 32] = [0; 32]; +``` + +Like in Rust, arrays in Noir are a fixed size. However, if you wish to convert an array to a [slice](./slices), you can just call `as_slice` on your array: + +```rust +let array: [Field; 32] = [0; 32]; +let sl = array.as_slice() +``` + +You can define multidimensional arrays: + +```rust +let array : [[Field; 2]; 2]; +let element = array[0][0]; +``` + +## Types + +You can create arrays of primitive types or structs. There is not yet support for nested arrays +(arrays of arrays) or arrays of structs that contain arrays. + +## Methods + +For convenience, the STD provides some ready-to-use, common methods for arrays: + +### len + +Returns the length of an array + +```rust +fn len(_array: [T; N]) -> comptime Field +``` + +example + +```rust +fn main() { + let array = [42, 42]; + assert(array.len() == 2); +} +``` + +### sort + +Returns a new sorted array. The original array remains untouched. Notice that this function will +only work for arrays of fields or integers, not for any arbitrary type. This is because the sorting +logic it uses internally is optimized specifically for these values. If you need a sort function to +sort any type, you should use the function `sort_via` described below. + +```rust +fn sort(_array: [T; N]) -> [T; N] +``` + +example + +```rust +fn main() { + let arr = [42, 32]; + let sorted = arr.sort(); + assert(sorted == [32, 42]); +} +``` + +### sort_via + +Sorts the array with a custom comparison function + +```rust +fn sort_via(mut a: [T; N], ordering: fn(T, T) -> bool) -> [T; N] +``` + +example + +```rust +fn main() { + let arr = [42, 32] + let sorted_ascending = arr.sort_via(|a, b| a < b); + assert(sorted_ascending == [32, 42]); // verifies + + let sorted_descending = arr.sort_via(|a, b| a > b); + assert(sorted_descending == [32, 42]); // does not verify +} +``` + +### map + +Applies a function to each element of the array, returning a new array containing the mapped elements. + +```rust +fn map(f: fn(T) -> U) -> [U; N] +``` + +example + +```rust +let a = [1, 2, 3]; +let b = a.map(|a| a * 2); // b is now [2, 4, 6] +``` + +### fold + +Applies a function to each element of the array, returning the final accumulated value. The first +parameter is the initial value. + +```rust +fn fold(mut accumulator: U, f: fn(U, T) -> U) -> U +``` + +This is a left fold, so the given function will be applied to the accumulator and first element of +the array, then the second, and so on. For a given call the expected result would be equivalent to: + +```rust +let a1 = [1]; +let a2 = [1, 2]; +let a3 = [1, 2, 3]; + +let f = |a, b| a - b; +a1.fold(10, f) //=> f(10, 1) +a2.fold(10, f) //=> f(f(10, 1), 2) +a3.fold(10, f) //=> f(f(f(10, 1), 2), 3) +``` + +example: + +```rust + +fn main() { + let arr = [2, 2, 2, 2, 2]; + let folded = arr.fold(0, |a, b| a + b); + assert(folded == 10); +} + +``` + +### reduce + +Same as fold, but uses the first element as starting element. + +```rust +fn reduce(f: fn(T, T) -> T) -> T +``` + +example: + +```rust +fn main() { + let arr = [2, 2, 2, 2, 2]; + let reduced = arr.reduce(|a, b| a + b); + assert(reduced == 10); +} +``` + +### all + +Returns true if all the elements satisfy the given predicate + +```rust +fn all(predicate: fn(T) -> bool) -> bool +``` + +example: + +```rust +fn main() { + let arr = [2, 2, 2, 2, 2]; + let all = arr.all(|a| a == 2); + assert(all); +} +``` + +### any + +Returns true if any of the elements satisfy the given predicate + +```rust +fn any(predicate: fn(T) -> bool) -> bool +``` + +example: + +```rust +fn main() { + let arr = [2, 2, 2, 2, 5]; + let any = arr.any(|a| a == 5); + assert(any); +} + +``` diff --git a/docs/versioned_docs/version-v0.19.4/language_concepts/data_types/05_slices.mdx b/docs/versioned_docs/version-v0.19.4/language_concepts/data_types/05_slices.mdx new file mode 100644 index 00000000000..1be0ec4a137 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/language_concepts/data_types/05_slices.mdx @@ -0,0 +1,146 @@ +--- +title: Slices +description: Explore the Slice data type in Noir. Understand its methods, see real-world examples, and learn how to effectively use Slices in your Noir programs. +keywords: [noir, slice type, methods, examples, subarrays] +--- + +import Experimental from '@site/src/components/Notes/_experimental.mdx'; + + + +A slice is a dynamically-sized view into a sequence of elements. They can be resized at runtime, but because they don't own the data, they cannot be returned from a circuit. You can treat slices as arrays without a constrained size. + +```rust +use dep::std::slice; + +fn main() -> pub Field { + let mut slice: [Field] = [0; 2]; + + let mut new_slice = slice.push_back(6); + new_slice.len() +} +``` + +View the corresponding test file [here][test-file]. + +[test-file]: https://github.com/noir-lang/noir/blob/f387ec1475129732f72ba294877efdf6857135ac/crates/nargo_cli/tests/test_data_ssa_refactor/slices/src/main.nr + +## Methods + +For convenience, the STD provides some ready-to-use, common methods for slices: + +### push_back + +Pushes a new element to the end of the slice, returning a new slice with a length one greater than the original unmodified slice. + +```rust +fn push_back(_self: [T], _elem: T) -> [T] +``` + +example: + +```rust +fn main() -> pub Field { + let mut slice: [Field] = [0; 2]; + + let mut new_slice = slice.push_back(6); + new_slice.len() +} +``` + +View the corresponding test file [here][test-file]. + +### push_front + +Returns a new array with the specified element inserted at index 0. The existing elements indexes are incremented by 1. + +```rust +fn push_front(_self: Self, _elem: T) -> Self +``` + +Example: + +```rust +let mut new_slice: [Field] = []; +new_slice = new_slice.push_front(20); +assert(new_slice[0] == 20); // returns true +``` + +View the corresponding test file [here][test-file]. + +### pop_front + +Returns a tuple of two items, the first element of the array and the rest of the array. + +```rust +fn pop_front(_self: Self) -> (T, Self) +``` + +Example: + +```rust +let (first_elem, rest_of_slice) = slice.pop_front(); +``` + +View the corresponding test file [here][test-file]. + +### pop_back + +Returns a tuple of two items, the beginning of the array with the last element omitted and the last element. + +```rust +fn pop_back(_self: Self) -> (Self, T) +``` + +Example: + +```rust +let (popped_slice, last_elem) = slice.pop_back(); +``` + +View the corresponding test file [here][test-file]. + +### append + +Loops over a slice and adds it to the end of another. + +```rust +fn append(mut self, other: Self) -> Self +``` + +Example: + +```rust +let append = [1, 2].append([3, 4, 5]); +``` + +### insert + +Inserts an element at a specified index and shifts all following elements by 1. + +```rust +fn insert(_self: Self, _index: Field, _elem: T) -> Self +``` + +Example: + +```rust +new_slice = rest_of_slice.insert(2, 100); +assert(new_slice[2] == 100); +``` + +View the corresponding test file [here][test-file]. + +### remove + +Remove an element at a specified index, shifting all elements after it to the left, returning the altered slice and the removed element. + +```rust +fn remove(_self: Self, _index: Field) -> (Self, T) +``` + +Example: + +```rust +let (remove_slice, removed_elem) = slice.remove(3); +``` diff --git a/docs/versioned_docs/version-v0.19.4/language_concepts/data_types/06_vectors.mdx b/docs/versioned_docs/version-v0.19.4/language_concepts/data_types/06_vectors.mdx new file mode 100644 index 00000000000..4617e90d038 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/language_concepts/data_types/06_vectors.mdx @@ -0,0 +1,172 @@ +--- +title: Vectors +description: Delve into the Vector data type in Noir. Learn about its methods, practical examples, and best practices for using Vectors in your Noir code. +keywords: [noir, vector type, methods, examples, dynamic arrays] +--- + +import Experimental from '@site/src/components/Notes/_experimental.mdx'; + + + +A vector is a collection type similar to Rust's Vector type. It's convenient way to use slices as mutable arrays. + +Example: + +```rust +use dep::std::collections::vec::Vec; + +let mut vector: Vec = Vec::new(); +for i in 0..5 { + vector.push(i); +} +assert(vector.len() == 5); +``` + +## Methods + +### new + +Creates a new, empty vector. + +```rust +pub fn new() -> Self { + Self { slice: [] } +} +``` + +Example: + +```rust +let empty_vector: Vec = Vec::new(); +assert(empty_vector.len() == 0); +``` + +### from_slice + +Creates a vector containing each element from a given slice. Mutations to the resulting vector will not affect the original slice. + +```rust +pub fn from_slice(slice: [T]) -> Self { + Self { slice } +} +``` + +Example: + +```rust +let arr: [Field] = [1, 2, 3]; +let vector_from_slice = Vec::from_slice(arr); +assert(vector_from_slice.len() == 3); +``` + +### get + +Retrieves an element from the vector at a given index. Panics if the index points beyond the vector's end. + +```rust +pub fn get(self, index: Field) -> T { + self.slice[index] +} +``` + +Example: + +```rust +let vector: Vec = Vec::from_slice([10, 20, 30]); +assert(vector.get(1) == 20); +``` + +### push + +Adds a new element to the vector's end, returning a new vector with a length one greater than the original unmodified vector. + +```rust +pub fn push(&mut self, elem: T) { + self.slice = self.slice.push_back(elem); +} +``` + +Example: + +```rust +let mut vector: Vec = Vec::new(); +vector.push(10); +assert(vector.len() == 1); +``` + +### pop + +Removes an element from the vector's end, returning a new vector with a length one less than the original vector, along with the removed element. Panics if the vector's length is zero. + +```rust +pub fn pop(&mut self) -> T { + let (popped_slice, last_elem) = self.slice.pop_back(); + self.slice = popped_slice; + last_elem +} +``` + +Example: + +```rust +let mut vector = Vec::from_slice([10, 20]); +let popped_elem = vector.pop(); +assert(popped_elem == 20); +assert(vector.len() == 1); +``` + +### insert + +Inserts an element at a specified index, shifting subsequent elements to the right. + +```rust +pub fn insert(&mut self, index: Field, elem: T) { + self.slice = self.slice.insert(index, elem); +} +``` + +Example: + +```rust +let mut vector = Vec::from_slice([10, 30]); +vector.insert(1, 20); +assert(vector.get(1) == 20); +``` + +### remove + +Removes an element at a specified index, shifting subsequent elements to the left, and returns the removed element. + +```rust +pub fn remove(&mut self, index: Field) -> T { + let (new_slice, elem) = self.slice.remove(index); + self.slice = new_slice; + elem +} +``` + +Example: + +```rust +let mut vector = Vec::from_slice([10, 20, 30]); +let removed_elem = vector.remove(1); +assert(removed_elem == 20); +assert(vector.len() == 2); +``` + +### len + +Returns the number of elements in the vector. + +```rust +pub fn len(self) -> Field { + self.slice.len() +} +``` + +Example: + +```rust +let empty_vector: Vec = Vec::new(); +assert(empty_vector.len() == 0); +``` diff --git a/docs/versioned_docs/version-v0.19.4/language_concepts/data_types/07_tuples.md b/docs/versioned_docs/version-v0.19.4/language_concepts/data_types/07_tuples.md new file mode 100644 index 00000000000..5f6cab974a8 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/language_concepts/data_types/07_tuples.md @@ -0,0 +1,47 @@ +--- +title: Tuples +description: + Dive into the Tuple data type in Noir. Understand its methods, practical examples, and best practices for efficiently using Tuples in your Noir code. +keywords: + [ + noir, + tuple type, + methods, + examples, + multi-value containers, + ] +--- + +A tuple collects multiple values like an array, but with the added ability to collect values of +different types: + +```rust +fn main() { + let tup: (u8, u64, Field) = (255, 500, 1000); +} +``` + +One way to access tuple elements is via destructuring using pattern matching: + +```rust +fn main() { + let tup = (1, 2); + + let (one, two) = tup; + + let three = one + two; +} +``` + +Another way to access tuple elements is via direct member access, using a period (`.`) followed by +the index of the element we want to access. Index `0` corresponds to the first tuple element, `1` to +the second and so on: + +```rust +fn main() { + let tup = (5, 6, 7, 8); + + let five = tup.0; + let eight = tup.3; +} +``` diff --git a/docs/versioned_docs/version-v0.19.4/language_concepts/data_types/08_structs.md b/docs/versioned_docs/version-v0.19.4/language_concepts/data_types/08_structs.md new file mode 100644 index 00000000000..35421734639 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/language_concepts/data_types/08_structs.md @@ -0,0 +1,69 @@ +--- +title: Structs +description: + Explore the Struct data type in Noir. Learn about its methods, see real-world examples, and grasp how to effectively define and use Structs in your Noir programs. +keywords: + [ + noir, + struct type, + methods, + examples, + data structures, + ] +--- + +A struct also allows for grouping multiple values of different types. Unlike tuples, we can also +name each field. + +> **Note:** The usage of _field_ here refers to each element of the struct and is unrelated to the +> field type of Noir. + +Defining a struct requires giving it a name and listing each field within as `: ` pairs: + +```rust +struct Animal { + hands: Field, + legs: Field, + eyes: u8, +} +``` + +An instance of a struct can then be created with actual values in `: ` pairs in any +order. Struct fields are accessible using their given names: + +```rust +fn main() { + let legs = 4; + + let dog = Animal { + eyes: 2, + hands: 0, + legs, + }; + + let zero = dog.hands; +} +``` + +Structs can also be destructured in a pattern, binding each field to a new variable: + +```rust +fn main() { + let Animal { hands, legs: feet, eyes } = get_octopus(); + + let ten = hands + feet + eyes as u8; +} + +fn get_octopus() -> Animal { + let octopus = Animal { + hands: 0, + legs: 8, + eyes: 2, + }; + + octopus +} +``` + +The new variables can be bound with names different from the original struct field names, as +showcased in the `legs --> feet` binding in the example above. diff --git a/docs/versioned_docs/version-v0.19.4/language_concepts/data_types/09_references.md b/docs/versioned_docs/version-v0.19.4/language_concepts/data_types/09_references.md new file mode 100644 index 00000000000..b0c35ce2cb9 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/language_concepts/data_types/09_references.md @@ -0,0 +1,22 @@ +--- +title: References +--- + +Noir supports first-class references. References are a bit like pointers: they point to a specific address that can be followed to access the data stored at that address. You can use Rust-like syntax to use pointers in Noir: the `&` operator references the variable, the `*` operator dereferences it. + +Example: + +```rust +fn main() { + let mut x = 2; + + // you can reference x as &mut and pass it to multiplyBy2 + multiplyBy2(&mut x); +} + +// you can access &mut here +fn multiplyBy2(x: &mut Field) { + // and dereference it with * + *x = *x * 2; +} +``` diff --git a/docs/versioned_docs/version-v0.19.4/language_concepts/data_types/10_function_types.md b/docs/versioned_docs/version-v0.19.4/language_concepts/data_types/10_function_types.md new file mode 100644 index 00000000000..1ec92efd594 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/language_concepts/data_types/10_function_types.md @@ -0,0 +1,25 @@ +--- +title: Function types +--- + +Noir supports higher-order functions. The syntax for a function type is as follows: + +```rust +fn(arg1_type, arg2_type, ...) -> return_type +``` + +Example: + +```rust +fn assert_returns_100(f: fn() -> Field) { // f takes no args and returns a Field + assert(f() == 100); +} + +fn main() { + assert_returns_100(|| 100); // ok + assert_returns_100(|| 150); // fails +} +``` + +A function type also has an optional capture environment - this is necessary to support closures. +See [Lambdas](../08_lambdas.md) for more details. diff --git a/docs/versioned_docs/version-v0.19.4/migration_notes.md b/docs/versioned_docs/version-v0.19.4/migration_notes.md new file mode 100644 index 00000000000..905bca3d30c --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/migration_notes.md @@ -0,0 +1,91 @@ +--- +title: Migration notes +description: Read about migration notes from previous versions, which could solve problems while updating +keywords: [Noir, notes, migration, updating, upgrading] +--- + +Noir is in full-speed development. Things break fast, wild, and often. This page attempts to leave some notes on errors you might encounter when upgrading and how to resolve them until proper patches are built. + +## ≥0.19 + +### Enforcing `compiler_version` + +From this version on, the compiler will check for the `compiler_version` field in `Nargo.toml`, and will error if it doesn't match the current Nargo version in use. + +To update, please make sure this field in `Nargo.toml` matches the output of `nargo --version`. + +## ≥0.14 + +The index of the [for loops](./language_concepts/02_control_flow.md#loops) is now of type `u64` instead of `Field`. An example refactor would be: + +```rust +for i in 0..10 { + let i = i as Field; +} +``` + +## ≥v0.11.0 and Nargo backend + +From this version onwards, Nargo starts managing backends through the `nargo backend` command. Upgrading to the versions per usual steps might lead to: + +### `backend encountered an error` + +This is likely due to the existing locally installed version of proving backend (e.g. barretenberg) is incompatible with the version of Nargo in use. + +To fix the issue: + +1. Uninstall the existing backend + +```bash +nargo backend uninstall acvm-backend-barretenberg +``` + +You may replace _acvm-backend-barretenberg_ with the name of your backend listed in `nargo backend ls` or in ~/.nargo/backends. + +2. Reinstall a compatible version of the proving backend. + +If you are using the default barretenberg backend, simply run: + +``` +nargo prove +``` + +with your Noir program. + +This will trigger the download and installation of the latest version of barretenberg compatible with your Nargo in use. + +### `backend encountered an error: illegal instruction` + +On certain Intel-based systems, an `illegal instruction` error may arise due to incompatibility of barretenberg with certain CPU instructions. + +To fix the issue: + +1. Uninstall the existing backend + +```bash +nargo backend uninstall acvm-backend-barretenberg +``` + +You may replace _acvm-backend-barretenberg_ with the name of your backend listed in `nargo backend ls` or in ~/.nargo/backends. + +2. Reinstall a compatible version of the proving backend. + +If you are using the default barretenberg backend, simply run: + +``` +nargo backend install acvm-backend-barretenberg https://github.com/noir-lang/barretenberg-js-binary/raw/master/run-bb.tar.gz +``` + +This downloads and installs a specific bb.js based version of barretenberg binary from GitHub. + +The gzipped file is running [this bash script](https://github.com/noir-lang/barretenberg-js-binary/blob/master/run-bb-js.sh), where we need to gzip it as the Nargo currently expect the backend to be zipped up. + +Then run: + +``` +DESIRED_BINARY_VERSION=0.8.1 nargo info +``` + +This overrides the bb native binary with a bb.js node application instead, which should be compatible with most if not all hardware. This does come with the drawback of being generally slower than native binary. + +0.8.1 indicates bb.js version 0.8.1, so if you change that it will update to a different version or the default version in the script if none was supplied. diff --git a/docs/versioned_docs/version-v0.19.4/modules_packages_crates/crates_and_packages.md b/docs/versioned_docs/version-v0.19.4/modules_packages_crates/crates_and_packages.md new file mode 100644 index 00000000000..744de72bb2c --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/modules_packages_crates/crates_and_packages.md @@ -0,0 +1,42 @@ +--- +title: Crates and Packages +description: Learn how to use Crates and Packages in your Noir project +keywords: [Nargo, dependencies, package management, crates, package] +--- + +## Crates + +A crate is the smallest amount of code that the Noir compiler considers at a time. +Crates can contain modules, and the modules may be defined in other files that get compiled with the crate, as we’ll see in the coming sections. + +### Crate Types + +A Noir crate can come in several forms: binaries, libraries or contracts. + +#### Binaries + +_Binary crates_ are programs which you can compile to an ACIR circuit which you can then create proofs against. Each must have a function called `main` that defines the ACIR circuit which is to be proved. + +#### Libraries + +_Library crates_ don't have a `main` function and they don't compile down to ACIR. Instead they define functionality intended to be shared with multiple projects, and eventually included in a binary crate. + +#### Contracts + +Contract crates are similar to binary crates in that they compile to ACIR which you can create proofs against. They are different in that they do not have a single `main` function, but are a collection of functions to be deployed to the [Aztec network](https://aztec.network). You can learn more about the technical details of Aztec in the [monorepo](https://github.com/AztecProtocol/aztec-packages) or contract [examples](https://github.com/AztecProtocol/aztec-packages/tree/master/yarn-project/noir-contracts/contracts). + +### Crate Root + +Every crate has a root, which is the source file that the compiler starts, this is also known as the root module. The Noir compiler does not enforce any conditions on the name of the file which is the crate root, however if you are compiling via Nargo the crate root must be called `lib.nr` or `main.nr` for library or binary crates respectively. + +## Packages + +A Nargo _package_ is a collection of one of more crates that provides a set of functionality. A package must include a Nargo.toml file. + +A package _must_ contain either a library or a binary crate, but not both. + +### Differences from Cargo Packages + +One notable difference between Rust's Cargo and Noir's Nargo is that while Cargo allows a package to contain an unlimited number of binary crates and a single library crate, Nargo currently only allows a package to contain a single crate. + +In future this restriction may be lifted to allow a Nargo package to contain both a binary and library crate or multiple binary crates. diff --git a/docs/versioned_docs/version-v0.19.4/modules_packages_crates/dependencies.md b/docs/versioned_docs/version-v0.19.4/modules_packages_crates/dependencies.md new file mode 100644 index 00000000000..e91e73a4c4f --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/modules_packages_crates/dependencies.md @@ -0,0 +1,123 @@ +--- +title: Dependencies +description: + Learn how to specify and manage dependencies in Nargo, allowing you to upload packages to GitHub + and use them easily in your project. +keywords: [Nargo, dependencies, GitHub, package management, versioning] +--- + +Nargo allows you to upload packages to GitHub and use them as dependencies. + +## Specifying a dependency + +Specifying a dependency requires a tag to a specific commit and the git url to the url containing +the package. + +Currently, there are no requirements on the tag contents. If requirements are added, it would follow +semver 2.0 guidelines. + +> Note: Without a `tag` , there would be no versioning and dependencies would change each time you +> compile your project. + +For example, to add the [ecrecover-noir library](https://github.com/colinnielsen/ecrecover-noir) to your project, add it to `Nargo.toml`: + +```toml +# Nargo.toml + +[dependencies] +ecrecover = {tag = "v0.8.0", git = "https://github.com/colinnielsen/ecrecover-noir"} +``` + +If the module is in a subdirectory, you can define a subdirectory in your git repository, for example: + +```toml +# Nargo.toml + +[dependencies] +easy_private_token_contract = {tag ="v0.1.0-alpha62", git = "https://github.com/AztecProtocol/aztec-packages", directory = "yarn-project/noir-contracts/contracts/easy_private_token_contract"} +``` + +## Specifying a local dependency + +You can also specify dependencies that are local to your machine. + +For example, this file structure has a library and binary crate + +```tree +├── binary_crate +│   ├── Nargo.toml +│   └── src +│   └── main.nr +└── lib_a + ├── Nargo.toml + └── src + └── lib.nr +``` + +Inside of the binary crate, you can specify: + +```toml +# Nargo.toml + +[dependencies] +lib_a = { path = "../lib_a" } +``` + +## Importing dependencies + +You can import a dependency to a Noir file using the following syntax. For example, to import the +ecrecover-noir library and local lib_a referenced above: + +```rust +use dep::ecrecover; +use dep::lib_a; +``` + +You can also import only the specific parts of dependency that you want to use, like so: + +```rust +use dep::std::hash::sha256; +use dep::std::scalar_mul::fixed_base_embedded_curve; +``` + +Lastly, as demonstrated in the +[elliptic curve example](../standard_library/cryptographic_primitives/ec_primitives#examples), you +can import multiple items in the same line by enclosing them in curly braces: + +```rust +use dep::std::ec::tecurve::affine::{Curve, Point}; +``` + +We don't have a way to consume libraries from inside a [workspace](./workspaces) as external dependencies right now. + +Inside a workspace, these are consumed as `{ path = "../to_lib" }` dependencies in Nargo.toml. + +## Dependencies of Dependencies + +Note that when you import a dependency, you also get access to all of the dependencies of that package. + +For example, the [phy_vector](https://github.com/resurgencelabs/phy_vector) library imports an [fraction](https://github.com/resurgencelabs/fraction) library. If you're importing the phy_vector library, then you can access the functions in fractions library like so: + +```rust +use dep::phy_vector; + +fn main(x : Field, y : pub Field) { + //... + let f = phy_vector::fraction::toFraction(true, 2, 1); + //... +} +``` + +## Available Libraries + +Noir does not currently have an official package manager. You can find a list of available Noir libraries in the [awesome-noir repo here](https://github.com/noir-lang/awesome-noir#libraries). + +Some libraries that are available today include: + +- [Standard Library](https://github.com/noir-lang/noir/tree/master/noir_stdlib) - the Noir Standard Library +- [Ethereum Storage Proof Verification](https://github.com/aragonzkresearch/noir-trie-proofs) - a library that contains the primitives necessary for RLP decoding (in the form of look-up table construction) and Ethereum state and storage proof verification (or verification of any trie proof involving 32-byte long keys) +- [BigInt](https://github.com/shuklaayush/noir-bigint) - a library that provides a custom BigUint56 data type, allowing for computations on large unsigned integers +- [ECrecover](https://github.com/colinnielsen/ecrecover-noir/tree/main) - a library to verify an ECDSA signature and return the source Ethereum address +- [Sparse Merkle Tree Verifier](https://github.com/vocdoni/smtverifier-noir/tree/main) - a library for verification of sparse Merkle trees +- [Signed Int](https://github.com/resurgencelabs/signed_int) - a library for accessing a custom Signed Integer data type, allowing access to negative numbers on Noir +- [Fraction](https://github.com/resurgencelabs/fraction) - a library for accessing fractional number data type in Noir, allowing results that aren't whole numbers diff --git a/docs/versioned_docs/version-v0.19.4/modules_packages_crates/modules.md b/docs/versioned_docs/version-v0.19.4/modules_packages_crates/modules.md new file mode 100644 index 00000000000..11e60cbf35e --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/modules_packages_crates/modules.md @@ -0,0 +1,104 @@ +--- +title: Modules +description: + Learn how to organize your files using modules in Noir, following the same convention as Rust's + module system. Examples included. +keywords: [Noir, Rust, modules, organizing files, sub-modules] +--- + +Noir's module system follows the same convention as the _newer_ version of Rust's module system. + +## Purpose of Modules + +Modules are used to organize files. Without modules all of your code would need to live in a single +file. In Noir, the compiler does not automatically scan all of your files to detect modules. This +must be done explicitly by the developer. + +## Examples + +### Importing a module in the crate root + +Filename : `src/main.nr` + +```rust +mod foo; + +fn main() { + foo::hello_world(); +} +``` + +Filename : `src/foo.nr` + +```rust +fn from_foo() {} +``` + +In the above snippet, the crate root is the `src/main.nr` file. The compiler sees the module +declaration `mod foo` which prompts it to look for a foo.nr file. + +Visually this module hierarchy looks like the following : + +``` +crate + ├── main + │ + └── foo + └── from_foo + +``` + +### Importing a module throughout the tree + +All modules are accessible from the `crate::` namespace. + +``` +crate + ├── bar + ├── foo + └── main + +``` + +In the above snippet, if `bar` would like to use functions in `foo`, it can do so by `use crate::foo::function_name`. + +### Sub-modules + +Filename : `src/main.nr` + +```rust +mod foo; + +fn main() { + foo::from_foo(); +} +``` + +Filename : `src/foo.nr` + +```rust +mod bar; +fn from_foo() {} +``` + +Filename : `src/foo/bar.nr` + +```rust +fn from_bar() {} +``` + +In the above snippet, we have added an extra module to the module tree; `bar`. `bar` is a submodule +of `foo` hence we declare bar in `foo.nr` with `mod bar`. Since `foo` is not the crate root, the +compiler looks for the file associated with the `bar` module in `src/foo/bar.nr` + +Visually the module hierarchy looks as follows: + +``` +crate + ├── main + │ + └── foo + ├── from_foo + └── bar + └── from_bar +``` diff --git a/docs/versioned_docs/version-v0.19.4/modules_packages_crates/workspaces.md b/docs/versioned_docs/version-v0.19.4/modules_packages_crates/workspaces.md new file mode 100644 index 00000000000..a979ef9f0a5 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/modules_packages_crates/workspaces.md @@ -0,0 +1,39 @@ +--- +title: Workspaces +--- + +Workspaces are a feature of nargo that allow you to manage multiple related Noir packages in a single repository. A workspace is essentially a group of related projects that share common build output directories and configurations. + +Each Noir project (with it's own Nargo.toml file) can be thought of as a package. Each package is expected to contain exactly one "named circuit", being the "name" defined in Nargo.toml with the program logic defined in `./src/main.nr`. + +For a project with the following structure: + +```tree +├── crates +│   ├── a +│   │   ├── Nargo.toml +│   │   └── src +│   │   └── main.nr +│   └── b +│   ├── Nargo.toml +│   └── src +│   └── main.nr +├── Nargo.toml +└── Prover.toml +``` + +You can define a workspace in Nargo.toml like so: + +```toml +[workspace] +members = ["crates/a", "crates/b"] +default-member = "crates/a" +``` + +`members` indicates which packages are included in the workspace. As such, all member packages of a workspace will be processed when the `--workspace` flag is used with various commands or if a `default-member` is not specified. + +`default-member` indicates which package various commands process by default. + +Libraries can be defined in a workspace. Inside a workspace, these are consumed as `{ path = "../to_lib" }` dependencies in Nargo.toml. + +Inside a workspace, these are consumed as `{ path = "../to_lib" }` dependencies in Nargo.toml. \ No newline at end of file diff --git a/docs/versioned_docs/version-v0.19.4/nargo/01_commands.md b/docs/versioned_docs/version-v0.19.4/nargo/01_commands.md new file mode 100644 index 00000000000..65e2bdb44e3 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/nargo/01_commands.md @@ -0,0 +1,250 @@ +--- +title: Commands +description: + Noir CLI Commands for Noir Prover and Verifier to create, execute, prove and verify programs, + generate Solidity verifier smart contract and compile into JSON file containing ACIR + representation and ABI of circuit. +keywords: + [ + Nargo, + Noir CLI, + Noir Prover, + Noir Verifier, + generate Solidity verifier, + compile JSON file, + ACIR representation, + ABI of circuit, + TypeScript, + ] +--- + +## General options + +| Option | Description | +| -------------------- | -------------------------------------------------- | +| `--show-ssa` | Emit debug information for the intermediate SSA IR | +| `--deny-warnings` | Quit execution when warnings are emitted | +| `--silence-warnings` | Suppress warnings | +| `-h, --help` | Print help | + +## `nargo help [subcommand]` + +Prints the list of available commands or specific information of a subcommand. + +_Arguments_ + +| Argument | Description | +| -------------- | -------------------------------------------- | +| `` | The subcommand whose help message to display | + +## `nargo backend` + +Installs and selects custom backends used to generate and verify proofs. + +### Commands + +| Command | Description | +| ----------- | --------------------------------------------------------- | +| `current` | Prints the name of the currently active backend | +| `ls` | Prints the list of currently installed backends | +| `use` | Select the backend to use | +| `install` | Install a new backend from a URL | +| `uninstall` | Uninstalls a backend | +| `help` | Print this message or the help of the given subcommand(s) | + +### Options + +| Option | Description | +| ------------ | ----------- | +| `-h, --help` | Print help | + +## `nargo check` + +Generate the `Prover.toml` and `Verifier.toml` files for specifying prover and verifier in/output +values of the Noir program respectively. + +### Options + +| Option | Description | +| --------------------- | ------------------------------------- | +| `--package ` | The name of the package to check | +| `--workspace` | Check all packages in the workspace | +| `--print-acir` | Display the ACIR for compiled circuit | +| `--deny-warnings` | Treat all warnings as errors | +| `--silence-warnings` | Suppress warnings | +| `-h, --help` | Print help | + +### `nargo codegen-verifier` + +Generate a Solidity verifier smart contract for the program. + +### Options + +| Option | Description | +| --------------------- | ------------------------------------- | +| `--package ` | The name of the package to codegen | +| `--workspace` | Codegen all packages in the workspace | +| `--print-acir` | Display the ACIR for compiled circuit | +| `--deny-warnings` | Treat all warnings as errors | +| `--silence-warnings` | Suppress warnings | +| `-h, --help` | Print help | + +## `nargo compile` + +Compile the program into a JSON build artifact file containing the ACIR representation and the ABI +of the circuit. This build artifact can then be used to generate and verify proofs. + +You can also use "build" as an alias for compile (e.g. `nargo build`). + +### Options + +| Option | Description | +| --------------------- | ------------------------------------------------------------ | +| `--include-keys` | Include Proving and Verification keys in the build artifacts | +| `--package ` | The name of the package to compile | +| `--workspace` | Compile all packages in the workspace | +| `--print-acir` | Display the ACIR for compiled circuit | +| `--deny-warnings` | Treat all warnings as errors | +| `--silence-warnings` | Suppress warnings | +| `-h, --help` | Print help | + +## `nargo new ` + +Creates a new Noir project in a new folder. + +**Arguments** + +| Argument | Description | +| -------- | -------------------------------- | +| `` | The path to save the new project | + +### Options + +| Option | Description | +| --------------- | ----------------------------------------------------- | +| `--name ` | Name of the package [default: package directory name] | +| `--lib` | Use a library template | +| `--bin` | Use a binary template [default] | +| `--contract` | Use a contract template | +| `-h, --help` | Print help | + +## `nargo init` + +Creates a new Noir project in the current directory. + +### Options + +| Option | Description | +| --------------- | ----------------------------------------------------- | +| `--name ` | Name of the package [default: current directory name] | +| `--lib` | Use a library template | +| `--bin` | Use a binary template [default] | +| `--contract` | Use a contract template | +| `-h, --help` | Print help | + +## `nargo execute [WITNESS_NAME]` + +Runs the Noir program and prints its return value. + +**Arguments** + +| Argument | Description | +| ---------------- | ----------------------------------------- | +| `[WITNESS_NAME]` | Write the execution witness to named file | + +### Options + +| Option | Description | +| --------------------------------- | ------------------------------------------------------------------------------------ | +| `-p, --prover-name ` | The name of the toml file which contains the inputs for the prover [default: Prover] | +| `--package ` | The name of the package to execute | +| `--workspace` | Execute all packages in the workspace | +| `--print-acir` | Display the ACIR for compiled circuit | +| `--deny-warnings` | Treat all warnings as errors | +| `--silence-warnings` | Suppress warnings | +| `-h, --help` | Print help | + +_Usage_ + +The inputs to the circuit are read from the `Prover.toml` file generated by `nargo check`, which +must be filled in. + +To save the witness to file, run the command with a value for the `WITNESS_NAME` argument. A +`.tr` file will then be saved in the `./target` folder. + +## `nargo prove` + +Creates a proof for the program. + +### Options + +| Option | Description | +| ------------------------------------- | ---------------------------------------------------------------------------------------- | +| `-p, --prover-name ` | The name of the toml file which contains the inputs for the prover [default: Prover] | +| `-v, --verifier-name ` | The name of the toml file which contains the inputs for the verifier [default: Verifier] | +| `--verify` | Verify proof after proving | +| `--package ` | The name of the package to prove | +| `--workspace` | Prove all packages in the workspace | +| `--print-acir` | Display the ACIR for compiled circuit | +| `--deny-warnings` | Treat all warnings as errors | +| `--silence-warnings` | Suppress warnings | +| `-h, --help` | Print help | + +## `nargo verify` + +Given a proof and a program, verify whether the proof is valid. + +### Options + +| Option | Description | +| ------------------------------------- | ---------------------------------------------------------------------------------------- | +| `-v, --verifier-name ` | The name of the toml file which contains the inputs for the verifier [default: Verifier] | +| `--package ` | The name of the package to verify | +| `--workspace` | Verify all packages in the workspace | +| `--print-acir` | Display the ACIR for compiled circuit | +| `--deny-warnings` | Treat all warnings as errors | +| `--silence-warnings` | Suppress warnings | +| `-h, --help` | Print help | + +## `nargo test [TEST_NAME]` + +Nargo will automatically compile and run any functions which have the decorator `#[test]` on them if +you run `nargo test`. To print `println` statements in tests, use the `--show-output` flag. + +Takes an optional `--exact` flag which allows you to select tests based on an exact name. + +See an example on the [testing page](./testing). + +### Options + +| Option | Description | +| --------------------- | -------------------------------------- | +| `--show-output` | Display output of `println` statements | +| `--exact` | Only run tests that match exactly | +| `--package ` | The name of the package to test | +| `--workspace` | Test all packages in the workspace | +| `--print-acir` | Display the ACIR for compiled circuit | +| `--deny-warnings` | Treat all warnings as errors | +| `--silence-warnings` | Suppress warnings | +| `-h, --help` | Print help | + +## `nargo info` + +Prints a table containing the information of the package. + +Currently the table provide + +1. The number of ACIR opcodes +2. The final number gates in the circuit used by a backend + +If the file contains a contract the table will provide the +above information about each function of the contract. + +## `nargo lsp` + +Start a long-running Language Server process that communicates over stdin/stdout. +Usually this command is not run by a user, but instead will be run by a Language Client, such as [vscode-noir](https://github.com/noir-lang/vscode-noir). + +## `nargo fmt` + +Automatically formats your Noir source code based on the default formatting settings. diff --git a/docs/versioned_docs/version-v0.19.4/nargo/02_testing.md b/docs/versioned_docs/version-v0.19.4/nargo/02_testing.md new file mode 100644 index 00000000000..5c57ef92705 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/nargo/02_testing.md @@ -0,0 +1,61 @@ +--- +title: Testing in Noir +description: Learn how to use Nargo to test your Noir program in a quick and easy way +keywords: [Nargo, testing, Noir, compile, test] +--- + +You can test your Noir programs using Noir circuits. + +Nargo will automatically compile and run any functions which have the decorator `#[test]` on them if +you run `nargo test`. + +For example if you have a program like: + +```rust +fn add(x: u64, y: u64) -> u64 { + x + y +} +#[test] +fn test_add() { + assert(add(2,2) == 4); + assert(add(0,1) == 1); + assert(add(1,0) == 1); +} +``` + +Running `nargo test` will test that the `test_add` function can be executed while satisfying all +the constraints which allows you to test that add returns the expected values. Test functions can't +have any arguments currently. + +### Test fail + +You can write tests that are expected to fail by using the decorator `#[test(should_fail)]`. For example: + +```rust +fn add(x: u64, y: u64) -> u64 { + x + y +} +#[test(should_fail)] +fn test_add() { + assert(add(2,2) == 5); +} +``` + +You can be more specific and make it fail with a specific reason by using `should_fail_with = "`: + +```rust +fn main(african_swallow_avg_speed : Field) { + assert(african_swallow_avg_speed == 65, "What is the airspeed velocity of an unladen swallow"); +} + +#[test] +fn test_king_arthur() { + main(65); +} + +#[test(should_fail_with = "What is the airspeed velocity of an unladen swallow")] +fn test_bridgekeeper() { + main(32); +} + +``` diff --git a/docs/versioned_docs/version-v0.7.1/nargo/03_solidity_verifier.md b/docs/versioned_docs/version-v0.19.4/nargo/03_solidity_verifier.md similarity index 100% rename from docs/versioned_docs/version-v0.7.1/nargo/03_solidity_verifier.md rename to docs/versioned_docs/version-v0.19.4/nargo/03_solidity_verifier.md diff --git a/docs/versioned_docs/version-v0.19.4/nargo/04_language_server.md b/docs/versioned_docs/version-v0.19.4/nargo/04_language_server.md new file mode 100644 index 00000000000..48c01465f6e --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/nargo/04_language_server.md @@ -0,0 +1,42 @@ +--- +title: Language Server +description: Learn about the Noir Language Server, how to install the components, and configuration that may be required. +keywords: [Nargo, Language Server, LSP, VSCode, Visual Studio Code] +--- + +This section helps you install and configure the Noir Language Server. + +The Language Server Protocol (LSP) has two components, the [Server](#language-server) and the [Client](#language-client). Below we describe each in the context of Noir. + +## Language Server + +The Server component is provided by the Nargo command line tool that you installed at the beginning of this guide. +As long as Nargo is installed and you've used it to run other commands in this guide, it should be good to go! + +If you'd like to verify that the `nargo lsp` command is available, you can run `nargo --help` and look for `lsp` in the list of commands. If you see it, you're using a version of Noir with LSP support. + +## Language Client + +The Client component is usually an editor plugin that launches the Server. It communicates LSP messages between the editor and the Server. For example, when you save a file, the Client will alert the Server, so it can try to compile the project and report any errors. + +Currently, Noir provides a Language Client for Visual Studio Code via the [vscode-noir](https://github.com/noir-lang/vscode-noir) extension. You can install it via the [Visual Studio Marketplace](https://marketplace.visualstudio.com/items?itemName=noir-lang.vscode-noir). + +> **Note:** Noir's Language Server Protocol support currently assumes users' VSCode workspace root to be the same as users' Noir project root (i.e. where Nargo.toml lies). +> +> If LSP features seem to be missing / malfunctioning, make sure you are opening your Noir project directly (instead of as a sub-folder) in your VSCode instance. + +When your language server is running correctly and the VSCode plugin is installed, you should see handy codelens buttons for compilation, measuring circuit size, execution, and tests: + +![Compile and Execute](@site/static/img/codelens_compile_execute.png) +![Run test](@site/static/img/codelens_run_test.png) + +You should also see your tests in the `testing` panel: + +![Testing panel](@site/static/img/codelens_testing_panel.png) + +### Configuration + +- **Noir: Enable LSP** - If checked, the extension will launch the Language Server via `nargo lsp` and communicate with it. +- **Noir: Nargo Flags** - Additional flags may be specified if you require them to be added when the extension calls `nargo lsp`. +- **Noir: Nargo Path** - An absolute path to a Nargo binary with the `lsp` command. This may be useful if Nargo is not within the `PATH` of your editor. +- **Noir > Trace: Server** - Setting this to `"messages"` or `"verbose"` will log LSP messages between the Client and Server. Useful for debugging. diff --git a/docs/versioned_docs/version-v0.19.4/noir_js/getting_started/01_tiny_noir_app.md b/docs/versioned_docs/version-v0.19.4/noir_js/getting_started/01_tiny_noir_app.md new file mode 100644 index 00000000000..c51ed61de52 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/noir_js/getting_started/01_tiny_noir_app.md @@ -0,0 +1,260 @@ +--- +title: End-to-end +description: Learn how to setup a new app that uses Noir to generate and verify zero-knowledge SNARK proofs in a typescript or javascript environment +keywords: [how to, guide, javascript, typescript, noir, barretenberg, zero-knowledge, proofs] +--- + +NoirJS works both on the browser and on the server, and works for both ESM and CJS module systems. In this page, we will learn how can we write a simple test and a simple web app to verify the standard Noir example. + +You can find the complete app code for this guide [here](https://github.com/noir-lang/tiny-noirjs-app). + +## Before we start + +:::note + +Feel free to use whatever versions, just keep in mind that Nargo and the NoirJS packages are meant to be in sync. For example, Nargo 0.18.x matches `noir_js@0.18.x`, etc. + +In this guide, we will be pinned to 0.17.0. + +::: + +Make sure you have Node installed on your machine by opening a terminal and executing `node --version`. If you don't see a version, you should install [node](https://github.com/nvm-sh/nvm). You can also use `yarn` if you prefer that package manager over npm (which comes with node). + +First of all, follow the the [Nargo guide](../../getting_started/00_nargo_installation.md) to install nargo version 0.17.0 and create a new project with `nargo new circuit`. Once there, `cd` into the `circuit` folder. You should then be able to compile your circuit into `json` format and see it inside the `target` folder: + +```bash +nargo compile +``` + +Your folder structure should look like: + +```tree +. +└── circuit + ├── Nargo.toml + ├── src + │ └── main.nr + └── target + └── circuit.json +``` + +## Starting a new project + +Go back to the previous folder and start a new project by running run `npm init`. You can configure your project or just leave the defaults, and see a `package.json` appear in your root folder. + +## Installing dependencies + +We'll need two `npm` packages. These packages will provide us the methods we need to run and verify proofs: + +```bash +npm i @noir-lang/backend_barretenberg@^0.17.0 @noir-lang/noir_js@^0.17.0 +``` + +To serve our page, we can use a build tool such as `vite`. Because we're gonna use some `wasm` files, we need to install a plugin as well. Run: + +```bash +npm i --save-dev vite rollup-plugin-copy +``` + +Since we're on the dependency world, we may as well define a nice starting script. Vite makes it easy. Just open `package.json`, find the block "scripts" and add this just below the line with `"test" : "echo......."`: + +```json + "start": "vite --open" +``` + +If you want do build a static website, you can also add some build and preview scripts: + +```json + "build": "vite build", + "preview": "vite preview" +``` + +## Vite plugins + +Vite is great, but support from `wasm` doesn't work out-of-the-box. We're gonna write a quick plugin and use another one. Just copy and paste this into a file named `vite.config.js`. You don't need to understand it, just trust me bro. + +```js +import { defineConfig } from 'vite'; +import copy from 'rollup-plugin-copy'; +import fs from 'fs'; +import path from 'path'; + +const wasmContentTypePlugin = { + name: 'wasm-content-type-plugin', + configureServer(server) { + server.middlewares.use(async (req, res, next) => { + if (req.url.endsWith('.wasm')) { + res.setHeader('Content-Type', 'application/wasm'); + const newPath = req.url.replace('deps', 'dist'); + const targetPath = path.join(__dirname, newPath); + const wasmContent = fs.readFileSync(targetPath); + return res.end(wasmContent); + } + next(); + }); + }, +}; + +export default defineConfig(({ command }) => { + if (command === 'serve') { + return { + plugins: [ + copy({ + targets: [{ src: 'node_modules/**/*.wasm', dest: 'node_modules/.vite/dist' }], + copySync: true, + hook: 'buildStart', + }), + command === 'serve' ? wasmContentTypePlugin : [], + ], + }; + } + + return {}; +}); +``` + +## HTML + +Here's the simplest HTML with some terrible UI. Create a file called `index.html` and paste this: + +```html + + + + + + +

Very basic Noir app

+
+

Logs

+

Proof

+
+ + +``` + +## Some good old vanilla Javascript + +Create a new file `app.js`, which is where our javascript code will live. Let's start with this code inside: + +```js +document.addEventListener('DOMContentLoaded', async () => { + // here's where the magic happens +}); + +function display(container, msg) { + const c = document.getElementById(container); + const p = document.createElement('p'); + p.textContent = msg; + c.appendChild(p); +} +``` + +We can manipulate our website with this little function, so we can see our website working. + +## Adding Noir + +If you come from the previous page, your folder structure should look like this: + +```tree +├── app.js +├── circuit +│ ├── Nargo.toml +│ ├── src +│ │ └── main.nr +│ └── target +│ └── circuit.json +├── index.html +├── package.json +└── vite.config.js +``` + +You'll see other files and folders showing up (like `package-lock.json`, `yarn.lock`, `node_modules`) but you shouldn't have to care about those. + +## Importing our dependencies + +We're starting with the good stuff now. At the top of the new javascript file, import the packages: + +```ts +import { BarretenbergBackend } from '@noir-lang/backend_barretenberg'; +import { Noir } from '@noir-lang/noir_js'; +``` + +We also need to import the `circuit` JSON file we created. If you have the suggested folder structure, you can add this line: + +```ts +import circuit from './circuit/target/circuit.json'; +``` + +## Write code + +:::note + +We're gonna be adding code inside the `document.addEventListener...etc` block: + +```js +// forget stuff here +document.addEventListener('DOMContentLoaded', async () => { + // here's where the magic happens +}); +// forget stuff here +``` + +::: + +Our dependencies exported two classes: `BarretenbergBackend` and `Noir`. Let's `init` them and add some logs, just to flex: + +```ts +const backend = new BarretenbergBackend(circuit); +const noir = new Noir(circuit, backend); +``` + +## Proving + +Now we're ready to prove stuff! Let's feed some inputs to our circuit and calculate the proof: + +```js +const input = { x: 1, y: 2 }; +display('logs', 'Generating proof... ⌛'); +const proof = await noir.generateFinalProof(input); +display('logs', 'Generating proof... ✅'); +display('results', proof.proof); +``` + +You're probably eager to see stuff happening, so go and run your app now! + +From your terminal, run `npm start` (or `yarn start`). If it doesn't open a browser for you, just visit `localhost:5173`. On a modern laptop, proof will generate in less than 100ms, and you'll see this: + +![Getting Started 0](@site/static/img/noir_getting_started_1.png) + +If you're human, you shouldn't be able to understand anything on the "proof" box. That's OK. We like you, human. + +In any case, this means your proof was generated! But you shouldn't trust me just yet. Add these lines to see it being verified: + +```js +display('logs', 'Verifying proof... ⌛'); +const verification = await noir.verifyFinalProof(proof); +if (verification) display('logs', 'Verifying proof... ✅'); +``` + +By saving, your app will refresh and here's our complete Tiny Noir App! + +You can find the complete app code for this guide [here](https://github.com/noir-lang/tiny-noirjs-app). + +## Further Reading + +You can see how noirjs is used in a full stack Next.js hardhat application in the [noir-starter repo here](https://github.com/noir-lang/noir-starter/tree/main/next-hardhat). The example shows how to calculate a proof in the browser and verify it with a deployed Solidity verifier contract from noirjs. + +You should also check out the more advanced examples in the [noir-examples repo](https://github.com/noir-lang/noir-examples), where you'll find reference usage for some cool apps. diff --git a/docs/versioned_docs/version-v0.19.4/noir_js/noir_js.md b/docs/versioned_docs/version-v0.19.4/noir_js/noir_js.md new file mode 100644 index 00000000000..f895b22eaf8 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/noir_js/noir_js.md @@ -0,0 +1,36 @@ +--- +title: NoirJS +description: Interact with Noir in Typescript or Javascript +keywords: [Noir project, javascript, typescript, node.js, browser, react] +--- + +NoirJS is a TypeScript library that make it easy to use Noir on your dapp, webapp, Node.js server, website, etc. + +A typical workflow would be composed of two major elements: + +- NoirJS +- Proving backend of choice's JavaScript package + + + +To install NoirJS, install Node.js if you have not already and run this in your JavaScript project: + +```bash +npm i @noir-lang/noir_js +``` + +## Proving backend + +Since Noir is backend agnostic, you can instantiate NoirJS without any backend (i.e. to execute a function). But for proving, you would have to instantiate NoirJS with any of the supported backends through their own `js` interface. + +### Barretenberg + +Aztec Labs maintains the `barretenberg` proving backend, which you can instantiate and make use of alongside NoirJS. It is also the default proving backend installed and used with Nargo, the Noir CLI tool. + +To install its JavaScript library, run this in your project: + +```bash +npm i @noir-lang/backend_barretenberg +``` + +For more details on how to instantiate and use the libraries, refer to the [Full Noir App Guide](./getting_started/01_tiny_noir_app.md) and [Reference](./reference/noir_js/classes/Noir.md) sections. diff --git a/docs/versioned_docs/version-v0.19.4/noir_js/reference/backend_barretenberg/.nojekyll b/docs/versioned_docs/version-v0.19.4/noir_js/reference/backend_barretenberg/.nojekyll new file mode 100644 index 00000000000..e2ac6616add --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/noir_js/reference/backend_barretenberg/.nojekyll @@ -0,0 +1 @@ +TypeDoc added this file to prevent GitHub Pages from using Jekyll. You can turn off this behavior by setting the `githubPages` option to false. \ No newline at end of file diff --git a/docs/versioned_docs/version-v0.19.4/noir_js/reference/backend_barretenberg/classes/BarretenbergBackend.md b/docs/versioned_docs/version-v0.19.4/noir_js/reference/backend_barretenberg/classes/BarretenbergBackend.md new file mode 100644 index 00000000000..5cbe9421b92 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/noir_js/reference/backend_barretenberg/classes/BarretenbergBackend.md @@ -0,0 +1,185 @@ +# BarretenbergBackend + +## Implements + +- [`Backend`](../interfaces/Backend.md) + +## Constructors + +### new BarretenbergBackend(acirCircuit, options) + +```ts +new BarretenbergBackend(acirCircuit, options): BarretenbergBackend +``` + +#### Parameters + +| Parameter | Type | +| :------ | :------ | +| `acirCircuit` | [`CompiledCircuit`](../type-aliases/CompiledCircuit.md) | +| `options` | [`BackendOptions`](../type-aliases/BackendOptions.md) | + +#### Returns + +[`BarretenbergBackend`](BarretenbergBackend.md) + +## Methods + +### destroy() + +```ts +destroy(): Promise +``` + +#### Returns + +`Promise`\<`void`\> + +#### Implementation of + +[`Backend`](../interfaces/Backend.md).[`destroy`](../interfaces/Backend.md#destroy) + +#### Description + +Destroys the backend + +*** + +### generateFinalProof() + +```ts +generateFinalProof(decompressedWitness): Promise +``` + +#### Parameters + +| Parameter | Type | +| :------ | :------ | +| `decompressedWitness` | `Uint8Array` | + +#### Returns + +`Promise`\<[`ProofData`](../type-aliases/ProofData.md)\> + +#### Implementation of + +[`Backend`](../interfaces/Backend.md).[`generateFinalProof`](../interfaces/Backend.md#generatefinalproof) + +#### Description + +Generates a final proof (not meant to be verified in another circuit) + +*** + +### generateIntermediateProof() + +```ts +generateIntermediateProof(witness): Promise +``` + +#### Parameters + +| Parameter | Type | +| :------ | :------ | +| `witness` | `Uint8Array` | + +#### Returns + +`Promise`\<[`ProofData`](../type-aliases/ProofData.md)\> + +#### Implementation of + +[`Backend`](../interfaces/Backend.md).[`generateIntermediateProof`](../interfaces/Backend.md#generateintermediateproof) + +#### Example + +```typescript +const intermediateProof = await backend.generateIntermediateProof(witness); +``` + +*** + +### generateIntermediateProofArtifacts() + +```ts +generateIntermediateProofArtifacts(proofData, numOfPublicInputs): Promise +``` + +#### Parameters + +| Parameter | Type | Default value | +| :------ | :------ | :------ | +| `proofData` | [`ProofData`](../type-aliases/ProofData.md) | `undefined` | +| `numOfPublicInputs` | `number` | `0` | + +#### Returns + +`Promise`\<`object`\> + +#### Implementation of + +[`Backend`](../interfaces/Backend.md).[`generateIntermediateProofArtifacts`](../interfaces/Backend.md#generateintermediateproofartifacts) + +#### Example + +```typescript +const artifacts = await backend.generateIntermediateProofArtifacts(proof, numOfPublicInputs); +``` + +*** + +### verifyFinalProof() + +```ts +verifyFinalProof(proofData): Promise +``` + +#### Parameters + +| Parameter | Type | +| :------ | :------ | +| `proofData` | [`ProofData`](../type-aliases/ProofData.md) | + +#### Returns + +`Promise`\<`boolean`\> + +#### Implementation of + +[`Backend`](../interfaces/Backend.md).[`verifyFinalProof`](../interfaces/Backend.md#verifyfinalproof) + +#### Description + +Verifies a final proof + +*** + +### verifyIntermediateProof() + +```ts +verifyIntermediateProof(proofData): Promise +``` + +#### Parameters + +| Parameter | Type | +| :------ | :------ | +| `proofData` | [`ProofData`](../type-aliases/ProofData.md) | + +#### Returns + +`Promise`\<`boolean`\> + +#### Implementation of + +[`Backend`](../interfaces/Backend.md).[`verifyIntermediateProof`](../interfaces/Backend.md#verifyintermediateproof) + +#### Example + +```typescript +const isValidIntermediate = await backend.verifyIntermediateProof(proof); +``` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.19.4/noir_js/reference/backend_barretenberg/index.md b/docs/versioned_docs/version-v0.19.4/noir_js/reference/backend_barretenberg/index.md new file mode 100644 index 00000000000..93b248b0f65 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/noir_js/reference/backend_barretenberg/index.md @@ -0,0 +1,45 @@ +# Backend Barretenberg + +## Exports + +### Classes + +| Class | Description | +| :------ | :------ | +| [BarretenbergBackend](classes/BarretenbergBackend.md) | - | + +### Interfaces + +| Interface | Description | +| :------ | :------ | +| [Backend](interfaces/Backend.md) | - | + +### Type Aliases + +| Type alias | Description | +| :------ | :------ | +| [BackendOptions](type-aliases/BackendOptions.md) | - | +| [CompiledCircuit](type-aliases/CompiledCircuit.md) | - | +| [ProofData](type-aliases/ProofData.md) | - | + +## Functions + +### flattenPublicInputs() + +```ts +flattenPublicInputs(publicInputs): string[] +``` + +#### Parameters + +| Parameter | Type | +| :------ | :------ | +| `publicInputs` | `WitnessMap` | + +#### Returns + +`string`[] + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.19.4/noir_js/reference/backend_barretenberg/interfaces/Backend.md b/docs/versioned_docs/version-v0.19.4/noir_js/reference/backend_barretenberg/interfaces/Backend.md new file mode 100644 index 00000000000..3eb9645c8d2 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/noir_js/reference/backend_barretenberg/interfaces/Backend.md @@ -0,0 +1,132 @@ +# Backend + +## Methods + +### destroy() + +```ts +destroy(): Promise +``` + +#### Returns + +`Promise`\<`void`\> + +#### Description + +Destroys the backend + +*** + +### generateFinalProof() + +```ts +generateFinalProof(decompressedWitness): Promise +``` + +#### Parameters + +| Parameter | Type | +| :------ | :------ | +| `decompressedWitness` | `Uint8Array` | + +#### Returns + +`Promise`\<[`ProofData`](../type-aliases/ProofData.md)\> + +#### Description + +Generates a final proof (not meant to be verified in another circuit) + +*** + +### generateIntermediateProof() + +```ts +generateIntermediateProof(decompressedWitness): Promise +``` + +#### Parameters + +| Parameter | Type | +| :------ | :------ | +| `decompressedWitness` | `Uint8Array` | + +#### Returns + +`Promise`\<[`ProofData`](../type-aliases/ProofData.md)\> + +#### Description + +Generates an intermediate proof (meant to be verified in another circuit) + +*** + +### generateIntermediateProofArtifacts() + +```ts +generateIntermediateProofArtifacts(proofData, numOfPublicInputs): Promise +``` + +#### Parameters + +| Parameter | Type | +| :------ | :------ | +| `proofData` | [`ProofData`](../type-aliases/ProofData.md) | +| `numOfPublicInputs` | `number` | + +#### Returns + +`Promise`\<`object`\> + +#### Description + +Retrieves the artifacts from a proof in the Field format + +*** + +### verifyFinalProof() + +```ts +verifyFinalProof(proofData): Promise +``` + +#### Parameters + +| Parameter | Type | +| :------ | :------ | +| `proofData` | [`ProofData`](../type-aliases/ProofData.md) | + +#### Returns + +`Promise`\<`boolean`\> + +#### Description + +Verifies a final proof + +*** + +### verifyIntermediateProof() + +```ts +verifyIntermediateProof(proofData): Promise +``` + +#### Parameters + +| Parameter | Type | +| :------ | :------ | +| `proofData` | [`ProofData`](../type-aliases/ProofData.md) | + +#### Returns + +`Promise`\<`boolean`\> + +#### Description + +Verifies an intermediate proof + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.19.4/noir_js/reference/backend_barretenberg/type-aliases/BackendOptions.md b/docs/versioned_docs/version-v0.19.4/noir_js/reference/backend_barretenberg/type-aliases/BackendOptions.md new file mode 100644 index 00000000000..266ade75d17 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/noir_js/reference/backend_barretenberg/type-aliases/BackendOptions.md @@ -0,0 +1,19 @@ +# BackendOptions + +```ts +type BackendOptions: object; +``` + +## Description + +An options object, currently only used to specify the number of threads to use. + +## Type declaration + +| Member | Type | Description | +| :------ | :------ | :------ | +| `threads` | `number` | **Description**

Number of threads | + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.19.4/noir_js/reference/backend_barretenberg/type-aliases/CompiledCircuit.md b/docs/versioned_docs/version-v0.19.4/noir_js/reference/backend_barretenberg/type-aliases/CompiledCircuit.md new file mode 100644 index 00000000000..34e0dd04205 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/noir_js/reference/backend_barretenberg/type-aliases/CompiledCircuit.md @@ -0,0 +1,20 @@ +# CompiledCircuit + +```ts +type CompiledCircuit: object; +``` + +## Description + +The representation of a compiled circuit + +## Type declaration + +| Member | Type | Description | +| :------ | :------ | :------ | +| `abi` | `Abi` | **Description**

ABI representation of the circuit | +| `bytecode` | `string` | **Description**

The bytecode of the circuit | + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.19.4/noir_js/reference/backend_barretenberg/type-aliases/ProofData.md b/docs/versioned_docs/version-v0.19.4/noir_js/reference/backend_barretenberg/type-aliases/ProofData.md new file mode 100644 index 00000000000..3eb360a78f1 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/noir_js/reference/backend_barretenberg/type-aliases/ProofData.md @@ -0,0 +1,20 @@ +# ProofData + +```ts +type ProofData: object; +``` + +## Description + +The representation of a proof + +## Type declaration + +| Member | Type | Description | +| :------ | :------ | :------ | +| `proof` | `Uint8Array` | **Description**

An byte array representing the proof | +| `publicInputs` | `WitnessMap` | **Description**

Public inputs of a proof | + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.19.4/noir_js/reference/backend_barretenberg/typedoc-sidebar.cjs b/docs/versioned_docs/version-v0.19.4/noir_js/reference/backend_barretenberg/typedoc-sidebar.cjs new file mode 100644 index 00000000000..04e662c845f --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/noir_js/reference/backend_barretenberg/typedoc-sidebar.cjs @@ -0,0 +1,4 @@ +// @ts-check +/** @type {import('@docusaurus/plugin-content-docs').SidebarsConfig} */ +const typedocSidebar = { items: [{"type":"category","label":"Classes","items":[{"type":"doc","id":"noir_js/reference/backend_barretenberg/classes/BarretenbergBackend","label":"BarretenbergBackend"}]},{"type":"category","label":"Interfaces","items":[{"type":"doc","id":"noir_js/reference/backend_barretenberg/interfaces/Backend","label":"Backend"}]},{"type":"category","label":"Type Aliases","items":[{"type":"doc","id":"noir_js/reference/backend_barretenberg/type-aliases/BackendOptions","label":"BackendOptions"},{"type":"doc","id":"noir_js/reference/backend_barretenberg/type-aliases/CompiledCircuit","label":"CompiledCircuit"},{"type":"doc","id":"noir_js/reference/backend_barretenberg/type-aliases/ProofData","label":"ProofData"}]}]}; +module.exports = typedocSidebar.items; \ No newline at end of file diff --git a/docs/versioned_docs/version-v0.19.4/noir_js/reference/noir_js/.nojekyll b/docs/versioned_docs/version-v0.19.4/noir_js/reference/noir_js/.nojekyll new file mode 100644 index 00000000000..e2ac6616add --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/noir_js/reference/noir_js/.nojekyll @@ -0,0 +1 @@ +TypeDoc added this file to prevent GitHub Pages from using Jekyll. You can turn off this behavior by setting the `githubPages` option to false. \ No newline at end of file diff --git a/docs/versioned_docs/version-v0.19.4/noir_js/reference/noir_js/classes/Noir.md b/docs/versioned_docs/version-v0.19.4/noir_js/reference/noir_js/classes/Noir.md new file mode 100644 index 00000000000..c54468891af --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/noir_js/reference/noir_js/classes/Noir.md @@ -0,0 +1,131 @@ +# Noir + +## Constructors + +### new Noir(circuit, backend) + +```ts +new Noir(circuit, backend?): Noir +``` + +#### Parameters + +| Parameter | Type | +| :------ | :------ | +| `circuit` | [`CompiledCircuit`](../type-aliases/CompiledCircuit.md) | +| `backend`? | `Backend` | + +#### Returns + +[`Noir`](Noir.md) + +## Methods + +### destroy() + +```ts +destroy(): Promise +``` + +#### Returns + +`Promise`\<`void`\> + +#### Description + +Destroys the underlying backend instance. + +#### Example + +```typescript +await noir.destroy(); +``` + +*** + +### execute() + +```ts +execute(inputs, foreignCallHandler?): Promise +``` + +#### Parameters + +| Parameter | Type | +| :------ | :------ | +| `inputs` | [`InputMap`](../type-aliases/InputMap.md) | +| `foreignCallHandler`? | [`ForeignCallHandler`](../type-aliases/ForeignCallHandler.md) | + +#### Returns + +`Promise`\<`object`\> + +#### Description + +Allows to execute a circuit to get its witness and return value. + +#### Example + +```typescript +async execute(inputs) +``` + +*** + +### generateFinalProof() + +```ts +generateFinalProof(inputs): Promise +``` + +#### Parameters + +| Parameter | Type | +| :------ | :------ | +| `inputs` | [`InputMap`](../type-aliases/InputMap.md) | + +#### Returns + +`Promise`\<[`ProofData`](../type-aliases/ProofData.md)\> + +#### Description + +Generates a witness and a proof given an object as input. + +#### Example + +```typescript +async generateFinalProof(input) +``` + +*** + +### verifyFinalProof() + +```ts +verifyFinalProof(proofData): Promise +``` + +#### Parameters + +| Parameter | Type | +| :------ | :------ | +| `proofData` | [`ProofData`](../type-aliases/ProofData.md) | + +#### Returns + +`Promise`\<`boolean`\> + +#### Description + +Instantiates the verification key and verifies a proof. + +#### Example + +```typescript +async verifyFinalProof(proof) +``` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.19.4/noir_js/reference/noir_js/functions/and.md b/docs/versioned_docs/version-v0.19.4/noir_js/reference/noir_js/functions/and.md new file mode 100644 index 00000000000..c783283e396 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/noir_js/reference/noir_js/functions/and.md @@ -0,0 +1,22 @@ +# and() + +```ts +and(lhs, rhs): string +``` + +Performs a bitwise AND operation between `lhs` and `rhs` + +## Parameters + +| Parameter | Type | Description | +| :------ | :------ | :------ | +| `lhs` | `string` | | +| `rhs` | `string` | | + +## Returns + +`string` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.19.4/noir_js/reference/noir_js/functions/blake2s256.md b/docs/versioned_docs/version-v0.19.4/noir_js/reference/noir_js/functions/blake2s256.md new file mode 100644 index 00000000000..7882d0da8d5 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/noir_js/reference/noir_js/functions/blake2s256.md @@ -0,0 +1,21 @@ +# blake2s256() + +```ts +blake2s256(inputs): Uint8Array +``` + +Calculates the Blake2s256 hash of the input bytes + +## Parameters + +| Parameter | Type | Description | +| :------ | :------ | :------ | +| `inputs` | `Uint8Array` | | + +## Returns + +`Uint8Array` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.19.4/noir_js/reference/noir_js/functions/ecdsa_secp256k1_verify.md b/docs/versioned_docs/version-v0.19.4/noir_js/reference/noir_js/functions/ecdsa_secp256k1_verify.md new file mode 100644 index 00000000000..0ba5783f0d5 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/noir_js/reference/noir_js/functions/ecdsa_secp256k1_verify.md @@ -0,0 +1,29 @@ +# ecdsa\_secp256k1\_verify() + +```ts +ecdsa_secp256k1_verify( + hashed_msg, + public_key_x_bytes, + public_key_y_bytes, + signature): boolean +``` + +Calculates the Blake2s256 hash of the input bytes and represents these as a single field element. +Verifies a ECDSA signature over the secp256k1 curve. + +## Parameters + +| Parameter | Type | Description | +| :------ | :------ | :------ | +| `hashed_msg` | `Uint8Array` | | +| `public_key_x_bytes` | `Uint8Array` | | +| `public_key_y_bytes` | `Uint8Array` | | +| `signature` | `Uint8Array` | | + +## Returns + +`boolean` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.19.4/noir_js/reference/noir_js/functions/ecdsa_secp256r1_verify.md b/docs/versioned_docs/version-v0.19.4/noir_js/reference/noir_js/functions/ecdsa_secp256r1_verify.md new file mode 100644 index 00000000000..0b20ff68957 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/noir_js/reference/noir_js/functions/ecdsa_secp256r1_verify.md @@ -0,0 +1,28 @@ +# ecdsa\_secp256r1\_verify() + +```ts +ecdsa_secp256r1_verify( + hashed_msg, + public_key_x_bytes, + public_key_y_bytes, + signature): boolean +``` + +Verifies a ECDSA signature over the secp256r1 curve. + +## Parameters + +| Parameter | Type | Description | +| :------ | :------ | :------ | +| `hashed_msg` | `Uint8Array` | | +| `public_key_x_bytes` | `Uint8Array` | | +| `public_key_y_bytes` | `Uint8Array` | | +| `signature` | `Uint8Array` | | + +## Returns + +`boolean` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.19.4/noir_js/reference/noir_js/functions/keccak256.md b/docs/versioned_docs/version-v0.19.4/noir_js/reference/noir_js/functions/keccak256.md new file mode 100644 index 00000000000..d10f155ce86 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/noir_js/reference/noir_js/functions/keccak256.md @@ -0,0 +1,21 @@ +# keccak256() + +```ts +keccak256(inputs): Uint8Array +``` + +Calculates the Keccak256 hash of the input bytes + +## Parameters + +| Parameter | Type | Description | +| :------ | :------ | :------ | +| `inputs` | `Uint8Array` | | + +## Returns + +`Uint8Array` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.19.4/noir_js/reference/noir_js/functions/sha256.md b/docs/versioned_docs/version-v0.19.4/noir_js/reference/noir_js/functions/sha256.md new file mode 100644 index 00000000000..6ba4ecac022 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/noir_js/reference/noir_js/functions/sha256.md @@ -0,0 +1,21 @@ +# sha256() + +```ts +sha256(inputs): Uint8Array +``` + +Calculates the SHA256 hash of the input bytes + +## Parameters + +| Parameter | Type | Description | +| :------ | :------ | :------ | +| `inputs` | `Uint8Array` | | + +## Returns + +`Uint8Array` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.19.4/noir_js/reference/noir_js/functions/xor.md b/docs/versioned_docs/version-v0.19.4/noir_js/reference/noir_js/functions/xor.md new file mode 100644 index 00000000000..8d762b895d3 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/noir_js/reference/noir_js/functions/xor.md @@ -0,0 +1,22 @@ +# xor() + +```ts +xor(lhs, rhs): string +``` + +Performs a bitwise XOR operation between `lhs` and `rhs` + +## Parameters + +| Parameter | Type | Description | +| :------ | :------ | :------ | +| `lhs` | `string` | | +| `rhs` | `string` | | + +## Returns + +`string` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.19.4/noir_js/reference/noir_js/index.md b/docs/versioned_docs/version-v0.19.4/noir_js/reference/noir_js/index.md new file mode 100644 index 00000000000..348453c0059 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/noir_js/reference/noir_js/index.md @@ -0,0 +1,37 @@ +# Noir JS + +## Exports + +### Classes + +| Class | Description | +| :------ | :------ | +| [Noir](classes/Noir.md) | - | + +### Type Aliases + +| Type alias | Description | +| :------ | :------ | +| [CompiledCircuit](type-aliases/CompiledCircuit.md) | - | +| [ForeignCallHandler](type-aliases/ForeignCallHandler.md) | A callback which performs an foreign call and returns the response. | +| [ForeignCallInput](type-aliases/ForeignCallInput.md) | - | +| [ForeignCallOutput](type-aliases/ForeignCallOutput.md) | - | +| [InputMap](type-aliases/InputMap.md) | - | +| [ProofData](type-aliases/ProofData.md) | - | +| [WitnessMap](type-aliases/WitnessMap.md) | - | + +### Functions + +| Function | Description | +| :------ | :------ | +| [and](functions/and.md) | Performs a bitwise AND operation between `lhs` and `rhs` | +| [blake2s256](functions/blake2s256.md) | Calculates the Blake2s256 hash of the input bytes | +| [ecdsa\_secp256k1\_verify](functions/ecdsa_secp256k1_verify.md) | Calculates the Blake2s256 hash of the input bytes and represents these as a single field element. | +| [ecdsa\_secp256r1\_verify](functions/ecdsa_secp256r1_verify.md) | Verifies a ECDSA signature over the secp256r1 curve. | +| [keccak256](functions/keccak256.md) | Calculates the Keccak256 hash of the input bytes | +| [sha256](functions/sha256.md) | Calculates the SHA256 hash of the input bytes | +| [xor](functions/xor.md) | Performs a bitwise XOR operation between `lhs` and `rhs` | + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.19.4/noir_js/reference/noir_js/type-aliases/CompiledCircuit.md b/docs/versioned_docs/version-v0.19.4/noir_js/reference/noir_js/type-aliases/CompiledCircuit.md new file mode 100644 index 00000000000..34e0dd04205 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/noir_js/reference/noir_js/type-aliases/CompiledCircuit.md @@ -0,0 +1,20 @@ +# CompiledCircuit + +```ts +type CompiledCircuit: object; +``` + +## Description + +The representation of a compiled circuit + +## Type declaration + +| Member | Type | Description | +| :------ | :------ | :------ | +| `abi` | `Abi` | **Description**

ABI representation of the circuit | +| `bytecode` | `string` | **Description**

The bytecode of the circuit | + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.19.4/noir_js/reference/noir_js/type-aliases/ForeignCallHandler.md b/docs/versioned_docs/version-v0.19.4/noir_js/reference/noir_js/type-aliases/ForeignCallHandler.md new file mode 100644 index 00000000000..812b8b16481 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/noir_js/reference/noir_js/type-aliases/ForeignCallHandler.md @@ -0,0 +1,24 @@ +# ForeignCallHandler + +```ts +type ForeignCallHandler: (name, inputs) => Promise; +``` + +A callback which performs an foreign call and returns the response. + +## Parameters + +| Parameter | Type | Description | +| :------ | :------ | :------ | +| `name` | `string` | The identifier for the type of foreign call being performed. | +| `inputs` | [`ForeignCallInput`](ForeignCallInput.md)[] | An array of hex encoded inputs to the foreign call. | + +## Returns + +`Promise`\<[`ForeignCallOutput`](ForeignCallOutput.md)[]\> + +outputs - An array of hex encoded outputs containing the results of the foreign call. + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.19.4/noir_js/reference/noir_js/type-aliases/ForeignCallInput.md b/docs/versioned_docs/version-v0.19.4/noir_js/reference/noir_js/type-aliases/ForeignCallInput.md new file mode 100644 index 00000000000..dd95809186a --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/noir_js/reference/noir_js/type-aliases/ForeignCallInput.md @@ -0,0 +1,9 @@ +# ForeignCallInput + +```ts +type ForeignCallInput: string[]; +``` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.19.4/noir_js/reference/noir_js/type-aliases/ForeignCallOutput.md b/docs/versioned_docs/version-v0.19.4/noir_js/reference/noir_js/type-aliases/ForeignCallOutput.md new file mode 100644 index 00000000000..b71fb78a946 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/noir_js/reference/noir_js/type-aliases/ForeignCallOutput.md @@ -0,0 +1,9 @@ +# ForeignCallOutput + +```ts +type ForeignCallOutput: string | string[]; +``` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.19.4/noir_js/reference/noir_js/type-aliases/InputMap.md b/docs/versioned_docs/version-v0.19.4/noir_js/reference/noir_js/type-aliases/InputMap.md new file mode 100644 index 00000000000..c714e999d93 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/noir_js/reference/noir_js/type-aliases/InputMap.md @@ -0,0 +1,13 @@ +# InputMap + +```ts +type InputMap: object; +``` + +## Index signature + + \[`key`: `string`\]: `InputValue` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.19.4/noir_js/reference/noir_js/type-aliases/ProofData.md b/docs/versioned_docs/version-v0.19.4/noir_js/reference/noir_js/type-aliases/ProofData.md new file mode 100644 index 00000000000..3eb360a78f1 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/noir_js/reference/noir_js/type-aliases/ProofData.md @@ -0,0 +1,20 @@ +# ProofData + +```ts +type ProofData: object; +``` + +## Description + +The representation of a proof + +## Type declaration + +| Member | Type | Description | +| :------ | :------ | :------ | +| `proof` | `Uint8Array` | **Description**

An byte array representing the proof | +| `publicInputs` | `WitnessMap` | **Description**

Public inputs of a proof | + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.19.4/noir_js/reference/noir_js/type-aliases/WitnessMap.md b/docs/versioned_docs/version-v0.19.4/noir_js/reference/noir_js/type-aliases/WitnessMap.md new file mode 100644 index 00000000000..258c46f9d0c --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/noir_js/reference/noir_js/type-aliases/WitnessMap.md @@ -0,0 +1,9 @@ +# WitnessMap + +```ts +type WitnessMap: Map; +``` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.19.4/noir_js/reference/noir_js/typedoc-sidebar.cjs b/docs/versioned_docs/version-v0.19.4/noir_js/reference/noir_js/typedoc-sidebar.cjs new file mode 100644 index 00000000000..077ebeb133e --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/noir_js/reference/noir_js/typedoc-sidebar.cjs @@ -0,0 +1,4 @@ +// @ts-check +/** @type {import('@docusaurus/plugin-content-docs').SidebarsConfig} */ +const typedocSidebar = { items: [{"type":"category","label":"Classes","items":[{"type":"doc","id":"noir_js/reference/noir_js/classes/Noir","label":"Noir"}]},{"type":"category","label":"Type Aliases","items":[{"type":"doc","id":"noir_js/reference/noir_js/type-aliases/CompiledCircuit","label":"CompiledCircuit"},{"type":"doc","id":"noir_js/reference/noir_js/type-aliases/ForeignCallHandler","label":"ForeignCallHandler"},{"type":"doc","id":"noir_js/reference/noir_js/type-aliases/ForeignCallInput","label":"ForeignCallInput"},{"type":"doc","id":"noir_js/reference/noir_js/type-aliases/ForeignCallOutput","label":"ForeignCallOutput"},{"type":"doc","id":"noir_js/reference/noir_js/type-aliases/InputMap","label":"InputMap"},{"type":"doc","id":"noir_js/reference/noir_js/type-aliases/ProofData","label":"ProofData"},{"type":"doc","id":"noir_js/reference/noir_js/type-aliases/WitnessMap","label":"WitnessMap"}]},{"type":"category","label":"Functions","items":[{"type":"doc","id":"noir_js/reference/noir_js/functions/and","label":"and"},{"type":"doc","id":"noir_js/reference/noir_js/functions/blake2s256","label":"blake2s256"},{"type":"doc","id":"noir_js/reference/noir_js/functions/ecdsa_secp256k1_verify","label":"ecdsa_secp256k1_verify"},{"type":"doc","id":"noir_js/reference/noir_js/functions/ecdsa_secp256r1_verify","label":"ecdsa_secp256r1_verify"},{"type":"doc","id":"noir_js/reference/noir_js/functions/keccak256","label":"keccak256"},{"type":"doc","id":"noir_js/reference/noir_js/functions/sha256","label":"sha256"},{"type":"doc","id":"noir_js/reference/noir_js/functions/xor","label":"xor"}]}]}; +module.exports = typedocSidebar.items; \ No newline at end of file diff --git a/docs/versioned_docs/version-v0.19.4/standard_library/black_box_fns.md b/docs/versioned_docs/version-v0.19.4/standard_library/black_box_fns.md new file mode 100644 index 00000000000..e0c6d475c1f --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/standard_library/black_box_fns.md @@ -0,0 +1,46 @@ +--- +title: Black Box Functions +description: Black box functions are functions in Noir that rely on backends implementing support for specialized constraints. +keywords: [noir, black box functions] +--- + +Black box functions are functions in Noir that rely on backends implementing support for specialized constraints. This makes certain zk-snark unfriendly computations cheaper than if they were implemented in Noir. + +:::warning + +It is likely that not all backends will support a particular black box function. + +::: + +Because it is not guaranteed that all backends will support black box functions, it is possible that certain Noir programs won't compile against a particular backend if they use an unsupported black box function. It is possible to fallback to less efficient implementations written in Noir/ACIR in some cases. + +Black box functions are specified with the `#[foreign(black_box_fn)]` attribute. For example, the SHA256 function in the Noir [source code](https://github.com/noir-lang/noir/blob/v0.5.1/noir_stdlib/src/hash.nr) looks like: + +```rust +#[foreign(sha256)] +fn sha256(_input : [u8; N]) -> [u8; 32] {} +``` + +## Function list + +Here is a list of the current black box functions that are supported by UltraPlonk: + +- AES +- [SHA256](./cryptographic_primitives/hashes#sha256) +- [Schnorr signature verification](./cryptographic_primitives/schnorr) +- [Blake2s](./cryptographic_primitives/hashes#blake2s) +- [Pedersen Hash](./cryptographic_primitives/hashes#pedersen_hash) +- [Pedersen Commitment](./cryptographic_primitives/hashes#pedersen_commitment) +- [HashToField128Security](./cryptographic_primitives/hashes#hash_to_field) +- [ECDSA signature verification](./cryptographic_primitives/ecdsa_sig_verification) +- [Fixed base scalar multiplication](./cryptographic_primitives/scalar) +- [Compute merkle root](./merkle_trees#compute_merkle_root) +- AND +- XOR +- RANGE +- [Keccak256](./cryptographic_primitives/hashes#keccak256) +- [Recursive proof verification](./recursion) + +Most black box functions are included as part of the Noir standard library, however `AND`, `XOR` and `RANGE` are used as part of the Noir language syntax. For instance, using the bitwise operator `&` will invoke the `AND` black box function. To ensure compatibility across backends, the ACVM has fallback implementations of `AND`, `XOR` and `RANGE` defined in its standard library which it can seamlessly fallback to if the backend doesn't support them. + +You can view the black box functions defined in the ACVM code [here](https://github.com/noir-lang/noir/blob/master/acvm-repo/acir/src/circuit/black_box_functions.rs). diff --git a/docs/versioned_docs/version-v0.6.0/standard_library/cryptographic_primitives.md b/docs/versioned_docs/version-v0.19.4/standard_library/cryptographic_primitives.md similarity index 100% rename from docs/versioned_docs/version-v0.6.0/standard_library/cryptographic_primitives.md rename to docs/versioned_docs/version-v0.19.4/standard_library/cryptographic_primitives.md diff --git a/docs/versioned_docs/version-v0.19.4/standard_library/cryptographic_primitives/00_hashes.mdx b/docs/versioned_docs/version-v0.19.4/standard_library/cryptographic_primitives/00_hashes.mdx new file mode 100644 index 00000000000..76745196681 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/standard_library/cryptographic_primitives/00_hashes.mdx @@ -0,0 +1,167 @@ +--- +title: Hash methods +description: + Learn about the cryptographic primitives ready to use for any Noir project, including sha256, + blake2s, pedersen, mimc_bn254 and mimc +keywords: + [cryptographic primitives, Noir project, sha256, blake2s, pedersen, mimc_bn254, mimc, hash] +--- + +import BlackBoxInfo from '@site/src/components/Notes/_blackbox.mdx'; + +## sha256 + +Given an array of bytes, returns the resulting sha256 hash. + +```rust +fn sha256(_input : [u8]) -> [u8; 32] +``` + +example: + +```rust +fn main() { + let x = [163, 117, 178, 149]; // some random bytes + let hash = std::hash::sha256(x); +} +``` + + + +## blake2s + +Given an array of bytes, returns an array with the Blake2 hash + +```rust +fn blake2s(_input : [u8]) -> [u8; 32] +``` + +example: + +```rust +fn main() { + let x = [163, 117, 178, 149]; // some random bytes + let hash = std::hash::blake2s(x); +} +``` + + + +## pedersen_hash + +Given an array of Fields, returns the Pedersen hash. + +```rust +fn pedersen_hash(_input : [Field]) -> Field +``` + +example: + +```rust +fn main() { + let x = [163, 117, 178, 149]; // some random bytes + let hash = std::hash::pedersen_hash(x); +} +``` + + + + + +## pedersen_commitment + +Given an array of Fields, returns the Pedersen commitment. + +```rust +fn pedersen_commitment(_input : [Field]) -> [Field; 2] +``` + +example: + +```rust +fn main() { + let x = [163, 117, 178, 149]; // some random bytes + let commitment = std::hash::pedersen_commitment(x); +} +``` + + + +## keccak256 + +Given an array of bytes (`u8`), returns the resulting keccak hash as an array of 32 bytes +(`[u8; 32]`). Specify a message_size to hash only the first `message_size` bytes +of the input. + +```rust +fn keccak256(_input : [u8; N], _message_size: u32) -> [u8; 32] +``` + +example: + +```rust +fn main() { + let x = [163, 117, 178, 149]; // some random bytes + let message_size = 4; + let hash = std::hash::keccak256(x, message_size); +} +``` + + + +## poseidon + +Given an array of Fields, returns a new Field with the Poseidon Hash. Mind that you need to specify +how many inputs are there to your Poseidon function. + +```rust +// example for hash_1, hash_2 accepts an array of length 2, etc +fn hash_1(input: [Field; 1]) -> Field +``` + +example: + +```rust +fn main() +{ + let hash1 = std::hash::poseidon::bn254::hash_2([1, 2]); + assert(hash1 == 0x115cc0f5e7d690413df64c6b9662e9cf2a3617f2743245519e19607a4417189a); +} +``` + +## mimc_bn254 and mimc + +`mimc_bn254` is `mimc`, but with hardcoded parameters for the BN254 curve. You can use it by +providing an array of Fields, and it returns a Field with the hash. You can use the `mimc` method if +you're willing to input your own constants: + +```rust +fn mimc(x: Field, k: Field, constants: [Field; N], exp : Field) -> Field +``` + +otherwise, use the `mimc_bn254` method: + +```rust +fn mimc_bn254(array: [Field; N]) -> Field +``` + +example: + +```rust + +fn main() { + let x = [163, 117, 178, 149]; // some random bytes + let hash = std::hash::mimc::mimc_bn254(x); +} +``` + +## hash_to_field + +```rust +fn hash_to_field(_input : [Field; N]) -> Field {} +``` + +Calculates the `blake2s` hash of the inputs and returns the hash modulo the field modulus to return +a value which can be represented as a `Field`. + + diff --git a/docs/versioned_docs/version-v0.19.4/standard_library/cryptographic_primitives/01_scalar.mdx b/docs/versioned_docs/version-v0.19.4/standard_library/cryptographic_primitives/01_scalar.mdx new file mode 100644 index 00000000000..c7eed820a80 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/standard_library/cryptographic_primitives/01_scalar.mdx @@ -0,0 +1,27 @@ +--- +title: Scalar multiplication +description: See how you can perform scalar multiplications over a fixed base in Noir +keywords: [cryptographic primitives, Noir project, scalar multiplication] +--- + +import BlackBoxInfo from '@site/src/components/Notes/_blackbox.mdx'; + +## scalar_mul::fixed_base_embedded_curve + +Performs scalar multiplication over the embedded curve whose coordinates are defined by the +configured noir field. For the BN254 scalar field, this is BabyJubJub or Grumpkin. + +```rust +fn fixed_base_embedded_curve(_input : Field) -> [Field; 2] +``` + +example + +```rust +fn main(x : Field) { + let scal = std::scalar_mul::fixed_base_embedded_curve(x); + std::println(scal); +} +``` + + diff --git a/docs/versioned_docs/version-v0.19.4/standard_library/cryptographic_primitives/02_schnorr.mdx b/docs/versioned_docs/version-v0.19.4/standard_library/cryptographic_primitives/02_schnorr.mdx new file mode 100644 index 00000000000..c184ce28120 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/standard_library/cryptographic_primitives/02_schnorr.mdx @@ -0,0 +1,37 @@ +--- +title: Schnorr Signatures +description: Learn how you can verify Schnorr signatures using Noir +keywords: [cryptographic primitives, Noir project, schnorr, signatures] +--- + +import BlackBoxInfo from '@site/src/components/Notes/_blackbox.mdx'; + +## schnorr::verify_signature + +Verifier for Schnorr signatures over the embedded curve (for BN254 it is Grumpkin). + +```rust +fn verify_signature(_public_key_x: Field, _public_key_y: Field, _signature: [u8; 64], _message: [u8]) -> bool +``` + +where `_signature` can be generated like so using the npm package +[@noir-lang/barretenberg](https://www.npmjs.com/package/@noir-lang/barretenberg) + +```js +const { BarretenbergWasm } = require('@noir-lang/barretenberg/dest/wasm'); +const { Schnorr } = require('@noir-lang/barretenberg/dest/crypto/schnorr'); + +... + +const barretenberg = await BarretenbergWasm.new(); +const schnorr = new Schnorr(barretenberg); +const pubKey = schnorr.computePublicKey(privateKey); +const message = ... +const signature = Array.from( + schnorr.constructSignature(hash, privateKey).toBuffer() +); + +... +``` + + diff --git a/docs/versioned_docs/version-v0.19.4/standard_library/cryptographic_primitives/03_ecdsa_sig_verification.mdx b/docs/versioned_docs/version-v0.19.4/standard_library/cryptographic_primitives/03_ecdsa_sig_verification.mdx new file mode 100644 index 00000000000..72bce984821 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/standard_library/cryptographic_primitives/03_ecdsa_sig_verification.mdx @@ -0,0 +1,45 @@ +--- +title: ECDSA Signature Verification +description: Learn about the cryptographic primitives regarding ECDSA over the secp256k1 and secp256r1 curves +keywords: [cryptographic primitives, Noir project, ecdsa, secp256k1, secp256r1, signatures] +--- + +import BlackBoxInfo from '@site/src/components/Notes/_blackbox.mdx'; + +Noir supports ECDSA signatures verification over the secp256k1 and secp256r1 curves. + +## ecdsa_secp256k1::verify_signature + +Verifier for ECDSA Secp256k1 signatures + +```rust +fn verify_signature(_public_key_x : [u8; 32], _public_key_y : [u8; 32], _signature: [u8; 64], _message: [u8]) -> bool +``` + +example: + +```rust +fn main(hashed_message : [u8;32], pub_key_x : [u8;32], pub_key_y : [u8;32], signature : [u8;64]) { + let valid_signature = std::ecdsa_secp256k1::verify_signature(pub_key_x, pub_key_y, signature, hashed_message); + assert(valid_signature); +} +``` + +## ecdsa_secp256r1::verify_signature + +Verifier for ECDSA Secp256r1 signatures + +```rust +fn verify_signature(_public_key_x : [u8; 32], _public_key_y : [u8; 32], _signature: [u8; 64], _message: [u8]) -> bool +``` + +example: + +```rust +fn main(hashed_message : [u8;32], pub_key_x : [u8;32], pub_key_y : [u8;32], signature : [u8;64]) { + let valid_signature = std::ecdsa_secp256r1::verify_signature(pub_key_x, pub_key_y, signature, hashed_message); + assert(valid_signature); +} +``` + + diff --git a/docs/versioned_docs/version-v0.9.0/standard_library/cryptographic_primitives/04_ec_primitives.md b/docs/versioned_docs/version-v0.19.4/standard_library/cryptographic_primitives/04_ec_primitives.md similarity index 100% rename from docs/versioned_docs/version-v0.9.0/standard_library/cryptographic_primitives/04_ec_primitives.md rename to docs/versioned_docs/version-v0.19.4/standard_library/cryptographic_primitives/04_ec_primitives.md diff --git a/docs/versioned_docs/version-v0.19.4/standard_library/cryptographic_primitives/05_eddsa.mdx b/docs/versioned_docs/version-v0.19.4/standard_library/cryptographic_primitives/05_eddsa.mdx new file mode 100644 index 00000000000..9a5beb55ee9 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/standard_library/cryptographic_primitives/05_eddsa.mdx @@ -0,0 +1,17 @@ +--- +title: EdDSA Verification +description: Learn about the cryptographic primitives regarding EdDSA +keywords: [cryptographic primitives, Noir project, eddsa, signatures] +--- + +import BlackBoxInfo from '@site/src/components/Notes/_blackbox.mdx'; + +## eddsa::eddsa_poseidon_verify + +Verifier for EdDSA signatures + +```rust +fn eddsa_poseidon_verify(public_key_x : Field, public_key_y : Field, signature_s: Field, signature_r8_x: Field, signature_r8_y: Field, message: Field) -> bool +``` + + diff --git a/docs/versioned_docs/version-v0.19.4/standard_library/logging.md b/docs/versioned_docs/version-v0.19.4/standard_library/logging.md new file mode 100644 index 00000000000..4ba0fe0e656 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/standard_library/logging.md @@ -0,0 +1,62 @@ +--- +title: Logging +description: + Learn how to use the println statement for debugging in Noir with this tutorial. Understand the + basics of logging in Noir and how to implement it in your code. +keywords: + [ + noir logging, + println statement, + debugging in noir, + noir std library, + logging tutorial, + basic logging in noir, + noir logging implementation, + noir debugging techniques, + rust, + ] +--- + +The standard library provides a familiar `println` statement you can use. Despite being a limited +implementation of rust's `println!` macro, this construct can be useful for debugging. + +You can print the output of println statements in your Noir code by using the `nargo execute` command or the `--show-output` flag when using `nargo test` (provided there are println statements in your tests). + +It is recommended to use `nargo execute` if you want to debug failing constraints with `println` statements. This is due to every input in a test being a constant rather than a witness, so we issue an error during compilation while we only print during execution (which comes after compilation). `println` will not work for failed constraints caught at compile time. + +The `println` statement is unconstrained, so it works for outputting integers, fields, strings, and even structs or expressions. For example: + +```rust +use dep::std; + +struct Person { + age : Field, + height : Field, +} + +fn main(age : Field, height : Field) { + let person = Person { age : age, height : height }; + std::println(person); + std::println(age + height); + std::println("Hello world!"); +} + +``` + +You can print multiple different types in the same statement and string as well as a new "fmtstr" type. A `fmtstr` can be specified in the same way as a normal string it just should be prepended with an "f" character: + +```rust + let fmt_str = f"i: {i}, j: {j}"; + std::println(fmt_str); + + let s = myStruct { y: x, x: y }; + std::println(s); + + std::println(f"i: {i}, s: {s}"); + + std::println(x); + std::println([x, y]); + + let foo = fooStruct { my_struct: s, foo: 15 }; + std::println(f"s: {s}, foo: {foo}"); +``` diff --git a/docs/versioned_docs/version-v0.19.4/standard_library/merkle_trees.md b/docs/versioned_docs/version-v0.19.4/standard_library/merkle_trees.md new file mode 100644 index 00000000000..dc383a1426b --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/standard_library/merkle_trees.md @@ -0,0 +1,58 @@ +--- +title: Merkle Trees +description: Learn about Merkle Trees in Noir with this tutorial. Explore the basics of computing a merkle root using a proof, with examples. +keywords: + [ + Merkle trees in Noir, + Noir programming language, + check membership, + computing root from leaf, + Noir Merkle tree implementation, + Merkle tree tutorial, + Merkle tree code examples, + Noir libraries, + pedersen hash., + ] +--- + +## compute_merkle_root + +Returns the root of the tree from the provided leaf and its hash path, using a [Pedersen hash](cryptographic_primitives/00_hashes.mdx#pedersen_hash). + +```rust +fn compute_merkle_root(leaf : Field, index : Field, hash_path: [Field]) -> Field +``` + +example: + +```rust +/** + // these values are for this example only + index = "0" + priv_key = "0x000000000000000000000000000000000000000000000000000000616c696365" + secret = "0x1929ea3ab8d9106a899386883d9428f8256cfedb3c4f6b66bf4aa4d28a79988f" + note_hash_path = [ + "0x1e61bdae0f027b1b2159e1f9d3f8d00fa668a952dddd822fda80dc745d6f65cc", + "0x0e4223f3925f98934393c74975142bd73079ab0621f4ee133cee050a3c194f1a", + "0x2fd7bb412155bf8693a3bd2a3e7581a679c95c68a052f835dddca85fa1569a40" + ] + */ +fn main(index: Field, priv_key: Field, secret: Field, note_hash_path: [Field; 3]) { + + let pubkey = std::scalar_mul::fixed_base_embedded_curve(priv_key); + let pubkey_x = pubkey[0]; + let pubkey_y = pubkey[1]; + let note_commitment = std::hash::pedersen([pubkey_x, pubkey_y, secret]); + + let root = std::merkle::compute_merkle_root(note_commitment[0], index, note_hash_path); + std::println(root); +} +``` + +To check merkle tree membership: + +1. Include a merkle root as a program input. +2. Compute the merkle root of a given leaf, index and hash path. +3. Assert the merkle roots are equal. + +For more info about merkle trees, see the Wikipedia [page](https://en.wikipedia.org/wiki/Merkle_tree). diff --git a/docs/versioned_docs/version-v0.19.4/standard_library/options.md b/docs/versioned_docs/version-v0.19.4/standard_library/options.md new file mode 100644 index 00000000000..3d3139fb98b --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/standard_library/options.md @@ -0,0 +1,99 @@ +--- +title: Option Type +--- + +The `Option` type is a way to express that a value might be present (`Some(T))` or absent (`None`). It's a safer way to handle potential absence of values, compared to using nulls in many other languages. + +```rust +struct Option { + None, + Some(T), +} +``` + +You can import the Option type into your Noir program like so: + +```rust +use dep::std::option::Option; + +fn main() { + let none = Option::none(); + let some = Option::some(3); +} +``` + +See [this test](https://github.com/noir-lang/noir/blob/5cbfb9c4a06c8865c98ff2b594464b037d821a5c/crates/nargo_cli/tests/test_data/option/src/main.nr) for a more comprehensive set of examples of each of the methods described below. + +## Methods + +### none + +Constructs a none value. + +### some + +Constructs a some wrapper around a given value. + +### is_none + +Returns true if the Option is None. + +### is_some + +Returns true of the Option is Some. + +### unwrap + +Asserts `self.is_some()` and returns the wrapped value. + +### unwrap_unchecked + +Returns the inner value without asserting `self.is_some()`. This method can be useful within an if condition when we already know that `option.is_some()`. If the option is None, there is no guarantee what value will be returned, only that it will be of type T for an `Option`. + +### unwrap_or + +Returns the wrapped value if `self.is_some()`. Otherwise, returns the given default value. + +### unwrap_or_else + +Returns the wrapped value if `self.is_some()`. Otherwise, calls the given function to return a default value. + +### map + +If self is `Some(x)`, this returns `Some(f(x))`. Otherwise, this returns `None`. + +### map_or + +If self is `Some(x)`, this returns `f(x)`. Otherwise, this returns the given default value. + +### map_or_else + +If self is `Some(x)`, this returns `f(x)`. Otherwise, this returns `default()`. + +### and + +Returns None if self is None. Otherwise, this returns `other`. + +### and_then + +If self is None, this returns None. Otherwise, this calls the given function with the Some value contained within self, and returns the result of that call. In some languages this function is called `flat_map` or `bind`. + +### or + +If self is Some, return self. Otherwise, return `other`. + +### or_else + +If self is Some, return self. Otherwise, return `default()`. + +### xor + +If only one of the two Options is Some, return that option. Otherwise, if both options are Some or both are None, None is returned. + +### filter + +Returns `Some(x)` if self is `Some(x)` and `predicate(x)` is true. Otherwise, this returns `None`. + +### flatten + +Flattens an `Option>` into a `Option`. This returns `None` if the outer Option is None. Otherwise, this returns the inner Option. diff --git a/docs/versioned_docs/version-v0.19.4/standard_library/recursion.md b/docs/versioned_docs/version-v0.19.4/standard_library/recursion.md new file mode 100644 index 00000000000..ff4c63acaa7 --- /dev/null +++ b/docs/versioned_docs/version-v0.19.4/standard_library/recursion.md @@ -0,0 +1,96 @@ +--- +title: Recursive Proofs +description: Learn about how to write recursive proofs in Noir. +keywords: [recursion, recursive proofs, verification_key, aggregation object, verify_proof] +--- + +Noir supports recursively verifying proofs, meaning you verify the proof of a Noir program in another Noir program. This enables creating proofs of arbitrary size by doing step-wise verification of smaller components of a large proof. + +The `verify_proof` function takes a verification key, proof and public inputs for a zk program, as well as a key hash and an input aggregation object. The key hash is used to check the validity of the verification key and the input aggregation object is required by some proving systems. The `verify_proof` function returns an output aggregation object that can then be fed into future iterations of the proof verification if required. + +```rust +#[foreign(verify_proof)] +fn verify_proof(_verification_key : [Field], _proof : [Field], _public_input : Field, _key_hash : Field, _input_aggregation_object : [Field]) -> [Field] {} +``` + +:::info + +This is a black box function. Read [this section](./black_box_fns) to learn more about black box functions in Noir. + +::: + +## Aggregation Object + +The purpose of the input aggregation object is a little less clear though (and the output aggregation object that is returned from the `std::verify_proof` method). Recursive zkSNARK schemes do not necessarily "verify a proof" in the sense that you expect a true or false to be spit out by the verifier. Rather an aggregation object is built over the public inputs. In the case of PLONK the recursive aggregation object is two G1 points (expressed as 16 witness values). The final verifier (in our case this is most often the smart contract verifier) has to be aware of this aggregation object to execute a pairing and check the validity of these points (thus completing the recursive verification). + +So for example in this circuit: + +```rust +use dep::std; + +fn main( + verification_key : [Field; 114], + proof : [Field; 94], + public_inputs : [Field; 1], + key_hash : Field, + input_aggregation_object : [Field; 16], + proof_b : [Field; 94], +) -> pub [Field; 16] { + let output_aggregation_object_a = std::verify_proof( + verification_key, + proof, + public_inputs, + key_hash, + input_aggregation_object + ); + + let output_aggregation_object = std::verify_proof( + verification_key, + proof_b, + public_inputs, + key_hash, + output_aggregation_object_a + ); + + let mut output = [0; 16]; + for i in 0..16 { + output[i] = output_aggregation_object[i]; + } + output +} +``` + +In this example we have a circuit, that generates proofs A and B, that is being verified in circuit C. Assuming that the proof being passed in is not already a recursive proof, the `input_aggregation_object` will be all zeros. It will then generate an `output_aggregation_object`. This blob of data then becomes the `input_aggregation_object` of the next recursive aggregation we wish to compute. We can see here as the same public inputs, verification key, and key hash are used that we are verifying two proofs generated from the same circuit in this single circuit. `std::verify_proof` returns a `[Field]` because the size of an aggregation object is proof system dependent--in barretenberg, aggregation objects are two G1 points, while in Halo2, the aggregation object is a list of G1 points that is log the circuit size. So for the final step we convert the slice into an array of size 16 because we are generating proofs using UltraPlonk. + +## Parameters + +### `verification_key` + +The verification key for the zk program that is being verified. + +### `proof` + +The proof for the zk program that is being verified. + +### `public_inputs` + +These represent the public inputs of the proof we are verifying. They should be checked against in the circuit after construction of a new aggregation state. + +### `key_hash` + +A key hash is used to check the validity of the verification key. The circuit implementing this opcode can use this hash to ensure that the key provided to the circuit matches the key produced by the circuit creator. + +### `input_aggregation_object` + +An aggregation object is blob of data that the top-level verifier must run some proof system specific algorithm on to complete verification. The size is proof system specific and will be set by the backend integrating this opcode. The input aggregation object is only not `None` when we are verifying a previous recursive aggregation in the current circuit. If this is the first recursive aggregation there is no input aggregation object. It is left to the backend to determine how to handle when there is no input aggregation object. + +## Return value + +### `output_aggregation_object` + +This is the result of a recursive aggregation and is what will be fed into the next verifier. +The next verifier can either perform a final verification (returning true or false) or perform another recursive aggregation where this output aggregation object will be the input aggregation object of the next recursive aggregation. + +## Example + +You can see an example of how to do recursive proofs in [this example recursion demo repo](https://github.com/noir-lang/noir-examples/tree/master/recursion). diff --git a/docs/versioned_docs/version-v0.7.1/standard_library/zeroed.md b/docs/versioned_docs/version-v0.19.4/standard_library/zeroed.md similarity index 100% rename from docs/versioned_docs/version-v0.7.1/standard_library/zeroed.md rename to docs/versioned_docs/version-v0.19.4/standard_library/zeroed.md diff --git a/docs/versioned_docs/version-v0.22.0/explainers/explainer-recursion.md b/docs/versioned_docs/version-v0.22.0/explainers/explainer-recursion.md new file mode 100644 index 00000000000..9357d3c7341 --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/explainers/explainer-recursion.md @@ -0,0 +1,175 @@ +--- +title: Recursive proofs +description: Explore the concept of recursive proofs in Zero-Knowledge programming. Understand how recursion works in Noir, a language for writing smart contracts on the EVM blockchain. Learn through practical examples like Alice and Bob's guessing game, Charlie's recursive merkle tree, and Daniel's reusable components. Discover how to use recursive proofs to optimize computational resources and improve efficiency. + +keywords: + [ + "Recursive Proofs", + "Zero-Knowledge Programming", + "Noir", + "EVM Blockchain", + "Smart Contracts", + "Recursion in Noir", + "Alice and Bob Guessing Game", + "Recursive Merkle Tree", + "Reusable Components", + "Optimizing Computational Resources", + "Improving Efficiency", + "Verification Key", + "Aggregation Objects", + "Recursive zkSNARK schemes", + "PLONK", + "Proving and Verification Keys" + ] +sidebar_position: 1 +--- + +In programming, we tend to think of recursion as something calling itself. A classic example would be the calculation of the factorial of a number: + +```js +function factorial(n) { + if (n === 0 || n === 1) { + return 1; + } else { + return n * factorial(n - 1); + } +} +``` + +In this case, while `n` is not `1`, this function will keep calling itself until it hits the base case, bubbling up the result on the call stack: + +```md + Is `n` 1? <--------- + /\ / + / \ n = n -1 + / \ / + Yes No -------- +``` + +In Zero-Knowledge, recursion has some similarities. + +It is not a Noir function calling itself, but a proof being used as an input to another circuit. In short, you verify one proof *inside* another proof, returning the proof that both proofs are valid. + +This means that, given enough computational resources, you can prove the correctness of any arbitrary number of proofs in a single proof. This could be useful to design state channels (for which a common example would be [Bitcoin's Lightning Network](https://en.wikipedia.org/wiki/Lightning_Network)), to save on gas costs by settling one proof on-chain, or simply to make business logic less dependent on a consensus mechanism. + +## Examples + +Let us look at some of these examples + +### Alice and Bob - Guessing game + +Alice and Bob are friends, and they like guessing games. They want to play a guessing game online, but for that, they need a trusted third-party that knows both of their secrets and finishes the game once someone wins. + +So, they use zero-knowledge proofs. Alice tries to guess Bob's number, and Bob will generate a ZK proof stating whether she succeeded or failed. + +This ZK proof can go on a smart contract, revealing the winner and even giving prizes. However, this means every turn needs to be verified on-chain. This incurs some cost and waiting time that may simply make the game too expensive or time-consuming to be worth it. + +So, Alice started thinking: "what if Bob generates his proof, and instead of sending it on-chain, I verify it *within* my own proof before playing my own turn?". She can then generate a proof that she verified his proof, and so on. + +```md + Did you fail? <-------------------------- + / \ / + / \ n = n -1 + / \ / + Yes No / + | | / + | | / + | You win / + | / + | / +Generate proof of that / + + / + my own guess ---------------- +``` + +### Charlie - Recursive merkle tree + +Charlie is a concerned citizen, and wants to be sure his vote in an election is accounted for. He votes with a ZK proof, but he has no way of knowing that his ZK proof was included in the total vote count! + +So, the tallier puts all the votes in a merkle tree, and everyone can also prove the verification of two proofs within one proof, as such: + +```md + abcd + __________|______________ + | | + ab cd + _____|_____ ______|______ + | | | | + alice bob charlie daniel +``` + +Doing this recursively allows us to arrive on a final proof `abcd` which if true, verifies the correctness of all the votes. + +### Daniel - Reusable components + +Daniel has a big circuit and a big headache. A part of his circuit is a setup phase that finishes with some assertions that need to be made. But that section alone takes most of the proving time, and is largely independent of the rest of the circuit. + +He could find it more efficient to generate a proof for that setup phase separately, and verifying it in his actual business logic section of the circuit. This will allow for parallelization of both proofs, which results in a considerable speedup. + +## What params do I need + +As you can see in the [recursion reference](noir/standard_library/recursion.md), a simple recursive proof requires: + +- The proof to verify +- The Verification Key of the circuit that generated the proof +- A hash of this verification key, as it's needed for some backends +- The public inputs for the proof +- The input aggregation object + +It also returns the `output aggregation object`. These aggregation objects can be confusing at times, so let's dive in a little bit. + +### Aggregation objects + +Recursive zkSNARK schemes do not necessarily "verify a proof" in the sense that you expect a true or false to be spit out by the verifier. Rather an aggregation object is built over the public inputs. + +In the case of PLONK the recursive aggregation object is two G1 points (expressed as 16 witness values). The final verifier (in our case this is most often the smart contract verifier) has to be aware of this aggregation object to execute a pairing and check the validity of these points. + +So, taking the example of Alice and Bob and their guessing game: + +- Alice makes her guess. Her proof is *not* recursive: it doesn't verify any proof within it! It's just a standard `assert(x != y)` circuit +- Bob verifies Alice's proof and makes his own guess. In this circuit, he is verifying a proof, so it needs to output an `aggregation object`: he is generating a recursive proof! +- Alice verifies Bob's *recursive proof*, and uses Bob's `output aggregation object` as the `input aggregation object` in her proof... Which in turn, generates another `output aggregation object`. + +One should notice that when Bob generates his first proof, he has no input aggregation object. Because he is not verifying an recursive proof, he has no `input aggregation object`. In this case, he may use zeros instead. + +We can imagine the `aggregation object` as the baton in a [relay race](https://en.wikipedia.org/wiki/Relay_race). The first runner doesn't have to receive the baton from anyone else, as he/she already starts with it. But when his/her turn is over, the next runner needs to receive it, run a bit more, and pass it along. Even though every runner could theoretically verify the baton mid-run (why not? 🏃🔍), only at the end of the race does the referee verify that the whole race is valid. + +## Some architecture + +As with everything in computer science, there's no one-size-fits all. But there are some patterns that could help understanding and implementing them. To give three examples: + +### Adding some logic to a proof verification + +This would be an approach for something like our guessing game, where proofs are sent back and forth and are verified by each opponent. This circuit would be divided in two sections: + +- A `recursive verification` section, which would be just the call to `std::verify_proof`, and that would be skipped on the first move (since there's no proof to verify) +- A `guessing` section, which is basically the logic part where the actual guessing happens + +In such a situation, and assuming Alice is first, she would skip the first part and try to guess Bob's number. Bob would then verify her proof on the first section of his run, and try to guess Alice's number on the second part, and so on. + +### Aggregating proofs + +In some one-way interaction situations, recursion would allow for aggregation of simple proofs that don't need to be immediately verified on-chain or elsewhere. + +To give a practical example, a barman wouldn't need to verify a "proof-of-age" on-chain every time he serves alcohol to a customer. Instead, the architecture would comprise two circuits: + +- A `main`, non-recursive circuit with some logic +- A `recursive` circuit meant to verify two proofs in one proof + +The customer's proofs would be intermediate, and made on their phones, and the barman could just verify them locally. He would then aggregate them into a final proof sent on-chain (or elsewhere) at the end of the day. + +### Recursively verifying different circuits + +Nothing prevents you from verifying different circuits in a recursive proof, for example: + +- A `circuit1` circuit +- A `circuit2` circuit +- A `recursive` circuit + +In this example, a regulator could verify that taxes were paid for a specific purchase by aggregating both a `payer` circuit (proving that a purchase was made and taxes were paid), and a `receipt` circuit (proving that the payment was received) + +## How fast is it + +At the time of writing, verifying recursive proofs is surprisingly fast. This is because most of the time is spent on generating the verification key that will be used to generate the next proof. So you are able to cache the verification key and reuse it later. + +Currently, Noir JS packages don't expose the functionality of loading proving and verification keys, but that feature exists in the underlying `bb.js` package. diff --git a/docs/versioned_docs/version-v0.22.0/explanations/noir/traits.md b/docs/versioned_docs/version-v0.22.0/explanations/noir/traits.md new file mode 100644 index 00000000000..7ba07e74f40 --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/explanations/noir/traits.md @@ -0,0 +1,348 @@ +--- +title: Traits +description: + Traits in Noir can be used to abstract out a common interface for functions across + several data types. +keywords: [noir programming language, traits, interfaces, generic, protocol] +--- + +## Overview + +Traits in Noir are a useful abstraction similar to interfaces or protocols in other languages. Each trait defines +the interface of several methods contained within the trait. Types can then implement this trait by providing +implementations for these methods. For example in the program: + +```rust +struct Rectangle { + width: Field, + height: Field, +} + +impl Rectangle { + fn area(self) -> Field { + self.width * self.height + } +} + +fn log_area(r: Rectangle) { + println(r.area()); +} +``` + +We have a function `log_area` to log the area of a `Rectangle`. Now how should we change the program if we want this +function to work on `Triangle`s as well?: + +```rust +struct Triangle { + width: Field, + height: Field, +} + +impl Triangle { + fn area(self) -> Field { + self.width * self.height / 2 + } +} +``` + +Making `log_area` generic over all types `T` would be invalid since not all types have an `area` method. Instead, we can +introduce a new `Area` trait and make `log_area` generic over all types `T` that implement `Area`: + +```rust +trait Area { + fn area(self) -> Field; +} + +fn log_area(shape: T) where T: Area { + println(shape.area()); +} +``` + +We also need to explicitly implement `Area` for `Rectangle` and `Triangle`. We can do that by changing their existing +impls slightly. Note that the parameter types and return type of each of our `area` methods must match those defined +by the `Area` trait. + +```rust +impl Area for Rectangle { + fn area(self) -> Field { + self.width * self.height + } +} + +impl Area for Triangle { + fn area(self) -> Field { + self.width * self.height / 2 + } +} +``` + +Now we have a working program that is generic over any type of Shape that is used! Others can even use this program +as a library with their own types - such as `Circle` - as long as they also implement `Area` for these types. + +## Where Clauses + +As seen in `log_area` above, when we want to create a function or method that is generic over any type that implements +a trait, we can add a where clause to the generic function. + +```rust +fn log_area(shape: T) where T: Area { + println(shape.area()); +} +``` + +It is also possible to apply multiple trait constraints on the same variable at once by combining traits with the `+` +operator. Similarly, we can have multiple trait constraints by separating each with a comma: + +```rust +fn foo(elements: [T], thing: U) where + T: Default + Add + Eq, + U: Bar, +{ + let mut sum = T::default(); + + for element in elements { + sum += element; + } + + if sum == T::default() { + thing.bar(); + } +} +``` + +## Generic Implementations + +You can add generics to a trait implementation by adding the generic list after the `impl` keyword: + +```rust +trait Second { + fn second(self) -> Field; +} + +impl Second for (T, Field) { + fn second(self) -> Field { + self.1 + } +} +``` + +You can also implement a trait for every type this way: + +```rust +trait Debug { + fn debug(self); +} + +impl Debug for T { + fn debug(self) { + println(self); + } +} + +fn main() { + 1.debug(); +} +``` + +### Generic Trait Implementations With Where Clauses + +Where clauses can also be placed on trait implementations themselves to restrict generics in a similar way. +For example, while `impl Foo for T` implements the trait `Foo` for every type, `impl Foo for T where T: Bar` +will implement `Foo` only for types that also implement `Bar`. This is often used for implementing generic types. +For example, here is the implementation for array equality: + +```rust +impl Eq for [T; N] where T: Eq { + // Test if two arrays have the same elements. + // Because both arrays must have length N, we know their lengths already match. + fn eq(self, other: Self) -> bool { + let mut result = true; + + for i in 0 .. self.len() { + // The T: Eq constraint is needed to call == on the array elements here + result &= self[i] == other[i]; + } + + result + } +} +``` + +## Trait Methods With No `self` + +A trait can contain any number of methods, each of which have access to the `Self` type which represents each type +that eventually implements the trait. Similarly, the `self` variable is available as well but is not required to be used. +For example, we can define a trait to create a default value for a type. This trait will need to return the `Self` type +but doesn't need to take any parameters: + +```rust +trait Default { + fn default() -> Self; +} +``` + +Implementing this trait can be done similarly to any other trait: + +```rust +impl Default for Field { + fn default() -> Field { + 0 + } +} + +struct MyType {} + +impl Default for MyType { + fn default() -> Field { + MyType {} + } +} +``` + +However, since there is no `self` parameter, we cannot call it via the method call syntax `object.method()`. +Instead, we'll need to refer to the function directly. This can be done either by referring to the +specific impl `MyType::default()` or referring to the trait itself `Default::default()`. In the later +case, type inference determines the impl that is selected. + +```rust +let my_struct = MyStruct::default(); + +let x: Field = Default::default(); +let result = x + Default::default(); +``` + +:::warning + +```rust +let _ = Default::default(); +``` + +If type inference cannot select which impl to use because of an ambiguous `Self` type, an impl will be +arbitrarily selected. This occurs most often when the result of a trait function call with no parameters +is unused. To avoid this, when calling a trait function with no `self` or `Self` parameters or return type, +always refer to it via the implementation type's namespace - e.g. `MyType::default()`. +This is set to change to an error in future Noir versions. + +::: + +## Default Method Implementations + +A trait can also have default implementations of its methods by giving a body to the desired functions. +Note that this body must be valid for all types that may implement the trait. As a result, the only +valid operations on `self` will be operations valid for any type or other operations on the trait itself. + +```rust +trait Numeric { + fn add(self, other: Self) -> Self; + + // Default implementation of double is (self + self) + fn double(self) -> Self { + self.add(self) + } +} +``` + +When implementing a trait with default functions, a type may choose to implement only the required functions: + +```rust +impl Numeric for Field { + fn add(self, other: Field) -> Field { + self + other + } +} +``` + +Or it may implement the optional methods as well: + +```rust +impl Numeric for u32 { + fn add(self, other: u32) -> u32 { + self + other + } + + fn double(self) -> u32 { + self * 2 + } +} +``` + +## Impl Specialization + +When implementing traits for a generic type it is possible to implement the trait for only a certain combination +of generics. This can be either as an optimization or because those specific generics are required to implement the trait. + +```rust +trait Sub { + fn sub(self, other: Self) -> Self; +} + +struct NonZero { + value: T, +} + +impl Sub for NonZero { + fn sub(self, other: Self) -> Self { + let value = self.value - other.value; + assert(value != 0); + NonZero { value } + } +} +``` + +## Overlapping Implementations + +Overlapping implementations are disallowed by Noir to ensure Noir's decision on which impl to select is never ambiguous. +This means if a trait `Foo` is already implemented +by a type `Bar` for all `T`, then we cannot also have a separate impl for `Bar` (or any other +type argument). Similarly, if there is an impl for all `T` such as `impl Debug for T`, we cannot create +any more impls to `Debug` for other types since it would be ambiguous which impl to choose for any given +method call. + +```rust +trait Trait {} + +// Previous impl defined here +impl Trait for (A, B) {} + +// error: Impl for type `(Field, Field)` overlaps with existing impl +impl Trait for (Field, Field) {} +``` + +## Trait Coherence + +Another restriction on trait implementations is coherence. This restriction ensures other crates cannot create +impls that may overlap with other impls, even if several unrelated crates are used as dependencies in the same +program. + +The coherence restriction is: to implement a trait, either the trait itself or the object type must be declared +in the crate the impl is in. + +In practice this often comes up when using types provided by libraries. If a library provides a type `Foo` that does +not implement a trait in the standard library such as `Default`, you may not `impl Default for Foo` in your own crate. +While restrictive, this prevents later issues or silent changes in the program if the `Foo` library later added its +own impl for `Default`. If you are a user of the `Foo` library in this scenario and need a trait not implemented by the +library your choices are to either submit a patch to the library or use the newtype pattern. + +### The Newtype Pattern + +The newtype pattern gets around the coherence restriction by creating a new wrapper type around the library type +that we cannot create `impl`s for. Since the new wrapper type is defined in our current crate, we can create +impls for any trait we need on it. + +```rust +struct Wrapper { + foo: dep::some_library::Foo, +} + +impl Default for Wrapper { + fn default() -> Wrapper { + Wrapper { + foo: dep::some_library::Foo::new(), + } + } +} +``` + +Since we have an impl for our own type, the behavior of this code will not change even if `some_library` is updated +to provide its own `impl Default for Foo`. The downside of this pattern is that it requires extra wrapping and +unwrapping of values when converting to and from the `Wrapper` and `Foo` types. diff --git a/docs/versioned_docs/version-v0.22.0/explanations/standard_library/traits.md b/docs/versioned_docs/version-v0.22.0/explanations/standard_library/traits.md new file mode 100644 index 00000000000..63b4f3d6f0b --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/explanations/standard_library/traits.md @@ -0,0 +1,140 @@ +--- +title: Traits +description: Noir's stdlib provides a few commonly used traits. +keywords: [traits, trait, interface, protocol, default, add, eq] +--- + +## `std::default` + +### `std::default::Default` + +```rust +trait Default { + fn default() -> Self; +} +``` + +Constructs a default value of a type. + +Implementations: +```rust +impl Default for Field { .. } + +impl Default for i8 { .. } +impl Default for i16 { .. } +impl Default for i32 { .. } +impl Default for i64 { .. } + +impl Default for u8 { .. } +impl Default for u16 { .. } +impl Default for u32 { .. } +impl Default for u64 { .. } + +impl Default for () { .. } +impl Default for bool { .. } + +impl Default for [T; N] + where T: Default { .. } + +impl Default for (A, B) + where A: Default, B: Default { .. } + +impl Default for (A, B, C) + where A: Default, B: Default, C: Default { .. } + +impl Default for (A, B, C, D) + where A: Default, B: Default, C: Default, D: Default { .. } + +impl Default for (A, B, C, D, E) + where A: Default, B: Default, C: Default, D: Default, E: Default { .. } +``` + +For primitive integer types, the return value of `default` is `0`. Container +types such as arrays are filled with default values of their element type. + +## `std::ops` + +### `std::ops::Eq` + +```rust +trait Eq { + fn eq(self, other: Self) -> bool; +} +``` +Returns `true` if `self` is equal to `other`. + +Implementations: +```rust +impl Eq for Field { .. } + +impl Eq for i8 { .. } +impl Eq for i16 { .. } +impl Eq for i32 { .. } +impl Eq for i64 { .. } + +impl Eq for u8 { .. } +impl Eq for u16 { .. } +impl Eq for u32 { .. } +impl Eq for u64 { .. } + +impl Eq for () { .. } +impl Eq for bool { .. } + +impl Eq for [T; N] + where T: Eq { .. } + +impl Eq for (A, B) + where A: Eq, B: Eq { .. } + +impl Eq for (A, B, C) + where A: Eq, B: Eq, C: Eq { .. } + +impl Eq for (A, B, C, D) + where A: Eq, B: Eq, C: Eq, D: Eq { .. } + +impl Eq for (A, B, C, D, E) + where A: Eq, B: Eq, C: Eq, D: Eq, E: Eq { .. } +``` + +### `std::ops::Add`, `std::ops::Sub`, `std::ops::Mul`, and `std::ops::Div` + +These traits abstract over addition, subtraction, multiplication, and division respectively. +Although Noir does not currently have operator overloading, in the future implementing these +traits for a given type will also allow that type to be used with the corresponding operator +for that trait (`+` for Add, etc) in addition to the normal method names. + +```rust +trait Add { + fn add(self, other: Self) -> Self; +} + +trait Sub { + fn sub(self, other: Self) -> Self; +} + +trait Mul { + fn mul(self, other: Self) -> Self; +} + +trait Div { + fn div(self, other: Self) -> Self; +} +``` + +The implementations block below is given for the `Add` trait, but the same types that implement +`Add` also implement `Sub`, `Mul`, and `Div`. + +Implementations: +```rust +impl Add for Field { .. } + +impl Add for i8 { .. } +impl Add for i16 { .. } +impl Add for i32 { .. } +impl Add for i64 { .. } + +impl Add for u8 { .. } +impl Add for u16 { .. } +impl Add for u32 { .. } +impl Add for u64 { .. } +``` diff --git a/docs/versioned_docs/version-v0.22.0/getting_started/_category_.json b/docs/versioned_docs/version-v0.22.0/getting_started/_category_.json new file mode 100644 index 00000000000..5d694210bbf --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/getting_started/_category_.json @@ -0,0 +1,5 @@ +{ + "position": 0, + "collapsible": true, + "collapsed": true +} diff --git a/docs/versioned_docs/version-v0.22.0/getting_started/create_a_project.md b/docs/versioned_docs/version-v0.22.0/getting_started/create_a_project.md new file mode 100644 index 00000000000..f10916c39c5 --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/getting_started/create_a_project.md @@ -0,0 +1,142 @@ +--- +title: Creating A Project +description: + Learn how to create and verify your first Noir program using Nargo, a programming language for + zero-knowledge proofs. +keywords: + [ + Nargo, + Noir, + zero-knowledge proofs, + programming language, + create Noir program, + verify Noir program, + step-by-step guide, + ] +sidebar_position: 1 + +--- + +Now that we have installed Nargo, it is time to make our first hello world program! + +## Create a Project Directory + +Noir code can live anywhere on your computer. Let us create a _projects_ folder in the home +directory to house our Noir programs. + +For Linux, macOS, and Windows PowerShell, create the directory and change directory into it by +running: + +```sh +mkdir ~/projects +cd ~/projects +``` + +## Create Our First Nargo Project + +Now that we are in the projects directory, create a new Nargo project by running: + +```sh +nargo new hello_world +``` + +> **Note:** `hello_world` can be any arbitrary project name, we are simply using `hello_world` for +> demonstration. +> +> In production, the common practice is to name the project folder as `circuits` for better +> identifiability when sitting alongside other folders in the codebase (e.g. `contracts`, `scripts`, +> `test`). + +A `hello_world` folder would be created. Similar to Rust, the folder houses _src/main.nr_ and +_Nargo.toml_ that contains the source code and environmental options of your Noir program +respectively. + +### Intro to Noir Syntax + +Let us take a closer look at _main.nr_. The default _main.nr_ generated should look like this: + +```rust +fn main(x : Field, y : pub Field) { + assert(x != y); +} +``` + +The first line of the program specifies the program's inputs: + +```rust +x : Field, y : pub Field +``` + +Program inputs in Noir are private by default (e.g. `x`), but can be labeled public using the +keyword `pub` (e.g. `y`). To learn more about private and public values, check the +[Data Types](../noir/syntax/data_types/index.md) section. + +The next line of the program specifies its body: + +```rust +assert(x != y); +``` + +The Noir syntax `assert` can be interpreted as something similar to constraints in other zk-contract languages. + +For more Noir syntax, check the [Language Concepts](../noir/syntax/comments.md) chapter. + +## Build In/Output Files + +Change directory into _hello_world_ and build in/output files for your Noir program by running: + +```sh +cd hello_world +nargo check +``` + +Two additional files would be generated in your project directory: + +_Prover.toml_ houses input values, and _Verifier.toml_ houses public values. + +## Prove Our Noir Program + +Now that the project is set up, we can create a proof of correct execution on our Noir program. + +Fill in input values for execution in the _Prover.toml_ file. For example: + +```toml +x = "1" +y = "2" +``` + +Prove the valid execution of your Noir program: + +```sh +nargo prove +``` + +A new folder _proofs_ would then be generated in your project directory, containing the proof file +`.proof`, where the project name is defined in Nargo.toml. + +The _Verifier.toml_ file would also be updated with the public values computed from program +execution (in this case the value of `y`): + +```toml +y = "0x0000000000000000000000000000000000000000000000000000000000000002" +``` + +> **Note:** Values in _Verifier.toml_ are computed as 32-byte hex values. + +## Verify Our Noir Program + +Once a proof is generated, we can verify correct execution of our Noir program by verifying the +proof file. + +Verify your proof by running: + +```sh +nargo verify +``` + +The verification will complete in silence if it is successful. If it fails, it will log the +corresponding error instead. + +Congratulations, you have now created and verified a proof for your very first Noir program! + +In the [next section](./project_breakdown.md), we will go into more detail on each step performed. diff --git a/docs/versioned_docs/version-v0.22.0/getting_started/installation/_category_.json b/docs/versioned_docs/version-v0.22.0/getting_started/installation/_category_.json new file mode 100644 index 00000000000..0c02fb5d4d7 --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/getting_started/installation/_category_.json @@ -0,0 +1,6 @@ +{ + "position": 0, + "label": "Install Nargo", + "collapsible": true, + "collapsed": true +} diff --git a/docs/versioned_docs/version-v0.22.0/getting_started/installation/index.md b/docs/versioned_docs/version-v0.22.0/getting_started/installation/index.md new file mode 100644 index 00000000000..ddb8a250eb4 --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/getting_started/installation/index.md @@ -0,0 +1,45 @@ +--- +title: Nargo Installation +description: + nargo is a command line tool for interacting with Noir programs. This page is a quick guide on how to install Nargo though the most common and easy method, noirup +keywords: [ + Nargo + Noir + Rust + Cargo + Noirup + Installation + Terminal Commands + Version Check + Nightlies + Specific Versions + Branches + Noirup Repository +] +--- + +`nargo` is the one-stop-shop for almost everything related with Noir. The name comes from our love for Rust and its package manager `cargo`. + +With `nargo`, you can start new projects, compile, execute, prove, verify, test, generate solidity contracts, and do pretty much all that is available in Noir. + +Similarly to `rustup`, we also maintain an easy installation method that covers most machines: `noirup`. + +## Installing Noirup + +Open a terminal on your machine, and write: + +```bash +curl -L https://raw.githubusercontent.com/noir-lang/noirup/main/install | bash +``` + +Close the terminal, open another one, and run + +```bash +noirup +``` + +Done. That's it. You should have the latest version working. You can check with `nargo --version`. + +You can also install nightlies, specific versions +or branches, check out the [noirup repository](https://github.com/noir-lang/noirup) for more +information. diff --git a/docs/versioned_docs/version-v0.22.0/getting_started/installation/other_install_methods.md b/docs/versioned_docs/version-v0.22.0/getting_started/installation/other_install_methods.md new file mode 100644 index 00000000000..a532f83750e --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/getting_started/installation/other_install_methods.md @@ -0,0 +1,190 @@ +--- +title: Alternative Install Methods +description: + There are different ways to install Nargo, the one-stop shop and command-line tool for developing Noir programs. This guide explains other methods that don't rely on noirup, such as compiling from source, installing from binaries, and using WSL for windows +keywords: [ + Installation + Nargo + Noirup + Binaries + Compiling from Source + WSL for Windows + macOS + Linux + Nix + Direnv + Shell & editor experience + Building and testing + Uninstalling Nargo + Noir vs code extension +] +sidebar_position: 1 +--- + + +## Installation + +The most common method of installing Nargo is through [Noirup](./index.md) + +However, there are other methods for installing Nargo: + +- [Binaries](#binaries) +- [Compiling from Source](#compile-from-source) +- [WSL for Windows](#wsl-for-windows) + +### Binaries + +See [GitHub Releases](https://github.com/noir-lang/noir/releases) for the latest and previous +platform specific binaries. + +#### Step 1 + +Paste and run the following in the terminal to extract and install the binary: + +> **macOS / Linux:** If you are prompted with `Permission denied` when running commands, prepend +> `sudo` and re-run it. + +##### macOS (Apple Silicon) + +```bash +mkdir -p $HOME/.nargo/bin && \ +curl -o $HOME/.nargo/bin/nargo-aarch64-apple-darwin.tar.gz -L https://github.com/noir-lang/noir/releases/download/v0.6.0/nargo-aarch64-apple-darwin.tar.gz && \ +tar -xvf $HOME/.nargo/bin/nargo-aarch64-apple-darwin.tar.gz -C $HOME/.nargo/bin/ && \ +echo '\nexport PATH=$PATH:$HOME/.nargo/bin' >> ~/.zshrc && \ +source ~/.zshrc +``` + +##### macOS (Intel) + +```bash +mkdir -p $HOME/.nargo/bin && \ +curl -o $HOME/.nargo/bin/nargo-x86_64-apple-darwin.tar.gz -L https://github.com/noir-lang/noir/releases/download/v0.6.0/nargo-x86_64-apple-darwin.tar.gz && \ +tar -xvf $HOME/.nargo/bin/nargo-x86_64-apple-darwin.tar.gz -C $HOME/.nargo/bin/ && \ +echo '\nexport PATH=$PATH:$HOME/.nargo/bin' >> ~/.zshrc && \ +source ~/.zshrc +``` + +##### Linux (Bash) + +```bash +mkdir -p $HOME/.nargo/bin && \ +curl -o $HOME/.nargo/bin/nargo-x86_64-unknown-linux-gnu.tar.gz -L https://github.com/noir-lang/noir/releases/download/v0.6.0/nargo-x86_64-unknown-linux-gnu.tar.gz && \ +tar -xvf $HOME/.nargo/bin/nargo-x86_64-unknown-linux-gnu.tar.gz -C $HOME/.nargo/bin/ && \ +echo -e '\nexport PATH=$PATH:$HOME/.nargo/bin' >> ~/.bashrc && \ +source ~/.bashrc +``` + +#### Step 2 + +Check if the installation was successful by running `nargo --version`. You should get a version number. + +> **macOS:** If you are prompted with an OS alert, right-click and open the _nargo_ executable from +> Finder. Close the new terminal popped up and `nargo` should now be accessible. + +### Option 3: Compile from Source + +Due to the large number of native dependencies, Noir projects uses [Nix](https://nixos.org/) and [direnv](https://direnv.net/) to streamline the development experience. It helps mitigating issues commonly associated with dependency management, such as conflicts between required package versions for different projects (often referred to as "dependency hell"). + +Combined with direnv, which automatically sets or clears environment variables based on the directory, it further simplifies the development process by seamlessly integrating with the developer's shell, facilitating an efficient and reliable workflow for managing and deploying Noir projects with multiple dependencies. + +#### Setting up your environment + +For the best experience, please follow these instructions to setup your environment: + +1. Install Nix following [their guide](https://nixos.org/download.html) for your operating system. +2. Create the file `~/.config/nix/nix.conf` with the contents: + +```ini +experimental-features = nix-command +extra-experimental-features = flakes +``` + +3. Install direnv into your Nix profile by running: + +```sh +nix profile install nixpkgs#direnv +``` + +4. Add direnv to your shell following [their guide](https://direnv.net/docs/hook.html). + 1. For bash or zshell, add `eval "$(direnv hook bash)"` or `eval "$(direnv hook zsh)"` to your ~/.bashrc or ~/.zshrc file, respectively. +5. Restart your shell. + +#### Shell & editor experience + +Now that your environment is set up, you can get to work on the project. + +1. Clone the repository, such as: + +```sh +git clone git@github.com:noir-lang/noir +``` + +> Replacing `noir` with whichever repository you want to work on. + +2. Navigate to the directory: + +```sh +cd noir +``` + +> Replacing `noir` with whichever repository you cloned. + +3. You should see a **direnv error** because projects aren't allowed by default. Make sure you've reviewed and trust our `.envrc` file, then you need to run: + +```sh +direnv allow +``` + +4. Now, wait awhile for all the native dependencies to be built. This will take some time and direnv will warn you that it is taking a long time, but we just need to let it run. + +5. Once you are presented with your prompt again, you can start your editor within the project directory (we recommend [VSCode](https://code.visualstudio.com/)): + +```sh +code . +``` + +6. (Recommended) When launching VSCode for the first time, you should be prompted to install our recommended plugins. We highly recommend installing these for the best development experience. + +#### Building and testing + +Assuming you are using `direnv` to populate your environment, building and testing the project can be done +with the typical `cargo build`, `cargo test`, and `cargo clippy` commands. You'll notice that the `cargo` version matches the version we specify in `rust-toolchain.toml`, which is 1.71.1 at the time of this writing. + +If you want to build the entire project in an isolated sandbox, you can use Nix commands: + +1. `nix build .` (or `nix build . -L` for verbose output) to build the project in a Nix sandbox. +2. `nix flake check` (or `nix flake check -L` for verbose output) to run clippy and tests in a Nix sandbox. + +#### Without `direnv` + +If you have hesitations with using direnv, you can launch a subshell with `nix develop` and then launch your editor from within the subshell. However, if VSCode was already launched in the project directory, the environment won't be updated. + +Advanced: If you aren't using direnv nor launching your editor within the subshell, you can try to install Barretenberg and other global dependencies the package needs. This is an advanced workflow and likely won't receive support! + +### Option 4: WSL (for Windows) + +The default backend for Noir (Barretenberg) doesn't provide Windows binaries at this time. For that reason, Noir cannot be installed natively. However, it is available by using Windows Subsystem for Linux (WSL). + +Step 1: Follow the instructions [here](https://learn.microsoft.com/en-us/windows/wsl/install) to install and run WSL. + +step 2: Follow the [Noirup instructions](./index.md). + +## Uninstalling Nargo + +### Noirup + +If you installed Noir with `noirup`, you can uninstall Noir by removing the files in `~/.nargo`, `~/nargo` and `~/noir_cache`. + +```bash +rm -r ~/.nargo +rm -r ~/nargo +rm -r ~/noir_cache +``` + +### Nix + +If you installed Noir with Nix or from source, you can remove the binary located at `~/.nix-profile/bin/nargo`. + +```bash +rm ~/.nix-profile/bin/nargo +``` diff --git a/docs/versioned_docs/version-v0.22.0/getting_started/project_breakdown.md b/docs/versioned_docs/version-v0.22.0/getting_started/project_breakdown.md new file mode 100644 index 00000000000..c4e2a9ae003 --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/getting_started/project_breakdown.md @@ -0,0 +1,199 @@ +--- +title: Project Breakdown +description: + Learn about the anatomy of a Nargo project, including the purpose of the Prover and Verifier TOML + files, and how to prove and verify your program. +keywords: + [Nargo, Nargo project, Prover.toml, Verifier.toml, proof verification, private asset transfer] +sidebar_position: 2 +--- + +This section breaks down our hello world program in section _1.2_. We elaborate on the project +structure and what the `prove` and `verify` commands did in the previous section. + +## Anatomy of a Nargo Project + +Upon creating a new project with `nargo new` and building the in/output files with `nargo check` +commands, you would get a minimal Nargo project of the following structure: + + - src + - Prover.toml + - Verifier.toml + - Nargo.toml + +The source directory _src_ holds the source code for your Noir program. By default only a _main.nr_ +file will be generated within it. + +### Prover.toml + +_Prover.toml_ is used for specifying the input values for executing and proving the program. You can specify `toml` files with different names by using the `--prover-name` or `-p` flags, see the [Prover](#provertoml) section below. Optionally you may specify expected output values for prove-time checking as well. + +### Verifier.toml + +_Verifier.toml_ contains public in/output values computed when executing the Noir program. + +### Nargo.toml + +_Nargo.toml_ contains the environmental options of your project. It contains a "package" section and a "dependencies" section. + +Example Nargo.toml: + +```toml +[package] +name = "noir_starter" +type = "bin" +authors = ["Alice"] +compiler_version = "0.9.0" +description = "Getting started with Noir" +entry = "circuit/main.nr" +license = "MIT" + +[dependencies] +ecrecover = {tag = "v0.9.0", git = "https://github.com/colinnielsen/ecrecover-noir.git"} +``` + +Nargo.toml for a [workspace](../noir/modules_packages_crates/workspaces.md) will look a bit different. For example: + +```toml +[workspace] +members = ["crates/a", "crates/b"] +default-member = "crates/a" +``` + +#### Package section + +The package section requires a number of fields including: + +- `name` (**required**) - the name of the package +- `type` (**required**) - can be "bin", "lib", or "contract" to specify whether its a binary, library or Aztec contract +- `authors` (optional) - authors of the project +- `compiler_version` - specifies the version of the compiler to use. This is enforced by the compiler and follow's [Rust's versioning](https://doc.rust-lang.org/cargo/reference/manifest.html#the-version-field), so a `compiler_version = 0.18.0` will enforce Nargo version 0.18.0, `compiler_version = ^0.18.0` will enforce anything above 0.18.0 but below 0.19.0, etc. For more information, see how [Rust handles these operators](https://docs.rs/semver/latest/semver/enum.Op.html) +- `description` (optional) +- `entry` (optional) - a relative filepath to use as the entry point into your package (overrides the default of `src/lib.nr` or `src/main.nr`) +- `backend` (optional) +- `license` (optional) + +#### Dependencies section + +This is where you will specify any dependencies for your project. See the [Dependencies page](../noir/modules_packages_crates/dependencies.md) for more info. + +`./proofs/` and `./contract/` directories will not be immediately visible until you create a proof or +verifier contract respectively. + +### main.nr + +The _main.nr_ file contains a `main` method, this method is the entry point into your Noir program. + +In our sample program, _main.nr_ looks like this: + +```rust +fn main(x : Field, y : Field) { + assert(x != y); +} +``` + +The parameters `x` and `y` can be seen as the API for the program and must be supplied by the +prover. Since neither `x` nor `y` is marked as public, the verifier does not supply any inputs, when +verifying the proof. + +The prover supplies the values for `x` and `y` in the _Prover.toml_ file. + +As for the program body, `assert` ensures the satisfaction of the condition (e.g. `x != y`) is +constrained by the proof of the execution of said program (i.e. if the condition was not met, the +verifier would reject the proof as an invalid proof). + +### Prover.toml + +The _Prover.toml_ file is a file which the prover uses to supply his witness values(both private and +public). + +In our hello world program the _Prover.toml_ file looks like this: + +```toml +x = "1" +y = "2" +``` + +When the command `nargo prove` is executed, two processes happen: + +1. Noir creates a proof that `x` which holds the value of `1` and `y` which holds the value of `2` + is not equal. This not equal constraint is due to the line `assert(x != y)`. + +2. Noir creates and stores the proof of this statement in the _proofs_ directory in a file called your-project.proof. So if your project is named "private_voting" (defined in the project Nargo.toml), the proof will be saved at `./proofs/private_voting.proof`. Opening this file will display the proof in hex format. + +#### Arrays of Structs + +The following code shows how to pass an array of structs to a Noir program to generate a proof. + +```rust +// main.nr +struct Foo { + bar: Field, + baz: Field, +} + +fn main(foos: [Foo; 3]) -> pub Field { + foos[2].bar + foos[2].baz +} +``` + +Prover.toml: + +```toml +[[foos]] # foos[0] +bar = 0 +baz = 0 + +[[foos]] # foos[1] +bar = 0 +baz = 0 + +[[foos]] # foos[2] +bar = 1 +baz = 2 +``` + +#### Custom toml files + +You can specify a `toml` file with a different name to use for proving by using the `--prover-name` or `-p` flags. + +This command looks for proof inputs in the default **Prover.toml** and generates the proof and saves it at `./proofs/.proof`: + +```bash +nargo prove +``` + +This command looks for proof inputs in the custom **OtherProver.toml** and generates proof and saves it at `./proofs/.proof`: + +```bash +nargo prove -p OtherProver +``` + +## Verifying a Proof + +When the command `nargo verify` is executed, two processes happen: + +1. Noir checks in the _proofs_ directory for a proof file with the project name (eg. test_project.proof) + +2. If that file is found, the proof's validity is checked + +> **Note:** The validity of the proof is linked to the current Noir program; if the program is +> changed and the verifier verifies the proof, it will fail because the proof is not valid for the +> _modified_ Noir program. + +In production, the prover and the verifier are usually two separate entities. A prover would +retrieve the necessary inputs, execute the Noir program, generate a proof and pass it to the +verifier. The verifier would then retrieve the public inputs from usually external sources and +verifies the validity of the proof against it. + +Take a private asset transfer as an example: + +A user on browser as the prover would retrieve private inputs (e.g. the user's private key) and +public inputs (e.g. the user's encrypted balance on-chain), compute the transfer, generate a proof +and submit it to the verifier smart contract. + +The verifier contract would then draw the user's encrypted balance directly from the blockchain and +verify the proof submitted against it. If the verification passes, additional functions in the +verifier contract could trigger (e.g. approve the asset transfer). + +Now that you understand the concepts, you'll probably want some editor feedback while you are writing more complex code. diff --git a/docs/versioned_docs/version-v0.22.0/getting_started/tooling/_category_.json b/docs/versioned_docs/version-v0.22.0/getting_started/tooling/_category_.json new file mode 100644 index 00000000000..dff520ebc41 --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/getting_started/tooling/_category_.json @@ -0,0 +1,6 @@ +{ + "position": 3, + "label": "Tooling", + "collapsible": true, + "collapsed": true +} diff --git a/docs/versioned_docs/version-v0.22.0/getting_started/tooling/index.md b/docs/versioned_docs/version-v0.22.0/getting_started/tooling/index.md new file mode 100644 index 00000000000..55df833005a --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/getting_started/tooling/index.md @@ -0,0 +1,33 @@ +--- +title: Tooling +Description: This section provides information about the various tools and utilities available for Noir development. It covers the Noir playground, IDE tools, Codespaces, and community projects. +Keywords: [Noir, Development, Playground, IDE Tools, Language Service Provider, VS Code Extension, Codespaces, noir-starter, Community Projects, Awesome Noir Repository, Developer Tooling] +--- + +Noir is meant to be easy to develop with. For that reason, a number of utilities have been put together to ease the development process as much as feasible in the zero-knowledge world. + +## Playground + +The Noir playground is an easy way to test small ideas, share snippets, and integrate in other websites. You can access it at [play.noir-lang.org](https://play.noir-lang.org). + +## IDE tools + +When you install Nargo, you're also installing a Language Service Provider (LSP), which can be used by IDEs to provide syntax highlighting, codelens, warnings, and more. + +The easiest way to use these tools is by installing the [Noir VS Code extension](https://marketplace.visualstudio.com/items?itemName=noir-lang.vscode-noir). + +## Codespaces + +Some Noir repos have leveraged Codespaces in order to ease the development process. You can visit the [noir-starter](https://github.com/noir-lang/noir-starter) for an example. + +## GitHub Actions + +You can use `noirup` with GitHub Actions for CI/CD and automated testing. It is as simple as +installing `noirup` and running tests in your GitHub Action `yml` file. + +See the +[config file in the Noir repo](https://github.com/TomAFrench/noir-hashes/blob/master/.github/workflows/noir.yml) for an example usage. + +## Community projects + +As an open-source project, Noir has received many contributions over time. Some of them are related with developer tooling, and you can see some of them in [Awesome Noir repository](https://github.com/noir-lang/awesome-noir#dev-tools) diff --git a/docs/versioned_docs/version-v0.22.0/getting_started/tooling/language_server.md b/docs/versioned_docs/version-v0.22.0/getting_started/tooling/language_server.md new file mode 100644 index 00000000000..81e0356ef8a --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/getting_started/tooling/language_server.md @@ -0,0 +1,43 @@ +--- +title: Language Server +description: Learn about the Noir Language Server, how to install the components, and configuration that may be required. +keywords: [Nargo, Language Server, LSP, VSCode, Visual Studio Code] +sidebar_position: 0 +--- + +This section helps you install and configure the Noir Language Server. + +The Language Server Protocol (LSP) has two components, the [Server](#language-server) and the [Client](#language-client). Below we describe each in the context of Noir. + +## Language Server + +The Server component is provided by the Nargo command line tool that you installed at the beginning of this guide. +As long as Nargo is installed and you've used it to run other commands in this guide, it should be good to go! + +If you'd like to verify that the `nargo lsp` command is available, you can run `nargo --help` and look for `lsp` in the list of commands. If you see it, you're using a version of Noir with LSP support. + +## Language Client + +The Client component is usually an editor plugin that launches the Server. It communicates LSP messages between the editor and the Server. For example, when you save a file, the Client will alert the Server, so it can try to compile the project and report any errors. + +Currently, Noir provides a Language Client for Visual Studio Code via the [vscode-noir](https://github.com/noir-lang/vscode-noir) extension. You can install it via the [Visual Studio Marketplace](https://marketplace.visualstudio.com/items?itemName=noir-lang.vscode-noir). + +> **Note:** Noir's Language Server Protocol support currently assumes users' VSCode workspace root to be the same as users' Noir project root (i.e. where Nargo.toml lies). +> +> If LSP features seem to be missing / malfunctioning, make sure you are opening your Noir project directly (instead of as a sub-folder) in your VSCode instance. + +When your language server is running correctly and the VSCode plugin is installed, you should see handy codelens buttons for compilation, measuring circuit size, execution, and tests: + +![Compile and Execute](@site/static/img/codelens_compile_execute.png) +![Run test](@site/static/img/codelens_run_test.png) + +You should also see your tests in the `testing` panel: + +![Testing panel](@site/static/img/codelens_testing_panel.png) + +### Configuration + +- **Noir: Enable LSP** - If checked, the extension will launch the Language Server via `nargo lsp` and communicate with it. +- **Noir: Nargo Flags** - Additional flags may be specified if you require them to be added when the extension calls `nargo lsp`. +- **Noir: Nargo Path** - An absolute path to a Nargo binary with the `lsp` command. This may be useful if Nargo is not within the `PATH` of your editor. +- **Noir > Trace: Server** - Setting this to `"messages"` or `"verbose"` will log LSP messages between the Client and Server. Useful for debugging. diff --git a/docs/versioned_docs/version-v0.22.0/getting_started/tooling/testing.md b/docs/versioned_docs/version-v0.22.0/getting_started/tooling/testing.md new file mode 100644 index 00000000000..d3e0c522473 --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/getting_started/tooling/testing.md @@ -0,0 +1,62 @@ +--- +title: Testing in Noir +description: Learn how to use Nargo to test your Noir program in a quick and easy way +keywords: [Nargo, testing, Noir, compile, test] +sidebar_position: 1 +--- + +You can test your Noir programs using Noir circuits. + +Nargo will automatically compile and run any functions which have the decorator `#[test]` on them if +you run `nargo test`. + +For example if you have a program like: + +```rust +fn add(x: u64, y: u64) -> u64 { + x + y +} +#[test] +fn test_add() { + assert(add(2,2) == 4); + assert(add(0,1) == 1); + assert(add(1,0) == 1); +} +``` + +Running `nargo test` will test that the `test_add` function can be executed while satisfying all +the constraints which allows you to test that add returns the expected values. Test functions can't +have any arguments currently. + +### Test fail + +You can write tests that are expected to fail by using the decorator `#[test(should_fail)]`. For example: + +```rust +fn add(x: u64, y: u64) -> u64 { + x + y +} +#[test(should_fail)] +fn test_add() { + assert(add(2,2) == 5); +} +``` + +You can be more specific and make it fail with a specific reason by using `should_fail_with = "`: + +```rust +fn main(african_swallow_avg_speed : Field) { + assert(african_swallow_avg_speed == 65, "What is the airspeed velocity of an unladen swallow"); +} + +#[test] +fn test_king_arthur() { + main(65); +} + +#[test(should_fail_with = "What is the airspeed velocity of an unladen swallow")] +fn test_bridgekeeper() { + main(32); +} + +``` diff --git a/docs/versioned_docs/version-v0.22.0/how_to/_category_.json b/docs/versioned_docs/version-v0.22.0/how_to/_category_.json new file mode 100644 index 00000000000..23b560f610b --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/how_to/_category_.json @@ -0,0 +1,5 @@ +{ + "position": 1, + "collapsible": true, + "collapsed": true +} diff --git a/docs/versioned_docs/version-v0.22.0/how_to/how-to-recursion.md b/docs/versioned_docs/version-v0.22.0/how_to/how-to-recursion.md new file mode 100644 index 00000000000..db9ad0e99f8 --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/how_to/how-to-recursion.md @@ -0,0 +1,184 @@ +--- +title: How to use recursion on NoirJS +description: Learn how to implement recursion with NoirJS, a powerful tool for creating smart contracts on the EVM blockchain. This guide assumes familiarity with NoirJS, solidity verifiers, and the Barretenberg proving backend. Discover how to generate both final and intermediate proofs using `noir_js` and `backend_barretenberg`. +keywords: + [ + "NoirJS", + "EVM blockchain", + "smart contracts", + "recursion", + "solidity verifiers", + "Barretenberg backend", + "noir_js", + "backend_barretenberg", + "intermediate proofs", + "final proofs", + "nargo compile", + "json import", + "recursive circuit", + "recursive app" + ] +sidebar_position: 1 +--- + +This guide shows you how to use recursive proofs in your NoirJS app. For the sake of clarity, it is assumed that: + +- You already have a NoirJS app. If you don't, please visit the [NoirJS tutorial](../tutorials/noirjs_app.md) and the [reference](../reference/NoirJS/noir_js/index.md). +- You are familiar with what are recursive proofs and you have read the [recursion explainer](../explainers/explainer-recursion.md) +- You already built a recursive circuit following [the reference](../noir/standard_library/recursion.md), and understand how it works. + +It is also assumed that you're not using `noir_wasm` for compilation, and instead you've used [`nargo compile`](../reference/nargo_commands.md) to generate the `json` you're now importing into your project. However, the guide should work just the same if you're using `noir_wasm`. + +:::info + +As you've read in the [explainer](../explainers/explainer-recursion.md), a recursive proof is an intermediate proof. Meaning it doesn't necessarily generate the final step that makes it verifiable in a smart contract. However, it is easy to verify within another circuit. + +While "standard" usage of NoirJS packages abstracts final proofs, it currently lacks the necessary interface to abstract away intermediate proofs. Which means these proofs need to be created by using the backend directly. + +In short: + +- `noir_js` generates *only* final proofs +- `backend_barretenberg` generates both types of proofs + +::: + +In a standard recursive app, you're also dealing with at least two circuits. For the purpose of this guide, we will assume these two: + +- `main`: a circuit of type `assert(x != y)` +- `recursive`: a circuit that verifies `main` + +For a full example on how recursive proofs work, please refer to the [noir-examples](https://github.com/noir/noir-examples) repository. We will *not* be using it as a reference for this guide. + +## Step 1: Setup + +In a common NoirJS app, you need to instantiate a backend with something like `const backend = new Backend(circuit)`. Then you feed it to the `noir_js` interface. + +For recursion, this doesn't happen, and the only need for `noir_js` is only to `execute` a circuit and get its witness and return value. Everything else is not interfaced, so it needs to happen on the `backend` object. + +It is also recommended that you instantiate the backend with as many threads as possible, to allow for maximum concurrency: + +```js +const backend = new Backend(circuit, { threads: 8 }) +``` + +:::tip +You can use the [`os.cpus()`](https://nodejs.org/api/os.html#oscpus) object in `nodejs` or [`navigator.hardwareConcurrency`](https://developer.mozilla.org/en-US/docs/Web/API/Navigator/hardwareConcurrency) on the browser to make the most out of those glorious cpu cores +::: + +## Step 2: Generating the witness and the proof for `main` + +After instantiating the backend, you should also instantiate `noir_js`. We will use it to execute the circuit and get the witness. + +```js +const noir = new Noir(circuit, backend) +const { witness } = noir.execute(input) +``` + +With this witness, you are now able to generate the intermediate proof for the main circuit: + +```js +const { proof, publicInputs } = await backend.generateIntermediateProof(witness) +``` + +:::warning + +Always keep in mind what is actually happening on your development process, otherwise you'll quickly become confused about what circuit are we actually running and why! + +In this case, you can imagine that Alice (running the `main` circuit) is proving something to Bob (running the `recursive` circuit), and Bob is verifying her proof within his proof. + +With this in mind, it becomes clear that our intermediate proof is the one *meant to be verified within another circuit*. So it is Alice's. Actually, the only final proof in this theoretical scenario would be the last one, sent on-chain. + +::: + +## Step 3 - Verification and proof artifacts + +Optionally, you are able to verify the intermediate proof: + +```js +const verified = await backend.verifyIntermediateProof({ proof, publicInputs }) +``` + +This can be useful to make sure our intermediate proof was correctly generated. But the real goal is to do it within another circuit. For that, we need to generate the intermediate artifacts: + +```js +const { proofAsFields, vkAsFields, vkHash } = await backend.generateIntermediateProofArtifacts( { publicInputs, proof }, publicInputsCount) +``` + +This call takes the public inputs and the proof, but also the public inputs count. While this is easily retrievable by simply counting the `publicInputs` length, the backend interface doesn't currently abstract it away. + +:::info + +The `proofAsFields` has a constant size `[Field; 93]`. However, currently the backend doesn't remove the public inputs from the proof when converting it. + +This means that if your `main` circuit has two public inputs, then you should also modify the recursive circuit to accept a proof with the public inputs appended. This means that in our example, since `y` is a public input, our `proofAsFields` is of type `[Field; 94]`. + +Verification keys in Barretenberg are always of size 114. + +::: + +:::warning + +One common mistake is to forget *who* makes this call. + +In a situation where Alice is generating the `main` proof, if she generates the proof artifacts and sends them to Bob, which gladly takes them as true, this would mean Alice could prove anything! + +Instead, Bob needs to make sure *he* extracts the proof artifacts, using his own instance of the `main` circuit backend. This way, Alice has to provide a valid proof for the correct `main` circuit. + +::: + +## Step 4 - Recursive proof generation + +With the artifacts, generating a recursive proof is no different from a normal proof. You simply use the `backend` (with the recursive circuit) to generate it: + +```js +const recursiveInputs = { + verification_key: vkAsFields, // array of length 114 + proof: proofAsFields, // array of length 93 + size of public inputs + publicInputs: [mainInput.y], // using the example above, where `y` is the only public input + key_hash: vkHash, + input_aggregation_object: Array(16).fill(0) // this circuit is verifying a non-recursive proof, so there's no input aggregation object: just use zero +} + +const { witness, returnValue } = noir.execute(recursiveInputs) // we're executing the recursive circuit now! +const { proof, publicInputs } = backend.generateFinalProof(witness) +const verified = backend.verifyFinalProof({ proof, publicInputs }) +``` + +You can obviously chain this proof into another proof. In fact, if you're using recursive proofs, you're probably interested of using them this way! In that case, you should keep in mind the `returnValue`, as it will contain the `input_aggregation_object` for the next proof. + +:::tip + +Managing circuits and "who does what" can be confusing. To make sure your naming is consistent, you can keep them in an object. For example: + +```js +const circuits = { +main: mainJSON, +recursive: recursiveJSON +} +const backends = { +main: new BarretenbergBackend(circuits.main), +recursive: new BarretenbergBackend(circuits.recursive) +} +const noir_programs = { +main: new Noir(circuits.main, backends.main), +recursive: new Noir(circuits.recursive, backends.recursive) +} +``` + +This allows you to neatly call exactly the method you want without conflicting names: + +```js +// Alice runs this 👇 +const { witness: mainWitness } = await noir_programs.main.execute(input) +const proof = await backends.main.generateIntermediateProof(mainWitness) + +// Bob runs this 👇 +const verified = await backends.main.verifyIntermediateProof(proof) +const { proofAsFields, vkAsFields, vkHash } = await backends.main.generateIntermediateProofArtifacts( + proof, + numPublicInputs, +); +const recursiveProof = await noir_programs.recursive.generateFinalProof(recursiveInputs) +``` + +::: diff --git a/docs/versioned_docs/version-v0.22.0/how_to/merkle-proof.mdx b/docs/versioned_docs/version-v0.22.0/how_to/merkle-proof.mdx new file mode 100644 index 00000000000..34074659ac1 --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/how_to/merkle-proof.mdx @@ -0,0 +1,48 @@ +--- +title: Prove Merkle Tree Membership +description: + Learn how to use merkle membership proof in Noir to prove that a given leaf is a member of a + merkle tree with a specified root, at a given index. +keywords: + [merkle proof, merkle membership proof, Noir, rust, hash function, Pedersen, sha256, merkle tree] +--- + +Let's walk through an example of a merkle membership proof in Noir that proves that a given leaf is +in a merkle tree. + +```rust +use dep::std; + +fn main(message : [Field; 62], index : Field, hashpath : [Field; 40], root : Field) { + let leaf = std::hash::hash_to_field(message); + let merkle_root = std::merkle::compute_merkle_root(leaf, index, hashpath); + assert(merkle_root == root); +} + +``` + +The message is hashed using `hash_to_field`. The specific hash function that is being used is chosen +by the backend. The only requirement is that this hash function can heuristically be used as a +random oracle. If only collision resistance is needed, then one can call `std::hash::pedersen_hash` +instead. + +```rust +let leaf = std::hash::hash_to_field(message); +``` + +The leaf is then passed to a compute_merkle_root function with the root, index and hashpath. The returned root can then be asserted to be the same as the provided root. + +```rust +let merkle_root = std::merkle::compute_merkle_root(leaf, index, hashpath); +assert (merkle_root == root); +``` + +> **Note:** It is possible to re-implement the merkle tree implementation without standard library. +> However, for most usecases, it is enough. In general, the standard library will always opt to be +> as conservative as possible, while striking a balance with efficiency. + +An example, the merkle membership proof, only requires a hash function that has collision +resistance, hence a hash function like Pedersen is allowed, which in most cases is more efficient +than the even more conservative sha256. + +[View an example on the starter repo](https://github.com/noir-lang/noir-examples/blob/3ea09545cabfa464124ec2f3ea8e60c608abe6df/stealthdrop/circuits/src/main.nr#L20) diff --git a/docs/versioned_docs/version-v0.22.0/how_to/solidity_verifier.md b/docs/versioned_docs/version-v0.22.0/how_to/solidity_verifier.md new file mode 100644 index 00000000000..8022b0e5f20 --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/how_to/solidity_verifier.md @@ -0,0 +1,130 @@ +--- +title: Generate a Solidity Verifier +description: + Learn how to run the verifier as a smart contract on the blockchain. Compile a Solidity verifier + contract for your Noir program and deploy it on any EVM blockchain acting as a verifier smart + contract. Read more to find out +keywords: + [ + solidity verifier, + smart contract, + blockchain, + compiler, + plonk_vk.sol, + EVM blockchain, + verifying Noir programs, + proving backend, + Barretenberg, + ] +sidebar_position: 0 +--- + +For certain applications, it may be desirable to run the verifier as a smart contract instead of on +a local machine. + +Compile a Solidity verifier contract for your Noir program by running: + +```sh +nargo codegen-verifier +``` + +A new `contract` folder would then be generated in your project directory, containing the Solidity +file `plonk_vk.sol`. It can be deployed on any EVM blockchain acting as a verifier smart contract. + +> **Note:** It is possible to compile verifier contracts of Noir programs for other smart contract +> platforms as long as the proving backend supplies an implementation. +> +> Barretenberg, the default proving backend for Nargo, supports compilation of verifier contracts in +> Solidity only for the time being. + +## Verify + +To verify a proof using the Solidity verifier contract, call the `verify` function with the +following signature: + +```solidity +function verify(bytes calldata _proof, bytes32[] calldata _publicInputs) external view returns (bool) +``` + +You can see an example of how the `verify` function is called in the example zk voting application [here](https://github.com/noir-lang/noir-examples/blob/33e598c257e2402ea3a6b68dd4c5ad492bce1b0a/foundry-voting/src/zkVote.sol#L35): + +```solidity +function castVote(bytes calldata proof, uint proposalId, uint vote, bytes32 nullifierHash) public returns (bool) { + // ... + bytes32[] memory publicInputs = new bytes32[](4); + publicInputs[0] = merkleRoot; + publicInputs[1] = bytes32(proposalId); + publicInputs[2] = bytes32(vote); + publicInputs[3] = nullifierHash; + require(verifier.verify(proof, publicInputs), "Invalid proof"); +``` + +### Public Inputs + +:::tip + +A circuit doesn't have the concept of a return value. Return values are just syntactic sugar in +Noir. + +Under the hood, the return value is passed as an input to the circuit and is checked at the end of +the circuit program. + +::: + +The verifier contract uses the output (return) value of a Noir program as a public input. So if you +have the following function + +```rust +fn main( + // Public inputs + pubkey_x: pub Field, + pubkey_y: pub Field, + // Private inputs + priv_key: Field, +) -> pub Field +``` + +then `verify` in `plonk_vk.sol` will expect 3 public inputs. Passing two inputs will result in an +error like `Reason: PUBLIC_INPUT_COUNT_INVALID(3, 2)`. + +In this case the 3 inputs to `verify` would be ordered as `[pubkey_x, pubkey_y, return]`. + +#### Struct inputs + +Consider the following program: + +```rust +struct Type1 { + val1: Field, + val2: Field, +} + +struct Nested { + t1: Type1, + is_true: bool, +} + +fn main(x: pub Field, nested: pub Nested, y: pub Field) { + //... +} +``` + +Structs will be flattened so that the array of inputs is 1-dimensional array. The order of these inputs would be flattened to: `[x, nested.t1.val1, nested.t1.val2, nested.is_true, y]` + +## Noir for EVM chains + +You can currently deploy the Solidity verifier contracts to most EVM compatible chains. EVM chains that have been tested and are known to work include: + +- Optimism +- Arbitrum +- Polygon PoS +- Scroll +- Celo + +Other EVM chains should work, but have not been tested directly by our team. If you test any other chains, please open a PR on this page to update the list. See [this doc](https://github.com/noir-lang/noir-starter/tree/main/with-foundry#testing-on-chain) for more info about testing verifier contracts on different EVM chains. + +### Unsupported chains + +Unfortunately not all "EVM" chains are supported. + +**zkSync** and the **Polygon zkEVM** do _not_ currently support proof verification via Solidity verifier contracts. They are missing the bn256 precompile contract that the verifier contract requires. Once these chains support this precompile, they may work. diff --git a/docs/versioned_docs/version-v0.22.0/index.md b/docs/versioned_docs/version-v0.22.0/index.md new file mode 100644 index 00000000000..eaf8c59f935 --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/index.md @@ -0,0 +1,84 @@ +--- +title: Noir +description: + Learn about the public alpha release of Noir, a domain specific language heavily influenced by Rust that compiles to + an intermediate language which can be compiled to an arithmetic circuit or a rank-1 constraint system. +keywords: + [ + Noir, + Domain Specific Language, + Rust, + Intermediate Language, + Arithmetic Circuit, + Rank-1 Constraint System, + Ethereum Developers, + Protocol Developers, + Blockchain Developers, + Proving System, + Smart Contract Language, + ] +sidebar_position: 0 +--- + +## What's new about Noir? + +Noir, a domain-specific language crafted for SNARK proving systems, stands out with its simplicity, flexibility, +and robust capabilities. Unlike conventional approaches that compile directly to a fixed NP-complete language, +Noir takes a two-pronged path. It first compiles to an adaptable intermediate language known as ACIR. From there, +depending on the project's needs, ACIR can be further compiled into an arithmetic circuit for integration with Aztec's +barretenberg backend or transformed into a rank-1 constraint system suitable for R1CS backends like Arkworks' Marlin +backend, among others. + +This innovative design introduces unique challenges, yet it strategically separates the programming language from the +backend. Noir's approach echoes the modular philosophy of LLVM, offering developers a versatile toolkit for cryptographic +programming. + +## Who is Noir for? + +### Solidity Developers + +Noir streamlines the creation of Solidity contracts that interface with SNARK systems. +[`Utilize the nargo codegen-verifier`](./reference/nargo_commands.md#nargo-codegen-verifier) command to construct verifier +contracts efficiently. While the current alpha version offers this as a direct feature, future updates aim +to modularize this process for even greater ease of use. + +Noir currently includes a command to create a Solidity contract which verifies your Noir program. This will be +modularized in the future; however, as of the alpha, you can use the `nargo codegen-verifier` command to create a verifier contract. + +### Protocol Developers + +Should the Aztec backend not align with your existing tech stack, or if you're inclined to integrate alternative +proving systems, Noir's agnostic compilation to a proof-agnostic intermediate language offers unmatched flexibility. +This allows protocol engineers the freedom to substitute the default PLONK-based system with an alternative of their +choice, tailoring the proving system to their specific needs. + +### Blockchain developers + +Blockchain developers often face environmental constraints, such as predetermined proving systems and smart contract +languages. Noir addresses this by enabling the implementation of custom proving system backends and smart contract +interfaces, ensuring seamless integration with your blockchain's architecture, and expanding the horizons for innovation +within your projects. + +## Libraries + +Noir does not currently have an official package manager. You can find a list of available Noir libraries in the +[awesome-noir repo here](https://github.com/noir-lang/awesome-noir#libraries). + +Some libraries that are available today include: + +- [Standard Library](https://github.com/noir-lang/noir/tree/master/noir_stdlib) - the Noir Standard Library +- [Ethereum Storage Proof Verification](https://github.com/aragonzkresearch/noir-trie-proofs) - a library that contains + the primitives necessary for RLP decoding (in the form of look-up table construction) and Ethereum state and storage + proof verification (or verification of any trie proof involving 32-byte long keys) +- [BigInt](https://github.com/shuklaayush/noir-bigint) - a library that provides a custom BigUint56 data type, allowing + for computations on large unsigned integers +- [ECrecover](https://github.com/colinnielsen/ecrecover-noir/tree/main) - a library to verify an ECDSA signature and + return the source Ethereum address +- [Sparse Merkle Tree Verifier](https://github.com/vocdoni/smtverifier-noir/tree/main) - a library for verification of + sparse Merkle trees +- [Signed Int](https://github.com/resurgencelabs/signed_int) - a library for accessing a custom Signed Integer data + type, allowing access to negative numbers on Noir +- [Fraction](https://github.com/resurgencelabs/fraction) - a library for accessing fractional number data type in Noir, + allowing results that aren't whole numbers + +See the section on [dependencies](noir/modules_packages_crates/dependencies.md) for more information. diff --git a/docs/versioned_docs/version-v0.22.0/migration_notes.md b/docs/versioned_docs/version-v0.22.0/migration_notes.md new file mode 100644 index 00000000000..184ca283539 --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/migration_notes.md @@ -0,0 +1,91 @@ +--- +title: Migration notes +description: Read about migration notes from previous versions, which could solve problems while updating +keywords: [Noir, notes, migration, updating, upgrading] +--- + +Noir is in full-speed development. Things break fast, wild, and often. This page attempts to leave some notes on errors you might encounter when upgrading and how to resolve them until proper patches are built. + +## ≥0.19 + +### Enforcing `compiler_version` + +From this version on, the compiler will check for the `compiler_version` field in `Nargo.toml`, and will error if it doesn't match the current Nargo version in use. + +To update, please make sure this field in `Nargo.toml` matches the output of `nargo --version`. + +## ≥0.14 + +The index of the [for loops](noir/syntax/control_flow.md#loops) is now of type `u64` instead of `Field`. An example refactor would be: + +```rust +for i in 0..10 { + let i = i as Field; +} +``` + +## ≥v0.11.0 and Nargo backend + +From this version onwards, Nargo starts managing backends through the `nargo backend` command. Upgrading to the versions per usual steps might lead to: + +### `backend encountered an error` + +This is likely due to the existing locally installed version of proving backend (e.g. barretenberg) is incompatible with the version of Nargo in use. + +To fix the issue: + +1. Uninstall the existing backend + +```bash +nargo backend uninstall acvm-backend-barretenberg +``` + +You may replace _acvm-backend-barretenberg_ with the name of your backend listed in `nargo backend ls` or in ~/.nargo/backends. + +2. Reinstall a compatible version of the proving backend. + +If you are using the default barretenberg backend, simply run: + +``` +nargo prove +``` + +with your Noir program. + +This will trigger the download and installation of the latest version of barretenberg compatible with your Nargo in use. + +### `backend encountered an error: illegal instruction` + +On certain Intel-based systems, an `illegal instruction` error may arise due to incompatibility of barretenberg with certain CPU instructions. + +To fix the issue: + +1. Uninstall the existing backend + +```bash +nargo backend uninstall acvm-backend-barretenberg +``` + +You may replace _acvm-backend-barretenberg_ with the name of your backend listed in `nargo backend ls` or in ~/.nargo/backends. + +2. Reinstall a compatible version of the proving backend. + +If you are using the default barretenberg backend, simply run: + +``` +nargo backend install acvm-backend-barretenberg https://github.com/noir-lang/barretenberg-js-binary/raw/master/run-bb.tar.gz +``` + +This downloads and installs a specific bb.js based version of barretenberg binary from GitHub. + +The gzipped file is running [this bash script](https://github.com/noir-lang/barretenberg-js-binary/blob/master/run-bb-js.sh), where we need to gzip it as the Nargo currently expect the backend to be zipped up. + +Then run: + +``` +DESIRED_BINARY_VERSION=0.8.1 nargo info +``` + +This overrides the bb native binary with a bb.js node application instead, which should be compatible with most if not all hardware. This does come with the drawback of being generally slower than native binary. + +0.8.1 indicates bb.js version 0.8.1, so if you change that it will update to a different version or the default version in the script if none was supplied. diff --git a/docs/versioned_docs/version-v0.22.0/noir/modules_packages_crates/_category_.json b/docs/versioned_docs/version-v0.22.0/noir/modules_packages_crates/_category_.json new file mode 100644 index 00000000000..1debcfe7675 --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/noir/modules_packages_crates/_category_.json @@ -0,0 +1,6 @@ +{ + "label": "Modules, Packages and Crates", + "position": 2, + "collapsible": true, + "collapsed": true +} diff --git a/docs/versioned_docs/version-v0.22.0/noir/modules_packages_crates/crates_and_packages.md b/docs/versioned_docs/version-v0.22.0/noir/modules_packages_crates/crates_and_packages.md new file mode 100644 index 00000000000..760a463094c --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/noir/modules_packages_crates/crates_and_packages.md @@ -0,0 +1,43 @@ +--- +title: Crates and Packages +description: Learn how to use Crates and Packages in your Noir project +keywords: [Nargo, dependencies, package management, crates, package] +sidebar_position: 0 +--- + +## Crates + +A crate is the smallest amount of code that the Noir compiler considers at a time. +Crates can contain modules, and the modules may be defined in other files that get compiled with the crate, as we’ll see in the coming sections. + +### Crate Types + +A Noir crate can come in several forms: binaries, libraries or contracts. + +#### Binaries + +_Binary crates_ are programs which you can compile to an ACIR circuit which you can then create proofs against. Each must have a function called `main` that defines the ACIR circuit which is to be proved. + +#### Libraries + +_Library crates_ don't have a `main` function and they don't compile down to ACIR. Instead they define functionality intended to be shared with multiple projects, and eventually included in a binary crate. + +#### Contracts + +Contract crates are similar to binary crates in that they compile to ACIR which you can create proofs against. They are different in that they do not have a single `main` function, but are a collection of functions to be deployed to the [Aztec network](https://aztec.network). You can learn more about the technical details of Aztec in the [monorepo](https://github.com/AztecProtocol/aztec-packages) or contract [examples](https://github.com/AztecProtocol/aztec-packages/tree/master/yarn-project/noir-contracts/contracts). + +### Crate Root + +Every crate has a root, which is the source file that the compiler starts, this is also known as the root module. The Noir compiler does not enforce any conditions on the name of the file which is the crate root, however if you are compiling via Nargo the crate root must be called `lib.nr` or `main.nr` for library or binary crates respectively. + +## Packages + +A Nargo _package_ is a collection of one of more crates that provides a set of functionality. A package must include a Nargo.toml file. + +A package _must_ contain either a library or a binary crate, but not both. + +### Differences from Cargo Packages + +One notable difference between Rust's Cargo and Noir's Nargo is that while Cargo allows a package to contain an unlimited number of binary crates and a single library crate, Nargo currently only allows a package to contain a single crate. + +In future this restriction may be lifted to allow a Nargo package to contain both a binary and library crate or multiple binary crates. diff --git a/docs/versioned_docs/version-v0.22.0/noir/modules_packages_crates/dependencies.md b/docs/versioned_docs/version-v0.22.0/noir/modules_packages_crates/dependencies.md new file mode 100644 index 00000000000..a37dc401b7d --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/noir/modules_packages_crates/dependencies.md @@ -0,0 +1,124 @@ +--- +title: Dependencies +description: + Learn how to specify and manage dependencies in Nargo, allowing you to upload packages to GitHub + and use them easily in your project. +keywords: [Nargo, dependencies, GitHub, package management, versioning] +sidebar_position: 1 +--- + +Nargo allows you to upload packages to GitHub and use them as dependencies. + +## Specifying a dependency + +Specifying a dependency requires a tag to a specific commit and the git url to the url containing +the package. + +Currently, there are no requirements on the tag contents. If requirements are added, it would follow +semver 2.0 guidelines. + +> Note: Without a `tag` , there would be no versioning and dependencies would change each time you +> compile your project. + +For example, to add the [ecrecover-noir library](https://github.com/colinnielsen/ecrecover-noir) to your project, add it to `Nargo.toml`: + +```toml +# Nargo.toml + +[dependencies] +ecrecover = {tag = "v0.8.0", git = "https://github.com/colinnielsen/ecrecover-noir"} +``` + +If the module is in a subdirectory, you can define a subdirectory in your git repository, for example: + +```toml +# Nargo.toml + +[dependencies] +easy_private_token_contract = {tag ="v0.1.0-alpha62", git = "https://github.com/AztecProtocol/aztec-packages", directory = "yarn-project/noir-contracts/contracts/easy_private_token_contract"} +``` + +## Specifying a local dependency + +You can also specify dependencies that are local to your machine. + +For example, this file structure has a library and binary crate + +```tree +├── binary_crate +│   ├── Nargo.toml +│   └── src +│   └── main.nr +└── lib_a + ├── Nargo.toml + └── src + └── lib.nr +``` + +Inside of the binary crate, you can specify: + +```toml +# Nargo.toml + +[dependencies] +lib_a = { path = "../lib_a" } +``` + +## Importing dependencies + +You can import a dependency to a Noir file using the following syntax. For example, to import the +ecrecover-noir library and local lib_a referenced above: + +```rust +use dep::ecrecover; +use dep::lib_a; +``` + +You can also import only the specific parts of dependency that you want to use, like so: + +```rust +use dep::std::hash::sha256; +use dep::std::scalar_mul::fixed_base_embedded_curve; +``` + +Lastly, as demonstrated in the +[elliptic curve example](../standard_library/cryptographic_primitives/ec_primitives#examples), you +can import multiple items in the same line by enclosing them in curly braces: + +```rust +use dep::std::ec::tecurve::affine::{Curve, Point}; +``` + +We don't have a way to consume libraries from inside a [workspace](./workspaces) as external dependencies right now. + +Inside a workspace, these are consumed as `{ path = "../to_lib" }` dependencies in Nargo.toml. + +## Dependencies of Dependencies + +Note that when you import a dependency, you also get access to all of the dependencies of that package. + +For example, the [phy_vector](https://github.com/resurgencelabs/phy_vector) library imports an [fraction](https://github.com/resurgencelabs/fraction) library. If you're importing the phy_vector library, then you can access the functions in fractions library like so: + +```rust +use dep::phy_vector; + +fn main(x : Field, y : pub Field) { + //... + let f = phy_vector::fraction::toFraction(true, 2, 1); + //... +} +``` + +## Available Libraries + +Noir does not currently have an official package manager. You can find a list of available Noir libraries in the [awesome-noir repo here](https://github.com/noir-lang/awesome-noir#libraries). + +Some libraries that are available today include: + +- [Standard Library](https://github.com/noir-lang/noir/tree/master/noir_stdlib) - the Noir Standard Library +- [Ethereum Storage Proof Verification](https://github.com/aragonzkresearch/noir-trie-proofs) - a library that contains the primitives necessary for RLP decoding (in the form of look-up table construction) and Ethereum state and storage proof verification (or verification of any trie proof involving 32-byte long keys) +- [BigInt](https://github.com/shuklaayush/noir-bigint) - a library that provides a custom BigUint56 data type, allowing for computations on large unsigned integers +- [ECrecover](https://github.com/colinnielsen/ecrecover-noir/tree/main) - a library to verify an ECDSA signature and return the source Ethereum address +- [Sparse Merkle Tree Verifier](https://github.com/vocdoni/smtverifier-noir/tree/main) - a library for verification of sparse Merkle trees +- [Signed Int](https://github.com/resurgencelabs/signed_int) - a library for accessing a custom Signed Integer data type, allowing access to negative numbers on Noir +- [Fraction](https://github.com/resurgencelabs/fraction) - a library for accessing fractional number data type in Noir, allowing results that aren't whole numbers diff --git a/docs/versioned_docs/version-v0.22.0/noir/modules_packages_crates/modules.md b/docs/versioned_docs/version-v0.22.0/noir/modules_packages_crates/modules.md new file mode 100644 index 00000000000..ae822a1cff4 --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/noir/modules_packages_crates/modules.md @@ -0,0 +1,105 @@ +--- +title: Modules +description: + Learn how to organize your files using modules in Noir, following the same convention as Rust's + module system. Examples included. +keywords: [Noir, Rust, modules, organizing files, sub-modules] +sidebar_position: 2 +--- + +Noir's module system follows the same convention as the _newer_ version of Rust's module system. + +## Purpose of Modules + +Modules are used to organize files. Without modules all of your code would need to live in a single +file. In Noir, the compiler does not automatically scan all of your files to detect modules. This +must be done explicitly by the developer. + +## Examples + +### Importing a module in the crate root + +Filename : `src/main.nr` + +```rust +mod foo; + +fn main() { + foo::hello_world(); +} +``` + +Filename : `src/foo.nr` + +```rust +fn from_foo() {} +``` + +In the above snippet, the crate root is the `src/main.nr` file. The compiler sees the module +declaration `mod foo` which prompts it to look for a foo.nr file. + +Visually this module hierarchy looks like the following : + +``` +crate + ├── main + │ + └── foo + └── from_foo + +``` + +### Importing a module throughout the tree + +All modules are accessible from the `crate::` namespace. + +``` +crate + ├── bar + ├── foo + └── main + +``` + +In the above snippet, if `bar` would like to use functions in `foo`, it can do so by `use crate::foo::function_name`. + +### Sub-modules + +Filename : `src/main.nr` + +```rust +mod foo; + +fn main() { + foo::from_foo(); +} +``` + +Filename : `src/foo.nr` + +```rust +mod bar; +fn from_foo() {} +``` + +Filename : `src/foo/bar.nr` + +```rust +fn from_bar() {} +``` + +In the above snippet, we have added an extra module to the module tree; `bar`. `bar` is a submodule +of `foo` hence we declare bar in `foo.nr` with `mod bar`. Since `foo` is not the crate root, the +compiler looks for the file associated with the `bar` module in `src/foo/bar.nr` + +Visually the module hierarchy looks as follows: + +``` +crate + ├── main + │ + └── foo + ├── from_foo + └── bar + └── from_bar +``` diff --git a/docs/versioned_docs/version-v0.22.0/noir/modules_packages_crates/workspaces.md b/docs/versioned_docs/version-v0.22.0/noir/modules_packages_crates/workspaces.md new file mode 100644 index 00000000000..67a1dafa372 --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/noir/modules_packages_crates/workspaces.md @@ -0,0 +1,40 @@ +--- +title: Workspaces +sidebar_position: 3 +--- + +Workspaces are a feature of nargo that allow you to manage multiple related Noir packages in a single repository. A workspace is essentially a group of related projects that share common build output directories and configurations. + +Each Noir project (with it's own Nargo.toml file) can be thought of as a package. Each package is expected to contain exactly one "named circuit", being the "name" defined in Nargo.toml with the program logic defined in `./src/main.nr`. + +For a project with the following structure: + +```tree +├── crates +│   ├── a +│   │   ├── Nargo.toml +│   │   └── src +│   │   └── main.nr +│   └── b +│   ├── Nargo.toml +│   └── src +│   └── main.nr +├── Nargo.toml +└── Prover.toml +``` + +You can define a workspace in Nargo.toml like so: + +```toml +[workspace] +members = ["crates/a", "crates/b"] +default-member = "crates/a" +``` + +`members` indicates which packages are included in the workspace. As such, all member packages of a workspace will be processed when the `--workspace` flag is used with various commands or if a `default-member` is not specified. + +`default-member` indicates which package various commands process by default. + +Libraries can be defined in a workspace. Inside a workspace, these are consumed as `{ path = "../to_lib" }` dependencies in Nargo.toml. + +Inside a workspace, these are consumed as `{ path = "../to_lib" }` dependencies in Nargo.toml. diff --git a/docs/versioned_docs/version-v0.22.0/noir/standard_library/_category_.json b/docs/versioned_docs/version-v0.22.0/noir/standard_library/_category_.json new file mode 100644 index 00000000000..af04c0933fd --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/noir/standard_library/_category_.json @@ -0,0 +1,6 @@ +{ + "label": "Standard Library", + "position": 1, + "collapsible": true, + "collapsed": true +} diff --git a/docs/versioned_docs/version-v0.22.0/noir/standard_library/black_box_fns.md b/docs/versioned_docs/version-v0.22.0/noir/standard_library/black_box_fns.md new file mode 100644 index 00000000000..e0c6d475c1f --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/noir/standard_library/black_box_fns.md @@ -0,0 +1,46 @@ +--- +title: Black Box Functions +description: Black box functions are functions in Noir that rely on backends implementing support for specialized constraints. +keywords: [noir, black box functions] +--- + +Black box functions are functions in Noir that rely on backends implementing support for specialized constraints. This makes certain zk-snark unfriendly computations cheaper than if they were implemented in Noir. + +:::warning + +It is likely that not all backends will support a particular black box function. + +::: + +Because it is not guaranteed that all backends will support black box functions, it is possible that certain Noir programs won't compile against a particular backend if they use an unsupported black box function. It is possible to fallback to less efficient implementations written in Noir/ACIR in some cases. + +Black box functions are specified with the `#[foreign(black_box_fn)]` attribute. For example, the SHA256 function in the Noir [source code](https://github.com/noir-lang/noir/blob/v0.5.1/noir_stdlib/src/hash.nr) looks like: + +```rust +#[foreign(sha256)] +fn sha256(_input : [u8; N]) -> [u8; 32] {} +``` + +## Function list + +Here is a list of the current black box functions that are supported by UltraPlonk: + +- AES +- [SHA256](./cryptographic_primitives/hashes#sha256) +- [Schnorr signature verification](./cryptographic_primitives/schnorr) +- [Blake2s](./cryptographic_primitives/hashes#blake2s) +- [Pedersen Hash](./cryptographic_primitives/hashes#pedersen_hash) +- [Pedersen Commitment](./cryptographic_primitives/hashes#pedersen_commitment) +- [HashToField128Security](./cryptographic_primitives/hashes#hash_to_field) +- [ECDSA signature verification](./cryptographic_primitives/ecdsa_sig_verification) +- [Fixed base scalar multiplication](./cryptographic_primitives/scalar) +- [Compute merkle root](./merkle_trees#compute_merkle_root) +- AND +- XOR +- RANGE +- [Keccak256](./cryptographic_primitives/hashes#keccak256) +- [Recursive proof verification](./recursion) + +Most black box functions are included as part of the Noir standard library, however `AND`, `XOR` and `RANGE` are used as part of the Noir language syntax. For instance, using the bitwise operator `&` will invoke the `AND` black box function. To ensure compatibility across backends, the ACVM has fallback implementations of `AND`, `XOR` and `RANGE` defined in its standard library which it can seamlessly fallback to if the backend doesn't support them. + +You can view the black box functions defined in the ACVM code [here](https://github.com/noir-lang/noir/blob/master/acvm-repo/acir/src/circuit/black_box_functions.rs). diff --git a/docs/versioned_docs/version-v0.22.0/noir/standard_library/cryptographic_primitives/_category_.json b/docs/versioned_docs/version-v0.22.0/noir/standard_library/cryptographic_primitives/_category_.json new file mode 100644 index 00000000000..5d694210bbf --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/noir/standard_library/cryptographic_primitives/_category_.json @@ -0,0 +1,5 @@ +{ + "position": 0, + "collapsible": true, + "collapsed": true +} diff --git a/docs/versioned_docs/version-v0.22.0/noir/standard_library/cryptographic_primitives/ec_primitives.md b/docs/versioned_docs/version-v0.22.0/noir/standard_library/cryptographic_primitives/ec_primitives.md new file mode 100644 index 00000000000..8d573adb3be --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/noir/standard_library/cryptographic_primitives/ec_primitives.md @@ -0,0 +1,102 @@ +--- +title: Elliptic Curve Primitives +keywords: [cryptographic primitives, Noir project] +sidebar_position: 4 +--- + +Data structures and methods on them that allow you to carry out computations involving elliptic +curves over the (mathematical) field corresponding to `Field`. For the field currently at our +disposal, applications would involve a curve embedded in BN254, e.g. the +[Baby Jubjub curve](https://eips.ethereum.org/EIPS/eip-2494). + +## Data structures + +### Elliptic curve configurations + +(`std::ec::{tecurve,montcurve,swcurve}::{affine,curvegroup}::Curve`), i.e. the specific elliptic +curve you want to use, which would be specified using any one of the methods +`std::ec::{tecurve,montcurve,swcurve}::{affine,curvegroup}::new` which take the coefficients in the +defining equation together with a generator point as parameters. You can find more detail in the +comments in +[`noir_stdlib/src/ec.nr`](https://github.com/noir-lang/noir/blob/master/noir_stdlib/src/ec.nr), but +the gist of it is that the elliptic curves of interest are usually expressed in one of the standard +forms implemented here (Twisted Edwards, Montgomery and Short Weierstraß), and in addition to that, +you could choose to use `affine` coordinates (Cartesian coordinates - the usual (x,y) - possibly +together with a point at infinity) or `curvegroup` coordinates (some form of projective coordinates +requiring more coordinates but allowing for more efficient implementations of elliptic curve +operations). Conversions between all of these forms are provided, and under the hood these +conversions are done whenever an operation is more efficient in a different representation (or a +mixed coordinate representation is employed). + +### Points + +(`std::ec::{tecurve,montcurve,swcurve}::{affine,curvegroup}::Point`), i.e. points lying on the +elliptic curve. For a curve configuration `c` and a point `p`, it may be checked that `p` +does indeed lie on `c` by calling `c.contains(p1)`. + +## Methods + +(given a choice of curve representation, e.g. use `std::ec::tecurve::affine::Curve` and use +`std::ec::tecurve::affine::Point`) + +- The **zero element** is given by `Point::zero()`, and we can verify whether a point `p: Point` is + zero by calling `p.is_zero()`. +- **Equality**: Points `p1: Point` and `p2: Point` may be checked for equality by calling + `p1.eq(p2)`. +- **Addition**: For `c: Curve` and points `p1: Point` and `p2: Point` on the curve, adding these two + points is accomplished by calling `c.add(p1,p2)`. +- **Negation**: For a point `p: Point`, `p.negate()` is its negation. +- **Subtraction**: For `c` and `p1`, `p2` as above, subtracting `p2` from `p1` is accomplished by + calling `c.subtract(p1,p2)`. +- **Scalar multiplication**: For `c` as above, `p: Point` a point on the curve and `n: Field`, + scalar multiplication is given by `c.mul(n,p)`. If instead `n :: [u1; N]`, i.e. `n` is a bit + array, the `bit_mul` method may be used instead: `c.bit_mul(n,p)` +- **Multi-scalar multiplication**: For `c` as above and arrays `n: [Field; N]` and `p: [Point; N]`, + multi-scalar multiplication is given by `c.msm(n,p)`. +- **Coordinate representation conversions**: The `into_group` method converts a point or curve + configuration in the affine representation to one in the CurveGroup representation, and + `into_affine` goes in the other direction. +- **Curve representation conversions**: `tecurve` and `montcurve` curves and points are equivalent + and may be converted between one another by calling `into_montcurve` or `into_tecurve` on their + configurations or points. `swcurve` is more general and a curve c of one of the other two types + may be converted to this representation by calling `c.into_swcurve()`, whereas a point `p` lying + on the curve given by `c` may be mapped to its corresponding `swcurve` point by calling + `c.map_into_swcurve(p)`. +- **Map-to-curve methods**: The Elligator 2 method of mapping a field element `n: Field` into a + `tecurve` or `montcurve` with configuration `c` may be called as `c.elligator2_map(n)`. For all of + the curve configurations, the SWU map-to-curve method may be called as `c.swu_map(z,n)`, where + `z: Field` depends on `Field` and `c` and must be chosen by the user (the conditions it needs to + satisfy are specified in the comments + [here](https://github.com/noir-lang/noir/blob/master/noir_stdlib/src/ec.nr)). + +## Examples + +The +[ec_baby_jubjub test](https://github.com/noir-lang/noir/blob/master/crates/nargo_cli/tests/test_data/ec_baby_jubjub/src/main.nr) +illustrates all of the above primitives on various forms of the Baby Jubjub curve. A couple of more +interesting examples in Noir would be: + +Public-key cryptography: Given an elliptic curve and a 'base point' on it, determine the public key +from the private key. This is a matter of using scalar multiplication. In the case of Baby Jubjub, +for example, this code would do: + +```rust +use dep::std::ec::tecurve::affine::{Curve, Point}; + +fn bjj_pub_key(priv_key: Field) -> Point +{ + + let bjj = Curve::new(168700, 168696, G::new(995203441582195749578291179787384436505546430278305826713579947235728471134,5472060717959818805561601436314318772137091100104008585924551046643952123905)); + + let base_pt = Point::new(5299619240641551281634865583518297030282874472190772894086521144482721001553, 16950150798460657717958625567821834550301663161624707787222815936182638968203); + + bjj.mul(priv_key,base_pt) +} +``` + +This would come in handy in a Merkle proof. + +- EdDSA signature verification: This is a matter of combining these primitives with a suitable hash + function. See + [feat(stdlib): EdDSA sig verification noir#1136](https://github.com/noir-lang/noir/pull/1136) for + the case of Baby Jubjub and the Poseidon hash function. diff --git a/docs/versioned_docs/version-v0.22.0/noir/standard_library/cryptographic_primitives/ecdsa_sig_verification.mdx b/docs/versioned_docs/version-v0.22.0/noir/standard_library/cryptographic_primitives/ecdsa_sig_verification.mdx new file mode 100644 index 00000000000..1376c51dfde --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/noir/standard_library/cryptographic_primitives/ecdsa_sig_verification.mdx @@ -0,0 +1,46 @@ +--- +title: ECDSA Signature Verification +description: Learn about the cryptographic primitives regarding ECDSA over the secp256k1 and secp256r1 curves +keywords: [cryptographic primitives, Noir project, ecdsa, secp256k1, secp256r1, signatures] +sidebar_position: 3 +--- + +import BlackBoxInfo from '@site/src/components/Notes/_blackbox.mdx'; + +Noir supports ECDSA signatures verification over the secp256k1 and secp256r1 curves. + +## ecdsa_secp256k1::verify_signature + +Verifier for ECDSA Secp256k1 signatures + +```rust +fn verify_signature(_public_key_x : [u8; 32], _public_key_y : [u8; 32], _signature: [u8; 64], _message: [u8]) -> bool +``` + +example: + +```rust +fn main(hashed_message : [u8;32], pub_key_x : [u8;32], pub_key_y : [u8;32], signature : [u8;64]) { + let valid_signature = std::ecdsa_secp256k1::verify_signature(pub_key_x, pub_key_y, signature, hashed_message); + assert(valid_signature); +} +``` + +## ecdsa_secp256r1::verify_signature + +Verifier for ECDSA Secp256r1 signatures + +```rust +fn verify_signature(_public_key_x : [u8; 32], _public_key_y : [u8; 32], _signature: [u8; 64], _message: [u8]) -> bool +``` + +example: + +```rust +fn main(hashed_message : [u8;32], pub_key_x : [u8;32], pub_key_y : [u8;32], signature : [u8;64]) { + let valid_signature = std::ecdsa_secp256r1::verify_signature(pub_key_x, pub_key_y, signature, hashed_message); + assert(valid_signature); +} +``` + + diff --git a/docs/versioned_docs/version-v0.22.0/noir/standard_library/cryptographic_primitives/eddsa.mdx b/docs/versioned_docs/version-v0.22.0/noir/standard_library/cryptographic_primitives/eddsa.mdx new file mode 100644 index 00000000000..a9c10da6c06 --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/noir/standard_library/cryptographic_primitives/eddsa.mdx @@ -0,0 +1,18 @@ +--- +title: EdDSA Verification +description: Learn about the cryptographic primitives regarding EdDSA +keywords: [cryptographic primitives, Noir project, eddsa, signatures] +sidebar_position: 5 +--- + +import BlackBoxInfo from '@site/src/components/Notes/_blackbox.mdx'; + +## eddsa::eddsa_poseidon_verify + +Verifier for EdDSA signatures + +```rust +fn eddsa_poseidon_verify(public_key_x : Field, public_key_y : Field, signature_s: Field, signature_r8_x: Field, signature_r8_y: Field, message: Field) -> bool +``` + + diff --git a/docs/versioned_docs/version-v0.22.0/noir/standard_library/cryptographic_primitives/hashes.mdx b/docs/versioned_docs/version-v0.22.0/noir/standard_library/cryptographic_primitives/hashes.mdx new file mode 100644 index 00000000000..9250cb4a0c0 --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/noir/standard_library/cryptographic_primitives/hashes.mdx @@ -0,0 +1,168 @@ +--- +title: Hash methods +description: + Learn about the cryptographic primitives ready to use for any Noir project, including sha256, + blake2s, pedersen, mimc_bn254 and mimc +keywords: + [cryptographic primitives, Noir project, sha256, blake2s, pedersen, mimc_bn254, mimc, hash] +sidebar_position: 0 +--- + +import BlackBoxInfo from '@site/src/components/Notes/_blackbox.mdx'; + +## sha256 + +Given an array of bytes, returns the resulting sha256 hash. + +```rust +fn sha256(_input : [u8]) -> [u8; 32] +``` + +example: + +```rust +fn main() { + let x = [163, 117, 178, 149]; // some random bytes + let hash = std::hash::sha256(x); +} +``` + + + +## blake2s + +Given an array of bytes, returns an array with the Blake2 hash + +```rust +fn blake2s(_input : [u8]) -> [u8; 32] +``` + +example: + +```rust +fn main() { + let x = [163, 117, 178, 149]; // some random bytes + let hash = std::hash::blake2s(x); +} +``` + + + +## pedersen_hash + +Given an array of Fields, returns the Pedersen hash. + +```rust +fn pedersen_hash(_input : [Field]) -> Field +``` + +example: + +```rust +fn main() { + let x = [163, 117, 178, 149]; // some random bytes + let hash = std::hash::pedersen_hash(x); +} +``` + + + + + +## pedersen_commitment + +Given an array of Fields, returns the Pedersen commitment. + +```rust +fn pedersen_commitment(_input : [Field]) -> [Field; 2] +``` + +example: + +```rust +fn main() { + let x = [163, 117, 178, 149]; // some random bytes + let commitment = std::hash::pedersen_commitment(x); +} +``` + + + +## keccak256 + +Given an array of bytes (`u8`), returns the resulting keccak hash as an array of 32 bytes +(`[u8; 32]`). Specify a message_size to hash only the first `message_size` bytes +of the input. + +```rust +fn keccak256(_input : [u8; N], _message_size: u32) -> [u8; 32] +``` + +example: + +```rust +fn main() { + let x = [163, 117, 178, 149]; // some random bytes + let message_size = 4; + let hash = std::hash::keccak256(x, message_size); +} +``` + + + +## poseidon + +Given an array of Fields, returns a new Field with the Poseidon Hash. Mind that you need to specify +how many inputs are there to your Poseidon function. + +```rust +// example for hash_1, hash_2 accepts an array of length 2, etc +fn hash_1(input: [Field; 1]) -> Field +``` + +example: + +```rust +fn main() +{ + let hash_2 = std::hash::poseidon::bn254::hash_2([1, 2]); + assert(hash2 == 0x115cc0f5e7d690413df64c6b9662e9cf2a3617f2743245519e19607a4417189a); +} +``` + +## mimc_bn254 and mimc + +`mimc_bn254` is `mimc`, but with hardcoded parameters for the BN254 curve. You can use it by +providing an array of Fields, and it returns a Field with the hash. You can use the `mimc` method if +you're willing to input your own constants: + +```rust +fn mimc(x: Field, k: Field, constants: [Field; N], exp : Field) -> Field +``` + +otherwise, use the `mimc_bn254` method: + +```rust +fn mimc_bn254(array: [Field; N]) -> Field +``` + +example: + +```rust + +fn main() { + let x = [163, 117, 178, 149]; // some random bytes + let hash = std::hash::mimc::mimc_bn254(x); +} +``` + +## hash_to_field + +```rust +fn hash_to_field(_input : [Field; N]) -> Field {} +``` + +Calculates the `blake2s` hash of the inputs and returns the hash modulo the field modulus to return +a value which can be represented as a `Field`. + + diff --git a/docs/versioned_docs/version-v0.22.0/noir/standard_library/cryptographic_primitives/index.md b/docs/versioned_docs/version-v0.22.0/noir/standard_library/cryptographic_primitives/index.md new file mode 100644 index 00000000000..650f30165d5 --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/noir/standard_library/cryptographic_primitives/index.md @@ -0,0 +1,14 @@ +--- +title: Cryptographic Primitives +description: + Learn about the cryptographic primitives ready to use for any Noir project +keywords: + [ + cryptographic primitives, + Noir project, + ] +--- + +The Noir team is progressively adding new cryptographic primitives to the standard library. Reach out for news or if you would be interested in adding more of these calculations in Noir. + +Some methods are available thanks to the Aztec backend, not being performed using Noir. When using other backends, these methods may or may not be supplied. diff --git a/docs/versioned_docs/version-v0.22.0/noir/standard_library/cryptographic_primitives/scalar.mdx b/docs/versioned_docs/version-v0.22.0/noir/standard_library/cryptographic_primitives/scalar.mdx new file mode 100644 index 00000000000..1e686303c18 --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/noir/standard_library/cryptographic_primitives/scalar.mdx @@ -0,0 +1,28 @@ +--- +title: Scalar multiplication +description: See how you can perform scalar multiplications over a fixed base in Noir +keywords: [cryptographic primitives, Noir project, scalar multiplication] +sidebar_position: 1 +--- + +import BlackBoxInfo from '@site/src/components/Notes/_blackbox.mdx'; + +## scalar_mul::fixed_base_embedded_curve + +Performs scalar multiplication over the embedded curve whose coordinates are defined by the +configured noir field. For the BN254 scalar field, this is BabyJubJub or Grumpkin. + +```rust +fn fixed_base_embedded_curve(_input : Field) -> [Field; 2] +``` + +example + +```rust +fn main(x : Field) { + let scal = std::scalar_mul::fixed_base_embedded_curve(x); + std::println(scal); +} +``` + + diff --git a/docs/versioned_docs/version-v0.22.0/noir/standard_library/cryptographic_primitives/schnorr.mdx b/docs/versioned_docs/version-v0.22.0/noir/standard_library/cryptographic_primitives/schnorr.mdx new file mode 100644 index 00000000000..7a2c9c20226 --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/noir/standard_library/cryptographic_primitives/schnorr.mdx @@ -0,0 +1,38 @@ +--- +title: Schnorr Signatures +description: Learn how you can verify Schnorr signatures using Noir +keywords: [cryptographic primitives, Noir project, schnorr, signatures] +sidebar_position: 2 +--- + +import BlackBoxInfo from '@site/src/components/Notes/_blackbox.mdx'; + +## schnorr::verify_signature + +Verifier for Schnorr signatures over the embedded curve (for BN254 it is Grumpkin). + +```rust +fn verify_signature(_public_key_x: Field, _public_key_y: Field, _signature: [u8; 64], _message: [u8]) -> bool +``` + +where `_signature` can be generated like so using the npm package +[@noir-lang/barretenberg](https://www.npmjs.com/package/@noir-lang/barretenberg) + +```js +const { BarretenbergWasm } = require('@noir-lang/barretenberg/dest/wasm'); +const { Schnorr } = require('@noir-lang/barretenberg/dest/crypto/schnorr'); + +... + +const barretenberg = await BarretenbergWasm.new(); +const schnorr = new Schnorr(barretenberg); +const pubKey = schnorr.computePublicKey(privateKey); +const message = ... +const signature = Array.from( + schnorr.constructSignature(hash, privateKey).toBuffer() +); + +... +``` + + diff --git a/docs/versioned_docs/version-v0.22.0/noir/standard_library/logging.md b/docs/versioned_docs/version-v0.22.0/noir/standard_library/logging.md new file mode 100644 index 00000000000..2e163b52ab3 --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/noir/standard_library/logging.md @@ -0,0 +1,77 @@ +--- +title: Logging +description: + Learn how to use the println statement for debugging in Noir with this tutorial. Understand the + basics of logging in Noir and how to implement it in your code. +keywords: + [ + noir logging, + println statement, + print statement, + debugging in noir, + noir std library, + logging tutorial, + basic logging in noir, + noir logging implementation, + noir debugging techniques, + rust, + ] +--- + +The standard library provides two familiar statements you can use: `println` and `print`. Despite being a limited implementation of rust's `println!` and `print!` macros, these constructs can be useful for debugging. + +You can print the output of both statements in your Noir code by using the `nargo execute` command or the `--show-output` flag when using `nargo test` (provided there are print statements in your tests). + +It is recommended to use `nargo execute` if you want to debug failing constraints with `println` or `print` statements. This is due to every input in a test being a constant rather than a witness, so we issue an error during compilation while we only print during execution (which comes after compilation). Neither `println`, nor `print` are callable for failed constraints caught at compile time. + +Both `print` and `println` are generic functions which can work on integers, fields, strings, and even structs or expressions. Note however, that slices are currently unsupported. For example: + +```rust +use dep::std; + +struct Person { + age : Field, + height : Field, +} + +fn main(age : Field, height : Field) { + let person = Person { age : age, height : height }; + std::println(person); + std::println(age + height); + std::println("Hello world!"); +} +``` + +You can print different types in the same statement (including strings) with a type called `fmtstr`. It can be specified in the same way as a normal string, just prepended with an "f" character: + +```rust + let fmt_str = f"i: {i}, j: {j}"; + std::println(fmt_str); + + let s = myStruct { y: x, x: y }; + std::println(s); + + std::println(f"i: {i}, s: {s}"); + + std::println(x); + std::println([x, y]); + + let foo = fooStruct { my_struct: s, foo: 15 }; + std::println(f"s: {s}, foo: {foo}"); + + std::println(15); // prints 0x0f, implicit Field + std::println(-1 as u8); // prints 255 + std::println(-1 as i8); // prints -1 +``` + +Examples shown above are interchangeable between the two `print` statements: + +```rust +let person = Person { age : age, height : height }; + +std::println(person); +std::print(person); + +std::println("Hello world!"); // Prints with a newline at the end of the input +std::print("Hello world!"); // Prints the input and keeps cursor on the same line +``` diff --git a/docs/versioned_docs/version-v0.22.0/noir/standard_library/merkle_trees.md b/docs/versioned_docs/version-v0.22.0/noir/standard_library/merkle_trees.md new file mode 100644 index 00000000000..5b45617812a --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/noir/standard_library/merkle_trees.md @@ -0,0 +1,58 @@ +--- +title: Merkle Trees +description: Learn about Merkle Trees in Noir with this tutorial. Explore the basics of computing a merkle root using a proof, with examples. +keywords: + [ + Merkle trees in Noir, + Noir programming language, + check membership, + computing root from leaf, + Noir Merkle tree implementation, + Merkle tree tutorial, + Merkle tree code examples, + Noir libraries, + pedersen hash., + ] +--- + +## compute_merkle_root + +Returns the root of the tree from the provided leaf and its hash path, using a [Pedersen hash](./cryptographic_primitives/hashes.mdx#pedersen_hash). + +```rust +fn compute_merkle_root(leaf : Field, index : Field, hash_path: [Field]) -> Field +``` + +example: + +```rust +/** + // these values are for this example only + index = "0" + priv_key = "0x000000000000000000000000000000000000000000000000000000616c696365" + secret = "0x1929ea3ab8d9106a899386883d9428f8256cfedb3c4f6b66bf4aa4d28a79988f" + note_hash_path = [ + "0x1e61bdae0f027b1b2159e1f9d3f8d00fa668a952dddd822fda80dc745d6f65cc", + "0x0e4223f3925f98934393c74975142bd73079ab0621f4ee133cee050a3c194f1a", + "0x2fd7bb412155bf8693a3bd2a3e7581a679c95c68a052f835dddca85fa1569a40" + ] + */ +fn main(index: Field, priv_key: Field, secret: Field, note_hash_path: [Field; 3]) { + + let pubkey = std::scalar_mul::fixed_base_embedded_curve(priv_key); + let pubkey_x = pubkey[0]; + let pubkey_y = pubkey[1]; + let note_commitment = std::hash::pedersen([pubkey_x, pubkey_y, secret]); + + let root = std::merkle::compute_merkle_root(note_commitment[0], index, note_hash_path); + std::println(root); +} +``` + +To check merkle tree membership: + +1. Include a merkle root as a program input. +2. Compute the merkle root of a given leaf, index and hash path. +3. Assert the merkle roots are equal. + +For more info about merkle trees, see the Wikipedia [page](https://en.wikipedia.org/wiki/Merkle_tree). diff --git a/docs/versioned_docs/version-v0.22.0/noir/standard_library/options.md b/docs/versioned_docs/version-v0.22.0/noir/standard_library/options.md new file mode 100644 index 00000000000..3d3139fb98b --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/noir/standard_library/options.md @@ -0,0 +1,99 @@ +--- +title: Option Type +--- + +The `Option` type is a way to express that a value might be present (`Some(T))` or absent (`None`). It's a safer way to handle potential absence of values, compared to using nulls in many other languages. + +```rust +struct Option { + None, + Some(T), +} +``` + +You can import the Option type into your Noir program like so: + +```rust +use dep::std::option::Option; + +fn main() { + let none = Option::none(); + let some = Option::some(3); +} +``` + +See [this test](https://github.com/noir-lang/noir/blob/5cbfb9c4a06c8865c98ff2b594464b037d821a5c/crates/nargo_cli/tests/test_data/option/src/main.nr) for a more comprehensive set of examples of each of the methods described below. + +## Methods + +### none + +Constructs a none value. + +### some + +Constructs a some wrapper around a given value. + +### is_none + +Returns true if the Option is None. + +### is_some + +Returns true of the Option is Some. + +### unwrap + +Asserts `self.is_some()` and returns the wrapped value. + +### unwrap_unchecked + +Returns the inner value without asserting `self.is_some()`. This method can be useful within an if condition when we already know that `option.is_some()`. If the option is None, there is no guarantee what value will be returned, only that it will be of type T for an `Option`. + +### unwrap_or + +Returns the wrapped value if `self.is_some()`. Otherwise, returns the given default value. + +### unwrap_or_else + +Returns the wrapped value if `self.is_some()`. Otherwise, calls the given function to return a default value. + +### map + +If self is `Some(x)`, this returns `Some(f(x))`. Otherwise, this returns `None`. + +### map_or + +If self is `Some(x)`, this returns `f(x)`. Otherwise, this returns the given default value. + +### map_or_else + +If self is `Some(x)`, this returns `f(x)`. Otherwise, this returns `default()`. + +### and + +Returns None if self is None. Otherwise, this returns `other`. + +### and_then + +If self is None, this returns None. Otherwise, this calls the given function with the Some value contained within self, and returns the result of that call. In some languages this function is called `flat_map` or `bind`. + +### or + +If self is Some, return self. Otherwise, return `other`. + +### or_else + +If self is Some, return self. Otherwise, return `default()`. + +### xor + +If only one of the two Options is Some, return that option. Otherwise, if both options are Some or both are None, None is returned. + +### filter + +Returns `Some(x)` if self is `Some(x)` and `predicate(x)` is true. Otherwise, this returns `None`. + +### flatten + +Flattens an `Option>` into a `Option`. This returns `None` if the outer Option is None. Otherwise, this returns the inner Option. diff --git a/docs/versioned_docs/version-v0.22.0/noir/standard_library/recursion.md b/docs/versioned_docs/version-v0.22.0/noir/standard_library/recursion.md new file mode 100644 index 00000000000..67962082a8f --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/noir/standard_library/recursion.md @@ -0,0 +1,90 @@ +--- +title: Recursive Proofs +description: Learn about how to write recursive proofs in Noir. +keywords: [recursion, recursive proofs, verification_key, aggregation object, verify_proof] +--- + +Noir supports recursively verifying proofs, meaning you verify the proof of a Noir program in another Noir program. This enables creating proofs of arbitrary size by doing step-wise verification of smaller components of a large proof. + +The `verify_proof` function takes a verification key, proof and public inputs for a zk program, as well as a key hash and an input aggregation object. The key hash is used to check the validity of the verification key and the input aggregation object is required by some proving systems. The `verify_proof` function returns an output aggregation object that can then be fed into future iterations of the proof verification if required. + +```rust +#[foreign(verify_proof)] +fn verify_proof(_verification_key : [Field], _proof : [Field], _public_input : Field, _key_hash : Field, _input_aggregation_object : [Field]) -> [Field] {} +``` + +:::info + +This is a black box function. Read [this section](./black_box_fns) to learn more about black box functions in Noir. + +::: + +## Example usage + +```rust +use dep::std; + +fn main( + verification_key : [Field; 114], + proof : [Field; 94], + public_inputs : [Field; 1], + key_hash : Field, + input_aggregation_object : [Field; 16], + proof_b : [Field; 94], +) -> pub [Field; 16] { + let output_aggregation_object_a = std::verify_proof( + verification_key.as_slice(), + proof.as_slice(), + public_inputs.as_slice(), + key_hash, + input_aggregation_object + ); + + let output_aggregation_object = std::verify_proof( + verification_key.as_slice(), + proof_b.as_slice(), + public_inputs.as_slice(), + key_hash, + output_aggregation_object_a + ); + + let mut output = [0; 16]; + for i in 0..16 { + output[i] = output_aggregation_object[i]; + } + output +} +``` + +## Parameters + +### `verification_key` + +The verification key for the zk program that is being verified. + +### `proof` + +The proof for the zk program that is being verified. + +### `public_inputs` + +These represent the public inputs of the proof we are verifying. They should be checked against in the circuit after construction of a new aggregation state. + +### `key_hash` + +A key hash is used to check the validity of the verification key. The circuit implementing this opcode can use this hash to ensure that the key provided to the circuit matches the key produced by the circuit creator. + +### `input_aggregation_object` + +An aggregation object is blob of data that the top-level verifier must run some proof system specific algorithm on to complete verification. The size is proof system specific and will be set by the backend integrating this opcode. The input aggregation object is only not `None` when we are verifying a previous recursive aggregation in the current circuit. If this is the first recursive aggregation there is no input aggregation object. It is left to the backend to determine how to handle when there is no input aggregation object. + +## Return value + +### `output_aggregation_object` + +This is the result of a recursive aggregation and is what will be fed into the next verifier. +The next verifier can either perform a final verification (returning true or false) or perform another recursive aggregation where this output aggregation object will be the input aggregation object of the next recursive aggregation. + +## Example + +You can see an example of how to do recursive proofs in [this example recursion demo repo](https://github.com/noir-lang/noir-examples/tree/master/recursion). diff --git a/docs/versioned_docs/version-v0.9.0/standard_library/zeroed.md b/docs/versioned_docs/version-v0.22.0/noir/standard_library/zeroed.md similarity index 100% rename from docs/versioned_docs/version-v0.9.0/standard_library/zeroed.md rename to docs/versioned_docs/version-v0.22.0/noir/standard_library/zeroed.md diff --git a/docs/versioned_docs/version-v0.22.0/noir/syntax/_category_.json b/docs/versioned_docs/version-v0.22.0/noir/syntax/_category_.json new file mode 100644 index 00000000000..666b691ae91 --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/noir/syntax/_category_.json @@ -0,0 +1,6 @@ +{ + "label": "Syntax", + "position": 0, + "collapsible": true, + "collapsed": true +} diff --git a/docs/versioned_docs/version-v0.22.0/noir/syntax/assert.md b/docs/versioned_docs/version-v0.22.0/noir/syntax/assert.md new file mode 100644 index 00000000000..c5f9aff139c --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/noir/syntax/assert.md @@ -0,0 +1,27 @@ +--- +title: Assert Function +description: + Learn about the assert function in Noir, which can be used to explicitly constrain the predicate or + comparison expression that follows to be true, and what happens if the expression is false at + runtime. +keywords: [Noir programming language, assert statement, predicate expression, comparison expression] +sidebar_position: 4 +--- + +Noir includes a special `assert` function which will explicitly constrain the predicate/comparison +expression that follows to be true. If this expression is false at runtime, the program will fail to +be proven. Example: + +```rust +fn main(x : Field, y : Field) { + assert(x == y); +} +``` + +You can optionally provide a message to be logged when the assertion fails: + +```rust +assert(x == y, "x and y are not equal"); +``` + +> Assertions only work for predicate operations, such as `==`. If there's any ambiguity on the operation, the program will fail to compile. For example, it is unclear if `assert(x + y)` would check for `x + y == 0` or simply would return `true`. diff --git a/docs/versioned_docs/version-v0.22.0/noir/syntax/comments.md b/docs/versioned_docs/version-v0.22.0/noir/syntax/comments.md new file mode 100644 index 00000000000..f76ab49094b --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/noir/syntax/comments.md @@ -0,0 +1,33 @@ +--- +title: Comments +description: + Learn how to write comments in Noir programming language. A comment is a line of code that is + ignored by the compiler, but it can be read by programmers. Single-line and multi-line comments + are supported in Noir. +keywords: [Noir programming language, comments, single-line comments, multi-line comments] +sidebar_position: 9 +--- + +A comment is a line in your codebase which the compiler ignores, however it can be read by +programmers. + +Here is a single line comment: + +```rust +// This is a comment and is ignored +``` + +`//` is used to tell the compiler to ignore the rest of the line. + +Noir also supports multi-line block comments. Start a block comment with `/*` and end the block with `*/`. + +Noir does not natively support doc comments. You may be able to use [Rust doc comments](https://doc.rust-lang.org/reference/comments.html) in your code to leverage some Rust documentation build tools with Noir code. + +```rust +/* + This is a block comment describing a complex function. +*/ +fn main(x : Field, y : pub Field) { + assert(x != y); +} +``` diff --git a/docs/versioned_docs/version-v0.22.0/noir/syntax/control_flow.md b/docs/versioned_docs/version-v0.22.0/noir/syntax/control_flow.md new file mode 100644 index 00000000000..4ce65236db3 --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/noir/syntax/control_flow.md @@ -0,0 +1,45 @@ +--- +title: Control Flow +description: + Learn how to use loops and if expressions in the Noir programming language. Discover the syntax + and examples for for loops and if-else statements. +keywords: [Noir programming language, loops, for loop, if-else statements, Rust syntax] +sidebar_position: 2 +--- + +## Loops + +Noir has one kind of loop: the `for` loop. `for` loops allow you to repeat a block of code multiple +times. + +The following block of code between the braces is run 10 times. + +```rust +for i in 0..10 { + // do something +}; +``` + +The index for loops is of type `u64`. + +## If Expressions + +Noir supports `if-else` statements. The syntax is most similar to Rust's where it is not required +for the statement's conditional to be surrounded by parentheses. + +```rust +let a = 0; +let mut x: u32 = 0; + +if a == 0 { + if a != 0 { + x = 6; + } else { + x = 2; + } +} else { + x = 5; + assert(x == 5); +} +assert(x == 2); +``` diff --git a/docs/versioned_docs/version-v0.22.0/noir/syntax/data_bus.md b/docs/versioned_docs/version-v0.22.0/noir/syntax/data_bus.md new file mode 100644 index 00000000000..6c7e9b60891 --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/noir/syntax/data_bus.md @@ -0,0 +1,21 @@ +--- +title: Data Bus +sidebar_position: 12 +--- +**Disclaimer** this feature is experimental, do not use it! + +The data bus is an optimization that the backend can use to make recursion more efficient. +In order to use it, you must define some inputs of the program entry points (usually the `main()` +function) with the `call_data` modifier, and the return values with the `return_data` modifier. +These modifiers are incompatible with `pub` and `mut` modifiers. + +## Example + +```rust +fn main(mut x: u32, y: call_data u32, z: call_data [u32;4] ) -> return_data u32 { + let a = z[x]; + a+y +} +``` + +As a result, both call_data and return_data will be treated as private inputs and encapsulated into a read-only array each, for the backend to process. diff --git a/docs/versioned_docs/version-v0.22.0/noir/syntax/data_types/_category_.json b/docs/versioned_docs/version-v0.22.0/noir/syntax/data_types/_category_.json new file mode 100644 index 00000000000..5d694210bbf --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/noir/syntax/data_types/_category_.json @@ -0,0 +1,5 @@ +{ + "position": 0, + "collapsible": true, + "collapsed": true +} diff --git a/docs/versioned_docs/version-v0.22.0/noir/syntax/data_types/arrays.md b/docs/versioned_docs/version-v0.22.0/noir/syntax/data_types/arrays.md new file mode 100644 index 00000000000..075d39dadd4 --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/noir/syntax/data_types/arrays.md @@ -0,0 +1,245 @@ +--- +title: Arrays +description: + Dive into the Array data type in Noir. Grasp its methods, practical examples, and best practices for efficiently using Arrays in your Noir code. +keywords: + [ + noir, + array type, + methods, + examples, + indexing, + ] +sidebar_position: 4 +--- + +An array is one way of grouping together values into one compound type. Array types can be inferred +or explicitly specified via the syntax `[; ]`: + +```rust +fn main(x : Field, y : Field) { + let my_arr = [x, y]; + let your_arr: [Field; 2] = [x, y]; +} +``` + +Here, both `my_arr` and `your_arr` are instantiated as an array containing two `Field` elements. + +Array elements can be accessed using indexing: + +```rust +fn main() { + let a = [1, 2, 3, 4, 5]; + + let first = a[0]; + let second = a[1]; +} +``` + +All elements in an array must be of the same type (i.e. homogeneous). That is, an array cannot group +a `Field` value and a `u8` value together for example. + +You can write mutable arrays, like: + +```rust +fn main() { + let mut arr = [1, 2, 3, 4, 5]; + assert(arr[0] == 1); + + arr[0] = 42; + assert(arr[0] == 42); +} +``` + +You can instantiate a new array of a fixed size with the same value repeated for each element. The following example instantiates an array of length 32 where each element is of type Field and has the value 0. + +```rust +let array: [Field; 32] = [0; 32]; +``` + +Like in Rust, arrays in Noir are a fixed size. However, if you wish to convert an array to a [slice](./slices), you can just call `as_slice` on your array: + +```rust +let array: [Field; 32] = [0; 32]; +let sl = array.as_slice() +``` + +You can define multidimensional arrays: + +```rust +let array : [[Field; 2]; 2]; +let element = array[0][0]; +``` + +## Types + +You can create arrays of primitive types or structs. There is not yet support for nested arrays +(arrays of arrays) or arrays of structs that contain arrays. + +## Methods + +For convenience, the STD provides some ready-to-use, common methods for arrays: + +### len + +Returns the length of an array + +```rust +fn len(_array: [T; N]) -> comptime Field +``` + +example + +```rust +fn main() { + let array = [42, 42]; + assert(array.len() == 2); +} +``` + +### sort + +Returns a new sorted array. The original array remains untouched. Notice that this function will +only work for arrays of fields or integers, not for any arbitrary type. This is because the sorting +logic it uses internally is optimized specifically for these values. If you need a sort function to +sort any type, you should use the function `sort_via` described below. + +```rust +fn sort(_array: [T; N]) -> [T; N] +``` + +example + +```rust +fn main() { + let arr = [42, 32]; + let sorted = arr.sort(); + assert(sorted == [32, 42]); +} +``` + +### sort_via + +Sorts the array with a custom comparison function + +```rust +fn sort_via(mut a: [T; N], ordering: fn(T, T) -> bool) -> [T; N] +``` + +example + +```rust +fn main() { + let arr = [42, 32] + let sorted_ascending = arr.sort_via(|a, b| a < b); + assert(sorted_ascending == [32, 42]); // verifies + + let sorted_descending = arr.sort_via(|a, b| a > b); + assert(sorted_descending == [32, 42]); // does not verify +} +``` + +### map + +Applies a function to each element of the array, returning a new array containing the mapped elements. + +```rust +fn map(f: fn(T) -> U) -> [U; N] +``` + +example + +```rust +let a = [1, 2, 3]; +let b = a.map(|a| a * 2); // b is now [2, 4, 6] +``` + +### fold + +Applies a function to each element of the array, returning the final accumulated value. The first +parameter is the initial value. + +```rust +fn fold(mut accumulator: U, f: fn(U, T) -> U) -> U +``` + +This is a left fold, so the given function will be applied to the accumulator and first element of +the array, then the second, and so on. For a given call the expected result would be equivalent to: + +```rust +let a1 = [1]; +let a2 = [1, 2]; +let a3 = [1, 2, 3]; + +let f = |a, b| a - b; +a1.fold(10, f) //=> f(10, 1) +a2.fold(10, f) //=> f(f(10, 1), 2) +a3.fold(10, f) //=> f(f(f(10, 1), 2), 3) +``` + +example: + +```rust + +fn main() { + let arr = [2, 2, 2, 2, 2]; + let folded = arr.fold(0, |a, b| a + b); + assert(folded == 10); +} + +``` + +### reduce + +Same as fold, but uses the first element as starting element. + +```rust +fn reduce(f: fn(T, T) -> T) -> T +``` + +example: + +```rust +fn main() { + let arr = [2, 2, 2, 2, 2]; + let reduced = arr.reduce(|a, b| a + b); + assert(reduced == 10); +} +``` + +### all + +Returns true if all the elements satisfy the given predicate + +```rust +fn all(predicate: fn(T) -> bool) -> bool +``` + +example: + +```rust +fn main() { + let arr = [2, 2, 2, 2, 2]; + let all = arr.all(|a| a == 2); + assert(all); +} +``` + +### any + +Returns true if any of the elements satisfy the given predicate + +```rust +fn any(predicate: fn(T) -> bool) -> bool +``` + +example: + +```rust +fn main() { + let arr = [2, 2, 2, 2, 5]; + let any = arr.any(|a| a == 5); + assert(any); +} + +``` diff --git a/docs/versioned_docs/version-v0.22.0/noir/syntax/data_types/booleans.md b/docs/versioned_docs/version-v0.22.0/noir/syntax/data_types/booleans.md new file mode 100644 index 00000000000..69826fcd724 --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/noir/syntax/data_types/booleans.md @@ -0,0 +1,31 @@ +--- +title: Booleans +description: + Delve into the Boolean data type in Noir. Understand its methods, practical examples, and best practices for using Booleans in your Noir programs. +keywords: + [ + noir, + boolean type, + methods, + examples, + logical operations, + ] +sidebar_position: 2 +--- + + +The `bool` type in Noir has two possible values: `true` and `false`: + +```rust +fn main() { + let t = true; + let f: bool = false; +} +``` + +> **Note:** When returning a boolean value, it will show up as a value of 1 for `true` and 0 for +> `false` in _Verifier.toml_. + +The boolean type is most commonly used in conditionals like `if` expressions and `assert` +statements. More about conditionals is covered in the [Control Flow](../control_flow) and +[Assert Function](../assert) sections. diff --git a/docs/versioned_docs/version-v0.22.0/noir/syntax/data_types/fields.md b/docs/versioned_docs/version-v0.22.0/noir/syntax/data_types/fields.md new file mode 100644 index 00000000000..a1c67945d66 --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/noir/syntax/data_types/fields.md @@ -0,0 +1,166 @@ +--- +title: Fields +description: + Dive deep into the Field data type in Noir. Understand its methods, practical examples, and best practices to effectively use Fields in your Noir programs. +keywords: + [ + noir, + field type, + methods, + examples, + best practices, + ] +sidebar_position: 0 +--- + +The field type corresponds to the native field type of the proving backend. + +The size of a Noir field depends on the elliptic curve's finite field for the proving backend +adopted. For example, a field would be a 254-bit integer when paired with the default backend that +spans the Grumpkin curve. + +Fields support integer arithmetic and are often used as the default numeric type in Noir: + +```rust +fn main(x : Field, y : Field) { + let z = x + y; +} +``` + +`x`, `y` and `z` are all private fields in this example. Using the `let` keyword we defined a new +private value `z` constrained to be equal to `x + y`. + +If proving efficiency is of priority, fields should be used as a default for solving problems. +Smaller integer types (e.g. `u64`) incur extra range constraints. + +## Methods + +After declaring a Field, you can use these common methods on it: + +### to_le_bits + +Transforms the field into an array of bits, Little Endian. + +```rust +fn to_le_bits(_x : Field, _bit_size: u32) -> [u1; N] +``` + +example: + +```rust +fn main() { + let field = 2; + let bits = field.to_le_bits(32); +} +``` + +### to_be_bits + +Transforms the field into an array of bits, Big Endian. + +```rust +fn to_be_bits(_x : Field, _bit_size: u32) -> [u1; N] +``` + +example: + +```rust +fn main() { + let field = 2; + let bits = field.to_be_bits(32); +} +``` + +### to_le_bytes + +Transforms into an array of bytes, Little Endian + +```rust +fn to_le_bytes(_x : Field, byte_size: u32) -> [u8] +``` + +example: + +```rust +fn main() { + let field = 2; + let bytes = field.to_le_bytes(4); +} +``` + +### to_be_bytes + +Transforms into an array of bytes, Big Endian + +```rust +fn to_be_bytes(_x : Field, byte_size: u32) -> [u8] +``` + +example: + +```rust +fn main() { + let field = 2; + let bytes = field.to_be_bytes(4); +} +``` + +### to_le_radix + +Decomposes into a vector over the specified base, Little Endian + +```rust +fn to_le_radix(_x : Field, _radix: u32, _result_len: u32) -> [u8] +``` + +example: + +```rust +fn main() { + let field = 2; + let radix = field.to_le_radix(256, 4); +} +``` + +### to_be_radix + +Decomposes into a vector over the specified base, Big Endian + +```rust +fn to_be_radix(_x : Field, _radix: u32, _result_len: u32) -> [u8] +``` + +example: + +```rust +fn main() { + let field = 2; + let radix = field.to_be_radix(256, 4); +} +``` + +### pow_32 + +Returns the value to the power of the specified exponent + +```rust +fn pow_32(self, exponent: Field) -> Field +``` + +example: + +```rust +fn main() { + let field = 2 + let pow = field.pow_32(4); + assert(pow == 16); +} +``` + +### sgn0 + +Parity of (prime) Field element, i.e. sgn0(x mod p) = 0 if x ∈ \{0, ..., p-1\} is even, otherwise sgn0(x mod p) = 1. + +```rust +fn sgn0(self) -> u1 +``` diff --git a/docs/versioned_docs/version-v0.22.0/noir/syntax/data_types/function_types.md b/docs/versioned_docs/version-v0.22.0/noir/syntax/data_types/function_types.md new file mode 100644 index 00000000000..61e4076adaf --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/noir/syntax/data_types/function_types.md @@ -0,0 +1,26 @@ +--- +title: Function types +sidebar_position: 10 +--- + +Noir supports higher-order functions. The syntax for a function type is as follows: + +```rust +fn(arg1_type, arg2_type, ...) -> return_type +``` + +Example: + +```rust +fn assert_returns_100(f: fn() -> Field) { // f takes no args and returns a Field + assert(f() == 100); +} + +fn main() { + assert_returns_100(|| 100); // ok + assert_returns_100(|| 150); // fails +} +``` + +A function type also has an optional capture environment - this is necessary to support closures. +See [Lambdas](@site/docs/noir/syntax/lambdas.md) for more details. diff --git a/docs/versioned_docs/version-v0.22.0/noir/syntax/data_types/index.md b/docs/versioned_docs/version-v0.22.0/noir/syntax/data_types/index.md new file mode 100644 index 00000000000..01cd0431a68 --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/noir/syntax/data_types/index.md @@ -0,0 +1,96 @@ +--- +title: Data Types +description: + Get a clear understanding of the two categories of Noir data types - primitive types and compound + types. Learn about their characteristics, differences, and how to use them in your Noir + programming. +keywords: + [ + noir, + data types, + primitive types, + compound types, + private types, + public types, + ] +--- + +Every value in Noir has a type, which determines which operations are valid for it. + +All values in Noir are fundamentally composed of `Field` elements. For a more approachable +developing experience, abstractions are added on top to introduce different data types in Noir. + +Noir has two category of data types: primitive types (e.g. `Field`, integers, `bool`) and compound +types that group primitive types (e.g. arrays, tuples, structs). Each value can either be private or +public. + +## Private & Public Types + +A **private value** is known only to the Prover, while a **public value** is known by both the +Prover and Verifier. Mark values as `private` when the value should only be known to the prover. All +primitive types (including individual fields of compound types) in Noir are private by default, and +can be marked public when certain values are intended to be revealed to the Verifier. + +> **Note:** For public values defined in Noir programs paired with smart contract verifiers, once +> the proofs are verified on-chain the values can be considered known to everyone that has access to +> that blockchain. + +Public data types are treated no differently to private types apart from the fact that their values +will be revealed in proofs generated. Simply changing the value of a public type will not change the +circuit (where the same goes for changing values of private types as well). + +_Private values_ are also referred to as _witnesses_ sometimes. + +> **Note:** The terms private and public when applied to a type (e.g. `pub Field`) have a different +> meaning than when applied to a function (e.g. `pub fn foo() {}`). +> +> The former is a visibility modifier for the Prover to interpret if a value should be made known to +> the Verifier, while the latter is a visibility modifier for the compiler to interpret if a +> function should be made accessible to external Noir programs like in other languages. + +### pub Modifier + +All data types in Noir are private by default. Types are explicitly declared as public using the +`pub` modifier: + +```rust +fn main(x : Field, y : pub Field) -> pub Field { + x + y +} +``` + +In this example, `x` is **private** while `y` and `x + y` (the return value) are **public**. Note +that visibility is handled **per variable**, so it is perfectly valid to have one input that is +private and another that is public. + +> **Note:** Public types can only be declared through parameters on `main`. + +## Type Aliases + +A type alias is a new name for an existing type. Type aliases are declared with the keyword `type`: + +```rust +type Id = u8; + +fn main() { + let id: Id = 1; + let zero: u8 = 0; + assert(zero + 1 == id); +} +``` + +Type aliases can also be used with [generics](@site/docs/noir/syntax/generics.md): + +```rust +type Id = Size; + +fn main() { + let id: Id = 1; + let zero: u32 = 0; + assert(zero + 1 == id); +} +``` + +### BigInt + +You can achieve BigInt functionality using the [Noir BigInt](https://github.com/shuklaayush/noir-bigint) library. diff --git a/docs/versioned_docs/version-v0.22.0/noir/syntax/data_types/integers.md b/docs/versioned_docs/version-v0.22.0/noir/syntax/data_types/integers.md new file mode 100644 index 00000000000..7d1e83cf4e9 --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/noir/syntax/data_types/integers.md @@ -0,0 +1,113 @@ +--- +title: Integers +description: Explore the Integer data type in Noir. Learn about its methods, see real-world examples, and grasp how to efficiently use Integers in your Noir code. +keywords: [noir, integer types, methods, examples, arithmetic] +sidebar_position: 1 +--- + +An integer type is a range constrained field type. The Noir frontend supports arbitrarily-sized, both unsigned and signed integer types. + +:::info + +When an integer is defined in Noir without a specific type, it will default to `Field`. + +The one exception is for loop indices which default to `u64` since comparisons on `Field`s are not possible. + +::: + +## Unsigned Integers + +An unsigned integer type is specified first with the letter `u` (indicating its unsigned nature) followed by its bit size (e.g. `8`): + +```rust +fn main() { + let x: u8 = 1; + let y: u8 = 1; + let z = x + y; + assert (z == 2); +} +``` + +The bit size determines the maximum value the integer type can store. For example, a `u8` variable can store a value in the range of 0 to 255 (i.e. $\\2^{8}-1\\$). + +## Signed Integers + +A signed integer type is specified first with the letter `i` (which stands for integer) followed by its bit size (e.g. `8`): + +```rust +fn main() { + let x: i8 = -1; + let y: i8 = -1; + let z = x + y; + assert (z == -2); +} +``` + +The bit size determines the maximum and minimum range of value the integer type can store. For example, an `i8` variable can store a value in the range of -128 to 127 (i.e. $\\-2^{7}\\$ to $\\2^{7}-1\\$). + +:::tip + +If you are using the default proving backend with Noir, both even (e.g. _u2_, _i2_) and odd (e.g. _u3_, _i3_) arbitrarily-sized integer types up to 127 bits (i.e. _u127_ and _i127_) are supported. + +::: + +## Overflows + +Computations that exceed the type boundaries will result in overflow errors. This happens with both signed and unsigned integers. For example, attempting to prove: + +```rust +fn main(x: u8, y: u8) { + let z = x + y; +} +``` + +With: + +```toml +x = "255" +y = "1" +``` + +Would result in: + +``` +$ nargo prove +error: Assertion failed: 'attempt to add with overflow' +┌─ ~/src/main.nr:9:13 +│ +│ let z = x + y; +│ ----- +│ += Call stack: + ... +``` + +A similar error would happen with signed integers: + +```rust +fn main() { + let x: i8 = -118; + let y: i8 = -11; + let z = x + y; +} +``` + +### Wrapping methods + +Although integer overflow is expected to error, some use-cases rely on wrapping. For these use-cases, the standard library provides `wrapping` variants of certain common operations: + +```rust +fn wrapping_add(x: T, y: T) -> T; +fn wrapping_sub(x: T, y: T) -> T; +fn wrapping_mul(x: T, y: T) -> T; +``` + +Example of how it is used: + +```rust +use dep::std; + +fn main(x: u8, y: u8) -> pub u8 { + std::wrapping_add(x + y) +} +``` diff --git a/docs/versioned_docs/version-v0.22.0/noir/syntax/data_types/references.md b/docs/versioned_docs/version-v0.22.0/noir/syntax/data_types/references.md new file mode 100644 index 00000000000..a5293d11cfb --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/noir/syntax/data_types/references.md @@ -0,0 +1,23 @@ +--- +title: References +sidebar_position: 9 +--- + +Noir supports first-class references. References are a bit like pointers: they point to a specific address that can be followed to access the data stored at that address. You can use Rust-like syntax to use pointers in Noir: the `&` operator references the variable, the `*` operator dereferences it. + +Example: + +```rust +fn main() { + let mut x = 2; + + // you can reference x as &mut and pass it to multiplyBy2 + multiplyBy2(&mut x); +} + +// you can access &mut here +fn multiplyBy2(x: &mut Field) { + // and dereference it with * + *x = *x * 2; +} +``` diff --git a/docs/versioned_docs/version-v0.22.0/noir/syntax/data_types/slices.mdx b/docs/versioned_docs/version-v0.22.0/noir/syntax/data_types/slices.mdx new file mode 100644 index 00000000000..4a6ee816aa2 --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/noir/syntax/data_types/slices.mdx @@ -0,0 +1,147 @@ +--- +title: Slices +description: Explore the Slice data type in Noir. Understand its methods, see real-world examples, and learn how to effectively use Slices in your Noir programs. +keywords: [noir, slice type, methods, examples, subarrays] +sidebar_position: 5 +--- + +import Experimental from '@site/src/components/Notes/_experimental.mdx'; + + + +A slice is a dynamically-sized view into a sequence of elements. They can be resized at runtime, but because they don't own the data, they cannot be returned from a circuit. You can treat slices as arrays without a constrained size. + +```rust +use dep::std::slice; + +fn main() -> pub Field { + let mut slice: [Field] = [0; 2]; + + let mut new_slice = slice.push_back(6); + new_slice.len() +} +``` + +View the corresponding test file [here][test-file]. + +[test-file]: https://github.com/noir-lang/noir/blob/f387ec1475129732f72ba294877efdf6857135ac/crates/nargo_cli/tests/test_data_ssa_refactor/slices/src/main.nr + +## Methods + +For convenience, the STD provides some ready-to-use, common methods for slices: + +### push_back + +Pushes a new element to the end of the slice, returning a new slice with a length one greater than the original unmodified slice. + +```rust +fn push_back(_self: [T], _elem: T) -> [T] +``` + +example: + +```rust +fn main() -> pub Field { + let mut slice: [Field] = [0; 2]; + + let mut new_slice = slice.push_back(6); + new_slice.len() +} +``` + +View the corresponding test file [here][test-file]. + +### push_front + +Returns a new array with the specified element inserted at index 0. The existing elements indexes are incremented by 1. + +```rust +fn push_front(_self: Self, _elem: T) -> Self +``` + +Example: + +```rust +let mut new_slice: [Field] = []; +new_slice = new_slice.push_front(20); +assert(new_slice[0] == 20); // returns true +``` + +View the corresponding test file [here][test-file]. + +### pop_front + +Returns a tuple of two items, the first element of the array and the rest of the array. + +```rust +fn pop_front(_self: Self) -> (T, Self) +``` + +Example: + +```rust +let (first_elem, rest_of_slice) = slice.pop_front(); +``` + +View the corresponding test file [here][test-file]. + +### pop_back + +Returns a tuple of two items, the beginning of the array with the last element omitted and the last element. + +```rust +fn pop_back(_self: Self) -> (Self, T) +``` + +Example: + +```rust +let (popped_slice, last_elem) = slice.pop_back(); +``` + +View the corresponding test file [here][test-file]. + +### append + +Loops over a slice and adds it to the end of another. + +```rust +fn append(mut self, other: Self) -> Self +``` + +Example: + +```rust +let append = [1, 2].append([3, 4, 5]); +``` + +### insert + +Inserts an element at a specified index and shifts all following elements by 1. + +```rust +fn insert(_self: Self, _index: Field, _elem: T) -> Self +``` + +Example: + +```rust +new_slice = rest_of_slice.insert(2, 100); +assert(new_slice[2] == 100); +``` + +View the corresponding test file [here][test-file]. + +### remove + +Remove an element at a specified index, shifting all elements after it to the left, returning the altered slice and the removed element. + +```rust +fn remove(_self: Self, _index: Field) -> (Self, T) +``` + +Example: + +```rust +let (remove_slice, removed_elem) = slice.remove(3); +``` diff --git a/docs/versioned_docs/version-v0.22.0/noir/syntax/data_types/strings.md b/docs/versioned_docs/version-v0.22.0/noir/syntax/data_types/strings.md new file mode 100644 index 00000000000..8d76d4ca654 --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/noir/syntax/data_types/strings.md @@ -0,0 +1,80 @@ +--- +title: Strings +description: + Discover the String data type in Noir. Learn about its methods, see real-world examples, and understand how to effectively manipulate and use Strings in Noir. +keywords: + [ + noir, + string type, + methods, + examples, + concatenation, + ] +sidebar_position: 3 +--- + + +The string type is a fixed length value defined with `str`. + +You can use strings in `assert()` functions or print them with +`std::println()`. See more about [Logging](../../standard_library/logging). + +```rust +use dep::std; + +fn main(message : pub str<11>, hex_as_string : str<4>) { + std::println(message); + assert(message == "hello world"); + assert(hex_as_string == "0x41"); +} +``` + +You can convert a `str` to a byte array by calling `as_bytes()` +or a vector by calling `as_bytes_vec()`. + +```rust +fn main() { + let message = "hello world"; + let message_bytes = message.as_bytes(); + let mut message_vec = message.as_bytes_vec(); + assert(message_bytes.len() == 11); + assert(message_bytes[0] == 104); + assert(message_bytes[0] == message_vec.get(0)); +} +``` + +## Escape characters + +You can use escape characters for your strings: + +| Escape Sequence | Description | +|-----------------|-----------------| +| `\r` | Carriage Return | +| `\n` | Newline | +| `\t` | Tab | +| `\0` | Null Character | +| `\"` | Double Quote | +| `\\` | Backslash | + +Example: + +```rust +let s = "Hello \"world" // prints "Hello "world" +let s = "hey \tyou"; // prints "hey you" +``` + +## Raw strings + +A raw string begins with the letter `r` and is optionally delimited by a number of hashes `#`. + +Escape characters are *not* processed within raw strings. All contents are interpreted literally. + +Example: + +```rust +let s = r"Hello world"; +let s = r#"Simon says "hello world""#; + +// Any number of hashes may be used (>= 1) as long as the string also terminates with the same number of hashes +let s = r#####"One "#, Two "##, Three "###, Four "####, Five will end the string."#####; +``` diff --git a/docs/versioned_docs/version-v0.22.0/noir/syntax/data_types/structs.md b/docs/versioned_docs/version-v0.22.0/noir/syntax/data_types/structs.md new file mode 100644 index 00000000000..dbf68c99813 --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/noir/syntax/data_types/structs.md @@ -0,0 +1,70 @@ +--- +title: Structs +description: + Explore the Struct data type in Noir. Learn about its methods, see real-world examples, and grasp how to effectively define and use Structs in your Noir programs. +keywords: + [ + noir, + struct type, + methods, + examples, + data structures, + ] +sidebar_position: 8 +--- + +A struct also allows for grouping multiple values of different types. Unlike tuples, we can also +name each field. + +> **Note:** The usage of _field_ here refers to each element of the struct and is unrelated to the +> field type of Noir. + +Defining a struct requires giving it a name and listing each field within as `: ` pairs: + +```rust +struct Animal { + hands: Field, + legs: Field, + eyes: u8, +} +``` + +An instance of a struct can then be created with actual values in `: ` pairs in any +order. Struct fields are accessible using their given names: + +```rust +fn main() { + let legs = 4; + + let dog = Animal { + eyes: 2, + hands: 0, + legs, + }; + + let zero = dog.hands; +} +``` + +Structs can also be destructured in a pattern, binding each field to a new variable: + +```rust +fn main() { + let Animal { hands, legs: feet, eyes } = get_octopus(); + + let ten = hands + feet + eyes as u8; +} + +fn get_octopus() -> Animal { + let octopus = Animal { + hands: 0, + legs: 8, + eyes: 2, + }; + + octopus +} +``` + +The new variables can be bound with names different from the original struct field names, as +showcased in the `legs --> feet` binding in the example above. diff --git a/docs/versioned_docs/version-v0.22.0/noir/syntax/data_types/tuples.md b/docs/versioned_docs/version-v0.22.0/noir/syntax/data_types/tuples.md new file mode 100644 index 00000000000..2ec5c9c4113 --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/noir/syntax/data_types/tuples.md @@ -0,0 +1,48 @@ +--- +title: Tuples +description: + Dive into the Tuple data type in Noir. Understand its methods, practical examples, and best practices for efficiently using Tuples in your Noir code. +keywords: + [ + noir, + tuple type, + methods, + examples, + multi-value containers, + ] +sidebar_position: 7 +--- + +A tuple collects multiple values like an array, but with the added ability to collect values of +different types: + +```rust +fn main() { + let tup: (u8, u64, Field) = (255, 500, 1000); +} +``` + +One way to access tuple elements is via destructuring using pattern matching: + +```rust +fn main() { + let tup = (1, 2); + + let (one, two) = tup; + + let three = one + two; +} +``` + +Another way to access tuple elements is via direct member access, using a period (`.`) followed by +the index of the element we want to access. Index `0` corresponds to the first tuple element, `1` to +the second and so on: + +```rust +fn main() { + let tup = (5, 6, 7, 8); + + let five = tup.0; + let eight = tup.3; +} +``` diff --git a/docs/versioned_docs/version-v0.22.0/noir/syntax/data_types/vectors.mdx b/docs/versioned_docs/version-v0.22.0/noir/syntax/data_types/vectors.mdx new file mode 100644 index 00000000000..10e35711b74 --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/noir/syntax/data_types/vectors.mdx @@ -0,0 +1,173 @@ +--- +title: Vectors +description: Delve into the Vector data type in Noir. Learn about its methods, practical examples, and best practices for using Vectors in your Noir code. +keywords: [noir, vector type, methods, examples, dynamic arrays] +sidebar_position: 6 +--- + +import Experimental from '@site/src/components/Notes/_experimental.mdx'; + + + +A vector is a collection type similar to Rust's Vector type. It's convenient way to use slices as mutable arrays. + +Example: + +```rust +use dep::std::collections::vec::Vec; + +let mut vector: Vec = Vec::new(); +for i in 0..5 { + vector.push(i); +} +assert(vector.len() == 5); +``` + +## Methods + +### new + +Creates a new, empty vector. + +```rust +pub fn new() -> Self { + Self { slice: [] } +} +``` + +Example: + +```rust +let empty_vector: Vec = Vec::new(); +assert(empty_vector.len() == 0); +``` + +### from_slice + +Creates a vector containing each element from a given slice. Mutations to the resulting vector will not affect the original slice. + +```rust +pub fn from_slice(slice: [T]) -> Self { + Self { slice } +} +``` + +Example: + +```rust +let arr: [Field] = [1, 2, 3]; +let vector_from_slice = Vec::from_slice(arr); +assert(vector_from_slice.len() == 3); +``` + +### get + +Retrieves an element from the vector at a given index. Panics if the index points beyond the vector's end. + +```rust +pub fn get(self, index: Field) -> T { + self.slice[index] +} +``` + +Example: + +```rust +let vector: Vec = Vec::from_slice([10, 20, 30]); +assert(vector.get(1) == 20); +``` + +### push + +Adds a new element to the vector's end, returning a new vector with a length one greater than the original unmodified vector. + +```rust +pub fn push(&mut self, elem: T) { + self.slice = self.slice.push_back(elem); +} +``` + +Example: + +```rust +let mut vector: Vec = Vec::new(); +vector.push(10); +assert(vector.len() == 1); +``` + +### pop + +Removes an element from the vector's end, returning a new vector with a length one less than the original vector, along with the removed element. Panics if the vector's length is zero. + +```rust +pub fn pop(&mut self) -> T { + let (popped_slice, last_elem) = self.slice.pop_back(); + self.slice = popped_slice; + last_elem +} +``` + +Example: + +```rust +let mut vector = Vec::from_slice([10, 20]); +let popped_elem = vector.pop(); +assert(popped_elem == 20); +assert(vector.len() == 1); +``` + +### insert + +Inserts an element at a specified index, shifting subsequent elements to the right. + +```rust +pub fn insert(&mut self, index: Field, elem: T) { + self.slice = self.slice.insert(index, elem); +} +``` + +Example: + +```rust +let mut vector = Vec::from_slice([10, 30]); +vector.insert(1, 20); +assert(vector.get(1) == 20); +``` + +### remove + +Removes an element at a specified index, shifting subsequent elements to the left, and returns the removed element. + +```rust +pub fn remove(&mut self, index: Field) -> T { + let (new_slice, elem) = self.slice.remove(index); + self.slice = new_slice; + elem +} +``` + +Example: + +```rust +let mut vector = Vec::from_slice([10, 20, 30]); +let removed_elem = vector.remove(1); +assert(removed_elem == 20); +assert(vector.len() == 2); +``` + +### len + +Returns the number of elements in the vector. + +```rust +pub fn len(self) -> Field { + self.slice.len() +} +``` + +Example: + +```rust +let empty_vector: Vec = Vec::new(); +assert(empty_vector.len() == 0); +``` diff --git a/docs/versioned_docs/version-v0.22.0/noir/syntax/distinct.md b/docs/versioned_docs/version-v0.22.0/noir/syntax/distinct.md new file mode 100644 index 00000000000..b59e0296b23 --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/noir/syntax/distinct.md @@ -0,0 +1,64 @@ +--- +title: Distinct Witnesses +sidebar_position: 10 +--- + +The `distinct` keyword prevents repetitions of witness indices in the program's ABI. This ensures +that the witnesses being returned as public inputs are all unique. + +The `distinct` keyword is only used for return values on program entry points (usually the `main()` +function). + +When using `distinct` and `pub` simultaneously, `distinct` comes first. See the example below. + +You can read more about the problem this solves +[here](https://github.com/noir-lang/noir/issues/1183). + +## Example + +Without the `distinct` keyword, the following program + +```rust +fn main(x : pub Field, y : pub Field) -> pub [Field; 4] { + let a = 1; + let b = 1; + [x + 1, y, a, b] +} +``` + +compiles to + +```json +{ + //... + "abi": { + //... + "param_witnesses": { "x": [1], "y": [2] }, + "return_witnesses": [3, 2, 4, 4] + } +} +``` + +Whereas (with the `distinct` keyword) + +```rust +fn main(x : pub Field, y : pub Field) -> distinct pub [Field; 4] { + let a = 1; + let b = 1; + [x + 1, y, a, b] +} +``` + +compiles to + +```json +{ + //... + "abi": { + //... + "param_witnesses": { "x": [1], "y": [2] }, + //... + "return_witnesses": [3, 4, 5, 6] + } +} +``` diff --git a/docs/versioned_docs/version-v0.22.0/noir/syntax/functions.md b/docs/versioned_docs/version-v0.22.0/noir/syntax/functions.md new file mode 100644 index 00000000000..48aba9cd058 --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/noir/syntax/functions.md @@ -0,0 +1,226 @@ +--- +title: Functions +description: + Learn how to declare functions and methods in Noir, a programming language with Rust semantics. + This guide covers parameter declaration, return types, call expressions, and more. +keywords: [Noir, Rust, functions, methods, parameter declaration, return types, call expressions] +sidebar_position: 1 +--- + +Functions in Noir follow the same semantics of Rust, though Noir does not support early returns. + +To declare a function the `fn` keyword is used. + +```rust +fn foo() {} +``` + +By default, functions are visible only within the package they are defined. To make them visible outside of that package (for example, as part of a [library](../modules_packages_crates/crates_and_packages.md#libraries)), you should mark them as `pub`: + +```rust +pub fn foo() {} +``` + +You can also restrict the visibility of the function to only the crate it was defined in, by specifying `pub(crate)`: + +```rust +pub(crate) fn foo() {} //foo can only be called within its crate +``` + +All parameters in a function must have a type and all types are known at compile time. The parameter +is pre-pended with a colon and the parameter type. Multiple parameters are separated using a comma. + +```rust +fn foo(x : Field, y : Field){} +``` + +The return type of a function can be stated by using the `->` arrow notation. The function below +states that the foo function must return a `Field`. If the function returns no value, then the arrow +is omitted. + +```rust +fn foo(x : Field, y : Field) -> Field { + x + y +} +``` + +Note that a `return` keyword is unneeded in this case - the last expression in a function's body is +returned. + +## Main function + +If you're writing a binary, the `main` function is the starting point of your program. You can pass all types of expressions to it, as long as they have a fixed size at compile time: + +```rust +fn main(x : Field) // this is fine: passing a Field +fn main(x : [Field; 2]) // this is also fine: passing a Field with known size at compile-time +fn main(x : (Field, bool)) // 👌: passing a (Field, bool) tuple means size 2 +fn main(x : str<5>) // this is fine, as long as you pass a string of size 5 + +fn main(x : Vec) // can't compile, has variable size +fn main(x : [Field]) // can't compile, has variable size +fn main(....// i think you got it by now +``` + +Keep in mind [tests](../../getting_started/tooling/testing.md) don't differentiate between `main` and any other function. The following snippet passes tests, but won't compile or prove: + +```rust +fn main(x : [Field]) { + assert(x[0] == 1); +} + +#[test] +fn test_one() { + main([1, 2]); +} +``` + +```bash +$ nargo test +[testing] Running 1 test functions +[testing] Testing test_one... ok +[testing] All tests passed + +$ nargo check +The application panicked (crashed). +Message: Cannot have variable sized arrays as a parameter to main +``` + +## Call Expressions + +Calling a function in Noir is executed by using the function name and passing in the necessary +arguments. + +Below we show how to call the `foo` function from the `main` function using a call expression: + +```rust +fn main(x : Field, y : Field) { + let z = foo(x); +} + +fn foo(x : Field) -> Field { + x + x +} +``` + +## Methods + +You can define methods in Noir on any struct type in scope. + +```rust +struct MyStruct { + foo: Field, + bar: Field, +} + +impl MyStruct { + fn new(foo: Field) -> MyStruct { + MyStruct { + foo, + bar: 2, + } + } + + fn sum(self) -> Field { + self.foo + self.bar + } +} + +fn main() { + let s = MyStruct::new(40); + assert(s.sum() == 42); +} +``` + +Methods are just syntactic sugar for functions, so if we wanted to we could also call `sum` as +follows: + +```rust +assert(MyStruct::sum(s) == 42); +``` + +It is also possible to specialize which method is chosen depending on the [generic](./generics.md) type that is used. In this example, the `foo` function returns different values depending on its type: + +```rust +struct Foo {} + +impl Foo { + fn foo(self) -> Field { 1 } +} + +impl Foo { + fn foo(self) -> Field { 2 } +} + +fn main() { + let f1: Foo = Foo{}; + let f2: Foo = Foo{}; + assert(f1.foo() + f2.foo() == 3); +} +``` + +Also note that impls with the same method name defined in them cannot overlap. For example, if we already have `foo` defined for `Foo` and `Foo` like we do above, we cannot also define `foo` in an `impl Foo` since it would be ambiguous which version of `foo` to choose. + +```rust +// Including this impl in the same project as the above snippet would +// cause an overlapping impls error +impl Foo { + fn foo(self) -> Field { 3 } +} +``` + +## Lambdas + +Lambdas are anonymous functions. They follow the syntax of Rust - `|arg1, arg2, ..., argN| return_expression`. + +```rust +let add_50 = |val| val + 50; +assert(add_50(100) == 150); +``` + +See [Lambdas](./lambdas.md) for more details. + +## Attributes + +Attributes are metadata that can be applied to a function, using the following syntax: `#[attribute(value)]`. + +Supported attributes include: + +- **builtin**: the function is implemented by the compiler, for efficiency purposes. +- **deprecated**: mark the function as _deprecated_. Calling the function will generate a warning: `warning: use of deprecated function` +- **field**: Used to enable conditional compilation of code depending on the field size. See below for more details +- **oracle**: mark the function as _oracle_; meaning it is an external unconstrained function, implemented in noir_js. See [Unconstrained](./unconstrained.md) and [NoirJS](../../reference/NoirJS/noir_js/index.md) for more details. +- **test**: mark the function as unit tests. See [Tests](../../getting_started/tooling/testing.md) for more details + +### Field Attribute + +The field attribute defines which field the function is compatible for. The function is conditionally compiled, under the condition that the field attribute matches the Noir native field. +The field can be defined implicitly, by using the name of the elliptic curve usually associated to it - for instance bn254, bls12_381 - or explicitly by using the field (prime) order, in decimal or hexadecimal form. +As a result, it is possible to define multiple versions of a function with each version specialized for a different field attribute. This can be useful when a function requires different parameters depending on the underlying elliptic curve. + +Example: we define the function `foo()` three times below. Once for the default Noir bn254 curve, once for the field $\mathbb F_{23}$, which will normally never be used by Noir, and once again for the bls12_381 curve. + +```rust +#[field(bn254)] +fn foo() -> u32 { + 1 +} + +#[field(23)] +fn foo() -> u32 { + 2 +} + +// This commented code would not compile as foo would be defined twice because it is the same field as bn254 +// #[field(21888242871839275222246405745257275088548364400416034343698204186575808495617)] +// fn foo() -> u32 { +// 2 +// } + +#[field(bls12_381)] +fn foo() -> u32 { + 3 +} +``` + +If the field name is not known to Noir, it will discard the function. Field names are case insensitive. diff --git a/docs/versioned_docs/version-v0.22.0/noir/syntax/generics.md b/docs/versioned_docs/version-v0.22.0/noir/syntax/generics.md new file mode 100644 index 00000000000..443ca2b45a5 --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/noir/syntax/generics.md @@ -0,0 +1,114 @@ +--- +title: Generics +description: Learn how to use Generics in Noir +keywords: [Noir, Rust, generics, functions, structs] +sidebar_position: 6 +--- + +Generics allow you to use the same functions with multiple different concrete data types. You can +read more about the concept of generics in the Rust documentation +[here](https://doc.rust-lang.org/book/ch10-01-syntax.html). + +Here is a trivial example showing the identity function that supports any type. In Rust, it is +common to refer to the most general type as `T`. We follow the same convention in Noir. + +```rust +fn id(x: T) -> T { + x +} +``` + +## In Structs + +Generics are useful for specifying types in structs. For example, we can specify that a field in a +struct will be of a certain generic type. In this case `value` is of type `T`. + +```rust +struct RepeatedValue { + value: T, + count: Field, +} + +impl RepeatedValue { + fn new(value: T) -> Self { + Self { value, count: 1 } + } + + fn increment(mut repeated: Self) -> Self { + repeated.count += 1; + repeated + } + + fn print(self) { + for _i in 0 .. self.count { + dep::std::println(self.value); + } + } +} + +fn main() { + let mut repeated = RepeatedValue::new("Hello!"); + repeated = repeated.increment(); + repeated.print(); +} +``` + +The `print` function will print `Hello!` an arbitrary number of times, twice in this case. + +If we want to be generic over array lengths (which are type-level integers), we can use numeric +generics. Using these looks just like using regular generics, but these generics can resolve to +integers at compile-time, rather than resolving to types. Here's an example of a struct that is +generic over the size of the array it contains internally: + +```rust +struct BigInt { + limbs: [u32; N], +} + +impl BigInt { + // `N` is in scope of all methods in the impl + fn first(first: BigInt, second: BigInt) -> Self { + assert(first.limbs != second.limbs); + first + + fn second(first: BigInt, second: Self) -> Self { + assert(first.limbs != second.limbs); + second + } +} +``` + +## Calling functions on generic parameters + +Unlike Rust, Noir does not have traits, so how can one translate the equivalent of a trait bound in +Rust into Noir? That is, how can we write a function that is generic over some type `T`, while also +requiring there is a function like `eq: fn(T, T) -> bool` that works on the type? + +The answer is that we can translate this by passing in the function manually. Here's an example of +implementing array equality in Noir: + +```rust +fn array_eq(array1: [T; N], array2: [T; N], elem_eq: fn(T, T) -> bool) -> bool { + if array1.len() != array2.len() { + false + } else { + let mut result = true; + for i in 0 .. array1.len() { + result &= elem_eq(array1[i], array2[i]); + } + result + } +} + +fn main() { + assert(array_eq([1, 2, 3], [1, 2, 3], |a, b| a == b)); + + // We can use array_eq even for arrays of structs, as long as we have + // an equality function for these structs we can pass in + let array = [MyStruct::new(), MyStruct::new()]; + assert(array_eq(array, array, MyStruct::eq)); +} +``` + +You can see an example of generics in the tests +[here](https://github.com/noir-lang/noir/blob/master/tooling/nargo_cli/tests/execution_success/generics/src/main.nr). diff --git a/docs/versioned_docs/version-v0.22.0/noir/syntax/lambdas.md b/docs/versioned_docs/version-v0.22.0/noir/syntax/lambdas.md new file mode 100644 index 00000000000..e0a267adfda --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/noir/syntax/lambdas.md @@ -0,0 +1,81 @@ +--- +title: Lambdas +description: Learn how to use anonymous functions in Noir programming language. +keywords: [Noir programming language, lambda, closure, function, anonymous function] +sidebar_position: 8 +--- + +## Introduction + +Lambdas are anonymous functions. The syntax is `|arg1, arg2, ..., argN| return_expression`. + +```rust +let add_50 = |val| val + 50; +assert(add_50(100) == 150); +``` + +A block can be used as the body of a lambda, allowing you to declare local variables inside it: + +```rust +let cool = || { + let x = 100; + let y = 100; + x + y +} + +assert(cool() == 200); +``` + +## Closures + +Inside the body of a lambda, you can use variables defined in the enclosing function. Such lambdas are called **closures**. In this example `x` is defined inside `main` and is accessed from within the lambda: + +```rust +fn main() { + let x = 100; + let closure = || x + 150; + assert(closure() == 250); +} +``` + +## Passing closures to higher-order functions + +It may catch you by surprise that the following code fails to compile: + +```rust +fn foo(f: fn () -> Field) -> Field { + f() +} + +fn main() { + let (x, y) = (50, 50); + assert(foo(|| x + y) == 100); // error :( +} +``` + +The reason is that the closure's capture environment affects its type - we have a closure that captures two Fields and `foo` +expects a regular function as an argument - those are incompatible. +:::note + +Variables contained within the `||` are the closure's parameters, and the expression that follows it is the closure's body. The capture environment is comprised of any variables used in the closure's body that are not parameters. + +E.g. in |x| x + y, y would be a captured variable, but x would not be, since it is a parameter of the closure. + +::: +The syntax for the type of a closure is `fn[env](args) -> ret_type`, where `env` is the capture environment of the closure - +in this example that's `(Field, Field)`. + +The best solution in our case is to make `foo` generic over the environment type of its parameter, so that it can be called +with closures with any environment, as well as with regular functions: + +```rust +fn foo(f: fn[Env]() -> Field) -> Field { + f() +} + +fn main() { + let (x, y) = (50, 50); + assert(foo(|| x + y) == 100); // compiles fine + assert(foo(|| 60) == 60); // compiles fine +} +``` diff --git a/docs/versioned_docs/version-v0.22.0/noir/syntax/mutability.md b/docs/versioned_docs/version-v0.22.0/noir/syntax/mutability.md new file mode 100644 index 00000000000..6abfae3cfa7 --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/noir/syntax/mutability.md @@ -0,0 +1,93 @@ +--- +title: Mutability +description: + Learn about mutable variables, constants, and globals in Noir programming language. Discover how + to declare, modify, and use them in your programs. +keywords: [noir programming language, mutability in noir, mutable variables, constants, globals] +sidebar_position: 7 +--- + +Variables in noir can be declared mutable via the `mut` keyword. Mutable variables can be reassigned +to via an assignment expression. + +```rust +let x = 2; +x = 3; // error: x must be mutable to be assigned to + +let mut y = 3; +let y = 4; // OK +``` + +The `mut` modifier can also apply to patterns: + +```rust +let (a, mut b) = (1, 2); +a = 11; // error: a must be mutable to be assigned to +b = 12; // OK + +let mut (c, d) = (3, 4); +c = 13; // OK +d = 14; // OK + +// etc. +let MyStruct { x: mut y } = MyStruct { x: a }; +// y is now in scope +``` + +Note that mutability in noir is local and everything is passed by value, so if a called function +mutates its parameters then the parent function will keep the old value of the parameters. + +```rust +fn main() -> pub Field { + let x = 3; + helper(x); + x // x is still 3 +} + +fn helper(mut x: i32) { + x = 4; +} +``` + +## Comptime Values + +:::warning + +The 'comptime' keyword was removed in version 0.10. The comptime keyword and syntax are currently still kept and parsed for backwards compatibility, but are now deprecated and will issue a warning when used. `comptime` has been removed because it is no longer needed for accessing arrays. + +::: + +## Globals + +Noir also supports global variables. However, they must be known at compile-time. The global type can also be inferred by the compiler entirely. Globals can also be used to specify array +annotations for function parameters and can be imported from submodules. + +```rust +global N: Field = 5; // Same as `global N: Field = 5` + +fn main(x : Field, y : [Field; N]) { + let res = x * N; + + assert(res == y[0]); + + let res2 = x * my_submodule::N; + assert(res != res2); +} + +mod my_submodule { + use dep::std; + + global N: Field = 10; + + fn my_helper() -> Field { + let x = N; + x + } +} +``` + +## Why only local mutability? + +Witnesses in a proving system are immutable in nature. Noir aims to _closely_ mirror this setting +without applying additional overhead to the user. Modeling a mutable reference is not as +straightforward as on conventional architectures and would incur some possibly unexpected overhead. diff --git a/docs/versioned_docs/version-v0.22.0/noir/syntax/ops.md b/docs/versioned_docs/version-v0.22.0/noir/syntax/ops.md new file mode 100644 index 00000000000..60425cb8994 --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/noir/syntax/ops.md @@ -0,0 +1,98 @@ +--- +title: Logical Operations +description: + Learn about the supported arithmetic and logical operations in the Noir programming language. + Discover how to perform operations on private input types, integers, and booleans. +keywords: + [ + Noir programming language, + supported operations, + arithmetic operations, + logical operations, + predicate operators, + bitwise operations, + short-circuiting, + backend, + ] +sidebar_position: 3 +--- + +# Operations + +## Table of Supported Operations + +| Operation | Description | Requirements | +| :-------- | :------------------------------------------------------------: | -------------------------------------: | +| + | Adds two private input types together | Types must be private input | +| - | Subtracts two private input types together | Types must be private input | +| \* | Multiplies two private input types together | Types must be private input | +| / | Divides two private input types together | Types must be private input | +| ^ | XOR two private input types together | Types must be integer | +| & | AND two private input types together | Types must be integer | +| \| | OR two private input types together | Types must be integer | +| \<\< | Left shift an integer by another integer amount | Types must be integer | +| >> | Right shift an integer by another integer amount | Types must be integer | +| ! | Bitwise not of a value | Type must be integer or boolean | +| \< | returns a bool if one value is less than the other | Upper bound must have a known bit size | +| \<= | returns a bool if one value is less than or equal to the other | Upper bound must have a known bit size | +| > | returns a bool if one value is more than the other | Upper bound must have a known bit size | +| >= | returns a bool if one value is more than or equal to the other | Upper bound must have a known bit size | +| == | returns a bool if one value is equal to the other | Both types must not be constants | +| != | returns a bool if one value is not equal to the other | Both types must not be constants | + +### Predicate Operators + +`<,<=, !=, == , >, >=` are known as predicate/comparison operations because they compare two values. +This differs from the operations such as `+` where the operands are used in _computation_. + +### Bitwise Operations Example + +```rust +fn main(x : Field) { + let y = x as u32; + let z = y & y; +} +``` + +`z` is implicitly constrained to be the result of `y & y`. The `&` operand is used to denote bitwise +`&`. + +> `x & x` would not compile as `x` is a `Field` and not an integer type. + +### Logical Operators + +Noir has no support for the logical operators `||` and `&&`. This is because encoding the +short-circuiting that these operators require can be inefficient for Noir's backend. Instead you can +use the bitwise operators `|` and `&` which operate identically for booleans, just without the +short-circuiting. + +```rust +let my_val = 5; + +let mut flag = 1; +if (my_val > 6) | (my_val == 0) { + flag = 0; +} +assert(flag == 1); + +if (my_val != 10) & (my_val < 50) { + flag = 0; +} +assert(flag == 0); +``` + +### Shorthand operators + +Noir shorthand operators for most of the above operators, namely `+=, -=, *=, /=, %=, &=, |=, ^=, <<=`, and `>>=`. These allow for more concise syntax. For example: + +```rust +let mut i = 0; +i = i + 1; +``` + +could be written as: + +```rust +let mut i = 0; +i += 1; +``` diff --git a/docs/versioned_docs/version-v0.22.0/noir/syntax/shadowing.md b/docs/versioned_docs/version-v0.22.0/noir/syntax/shadowing.md new file mode 100644 index 00000000000..b5a6b6b38b9 --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/noir/syntax/shadowing.md @@ -0,0 +1,44 @@ +--- +title: Shadowing +sidebar_position: 11 +--- + +Noir allows for inheriting variables' values and re-declaring them with the same name similar to Rust, known as shadowing. + +For example, the following function is valid in Noir: + +```rust +fn main() { + let x = 5; + + { + let x = x * 2; + assert (x == 10); + } + + assert (x == 5); +} +``` + +In this example, a variable x is first defined with the value 5. + +The local scope that follows shadows the original x, i.e. creates a local mutable x based on the value of the original x. It is given a value of 2 times the original x. + +When we return to the main scope, x once again refers to just the original x, which stays at the value of 5. + +## Temporal mutability + +One way that shadowing is useful, in addition to ergonomics across scopes, is for temporarily mutating variables. + +```rust +fn main() { + let age = 30; + // age = age + 5; // Would error as `age` is immutable by default. + + let mut age = age + 5; // Temporarily mutates `age` with a new value. + + let age = age; // Locks `age`'s mutability again. + + assert (age == 35); +} +``` diff --git a/docs/versioned_docs/version-v0.22.0/noir/syntax/unconstrained.md b/docs/versioned_docs/version-v0.22.0/noir/syntax/unconstrained.md new file mode 100644 index 00000000000..6b3424f7993 --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/noir/syntax/unconstrained.md @@ -0,0 +1,95 @@ +--- +title: Unconstrained Functions +description: "Learn about what unconstrained functions in Noir are, how to use them and when you'd want to." + +keywords: [Noir programming language, unconstrained, open] +sidebar_position: 5 +--- + +Unconstrained functions are functions which do not constrain any of the included computation and allow for non-deterministic computation. + +## Why? + +Zero-knowledge (ZK) domain-specific languages (DSL) enable developers to generate ZK proofs from their programs by compiling code down to the constraints of an NP complete language (such as R1CS or PLONKish languages). However, the hard bounds of a constraint system can be very limiting to the functionality of a ZK DSL. + +Enabling a circuit language to perform unconstrained execution is a powerful tool. Said another way, unconstrained execution lets developers generate witnesses from code that does not generate any constraints. Being able to execute logic outside of a circuit is critical for both circuit performance and constructing proofs on information that is external to a circuit. + +Fetching information from somewhere external to a circuit can also be used to enable developers to improve circuit efficiency. + +A ZK DSL does not just prove computation, but proves that some computation was handled correctly. Thus, it is necessary that when we switch from performing some operation directly inside of a circuit to inside of an unconstrained environment that the appropriate constraints are still laid down elsewhere in the circuit. + +## Example + +An in depth example might help drive the point home. This example comes from the excellent [post](https://discord.com/channels/1113924620781883405/1124022445054111926/1128747641853972590) by Tom in the Noir Discord. + +Let's look at how we can optimize a function to turn a `u72` into an array of `u8`s. + +```rust +fn main(num: u72) -> pub [u8; 8] { + let mut out: [u8; 8] = [0; 8]; + for i in 0..8 { + out[i] = (num >> (56 - (i * 8)) as u72 & 0xff) as u8; + } + + out +} +``` + +``` +Total ACIR opcodes generated for language PLONKCSat { width: 3 }: 91 +Backend circuit size: 3619 +``` + +A lot of the operations in this function are optimized away by the compiler (all the bit-shifts turn into divisions by constants). However we can save a bunch of gates by casting to u8 a bit earlier. This automatically truncates the bit-shifted value to fit in a u8 which allows us to remove the XOR against 0xff. This saves us ~480 gates in total. + +```rust +fn main(num: u72) -> pub [u8; 8] { + let mut out: [u8; 8] = [0; 8]; + for i in 0..8 { + out[i] = (num >> (56 - (i * 8)) as u8; + } + + out +} +``` + +``` +Total ACIR opcodes generated for language PLONKCSat { width: 3 }: 75 +Backend circuit size: 3143 +``` + +Those are some nice savings already but we can do better. This code is all constrained so we're proving every step of calculating out using num, but we don't actually care about how we calculate this, just that it's correct. This is where brillig comes in. + +It turns out that truncating a u72 into a u8 is hard to do inside a snark, each time we do as u8 we lay down 4 ACIR opcodes which get converted into multiple gates. It's actually much easier to calculate num from out than the other way around. All we need to do is multiply each element of out by a constant and add them all together, both relatively easy operations inside a snark. + +We can then run u72_to_u8 as unconstrained brillig code in order to calculate out, then use that result in our constrained function and assert that if we were to do the reverse calculation we'd get back num. This looks a little like the below: + +```rust +fn main(num: u72) -> pub [u8; 8] { + let out = u72_to_u8(num); + + let mut reconstructed_num: u72 = 0; + for i in 0..8 { + reconstructed_num += (out[i] as u72 << (56 - (8 * i))); + } + assert(num == reconstructed_num); + out +} + +unconstrained fn u72_to_u8(num: u72) -> [u8; 8] { + let mut out: [u8; 8] = [0; 8]; + for i in 0..8 { + out[i] = (num >> (56 - (i * 8))) as u8; + } + out +} +``` + +``` +Total ACIR opcodes generated for language PLONKCSat { width: 3 }: 78 +Backend circuit size: 2902 +``` + +This ends up taking off another ~250 gates from our circuit! We've ended up with more ACIR opcodes than before but they're easier for the backend to prove (resulting in fewer gates). + +Generally we want to use brillig whenever there's something that's easy to verify but hard to compute within the circuit. For example, if you wanted to calculate a square root of a number it'll be a much better idea to calculate this in brillig and then assert that if you square the result you get back your number. diff --git a/docs/versioned_docs/version-v0.22.0/reference/NoirJS/backend_barretenberg/.nojekyll b/docs/versioned_docs/version-v0.22.0/reference/NoirJS/backend_barretenberg/.nojekyll new file mode 100644 index 00000000000..e2ac6616add --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/reference/NoirJS/backend_barretenberg/.nojekyll @@ -0,0 +1 @@ +TypeDoc added this file to prevent GitHub Pages from using Jekyll. You can turn off this behavior by setting the `githubPages` option to false. \ No newline at end of file diff --git a/docs/versioned_docs/version-v0.22.0/reference/NoirJS/backend_barretenberg/classes/BarretenbergBackend.md b/docs/versioned_docs/version-v0.22.0/reference/NoirJS/backend_barretenberg/classes/BarretenbergBackend.md new file mode 100644 index 00000000000..5cbe9421b92 --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/reference/NoirJS/backend_barretenberg/classes/BarretenbergBackend.md @@ -0,0 +1,185 @@ +# BarretenbergBackend + +## Implements + +- [`Backend`](../interfaces/Backend.md) + +## Constructors + +### new BarretenbergBackend(acirCircuit, options) + +```ts +new BarretenbergBackend(acirCircuit, options): BarretenbergBackend +``` + +#### Parameters + +| Parameter | Type | +| :------ | :------ | +| `acirCircuit` | [`CompiledCircuit`](../type-aliases/CompiledCircuit.md) | +| `options` | [`BackendOptions`](../type-aliases/BackendOptions.md) | + +#### Returns + +[`BarretenbergBackend`](BarretenbergBackend.md) + +## Methods + +### destroy() + +```ts +destroy(): Promise +``` + +#### Returns + +`Promise`\<`void`\> + +#### Implementation of + +[`Backend`](../interfaces/Backend.md).[`destroy`](../interfaces/Backend.md#destroy) + +#### Description + +Destroys the backend + +*** + +### generateFinalProof() + +```ts +generateFinalProof(decompressedWitness): Promise +``` + +#### Parameters + +| Parameter | Type | +| :------ | :------ | +| `decompressedWitness` | `Uint8Array` | + +#### Returns + +`Promise`\<[`ProofData`](../type-aliases/ProofData.md)\> + +#### Implementation of + +[`Backend`](../interfaces/Backend.md).[`generateFinalProof`](../interfaces/Backend.md#generatefinalproof) + +#### Description + +Generates a final proof (not meant to be verified in another circuit) + +*** + +### generateIntermediateProof() + +```ts +generateIntermediateProof(witness): Promise +``` + +#### Parameters + +| Parameter | Type | +| :------ | :------ | +| `witness` | `Uint8Array` | + +#### Returns + +`Promise`\<[`ProofData`](../type-aliases/ProofData.md)\> + +#### Implementation of + +[`Backend`](../interfaces/Backend.md).[`generateIntermediateProof`](../interfaces/Backend.md#generateintermediateproof) + +#### Example + +```typescript +const intermediateProof = await backend.generateIntermediateProof(witness); +``` + +*** + +### generateIntermediateProofArtifacts() + +```ts +generateIntermediateProofArtifacts(proofData, numOfPublicInputs): Promise +``` + +#### Parameters + +| Parameter | Type | Default value | +| :------ | :------ | :------ | +| `proofData` | [`ProofData`](../type-aliases/ProofData.md) | `undefined` | +| `numOfPublicInputs` | `number` | `0` | + +#### Returns + +`Promise`\<`object`\> + +#### Implementation of + +[`Backend`](../interfaces/Backend.md).[`generateIntermediateProofArtifacts`](../interfaces/Backend.md#generateintermediateproofartifacts) + +#### Example + +```typescript +const artifacts = await backend.generateIntermediateProofArtifacts(proof, numOfPublicInputs); +``` + +*** + +### verifyFinalProof() + +```ts +verifyFinalProof(proofData): Promise +``` + +#### Parameters + +| Parameter | Type | +| :------ | :------ | +| `proofData` | [`ProofData`](../type-aliases/ProofData.md) | + +#### Returns + +`Promise`\<`boolean`\> + +#### Implementation of + +[`Backend`](../interfaces/Backend.md).[`verifyFinalProof`](../interfaces/Backend.md#verifyfinalproof) + +#### Description + +Verifies a final proof + +*** + +### verifyIntermediateProof() + +```ts +verifyIntermediateProof(proofData): Promise +``` + +#### Parameters + +| Parameter | Type | +| :------ | :------ | +| `proofData` | [`ProofData`](../type-aliases/ProofData.md) | + +#### Returns + +`Promise`\<`boolean`\> + +#### Implementation of + +[`Backend`](../interfaces/Backend.md).[`verifyIntermediateProof`](../interfaces/Backend.md#verifyintermediateproof) + +#### Example + +```typescript +const isValidIntermediate = await backend.verifyIntermediateProof(proof); +``` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.22.0/reference/NoirJS/backend_barretenberg/index.md b/docs/versioned_docs/version-v0.22.0/reference/NoirJS/backend_barretenberg/index.md new file mode 100644 index 00000000000..bfbecb52864 --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/reference/NoirJS/backend_barretenberg/index.md @@ -0,0 +1,45 @@ +# backend_barretenberg + +## Exports + +### Classes + +| Class | Description | +| :------ | :------ | +| [BarretenbergBackend](classes/BarretenbergBackend.md) | - | + +### Interfaces + +| Interface | Description | +| :------ | :------ | +| [Backend](interfaces/Backend.md) | - | + +### Type Aliases + +| Type alias | Description | +| :------ | :------ | +| [BackendOptions](type-aliases/BackendOptions.md) | - | +| [CompiledCircuit](type-aliases/CompiledCircuit.md) | - | +| [ProofData](type-aliases/ProofData.md) | - | + +## Functions + +### flattenPublicInputs() + +```ts +flattenPublicInputs(publicInputs): string[] +``` + +#### Parameters + +| Parameter | Type | +| :------ | :------ | +| `publicInputs` | `WitnessMap` | + +#### Returns + +`string`[] + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.22.0/reference/NoirJS/backend_barretenberg/interfaces/Backend.md b/docs/versioned_docs/version-v0.22.0/reference/NoirJS/backend_barretenberg/interfaces/Backend.md new file mode 100644 index 00000000000..3eb9645c8d2 --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/reference/NoirJS/backend_barretenberg/interfaces/Backend.md @@ -0,0 +1,132 @@ +# Backend + +## Methods + +### destroy() + +```ts +destroy(): Promise +``` + +#### Returns + +`Promise`\<`void`\> + +#### Description + +Destroys the backend + +*** + +### generateFinalProof() + +```ts +generateFinalProof(decompressedWitness): Promise +``` + +#### Parameters + +| Parameter | Type | +| :------ | :------ | +| `decompressedWitness` | `Uint8Array` | + +#### Returns + +`Promise`\<[`ProofData`](../type-aliases/ProofData.md)\> + +#### Description + +Generates a final proof (not meant to be verified in another circuit) + +*** + +### generateIntermediateProof() + +```ts +generateIntermediateProof(decompressedWitness): Promise +``` + +#### Parameters + +| Parameter | Type | +| :------ | :------ | +| `decompressedWitness` | `Uint8Array` | + +#### Returns + +`Promise`\<[`ProofData`](../type-aliases/ProofData.md)\> + +#### Description + +Generates an intermediate proof (meant to be verified in another circuit) + +*** + +### generateIntermediateProofArtifacts() + +```ts +generateIntermediateProofArtifacts(proofData, numOfPublicInputs): Promise +``` + +#### Parameters + +| Parameter | Type | +| :------ | :------ | +| `proofData` | [`ProofData`](../type-aliases/ProofData.md) | +| `numOfPublicInputs` | `number` | + +#### Returns + +`Promise`\<`object`\> + +#### Description + +Retrieves the artifacts from a proof in the Field format + +*** + +### verifyFinalProof() + +```ts +verifyFinalProof(proofData): Promise +``` + +#### Parameters + +| Parameter | Type | +| :------ | :------ | +| `proofData` | [`ProofData`](../type-aliases/ProofData.md) | + +#### Returns + +`Promise`\<`boolean`\> + +#### Description + +Verifies a final proof + +*** + +### verifyIntermediateProof() + +```ts +verifyIntermediateProof(proofData): Promise +``` + +#### Parameters + +| Parameter | Type | +| :------ | :------ | +| `proofData` | [`ProofData`](../type-aliases/ProofData.md) | + +#### Returns + +`Promise`\<`boolean`\> + +#### Description + +Verifies an intermediate proof + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.22.0/reference/NoirJS/backend_barretenberg/type-aliases/BackendOptions.md b/docs/versioned_docs/version-v0.22.0/reference/NoirJS/backend_barretenberg/type-aliases/BackendOptions.md new file mode 100644 index 00000000000..266ade75d17 --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/reference/NoirJS/backend_barretenberg/type-aliases/BackendOptions.md @@ -0,0 +1,19 @@ +# BackendOptions + +```ts +type BackendOptions: object; +``` + +## Description + +An options object, currently only used to specify the number of threads to use. + +## Type declaration + +| Member | Type | Description | +| :------ | :------ | :------ | +| `threads` | `number` | **Description**

Number of threads | + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.22.0/reference/NoirJS/backend_barretenberg/type-aliases/CompiledCircuit.md b/docs/versioned_docs/version-v0.22.0/reference/NoirJS/backend_barretenberg/type-aliases/CompiledCircuit.md new file mode 100644 index 00000000000..34e0dd04205 --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/reference/NoirJS/backend_barretenberg/type-aliases/CompiledCircuit.md @@ -0,0 +1,20 @@ +# CompiledCircuit + +```ts +type CompiledCircuit: object; +``` + +## Description + +The representation of a compiled circuit + +## Type declaration + +| Member | Type | Description | +| :------ | :------ | :------ | +| `abi` | `Abi` | **Description**

ABI representation of the circuit | +| `bytecode` | `string` | **Description**

The bytecode of the circuit | + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.22.0/reference/NoirJS/backend_barretenberg/type-aliases/ProofData.md b/docs/versioned_docs/version-v0.22.0/reference/NoirJS/backend_barretenberg/type-aliases/ProofData.md new file mode 100644 index 00000000000..3eb360a78f1 --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/reference/NoirJS/backend_barretenberg/type-aliases/ProofData.md @@ -0,0 +1,20 @@ +# ProofData + +```ts +type ProofData: object; +``` + +## Description + +The representation of a proof + +## Type declaration + +| Member | Type | Description | +| :------ | :------ | :------ | +| `proof` | `Uint8Array` | **Description**

An byte array representing the proof | +| `publicInputs` | `WitnessMap` | **Description**

Public inputs of a proof | + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.22.0/reference/NoirJS/backend_barretenberg/typedoc-sidebar.cjs b/docs/versioned_docs/version-v0.22.0/reference/NoirJS/backend_barretenberg/typedoc-sidebar.cjs new file mode 100644 index 00000000000..2aaa55bccf6 --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/reference/NoirJS/backend_barretenberg/typedoc-sidebar.cjs @@ -0,0 +1,4 @@ +// @ts-check +/** @type {import('@docusaurus/plugin-content-docs').SidebarsConfig} */ +const typedocSidebar = { items: [{"type":"category","label":"Classes","items":[{"type":"doc","id":"reference/NoirJS/backend_barretenberg/classes/BarretenbergBackend","label":"BarretenbergBackend"}]},{"type":"category","label":"Interfaces","items":[{"type":"doc","id":"reference/NoirJS/backend_barretenberg/interfaces/Backend","label":"Backend"}]},{"type":"category","label":"Type Aliases","items":[{"type":"doc","id":"reference/NoirJS/backend_barretenberg/type-aliases/BackendOptions","label":"BackendOptions"},{"type":"doc","id":"reference/NoirJS/backend_barretenberg/type-aliases/CompiledCircuit","label":"CompiledCircuit"},{"type":"doc","id":"reference/NoirJS/backend_barretenberg/type-aliases/ProofData","label":"ProofData"}]}]}; +module.exports = typedocSidebar.items; \ No newline at end of file diff --git a/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/.nojekyll b/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/.nojekyll new file mode 100644 index 00000000000..e2ac6616add --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/.nojekyll @@ -0,0 +1 @@ +TypeDoc added this file to prevent GitHub Pages from using Jekyll. You can turn off this behavior by setting the `githubPages` option to false. \ No newline at end of file diff --git a/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/classes/Noir.md b/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/classes/Noir.md new file mode 100644 index 00000000000..34e20d99684 --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/classes/Noir.md @@ -0,0 +1,132 @@ +# Noir + +## Constructors + +### new Noir(circuit, backend) + +```ts +new Noir(circuit, backend?): Noir +``` + +#### Parameters + +| Parameter | Type | +| :------ | :------ | +| `circuit` | [`CompiledCircuit`](../type-aliases/CompiledCircuit.md) | +| `backend`? | `Backend` | + +#### Returns + +[`Noir`](Noir.md) + +## Methods + +### destroy() + +```ts +destroy(): Promise +``` + +#### Returns + +`Promise`\<`void`\> + +#### Description + +Destroys the underlying backend instance. + +#### Example + +```typescript +await noir.destroy(); +``` + +*** + +### execute() + +```ts +execute(inputs, foreignCallHandler?): Promise +``` + +#### Parameters + +| Parameter | Type | +| :------ | :------ | +| `inputs` | [`InputMap`](../type-aliases/InputMap.md) | +| `foreignCallHandler`? | [`ForeignCallHandler`](../type-aliases/ForeignCallHandler.md) | + +#### Returns + +`Promise`\<`object`\> + +#### Description + +Allows to execute a circuit to get its witness and return value. + +#### Example + +```typescript +async execute(inputs) +``` + +*** + +### generateFinalProof() + +```ts +generateFinalProof(inputs, foreignCallHandler?): Promise +``` + +#### Parameters + +| Parameter | Type | +| :------ | :------ | +| `inputs` | [`InputMap`](../type-aliases/InputMap.md) | +| `foreignCallHandler`? | [`ForeignCallHandler`](../type-aliases/ForeignCallHandler.md) | + +#### Returns + +`Promise`\<[`ProofData`](../type-aliases/ProofData.md)\> + +#### Description + +Generates a witness and a proof given an object as input. + +#### Example + +```typescript +async generateFinalProof(input) +``` + +*** + +### verifyFinalProof() + +```ts +verifyFinalProof(proofData): Promise +``` + +#### Parameters + +| Parameter | Type | +| :------ | :------ | +| `proofData` | [`ProofData`](../type-aliases/ProofData.md) | + +#### Returns + +`Promise`\<`boolean`\> + +#### Description + +Instantiates the verification key and verifies a proof. + +#### Example + +```typescript +async verifyFinalProof(proof) +``` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/functions/and.md b/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/functions/and.md new file mode 100644 index 00000000000..c783283e396 --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/functions/and.md @@ -0,0 +1,22 @@ +# and() + +```ts +and(lhs, rhs): string +``` + +Performs a bitwise AND operation between `lhs` and `rhs` + +## Parameters + +| Parameter | Type | Description | +| :------ | :------ | :------ | +| `lhs` | `string` | | +| `rhs` | `string` | | + +## Returns + +`string` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/functions/blake2s256.md b/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/functions/blake2s256.md new file mode 100644 index 00000000000..7882d0da8d5 --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/functions/blake2s256.md @@ -0,0 +1,21 @@ +# blake2s256() + +```ts +blake2s256(inputs): Uint8Array +``` + +Calculates the Blake2s256 hash of the input bytes + +## Parameters + +| Parameter | Type | Description | +| :------ | :------ | :------ | +| `inputs` | `Uint8Array` | | + +## Returns + +`Uint8Array` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/functions/ecdsa_secp256k1_verify.md b/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/functions/ecdsa_secp256k1_verify.md new file mode 100644 index 00000000000..0ba5783f0d5 --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/functions/ecdsa_secp256k1_verify.md @@ -0,0 +1,29 @@ +# ecdsa\_secp256k1\_verify() + +```ts +ecdsa_secp256k1_verify( + hashed_msg, + public_key_x_bytes, + public_key_y_bytes, + signature): boolean +``` + +Calculates the Blake2s256 hash of the input bytes and represents these as a single field element. +Verifies a ECDSA signature over the secp256k1 curve. + +## Parameters + +| Parameter | Type | Description | +| :------ | :------ | :------ | +| `hashed_msg` | `Uint8Array` | | +| `public_key_x_bytes` | `Uint8Array` | | +| `public_key_y_bytes` | `Uint8Array` | | +| `signature` | `Uint8Array` | | + +## Returns + +`boolean` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/functions/ecdsa_secp256r1_verify.md b/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/functions/ecdsa_secp256r1_verify.md new file mode 100644 index 00000000000..0b20ff68957 --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/functions/ecdsa_secp256r1_verify.md @@ -0,0 +1,28 @@ +# ecdsa\_secp256r1\_verify() + +```ts +ecdsa_secp256r1_verify( + hashed_msg, + public_key_x_bytes, + public_key_y_bytes, + signature): boolean +``` + +Verifies a ECDSA signature over the secp256r1 curve. + +## Parameters + +| Parameter | Type | Description | +| :------ | :------ | :------ | +| `hashed_msg` | `Uint8Array` | | +| `public_key_x_bytes` | `Uint8Array` | | +| `public_key_y_bytes` | `Uint8Array` | | +| `signature` | `Uint8Array` | | + +## Returns + +`boolean` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/functions/keccak256.md b/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/functions/keccak256.md new file mode 100644 index 00000000000..d10f155ce86 --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/functions/keccak256.md @@ -0,0 +1,21 @@ +# keccak256() + +```ts +keccak256(inputs): Uint8Array +``` + +Calculates the Keccak256 hash of the input bytes + +## Parameters + +| Parameter | Type | Description | +| :------ | :------ | :------ | +| `inputs` | `Uint8Array` | | + +## Returns + +`Uint8Array` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/functions/sha256.md b/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/functions/sha256.md new file mode 100644 index 00000000000..6ba4ecac022 --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/functions/sha256.md @@ -0,0 +1,21 @@ +# sha256() + +```ts +sha256(inputs): Uint8Array +``` + +Calculates the SHA256 hash of the input bytes + +## Parameters + +| Parameter | Type | Description | +| :------ | :------ | :------ | +| `inputs` | `Uint8Array` | | + +## Returns + +`Uint8Array` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/functions/xor.md b/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/functions/xor.md new file mode 100644 index 00000000000..8d762b895d3 --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/functions/xor.md @@ -0,0 +1,22 @@ +# xor() + +```ts +xor(lhs, rhs): string +``` + +Performs a bitwise XOR operation between `lhs` and `rhs` + +## Parameters + +| Parameter | Type | Description | +| :------ | :------ | :------ | +| `lhs` | `string` | | +| `rhs` | `string` | | + +## Returns + +`string` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/index.md b/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/index.md new file mode 100644 index 00000000000..8b9e35bc9a1 --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/index.md @@ -0,0 +1,37 @@ +# noir_js + +## Exports + +### Classes + +| Class | Description | +| :------ | :------ | +| [Noir](classes/Noir.md) | - | + +### Type Aliases + +| Type alias | Description | +| :------ | :------ | +| [CompiledCircuit](type-aliases/CompiledCircuit.md) | - | +| [ForeignCallHandler](type-aliases/ForeignCallHandler.md) | A callback which performs an foreign call and returns the response. | +| [ForeignCallInput](type-aliases/ForeignCallInput.md) | - | +| [ForeignCallOutput](type-aliases/ForeignCallOutput.md) | - | +| [InputMap](type-aliases/InputMap.md) | - | +| [ProofData](type-aliases/ProofData.md) | - | +| [WitnessMap](type-aliases/WitnessMap.md) | - | + +### Functions + +| Function | Description | +| :------ | :------ | +| [and](functions/and.md) | Performs a bitwise AND operation between `lhs` and `rhs` | +| [blake2s256](functions/blake2s256.md) | Calculates the Blake2s256 hash of the input bytes | +| [ecdsa\_secp256k1\_verify](functions/ecdsa_secp256k1_verify.md) | Calculates the Blake2s256 hash of the input bytes and represents these as a single field element. | +| [ecdsa\_secp256r1\_verify](functions/ecdsa_secp256r1_verify.md) | Verifies a ECDSA signature over the secp256r1 curve. | +| [keccak256](functions/keccak256.md) | Calculates the Keccak256 hash of the input bytes | +| [sha256](functions/sha256.md) | Calculates the SHA256 hash of the input bytes | +| [xor](functions/xor.md) | Performs a bitwise XOR operation between `lhs` and `rhs` | + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/type-aliases/CompiledCircuit.md b/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/type-aliases/CompiledCircuit.md new file mode 100644 index 00000000000..34e0dd04205 --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/type-aliases/CompiledCircuit.md @@ -0,0 +1,20 @@ +# CompiledCircuit + +```ts +type CompiledCircuit: object; +``` + +## Description + +The representation of a compiled circuit + +## Type declaration + +| Member | Type | Description | +| :------ | :------ | :------ | +| `abi` | `Abi` | **Description**

ABI representation of the circuit | +| `bytecode` | `string` | **Description**

The bytecode of the circuit | + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/type-aliases/ForeignCallHandler.md b/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/type-aliases/ForeignCallHandler.md new file mode 100644 index 00000000000..812b8b16481 --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/type-aliases/ForeignCallHandler.md @@ -0,0 +1,24 @@ +# ForeignCallHandler + +```ts +type ForeignCallHandler: (name, inputs) => Promise; +``` + +A callback which performs an foreign call and returns the response. + +## Parameters + +| Parameter | Type | Description | +| :------ | :------ | :------ | +| `name` | `string` | The identifier for the type of foreign call being performed. | +| `inputs` | [`ForeignCallInput`](ForeignCallInput.md)[] | An array of hex encoded inputs to the foreign call. | + +## Returns + +`Promise`\<[`ForeignCallOutput`](ForeignCallOutput.md)[]\> + +outputs - An array of hex encoded outputs containing the results of the foreign call. + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/type-aliases/ForeignCallInput.md b/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/type-aliases/ForeignCallInput.md new file mode 100644 index 00000000000..dd95809186a --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/type-aliases/ForeignCallInput.md @@ -0,0 +1,9 @@ +# ForeignCallInput + +```ts +type ForeignCallInput: string[]; +``` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/type-aliases/ForeignCallOutput.md b/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/type-aliases/ForeignCallOutput.md new file mode 100644 index 00000000000..b71fb78a946 --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/type-aliases/ForeignCallOutput.md @@ -0,0 +1,9 @@ +# ForeignCallOutput + +```ts +type ForeignCallOutput: string | string[]; +``` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/type-aliases/InputMap.md b/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/type-aliases/InputMap.md new file mode 100644 index 00000000000..c714e999d93 --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/type-aliases/InputMap.md @@ -0,0 +1,13 @@ +# InputMap + +```ts +type InputMap: object; +``` + +## Index signature + + \[`key`: `string`\]: `InputValue` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/type-aliases/ProofData.md b/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/type-aliases/ProofData.md new file mode 100644 index 00000000000..3eb360a78f1 --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/type-aliases/ProofData.md @@ -0,0 +1,20 @@ +# ProofData + +```ts +type ProofData: object; +``` + +## Description + +The representation of a proof + +## Type declaration + +| Member | Type | Description | +| :------ | :------ | :------ | +| `proof` | `Uint8Array` | **Description**

An byte array representing the proof | +| `publicInputs` | `WitnessMap` | **Description**

Public inputs of a proof | + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/type-aliases/WitnessMap.md b/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/type-aliases/WitnessMap.md new file mode 100644 index 00000000000..258c46f9d0c --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/type-aliases/WitnessMap.md @@ -0,0 +1,9 @@ +# WitnessMap + +```ts +type WitnessMap: Map; +``` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/typedoc-sidebar.cjs b/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/typedoc-sidebar.cjs new file mode 100644 index 00000000000..fe2629ddc9f --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/typedoc-sidebar.cjs @@ -0,0 +1,4 @@ +// @ts-check +/** @type {import('@docusaurus/plugin-content-docs').SidebarsConfig} */ +const typedocSidebar = { items: [{"type":"category","label":"Classes","items":[{"type":"doc","id":"reference/NoirJS/noir_js/classes/Noir","label":"Noir"}]},{"type":"category","label":"Type Aliases","items":[{"type":"doc","id":"reference/NoirJS/noir_js/type-aliases/CompiledCircuit","label":"CompiledCircuit"},{"type":"doc","id":"reference/NoirJS/noir_js/type-aliases/ForeignCallHandler","label":"ForeignCallHandler"},{"type":"doc","id":"reference/NoirJS/noir_js/type-aliases/ForeignCallInput","label":"ForeignCallInput"},{"type":"doc","id":"reference/NoirJS/noir_js/type-aliases/ForeignCallOutput","label":"ForeignCallOutput"},{"type":"doc","id":"reference/NoirJS/noir_js/type-aliases/InputMap","label":"InputMap"},{"type":"doc","id":"reference/NoirJS/noir_js/type-aliases/ProofData","label":"ProofData"},{"type":"doc","id":"reference/NoirJS/noir_js/type-aliases/WitnessMap","label":"WitnessMap"}]},{"type":"category","label":"Functions","items":[{"type":"doc","id":"reference/NoirJS/noir_js/functions/and","label":"and"},{"type":"doc","id":"reference/NoirJS/noir_js/functions/blake2s256","label":"blake2s256"},{"type":"doc","id":"reference/NoirJS/noir_js/functions/ecdsa_secp256k1_verify","label":"ecdsa_secp256k1_verify"},{"type":"doc","id":"reference/NoirJS/noir_js/functions/ecdsa_secp256r1_verify","label":"ecdsa_secp256r1_verify"},{"type":"doc","id":"reference/NoirJS/noir_js/functions/keccak256","label":"keccak256"},{"type":"doc","id":"reference/NoirJS/noir_js/functions/sha256","label":"sha256"},{"type":"doc","id":"reference/NoirJS/noir_js/functions/xor","label":"xor"}]}]}; +module.exports = typedocSidebar.items; \ No newline at end of file diff --git a/docs/versioned_docs/version-v0.22.0/reference/_category_.json b/docs/versioned_docs/version-v0.22.0/reference/_category_.json new file mode 100644 index 00000000000..5b6a20a609a --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/reference/_category_.json @@ -0,0 +1,5 @@ +{ + "position": 4, + "collapsible": true, + "collapsed": true +} diff --git a/docs/versioned_docs/version-v0.22.0/reference/nargo_commands.md b/docs/versioned_docs/version-v0.22.0/reference/nargo_commands.md new file mode 100644 index 00000000000..ff3dee8973f --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/reference/nargo_commands.md @@ -0,0 +1,250 @@ +--- +title: Nargo +description: + Noir CLI Commands for Noir Prover and Verifier to create, execute, prove and verify programs, + generate Solidity verifier smart contract and compile into JSON file containing ACIR + representation and ABI of circuit. +keywords: + [ + Nargo, + Noir CLI, + Noir Prover, + Noir Verifier, + generate Solidity verifier, + compile JSON file, + ACIR representation, + ABI of circuit, + TypeScript, + ] +sidebar_position: 0 +--- + +## General options + +| Option | Description | +| -------------------- | -------------------------------------------------- | +| `--show-ssa` | Emit debug information for the intermediate SSA IR | +| `--deny-warnings` | Quit execution when warnings are emitted | +| `--silence-warnings` | Suppress warnings | +| `-h, --help` | Print help | + +## `nargo help [subcommand]` + +Prints the list of available commands or specific information of a subcommand. + +_Arguments_ + +| Argument | Description | +| -------------- | -------------------------------------------- | +| `` | The subcommand whose help message to display | + +## `nargo backend` + +Installs and selects custom backends used to generate and verify proofs. + +### Commands + +| Command | Description | +| ----------- | --------------------------------------------------------- | +| `current` | Prints the name of the currently active backend | +| `ls` | Prints the list of currently installed backends | +| `use` | Select the backend to use | +| `install` | Install a new backend from a URL | +| `uninstall` | Uninstalls a backend | +| `help` | Print this message or the help of the given subcommand(s) | + +### Options + +| Option | Description | +| ------------ | ----------- | +| `-h, --help` | Print help | + +## `nargo check` + +Generate the `Prover.toml` and `Verifier.toml` files for specifying prover and verifier in/output +values of the Noir program respectively. + +### Options + +| Option | Description | +| --------------------- | ------------------------------------- | +| `--package ` | The name of the package to check | +| `--workspace` | Check all packages in the workspace | +| `--print-acir` | Display the ACIR for compiled circuit | +| `--deny-warnings` | Treat all warnings as errors | +| `--silence-warnings` | Suppress warnings | +| `-h, --help` | Print help | + +### `nargo codegen-verifier` + +Generate a Solidity verifier smart contract for the program. + +### Options + +| Option | Description | +| --------------------- | ------------------------------------- | +| `--package ` | The name of the package to codegen | +| `--workspace` | Codegen all packages in the workspace | +| `--print-acir` | Display the ACIR for compiled circuit | +| `--deny-warnings` | Treat all warnings as errors | +| `--silence-warnings` | Suppress warnings | +| `-h, --help` | Print help | + +## `nargo compile` + +Compile the program into a JSON build artifact file containing the ACIR representation and the ABI +of the circuit. This build artifact can then be used to generate and verify proofs. + +You can also use "build" as an alias for compile (e.g. `nargo build`). + +### Options + +| Option | Description | +| --------------------- | ------------------------------------------------------------ | +| `--package ` | The name of the package to compile | +| `--workspace` | Compile all packages in the workspace | +| `--print-acir` | Display the ACIR for compiled circuit | +| `--deny-warnings` | Treat all warnings as errors | +| `--silence-warnings` | Suppress warnings | +| `-h, --help` | Print help | + +## `nargo new ` + +Creates a new Noir project in a new folder. + +**Arguments** + +| Argument | Description | +| -------- | -------------------------------- | +| `` | The path to save the new project | + +### Options + +| Option | Description | +| --------------- | ----------------------------------------------------- | +| `--name ` | Name of the package [default: package directory name] | +| `--lib` | Use a library template | +| `--bin` | Use a binary template [default] | +| `--contract` | Use a contract template | +| `-h, --help` | Print help | + +## `nargo init` + +Creates a new Noir project in the current directory. + +### Options + +| Option | Description | +| --------------- | ----------------------------------------------------- | +| `--name ` | Name of the package [default: current directory name] | +| `--lib` | Use a library template | +| `--bin` | Use a binary template [default] | +| `--contract` | Use a contract template | +| `-h, --help` | Print help | + +## `nargo execute [WITNESS_NAME]` + +Runs the Noir program and prints its return value. + +**Arguments** + +| Argument | Description | +| ---------------- | ----------------------------------------- | +| `[WITNESS_NAME]` | Write the execution witness to named file | + +### Options + +| Option | Description | +| --------------------------------- | ------------------------------------------------------------------------------------ | +| `-p, --prover-name ` | The name of the toml file which contains the inputs for the prover [default: Prover] | +| `--package ` | The name of the package to execute | +| `--workspace` | Execute all packages in the workspace | +| `--print-acir` | Display the ACIR for compiled circuit | +| `--deny-warnings` | Treat all warnings as errors | +| `--silence-warnings` | Suppress warnings | +| `-h, --help` | Print help | + +_Usage_ + +The inputs to the circuit are read from the `Prover.toml` file generated by `nargo check`, which +must be filled in. + +To save the witness to file, run the command with a value for the `WITNESS_NAME` argument. A +`.tr` file will then be saved in the `./target` folder. + +## `nargo prove` + +Creates a proof for the program. + +### Options + +| Option | Description | +| ------------------------------------- | ---------------------------------------------------------------------------------------- | +| `-p, --prover-name ` | The name of the toml file which contains the inputs for the prover [default: Prover] | +| `-v, --verifier-name ` | The name of the toml file which contains the inputs for the verifier [default: Verifier] | +| `--verify` | Verify proof after proving | +| `--package ` | The name of the package to prove | +| `--workspace` | Prove all packages in the workspace | +| `--print-acir` | Display the ACIR for compiled circuit | +| `--deny-warnings` | Treat all warnings as errors | +| `--silence-warnings` | Suppress warnings | +| `-h, --help` | Print help | + +## `nargo verify` + +Given a proof and a program, verify whether the proof is valid. + +### Options + +| Option | Description | +| ------------------------------------- | ---------------------------------------------------------------------------------------- | +| `-v, --verifier-name ` | The name of the toml file which contains the inputs for the verifier [default: Verifier] | +| `--package ` | The name of the package to verify | +| `--workspace` | Verify all packages in the workspace | +| `--print-acir` | Display the ACIR for compiled circuit | +| `--deny-warnings` | Treat all warnings as errors | +| `--silence-warnings` | Suppress warnings | +| `-h, --help` | Print help | + +## `nargo test [TEST_NAME]` + +Nargo will automatically compile and run any functions which have the decorator `#[test]` on them if +you run `nargo test`. To print `println` statements in tests, use the `--show-output` flag. + +Takes an optional `--exact` flag which allows you to select tests based on an exact name. + +See an example on the [testing page](../getting_started/tooling/testing.md). + +### Options + +| Option | Description | +| --------------------- | -------------------------------------- | +| `--show-output` | Display output of `println` statements | +| `--exact` | Only run tests that match exactly | +| `--package ` | The name of the package to test | +| `--workspace` | Test all packages in the workspace | +| `--print-acir` | Display the ACIR for compiled circuit | +| `--deny-warnings` | Treat all warnings as errors | +| `--silence-warnings` | Suppress warnings | +| `-h, --help` | Print help | + +## `nargo info` + +Prints a table containing the information of the package. + +Currently the table provide + +1. The number of ACIR opcodes +2. The final number gates in the circuit used by a backend + +If the file contains a contract the table will provide the +above information about each function of the contract. + +## `nargo lsp` + +Start a long-running Language Server process that communicates over stdin/stdout. +Usually this command is not run by a user, but instead will be run by a Language Client, such as [vscode-noir](https://github.com/noir-lang/vscode-noir). + +## `nargo fmt` + +Automatically formats your Noir source code based on the default formatting settings. diff --git a/docs/versioned_docs/version-v0.22.0/tutorials/noirjs_app.md b/docs/versioned_docs/version-v0.22.0/tutorials/noirjs_app.md new file mode 100644 index 00000000000..302ee4aeade --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/tutorials/noirjs_app.md @@ -0,0 +1,261 @@ +--- +title: Tiny NoirJS app +description: Learn how to setup a new app that uses Noir to generate and verify zero-knowledge SNARK proofs in a typescript or javascript environment +keywords: [how to, guide, javascript, typescript, noir, barretenberg, zero-knowledge, proofs] +sidebar_position: 0 +--- + +NoirJS works both on the browser and on the server, and works for both ESM and CJS module systems. In this page, we will learn how can we write a simple test and a simple web app to verify the standard Noir example. + +You can find the complete app code for this guide [here](https://github.com/noir-lang/tiny-noirjs-app). + +## Before we start + +:::note + +Feel free to use whatever versions, just keep in mind that Nargo and the NoirJS packages are meant to be in sync. For example, Nargo 0.18.x matches `noir_js@0.18.x`, etc. + +In this guide, we will be pinned to 0.17.0. + +::: + +Make sure you have Node installed on your machine by opening a terminal and executing `node --version`. If you don't see a version, you should install [node](https://github.com/nvm-sh/nvm). You can also use `yarn` if you prefer that package manager over npm (which comes with node). + +First of all, follow the the [Nargo guide](../getting_started/installation/index.md) to install nargo version 0.17.0 and create a new project with `nargo new circuit`. Once there, `cd` into the `circuit` folder. You should then be able to compile your circuit into `json` format and see it inside the `target` folder: + +```bash +nargo compile +``` + +Your folder structure should look like: + +```tree +. +└── circuit + ├── Nargo.toml + ├── src + │ └── main.nr + └── target + └── circuit.json +``` + +## Starting a new project + +Go back to the previous folder and start a new project by running run `npm init`. You can configure your project or just leave the defaults, and see a `package.json` appear in your root folder. + +## Installing dependencies + +We'll need two `npm` packages. These packages will provide us the methods we need to run and verify proofs: + +```bash +npm i @noir-lang/backend_barretenberg@^0.17.0 @noir-lang/noir_js@^0.17.0 +``` + +To serve our page, we can use a build tool such as `vite`. Because we're gonna use some `wasm` files, we need to install a plugin as well. Run: + +```bash +npm i --save-dev vite rollup-plugin-copy +``` + +Since we're on the dependency world, we may as well define a nice starting script. Vite makes it easy. Just open `package.json`, find the block "scripts" and add this just below the line with `"test" : "echo......."`: + +```json + "start": "vite --open" +``` + +If you want do build a static website, you can also add some build and preview scripts: + +```json + "build": "vite build", + "preview": "vite preview" +``` + +## Vite plugins + +Vite is great, but support from `wasm` doesn't work out-of-the-box. We're gonna write a quick plugin and use another one. Just copy and paste this into a file named `vite.config.js`. You don't need to understand it, just trust me bro. + +```js +import { defineConfig } from 'vite'; +import copy from 'rollup-plugin-copy'; +import fs from 'fs'; +import path from 'path'; + +const wasmContentTypePlugin = { + name: 'wasm-content-type-plugin', + configureServer(server) { + server.middlewares.use(async (req, res, next) => { + if (req.url.endsWith('.wasm')) { + res.setHeader('Content-Type', 'application/wasm'); + const newPath = req.url.replace('deps', 'dist'); + const targetPath = path.join(__dirname, newPath); + const wasmContent = fs.readFileSync(targetPath); + return res.end(wasmContent); + } + next(); + }); + }, +}; + +export default defineConfig(({ command }) => { + if (command === 'serve') { + return { + plugins: [ + copy({ + targets: [{ src: 'node_modules/**/*.wasm', dest: 'node_modules/.vite/dist' }], + copySync: true, + hook: 'buildStart', + }), + command === 'serve' ? wasmContentTypePlugin : [], + ], + }; + } + + return {}; +}); +``` + +## HTML + +Here's the simplest HTML with some terrible UI. Create a file called `index.html` and paste this: + +```html + + + + + + +

Very basic Noir app

+
+

Logs

+

Proof

+
+ + +``` + +## Some good old vanilla Javascript + +Create a new file `app.js`, which is where our javascript code will live. Let's start with this code inside: + +```js +document.addEventListener('DOMContentLoaded', async () => { + // here's where the magic happens +}); + +function display(container, msg) { + const c = document.getElementById(container); + const p = document.createElement('p'); + p.textContent = msg; + c.appendChild(p); +} +``` + +We can manipulate our website with this little function, so we can see our website working. + +## Adding Noir + +If you come from the previous page, your folder structure should look like this: + +```tree +├── app.js +├── circuit +│ ├── Nargo.toml +│ ├── src +│ │ └── main.nr +│ └── target +│ └── circuit.json +├── index.html +├── package.json +└── vite.config.js +``` + +You'll see other files and folders showing up (like `package-lock.json`, `yarn.lock`, `node_modules`) but you shouldn't have to care about those. + +## Importing our dependencies + +We're starting with the good stuff now. At the top of the new javascript file, import the packages: + +```ts +import { BarretenbergBackend } from '@noir-lang/backend_barretenberg'; +import { Noir } from '@noir-lang/noir_js'; +``` + +We also need to import the `circuit` JSON file we created. If you have the suggested folder structure, you can add this line: + +```ts +import circuit from './circuit/target/circuit.json'; +``` + +## Write code + +:::note + +We're gonna be adding code inside the `document.addEventListener...etc` block: + +```js +// forget stuff here +document.addEventListener('DOMContentLoaded', async () => { + // here's where the magic happens +}); +// forget stuff here +``` + +::: + +Our dependencies exported two classes: `BarretenbergBackend` and `Noir`. Let's `init` them and add some logs, just to flex: + +```ts +const backend = new BarretenbergBackend(circuit); +const noir = new Noir(circuit, backend); +``` + +## Proving + +Now we're ready to prove stuff! Let's feed some inputs to our circuit and calculate the proof: + +```js +const input = { x: 1, y: 2 }; +display('logs', 'Generating proof... ⌛'); +const proof = await noir.generateFinalProof(input); +display('logs', 'Generating proof... ✅'); +display('results', proof.proof); +``` + +You're probably eager to see stuff happening, so go and run your app now! + +From your terminal, run `npm start` (or `yarn start`). If it doesn't open a browser for you, just visit `localhost:5173`. On a modern laptop, proof will generate in less than 100ms, and you'll see this: + +![Getting Started 0](@site/static/img/noir_getting_started_1.png) + +If you're human, you shouldn't be able to understand anything on the "proof" box. That's OK. We like you, human. + +In any case, this means your proof was generated! But you shouldn't trust me just yet. Add these lines to see it being verified: + +```js +display('logs', 'Verifying proof... ⌛'); +const verification = await noir.verifyFinalProof(proof); +if (verification) display('logs', 'Verifying proof... ✅'); +``` + +By saving, your app will refresh and here's our complete Tiny Noir App! + +You can find the complete app code for this guide [here](https://github.com/noir-lang/tiny-noirjs-app). + +## Further Reading + +You can see how noirjs is used in a full stack Next.js hardhat application in the [noir-starter repo here](https://github.com/noir-lang/noir-starter/tree/main/next-hardhat). The example shows how to calculate a proof in the browser and verify it with a deployed Solidity verifier contract from noirjs. + +You should also check out the more advanced examples in the [noir-examples repo](https://github.com/noir-lang/noir-examples), where you'll find reference usage for some cool apps. diff --git a/docs/versioned_docs/version-v0.6.0/examples/merkle-proof.md b/docs/versioned_docs/version-v0.6.0/examples/merkle-proof.md deleted file mode 100644 index 4696b4a1426..00000000000 --- a/docs/versioned_docs/version-v0.6.0/examples/merkle-proof.md +++ /dev/null @@ -1,48 +0,0 @@ ---- -title: Merkle Proof Membership -description: - Learn how to use merkle membership proof in Noir to prove that a given leaf is a member of a - merkle tree with a specified root, at a given index. -keywords: - [merkle proof, merkle membership proof, Noir, rust, hash function, Pedersen, sha256, merkle tree] ---- - -Let's walk through an example of a merkle membership proof in Noir that proves that a given leaf is -in a merkle tree. - -```rust -use dep::std; - -fn main(message : [Field; 62], index : Field, hashpath : [Field; 40], root : Field) { - let leaf = std::hash::hash_to_field(message); - let merkle_root = std::merkle::compute_merkle_root(leaf, index, hashpath); - assert(merkle_root == root); -} - -``` - -The message is hashed using `hash_to_field`. The specific hash function that is being used is chosen -by the backend. The only requirement is that this hash function can heuristically be used as a -random oracle. If only collision resistance is needed, then one can call `std::hash::pedersen` -instead. - -```rust -let leaf = std::hash::hash_to_field(message); -``` - -The leaf is then passed to a compute_merkle_root function with the root, index and hashpath. The returned root can then be asserted to be the same as the provided root. - -```rust -let merkle_root = std::merkle::compute_merkle_root(leaf, index, hashpath); -assert (merkle_root == root); -``` - -> **Note:** It is possible to re-implement the merkle tree implementation without standard library. -> However, for most usecases, it is enough. In general, the standard library will always opt to be -> as conservative as possible, while striking a balance with efficiency. - -An example, the merkle membership proof, only requires a hash function that has collision -resistance, hence a hash function like Pedersen is allowed, which in most cases is more efficient -than the even more conservative sha256. - -[view an example on the starter repo](https://github.com/noir-lang/noir-examples/blob/3ea09545cabfa464124ec2f3ea8e60c608abe6df/stealthdrop/circuits/src/main.nr#L20) diff --git a/docs/versioned_docs/version-v0.6.0/getting_started/00_nargo_installation.md b/docs/versioned_docs/version-v0.6.0/getting_started/00_nargo_installation.md deleted file mode 100644 index 432884b709e..00000000000 --- a/docs/versioned_docs/version-v0.6.0/getting_started/00_nargo_installation.md +++ /dev/null @@ -1,283 +0,0 @@ ---- -title: Nargo Installation -description: - nargo is a command line tool for interacting with Noir programs (e.g. compiling, proving, - verifying and more). Learn how to install and use Nargo for your projects with this comprehensive - guide. -keywords: [Nargo, command line tool, Noir programs, installation guide, how to use Nargo] ---- - -`nargo` is a command line tool for interacting with Noir programs (e.g. compiling, proving, -verifying and more). - -Alternatively, the interactions can also be performed in [TypeScript](../typescript). - -### UltraPlonk - -Nargo versions <0.5.0 of `aztec_backend` and `aztec_wasm_backend` are based on the TurboPlonk -version of Aztec Backend, which lacks efficient implementations of useful primitives (e.g. Keccak256 in 18k constraints, ECDSA verification in 36k constraints) that the UltraPlonk version offers. - -## Installation - -There are four approaches for installing Nargo: - -- [Option 1: Noirup](#option-1-noirup) -- [Option 2: Binaries](#option-2-binaries) -- [Option 3: Install via Nix](#option-3-install-via-nix) -- [Option 4: Compile from Source](#option-4-compile-from-source) - -Optionally you can also install [Noir VS Code extension] for syntax highlighting. - -### Option 1: Noirup - -If you're on OSX or Linux, the easiest way to start using Noir and Nargo is via noirup. Just open a -terminal and run: - -```bash -curl -L https://raw.githubusercontent.com/noir-lang/noirup/main/install | bash -``` - -Close the terminal, open another one, and run - -```bash -noirup -v 0.6.0 -``` - -Done, you should have the latest version working. You can check with `nargo --version`. - -You can also install nightlies, specific versions -or branches, check out the [noirup repository](https://github.com/noir-lang/noirup) for more -information. - -#### GitHub Actions - -You can use `noirup` with GitHub Actions for CI/CD and automated testing. It is as simple as -installing `noirup` and running tests in your GitHub Action `yml` file. - -See the -[config file](https://github.com/TomAFrench/noir-hashes/blob/master/.github/workflows/noir.yml) in -this repo containing hash functions in Noir for an example. - -#### Nightly versions - -To install the nightly version of Noir (updated daily) run: - -```bash -noirup -n -``` - -### Option 2: Binaries - -See [GitHub Releases](https://github.com/noir-lang/noir/releases) for the latest and previous -platform specific binaries. - -#### Step 1 - -Paste and run the following in the terminal to extract and install the binary: - -> **macOS / Linux:** If you are prompted with `Permission denied` when running commands, prepend -> `sudo` and re-run it. - -##### macOS (Apple Silicon) - -```bash -mkdir -p $HOME/.nargo/bin && \ -curl -o $HOME/.nargo/bin/nargo-aarch64-apple-darwin.tar.gz -L https://github.com/noir-lang/noir/releases/download/v0.6.0/nargo-aarch64-apple-darwin.tar.gz && \ -tar -xvf $HOME/.nargo/bin/nargo-aarch64-apple-darwin.tar.gz -C $HOME/.nargo/bin/ && \ -echo '\nexport PATH=$PATH:$HOME/.nargo/bin' >> ~/.zshrc && \ -source ~/.zshrc -``` - -##### macOS (Intel) - -```bash -mkdir -p $HOME/.nargo/bin && \ -curl -o $HOME/.nargo/bin/nargo-x86_64-apple-darwin.tar.gz -L https://github.com/noir-lang/noir/releases/download/v0.6.0/nargo-x86_64-apple-darwin.tar.gz && \ -tar -xvf $HOME/.nargo/bin/nargo-x86_64-apple-darwin.tar.gz -C $HOME/.nargo/bin/ && \ -echo '\nexport PATH=$PATH:$HOME/.nargo/bin' >> ~/.zshrc && \ -source ~/.zshrc -``` - -##### Windows (PowerShell) - -Open PowerShell as Administrator and run: - -```powershell -mkdir -f -p "$env:USERPROFILE\.nargo\bin\"; ` -Invoke-RestMethod -Method Get -Uri https://github.com/noir-lang/noir/releases/download/v0.4.1/nargo-x86_64-pc-windows-msvc.zip -Outfile "$env:USERPROFILE\.nargo\bin\nargo-x86_64-pc-windows-msvc.zip"; ` -Expand-Archive -Path "$env:USERPROFILE\.nargo\bin\nargo-x86_64-pc-windows-msvc.zip" -DestinationPath "$env:USERPROFILE\.nargo\bin\"; ` -$Reg = "Registry::HKLM\System\CurrentControlSet\Control\Session Manager\Environment"; ` -$OldPath = (Get-ItemProperty -Path "$Reg" -Name PATH).Path; ` -$NewPath = $OldPath + ’;’ + "$env:USERPROFILE\.nargo\bin\"; ` -Set-ItemProperty -Path "$Reg" -Name PATH –Value "$NewPath"; ` -$env:Path = [System.Environment]::GetEnvironmentVariable("Path","Machine") + ";" + [System.Environment]::GetEnvironmentVariable("Path","User") -``` - -##### Linux (Bash) - -```bash -mkdir -p $HOME/.nargo/bin && \ -curl -o $HOME/.nargo/bin/nargo-x86_64-unknown-linux-gnu.tar.gz -L https://github.com/noir-lang/noir/releases/download/v0.6.0/nargo-x86_64-unknown-linux-gnu.tar.gz && \ -tar -xvf $HOME/.nargo/bin/nargo-x86_64-unknown-linux-gnu.tar.gz -C $HOME/.nargo/bin/ && \ -echo -e '\nexport PATH=$PATH:$HOME/.nargo/bin' >> ~/.bashrc && \ -source ~/.bashrc -``` - -#### Step 2 - -Check if the installation was successful by running `nargo --help`. - -> **macOS:** If you are prompted with an OS alert, right-click and open the _nargo_ executable from -> Finder. Close the new terminal popped up and `nargo` should now be accessible. - -For a successful installation, you should see something similar to the following after running the -command: - -```sh -$ nargo --help - -Noir's package manager - -Usage: nargo - -Commands: - check Checks the constraint system for errors - codegen-verifier Generates a Solidity verifier smart contract for the program - compile Compile the program and its secret execution trace into ACIR format - new Create a new binary project - execute Executes a circuit to calculate its return value - prove Create proof for this program. The proof is returned as a hex encoded string - verify Given a proof and a program, verify whether the proof is valid - test Run the tests for this program - gates Counts the occurrences of different gates in circuit - help Print this message or the help of the given subcommand(s) -``` - -### Option 3: Install via Nix - -Due to the large number of native dependencies, Noir projects can be installed via [Nix](https://nixos.org/). - -#### Installing Nix - -For the best experience, please follow these instructions to setup Nix: - -1. Install Nix following [their guide](https://nixos.org/download.html) for your operating system. -2. Create the file `~/.config/nix/nix.conf` with the contents: - -```ini -experimental-features = nix-command -extra-experimental-features = flakes -``` - -#### Install Nargo into your Nix profile - -1. Use `nix profile` to install Nargo - -```sh -nix profile install github:noir-lang/noir -``` - -### Option 4: Compile from Source - -Due to the large number of native dependencies, Noir projects uses [Nix](https://nixos.org/) and [direnv](https://direnv.net/) to streamline the development experience. - -#### Setting up your environment - -For the best experience, please follow these instructions to setup your environment: - -1. Install Nix following [their guide](https://nixos.org/download.html) for your operating system. -2. Create the file `~/.config/nix/nix.conf` with the contents: - -```ini -experimental-features = nix-command -extra-experimental-features = flakes -``` - -3. Install direnv into your Nix profile by running: - -```sh -nix profile install nixpkgs#direnv -``` - -4. Add direnv to your shell following [their guide](https://direnv.net/docs/hook.html). -5. Restart your shell. - -#### Shell & editor experience - -Now that your environment is set up, you can get to work on the project. - -1. Clone the repository, such as: - -```sh -git clone git@github.com:noir-lang/noir -``` -> Replacing `noir` with whichever repository you want to work on. - -2. Navigate to the directory: - -```sh -cd noir -``` -> Replacing `noir` with whichever repository you cloned. - -3. You should see a __direnv error__ because projects aren't allowed by default. Make sure you've reviewed and trust our `.envrc` file, then you need to run: - -```sh -direnv allow -``` - -4. Now, wait awhile for all the native dependencies to be built. This will take some time and direnv will warn you that it is taking a long time, but we just need to let it run. - -5. Once you are presented with your prompt again, you can start your editor within the project directory (we recommend [VSCode](https://code.visualstudio.com/)): - -```sh -code . -``` - -6. (Recommended) When launching VSCode for the first time, you should be prompted to install our recommended plugins. We highly recommend installing these for the best development experience. - -#### Building and testing - -Assuming you are using `direnv` to populate your environment, building and testing the project can be done -with the typical `cargo build`, `cargo test`, and `cargo clippy` commands. You'll notice that the `cargo` version matches the version we specify in `flake.nix`, which is 1.66.0 at the time of this writing. - -If you want to build the entire project in an isolated sandbox, you can use Nix commands: - -1. `nix build .` (or `nix build . -L` for verbose output) to build the project in a Nix sandbox. -2. `nix flake check` (or `nix flake check -L` for verbose output) to run clippy and tests in a Nix sandbox. - -#### Without `direnv` - -If you have hesitations with using direnv, you can launch a subshell with `nix develop` and then launch your editor from within the subshell. However, if VSCode was already launched in the project directory, the environment won't be updated. - -Advanced: If you aren't using direnv nor launching your editor within the subshell, you can try to install Barretenberg and other global dependencies the package needs. This is an advanced workflow and likely won't receive support! - -## Uninstalling Nargo - -### Noirup - -If you installed Noir with `noirup`, you can uninstall Noir by removing the files in `~/.nargo`, `~/nargo` and `~/noir_cache`. - -```bash -rm -r ~/.nargo -rm -r ~/nargo -rm -r ~/noir_cache -``` - -### Nix - -If you installed Noir with Nix or from source, you can remove the binary located at `~/.nix-profile/bin/nargo`. - -```bash -rm ~/.nix-profile/bin/nargo -``` - -[git]: https://git-scm.com/book/en/v2/Getting-Started-Installing-Git -[rust]: https://www.rust-lang.org/tools/install -[noir vs code extension]: - https://marketplace.visualstudio.com/items?itemName=noir-lang.noir-programming-language-syntax-highlighter -[homebrew]: https://brew.sh/ -[cmake]: https://cmake.org/install/ -[llvm]: https://llvm.org/docs/GettingStarted.html -[openmp]: https://openmp.llvm.org/ -[barretenberg]: https://github.com/AztecProtocol/barretenberg diff --git a/docs/versioned_docs/version-v0.6.0/getting_started/01_hello_world.md b/docs/versioned_docs/version-v0.6.0/getting_started/01_hello_world.md deleted file mode 100644 index 0f21ad45569..00000000000 --- a/docs/versioned_docs/version-v0.6.0/getting_started/01_hello_world.md +++ /dev/null @@ -1,147 +0,0 @@ ---- -title: Create A Project -description: - Learn how to create and verify your first Noir program using Nargo, a programming language for - zero-knowledge proofs. -keywords: - [ - Nargo, - Noir, - zero-knowledge proofs, - programming language, - create Noir program, - verify Noir program, - step-by-step guide, - ] ---- - -Now that we have installed Nargo, it is time to make our first hello world program! - -## Create a Project Directory - -Noir code can live anywhere on your computer. Let us create a _projects_ folder in the home -directory to house our Noir programs. - -For Linux, macOS, and Windows PowerShell, create the directory and change directory into it by -running: - -```sh -mkdir ~/projects -cd ~/projects -``` - -For Windows CMD, run: - -```sh -> mkdir "%USERPROFILE%\projects" -> cd /d "%USERPROFILE%\projects" -``` - -## Create Our First Nargo Project - -Now that we are in the projects directory, create a new Nargo project by running: - -```sh -nargo new hello_world -``` - -> **Note:** `hello_world` can be any arbitrary project name, we are simply using `hello_world` for -> demonstration. -> -> In production, the common practice is to name the project folder as `circuits` for better -> identifiability when sitting alongside other folders in the codebase (e.g. `contracts`, `scripts`, -> `test`). - -A `hello_world` folder would be created. Similar to Rust, the folder houses _src/main.nr_ and -_Nargo.toml_ that contains the source code and environmental options of your Noir program -respectively. - -### Intro to Noir Syntax - -Let us take a closer look at _main.nr_. The default _main.nr_ generated should look like this: - -```rust -fn main(x : Field, y : pub Field) { - assert(x != y); -} -``` - -The first line of the program specifies the program's inputs: - -```rust -x : Field, y : pub Field -``` - -Program inputs in Noir are private by default (e.g. `x`), but can be labeled public using the -keyword `pub` (e.g. `y`). To learn more about private and public values, check the -[Data Types](../language_concepts/data_types) section. - -The next line of the program specifies its body: - -```rust -assert(x != y); -``` - -The Noir syntax `assert` can be interpreted as something similar to constraints in other zk-contract languages. - -For more Noir syntax, check the [Language Concepts](../language_concepts/comments) chapter. - -## Build In/Output Files - -Change directory into _hello_world_ and build in/output files for your Noir program by running: - -```sh -cd hello_world -nargo check -``` - -Two additional files would be generated in your project directory: - -_Prover.toml_ houses input values, and _Verifier.toml_ houses public values. - -## Prove Our Noir Program - -Now that the project is set up, we can create a proof of correct execution on our Noir program. - -Fill in input values for execution in the _Prover.toml_ file. For example: - -```toml -x = "1" -y = "2" -``` - -Prove the valid execution of your Noir program with your preferred proof name, for example `p`: - -```sh -nargo prove p -``` - -A new folder _proofs_ would then be generated in your project directory, containing the proof file -`p.proof`. - -The _Verifier.toml_ file would also be updated with the public values computed from program -execution (in this case the value of `y`): - -```toml -y = "0x0000000000000000000000000000000000000000000000000000000000000002" -``` - -> **Note:** Values in _Verifier.toml_ are computed as 32-byte hex values. - -## Verify Our Noir Program - -Once a proof is generated, we can verify correct execution of our Noir program by verifying the -proof file. - -Verify your proof of name `p` by running: - -```sh -nargo verify p -``` - -The verification will complete in silence if it is successful. If it fails, it will log the -corresponding error instead. - -Congratulations, you have now created and verified a proof for your very first Noir program! - -In the [next section](breakdown), we will go into more detail on each step performed. diff --git a/docs/versioned_docs/version-v0.6.0/getting_started/02_breakdown.md b/docs/versioned_docs/version-v0.6.0/getting_started/02_breakdown.md deleted file mode 100644 index 5f4f00a3f36..00000000000 --- a/docs/versioned_docs/version-v0.6.0/getting_started/02_breakdown.md +++ /dev/null @@ -1,103 +0,0 @@ ---- -title: Project Breakdown -description: - Learn about the anatomy of a Nargo project, including the purpose of the Prover and Verifier TOML - files, and how to prove and verify your program. -keywords: - [Nargo, Nargo project, Prover.toml, Verifier.toml, proof verification, private asset transfer] ---- - -This section breaks down our hello world program in section _1.2_. We elaborate on the project -structure and what the `prove` and `verify` commands did in the previous section. - -## Anatomy of a Nargo Project - -Upon creating a new project with `nargo new` and building the in/output files with `nargo check` -commands, you would get a minimal Nargo project of the following structure: - - - src - - Prover.toml - - Verifier.toml - - Nargo.toml - -The source directory _src_ holds the source code for your Noir program. By default only a _main.nr_ -file will be generated within it. - -_Prover.toml_ is used for specifying the input values for executing and proving the program. -Optionally you may specify expected output values for prove-time checking as well. - -_Verifier.toml_ contains public in/output values computed when executing the Noir program. - -_Nargo.toml_ contains the environmental options of your project. - -_proofs_ and _contract_ directories will not be immediately visible until you create a proof or -verifier contract respectively. - -### main.nr - -The _main.nr_ file contains a `main` method, this method is the entry point into your Noir program. - -In our sample program, _main.nr_ looks like this: - -```rust -fn main(x : Field, y : Field) { - constrain x != y; -} -``` - -The parameters `x` and `y` can be seen as the API for the program and must be supplied by the -prover. Since neither `x` nor `y` is marked as public, the verifier does not supply any inputs, when -verifying the proof. - -The prover supplies the values for `x` and `y` in the _Prover.toml_ file. - -As for the program body, `constrain` ensures the satisfaction of the condition (e.g. `x != y`) is -constrained by the proof of the execution of said program (i.e. if the condition was not met, the -verifier would reject the proof as an invalid proof). - -### Prover.toml - -The _Prover.toml_ file is a file which the prover uses to supply his witness values(both private and -public). - -In our hello world program the _Prover.toml_ file looks like this: - -```toml -x = "1" -y = "2" -``` - -When the command `nargo prove my_proof` is executed, two processes happen: - -1. Noir creates a proof that `x` which holds the value of `1` and `y` which holds the value of `2` - is not equal. This not equal constraint is due to the line `constrain x != y`. - -2. Noir creates and stores the proof of this statement in the _proofs_ directory and names the proof - file _my_proof_. Opening this file will display the proof in hex format. - -## Verifying a Proof - -When the command `nargo verify my_proof` is executed, two processes happen: - -1. Noir checks in the _proofs_ directory for a file called _my_proof_ - -2. If that file is found, the proof's validity is checked - -> **Note:** The validity of the proof is linked to the current Noir program; if the program is -> changed and the verifier verifies the proof, it will fail because the proof is not valid for the -> _modified_ Noir program. - -In production, the prover and the verifier are usually two separate entities. A prover would -retrieve the necessary inputs, execute the Noir program, generate a proof and pass it to the -verifier. The verifier would then retrieve the public inputs from usually external sources and -verifies the validity of the proof against it. - -Take a private asset transfer as an example: - -A user on browser as the prover would retrieve private inputs (e.g. the user's private key) and -public inputs (e.g. the user's encrypted balance on-chain), compute the transfer, generate a proof -and submit it to the verifier smart contract. - -The verifier contract would then draw the user's encrypted balance directly from the blockchain and -verify the proof submitted against it. If the verification passes, additional functions in the -verifier contract could trigger (e.g. approve the asset transfer). diff --git a/docs/versioned_docs/version-v0.6.0/index.md b/docs/versioned_docs/version-v0.6.0/index.md deleted file mode 100644 index f4706182ffa..00000000000 --- a/docs/versioned_docs/version-v0.6.0/index.md +++ /dev/null @@ -1,65 +0,0 @@ ---- -title: Introducing Noir -description: - Learn about the public alpha release of Noir, a domain specific language heavily influenced by - Rust that compiles to an intermediate language which can be compiled to an arithmetic circuit or a - rank-1 constraint system. -keywords: - [ - Noir, - Domain Specific Language, - Rust, - Intermediate Language, - Arithmetic Circuit, - Rank-1 Constraint System, - Ethereum Developers, - Protocol Developers, - Blockchain Developers, - Proving System, - Smart Contract Language, - ] -slug: / ---- - -This version of the book is being released with the public alpha. There will be a lot of features -that are missing in this version, however the syntax and the feel of the language will mostly be -completed. - -## What is Noir? - -Noir is a domain specific language for creating and verifying proofs. It's design choices are -influenced heavily by Rust. - -## What's new about Noir? - -Noir is simple and flexible in its design, as it does not compile immediately to a fixed -NP-complete language. Instead, Noir compiles to an intermediate language (ACIR), which itself can be compiled -to an arithmetic circuit (if choosing to target Aztec's barretenberg backend) or a rank-1 constraint system (if choosing to target an R1CS backend like Arkwork's Marlin backend, or others). - -This in itself brings up a few challenges -within the design process, but allows one to decouple the programming language completely from the -backend. This is similar in theory to LLVM. - -## Who is Noir for? - -Noir can be used for a variety of purposes. - -### Ethereum Developers - -Noir currently includes a command to publish a contract which verifies your Noir program. This will -be modularised in the future; however, as of the alpha, you can use the `contract` command to create -a verifier contract. - -### Protocol Developers - -As a protocol developer, you may not want to use the Aztec backend due to it not being a fit for -your stack, or maybe you simply want to use a different proving system. Since Noir does not compile -to a specific proof system, it is possible for protocol developers to replace the PLONK-based -proving system with a different proving system altogether. - -### Blockchain developers - -As a blockchain developer, you will be constrained by parameters set by your blockchain (for example, the -proving system and smart contract language has been pre-defined). In order for you to use Noir in -your blockchain, a proving system backend and a smart contract interface -must be implemented for it. diff --git a/docs/versioned_docs/version-v0.6.0/language_concepts/00_data_types.md b/docs/versioned_docs/version-v0.6.0/language_concepts/00_data_types.md deleted file mode 100644 index 3a711fc8922..00000000000 --- a/docs/versioned_docs/version-v0.6.0/language_concepts/00_data_types.md +++ /dev/null @@ -1,301 +0,0 @@ ---- -title: Data Types -description: - Get a clear understanding of the two categories of Noir data types - primitive types and compound - types. Learn about their characteristics, differences, and how to use them in your Noir - programming. -keywords: - [ - noir, - data types, - primitive types, - compound types, - private types, - public types, - field type, - integer types, - boolean type, - array type, - tuple type, - struct type, - ] ---- - -Every value in Noir has a type, which determines which operations are valid for it. - -All values in Noir are fundamentally composed of `Field` elements. For a more approachable -developing experience, abstractions are added on top to introduce different data types in Noir. - -Noir has two category of data types: primitive types (e.g. `Field`, integers, `bool`) and compound -types that group primitive types (e.g. arrays, tuples, structs). Each value can either be private or -public. - -## Private & Public Types - -A **private value** is known only to the Prover, while a **public value** is known by both the -Prover and Verifier. Mark values as `private` when the value should only be known to the prover. All -primitive types (including individual fields of compound types) in Noir are private by default, and -can be marked public when certain values are intended to be revealed to the Verifier. - -> **Note:** For public values defined in Noir programs paired with smart contract verifiers, once -> the proofs are verified on-chain the values can be considered known to everyone that has access to -> that blockchain. - -Public data types are treated no differently to private types apart from the fact that their values -will be revealed in proofs generated. Simply changing the value of a public type will not change the -circuit (where the same goes for changing values of private types as well). - -_Private values_ are also referred to as _witnesses_ sometimes. - -> **Note:** The terms private and public when applied to a type (e.g. `pub Field`) have a different -> meaning than when applied to a function (e.g. `pub fn foo() {}`). -> -> The former is a visibility modifier for the Prover to interpret if a value should be made known to -> the Verifier, while the latter is a visibility modifier for the compiler to interpret if a -> function should be made accessible to external Noir programs like in other languages. - -### pub Modifier - -All data types in Noir are private by default. Types are explicitly declared as public using the -`pub` modifier: - -```rust -fn main(x : Field, y : pub Field) -> pub Field { - x + y -} -``` - -In this example, `x` is **private** while `y` and `x + y` (the return value) are **public**. Note -that visibility is handled **per variable**, so it is perfectly valid to have one input that is -private and another that is public. - -> **Note:** Public types can only be declared through parameters on `main`. - -## Primitive Types - -A primitive type represents a single value. They can be private or public. - -### Fields - -The field type corresponds to the native field type of the proving backend. - -The size of a Noir field depends on the elliptic curve's finite field for the proving backend -adopted. For example, a field would be a 254-bit integer when paired with the default backend that -spans the Grumpkin curve. - -Fields support integer arithmetic and are often used as the default numeric type in Noir: - -```rust -fn main(x : Field, y : Field) { - let z = x + y; -} -``` - -`x`, `y` and `z` are all private fields in this example. Using the `let` keyword we defined a new -private value `z` constrained to be equal to `x + y`. - -If proving efficiency is of priority, fields should be used as a default for solving problems. -Smaller integer types (e.g. `u64`) incur extra range constraints. - -### Integers - -An integer type is a range constrained field type. The Noir frontend currently supports unsigned, -arbitrary-sized integer types. - -An integer type is specified first with the letter `u`, indicating its unsigned nature, followed by -its length in bits (e.g. `32`). For example, a `u32` variable can store a value in the range of -$\\([0,2^{32}-1]\\)$: - -```rust -fn main(x : Field, y : u32) { - let z = x as u32 + y; -} -``` - -`x`, `y` and `z` are all private values in this example. However, `x` is a field while `y` and `z` -are unsigned 32-bit integers. If `y` or `z` exceeds the range $\\([0,2^{32}-1]\\)$, proofs created -will be rejected by the verifier. - -> **Note:** The default backend supports both even (e.g. `u16`, `u48`) and odd (e.g. `u5`, `u3`) -> sized integer types. - -### Booleans - -The `bool` type in Noir has two possible values: `true` and `false`: - -```rust -fn main() { - let t = true; - let f: bool = false; -} -``` - -> **Note:** When returning a boolean value, it will show up as a value of 1 for `true` and 0 for -> `false` in _Verifier.toml_. - -The boolean type is most commonly used in conditionals like `if` expressions and `constrain` -statements. More about conditionals is covered in the [Control Flow](./control_flow) and -[Constrain Statement](./constrain) sections. - -### Strings - -The string type is a fixed length value defined with `str`. - -You can use strings in `constrain` statements, `assert()` functions or print them with -`std::println()`. - -```rust -fn main(message : pub str<11>, hex_as_string : str<4>) { - std::println(message); - assert(message == "hello world"); - assert(hex_as_string == "0x41"); -} -``` - -## Compound Types - -A compound type groups together multiple values into one type. Elements within a compound type can -be private or public. - -### Arrays - -An array is one way of grouping together values into one compound type. Array types can be inferred -or explicitly specified via the syntax `[; ]`: - -```rust -fn main(x : Field, y : Field) { - let my_arr = [x, y]; - let your_arr: [Field; 2] = [x, y]; -} -``` - -Here, both `my_arr` and `your_arr` are instantiated as an array containing two `Field` elements. - -Array elements can be accessed using indexing: - -```rust -fn main() { - let a = [1, 2, 3, 4, 5]; - - let first = a[0]; - let second = a[1]; -} -``` - -All elements in an array must be of the same type (i.e. homogeneous). That is, an array cannot group -a `Field` value and a `u8` value together for example. - -You can write mutable arrays, like: - -```rust -fn main() { - let mut arr = [1, 2, 3, 4, 5]; - assert(arr[0] == 1); - - arr[0] = 42; - assert(arr[0] == 42); -} -``` - -#### Types - -You can create arrays of primitive types or structs. There is not yet support for nested arrays -(arrays of arrays) or arrays of structs that contain arrays. - -### Tuples - -A tuple collects multiple values like an array, but with the added ability to collect values of -different types: - -```rust -fn main() { - let tup: (u8, u64, Field) = (255, 500, 1000); -} -``` - -One way to access tuple elements is via destructuring using pattern matching: - -```rust -fn main() { - let tup = (1, 2); - - let (one, two) = tup; - - let three = one + two; -} -``` - -Another way to access tuple elements is via direct member access, using a period (`.`) followed by -the index of the element we want to access. Index `0` corresponds to the first tuple element, `1` to -the second and so on: - -```rust -fn main() { - let tup = (5, 6, 7, 8); - - let five = tup.0; - let eight = tup.3; -} -``` - -### Structs - -A struct also allows for grouping multiple values of different types. Unlike tuples, we can also -name each field. - -> **Note:** The usage of _field_ here refers to each element of the struct and is unrelated to the -> field type of Noir. - -Defining a struct requires giving it a name and listing each field within as `: ` pairs: - -```rust -struct Animal { - hands: Field, - legs: Field, - eyes: u8, -} -``` - -An instance of a struct can then be created with actual values in `: ` pairs in any -order. Struct fields are accessible using their given names: - -```rust -fn main() { - let legs = 4; - - let dog = Animal { - eyes: 2, - hands: 0, - legs, - }; - - let zero = dog.hands; -} -``` - -Structs can also be destructured in a pattern, binding each field to a new variable: - -```rust -fn main() { - let Animal { hands, legs: feet, eyes } = get_octopus(); - - let ten = hands + feet + eyes as u8; -} - -fn get_octopus() -> Animal { - let octopus = Animal { - hands: 0, - legs: 8, - eyes: 2, - }; - - octopus -} -``` - -The new variables can be bound with names different from the original struct field names, as -showcased in the `legs --> feet` binding in the example above. - -:::note -You can use Structs as inputs to the `main` function, but you can't output them -::: diff --git a/docs/versioned_docs/version-v0.6.0/language_concepts/01_functions.md b/docs/versioned_docs/version-v0.6.0/language_concepts/01_functions.md deleted file mode 100644 index c4bc0545a1c..00000000000 --- a/docs/versioned_docs/version-v0.6.0/language_concepts/01_functions.md +++ /dev/null @@ -1,88 +0,0 @@ ---- -title: Functions -description: - Learn how to declare functions and methods in Noir, a programming language with Rust semantics. - This guide covers parameter declaration, return types, call expressions, and more. -keywords: [Noir, Rust, functions, methods, parameter declaration, return types, call expressions] ---- - -Functions in Noir follow the same semantics of Rust, though Noir does not support early returns. - -To declare a function the `fn` keyword is used. - -```rust -fn foo() {} -``` - -All parameters in a function must have a type and all types are known at compile time. The parameter -is pre-pended with a colon and the parameter type. Multiple parameters are separated using a comma. - -```rust -fn foo(x : Field, y : pub Field){} -``` - -The return type of a function can be stated by using the `->` arrow notation. The function below -states that the foo function must return a `Field`. If the function returns no value, then the arrow -is omitted. - -```rust -fn foo(x : Field, y : pub Field) -> Field { - x + y -} -``` - -Note that a `return` keyword is unneeded in this case - the last expression in a function's body is -returned. - -## Call Expressions - -Calling a function in Noir is executed by using the function name and passing in the necessary -arguments. - -Below we show how to call the `foo` function from the `main` function using a call expression: - -```rust -fn main(x : Field, y : Field) { - let z = foo(x); -} - -fn foo(x : Field) -> Field { - x + x -} -``` - -## Methods - -You can define methods in Noir on any struct type in scope. - -```rust -struct MyStruct { - foo: Field, - bar: Field, -} - -impl MyStruct { - fn new(foo: Field) -> MyStruct { - MyStruct { - foo, - bar: 2, - } - } - - fn sum(self) -> Field { - self.foo + self.bar - } -} - -fn main() { - let s = MyStruct::new(40); - constrain s.sum() == 42; -} -``` - -Methods are just syntactic sugar for functions, so if we wanted to we could also call `sum` as -follows: - -```rust -constrain MyStruct::sum(s) == 42 -``` diff --git a/docs/versioned_docs/version-v0.6.0/language_concepts/02_control_flow.md b/docs/versioned_docs/version-v0.6.0/language_concepts/02_control_flow.md deleted file mode 100644 index 29108dd2634..00000000000 --- a/docs/versioned_docs/version-v0.6.0/language_concepts/02_control_flow.md +++ /dev/null @@ -1,42 +0,0 @@ ---- -title: Control Flow -description: - Learn how to use loops and if expressions in the Noir programming language. Discover the syntax - and examples for for loops and if-else statements. -keywords: [Noir programming language, loops, for loop, if-else statements, Rust syntax] ---- - -## Loops - -Noir has one kind of loop: the `for` loop. `for` loops allow you to repeat a block of code multiple -times. - -The following block of code between the braces is run 10 times. - -```rust -for i in 0..10 { - // do something -}; -``` - -## If Expressions - -Noir supports `if-else` statements. The syntax is most similar to Rust's where it is not required -for the statement's conditonal to be surrounded by parentheses. - -```rust -let a = 0; -let mut x: u32 = 0; - -if a == 0 { - if a != 0 { - x = 6; - } else { - x = 2; - } -} else { - x = 5; - constrain x == 5; -} -constrain x == 2; -``` diff --git a/docs/versioned_docs/version-v0.6.0/language_concepts/03_ops.md b/docs/versioned_docs/version-v0.6.0/language_concepts/03_ops.md deleted file mode 100644 index d08df2094a5..00000000000 --- a/docs/versioned_docs/version-v0.6.0/language_concepts/03_ops.md +++ /dev/null @@ -1,96 +0,0 @@ ---- -title: Logical Operations -description: - Learn about the supported arithmetic and logical operations in the Noir programming language. - Discover how to perform operations on private input types, integers, and booleans. -keywords: - [ - Noir programming language, - supported operations, - arithmetic operations, - logical operations, - predicate operators, - bitwise operations, - short-circuiting, - backend, - ] ---- - -# Operations - -## Table of Supported Operations - -| Operation | Description | Requirements | -| :-------- | :------------------------------------------------------------: | -------------------------------------: | -| + | Adds two private input types together | Types must be private input | -| - | Subtracts two private input types together | Types must be private input | -| \* | Multiplies two private input types together | Types must be private input | -| / | Divides two private input types together | Types must be private input | -| ^ | XOR two private input types together | Types must be integer | -| & | AND two private input types together | Types must be integer | -| << | Left shift an integer by another integer amount | Types must be integer | -| >> | Right shift an integer by another integer amount | Types must be integer | -| ! | Bitwise not of a value | Type must be integer or boolean | -| < | returns a bool if one value is less than the other | Upper bound must have a known bit size | -| <= | returns a bool if one value is less than or equal to the other | Upper bound must have a known bit size | -| > | returns a bool if one value is more than the other | Upper bound must have a known bit size | -| >= | returns a bool if one value is more than or equal to the other | Upper bound must have a known bit size | -| == | returns a bool if one value is equal to the other | Both types must not be constants | -| != | returns a bool if one value is not equal to the other | Both types must not be constants | - -### Predicate Operators - -`<,<=, !=, == , >, >=` are known as predicate/comparison operations because they compare two values. -This differs from the operations such as `+` where the operands are used in _computation_. - -### Bitwise Operations Example - -```rust -fn main(x : Field) { - let y = x as u32; - let z = y & y; -} -``` - -`z` is implicitly constrained to be the result of `y & y`. The `&` operand is used to denote bitwise -`&`. - -> `x & x` would not compile as `x` is a `Field` and not an integer type. - -### Logical Operators - -Noir has no support for the logical operators `||` and `&&`. This is because encoding the -short-circuiting that these operators require can be inefficient for Noir's backend. Instead you can -use the bitwise operators `|` and `&` which operate indentically for booleans, just without the -short-circuiting. - -```rust -let my_val = 5; - -let mut flag = 1; -if (my_val > 6) | (my_val == 0) { - flag = 0; -} -constrain flag == 1; - -if (my_val != 10) & (my_val < 50) { - flag = 0; -} -constrain flag == 0; -``` - -### Shorthand operators - -Noir shorthand operators for most of the above operators, namely `+=, -=, *=, /=, %=, &=, |=, ^=, <<=`, and `>>=`. These allow for more concise syntax. For example: - -```rust -let mut i = 0; -i = i + 1; -``` - -could be written as: - -```rust -let mut i = 0; -i += 1; -``` diff --git a/docs/versioned_docs/version-v0.6.0/language_concepts/04_assert.md b/docs/versioned_docs/version-v0.6.0/language_concepts/04_assert.md deleted file mode 100644 index a25a946123d..00000000000 --- a/docs/versioned_docs/version-v0.6.0/language_concepts/04_assert.md +++ /dev/null @@ -1,34 +0,0 @@ ---- -title: Assert Function -description: - Learn about the assert function in Noir, which can be used to explicitly constrain the predicate or - comparison expression that follows to be true, and what happens if the expression is false at - runtime. -keywords: [Noir programming language, assert statement, predicate expression, comparison expression] ---- - -Noir includes a special `assert` function which will explicitly constrain the predicate/comparison -expression that follows to be true. If this expression is false at runtime, the program will fail to -be proven. - -### Example - -```rust -fn main(x : Field, y : Field) { - assert(x == y); -} -``` - -The above snippet compiles because `==` is a predicate operation. Conversely, the following will not -compile: - -```rust -// INCORRECT - -fn main(x : Field, y : Field) { - assert(x + y); -} -``` - -> The rationale behind this not compiling is due to ambiguity. It is not clear if the above should -> equate to `x + y == 0` or if it should check the truthiness of the result. diff --git a/docs/versioned_docs/version-v0.6.0/language_concepts/05_constrain.md b/docs/versioned_docs/version-v0.6.0/language_concepts/05_constrain.md deleted file mode 100644 index 9ba1e260e8c..00000000000 --- a/docs/versioned_docs/version-v0.6.0/language_concepts/05_constrain.md +++ /dev/null @@ -1,40 +0,0 @@ ---- -title: Constrain Statements -description: - Learn about the constrain keyword in Noir, which can be used to explicitly constrain the predicate - or comparison expression that follows to be true, and what happens if the expression is false at - runtime. -keywords: - [Noir programming language, constrain statement, predicate expression, comparison expression] ---- - -:::danger - -In versions >=0.5.0 use the [`assert`](./04_assert.md) syntax. The `constrain` statement will be -maintained for some time for backwards compatibility but will be deprecated in the future. - -::: - -Noir includes a special keyword `constrain` which will explicitly constrain the predicate/comparison -expression that follows to be true. If this expression is false at runtime, the program will fail to -be proven. - -### Constrain statement example - -```rust -fn main(x : Field, y : Field) { - constrain x == y; -} -``` - -The above snippet compiles because `==` is a predicate operation. Conversely, the following will not -compile: - -```rust -fn main(x : Field, y : Field) { - constrain x + y; -} -``` - -> The rationale behind this not compiling is due to ambiguity. It is not clear if the above should -> equate to `x + y == 0` or if it should check the truthiness of the result. diff --git a/docs/versioned_docs/version-v0.6.0/language_concepts/06_generics.md b/docs/versioned_docs/version-v0.6.0/language_concepts/06_generics.md deleted file mode 100644 index 4d6c01fd797..00000000000 --- a/docs/versioned_docs/version-v0.6.0/language_concepts/06_generics.md +++ /dev/null @@ -1,116 +0,0 @@ ---- -title: Generics -description: - Learn how to use Generics in Noir -keywords: [Noir, Rust, generics, functions, structs] ---- - -# Generics - -Generics allow you to use the same functions with multiple different concrete data types. You can -read more about the concept of generics in the Rust documentation -[here](https://doc.rust-lang.org/book/ch10-01-syntax.html). - -Here is a trivial example showing the identity function that supports any type. In Rust, it is -common to refer to the most general type as `T`. We follow the same convention in Noir. - -```rust -fn id(x: T) -> T { - x -} -``` - -## In Structs - -Generics are useful for specifying types in structs. For example, we can specify that a field in a -struct will be of a certain generic type. In this case `value` is of type `T`. - -```rust -struct RepeatedValue { - value: T, - count: comptime Field, -} - -impl RepeatedValue { - fn new(value: T) -> Self { - Self { value, count: 1 } - } - - fn increment(mut repeated: Self) -> Self { - repeated.count += 1; - repeated - } - - fn print(self) { - for _i in 0 .. self.count { - dep::std::println(self.value); - } - } -} - -fn main() { - let mut repeated = RepeatedValue::new("Hello!"); - repeated = repeated.increment(); - repeated.print(); -} -``` - -The `print` function will print `Hello!` an arbitrary number of times, twice in this case. - -If we want to be generic over array lengths (which are type-level integers), we can use numeric -generics. Using these looks just like using regular generics, but these generics can resolve to -integers at compile-time, rather than resolving to types. Here's an example of a struct that is -generic over the size of the array it contains internally: - -```rust -struct BigInt { - limbs: [u32; N], -} - -impl BigInt { - // `N` is in scope of all methods in the impl - fn first(first: BigInt, second: BigInt) -> Self { - constrain first.limbs != second.limbs; - first - - fn second(first: BigInt, second: Self) -> Self { - constrain first.limbs != second.limbs; - second - } -} -``` - -## Calling functions on generic parameters - -Unlike Rust, Noir does not have traits, so how can one translate the equivalent of a trait bound in -Rust into Noir? That is, how can we write a function that is generic over some type `T`, while also -requiring there is a function like `eq: fn(T, T) -> bool` that works on the type? - -The answer is that we can translate this by passing in the function manually. Here's an example of -implementing array equality in Noir: - -```rust -fn array_eq(array1: [T; N], array2: [T; N], elem_eq: fn(T, T) -> bool) -> bool { - if array1.len() != array2.len() { - false - } else { - let mut result = true; - for i in 0 .. array1.len() { - result &= elem_eq(array1[i], array2[i]); - } - result - } -} - -fn main() { - constrain array_eq([1, 2, 3], [1, 2, 3], |a, b| a == b); - - // We can use array_eq even for arrays of structs, as long as we have - // an equality function for these structs we can pass in - let array = [MyStruct::new(), MyStruct::new()]; - constrain array_eq(array, array, MyStruct::eq); -} -``` - -You can see an example of generics in the tests -[here](https://github.com/noir-lang/noir/blob/master/tooling/nargo_cli/tests/execution_success/generics/src/main.nr). diff --git a/docs/versioned_docs/version-v0.6.0/language_concepts/07_mutability.md b/docs/versioned_docs/version-v0.6.0/language_concepts/07_mutability.md deleted file mode 100644 index c8ccb4f8b9f..00000000000 --- a/docs/versioned_docs/version-v0.6.0/language_concepts/07_mutability.md +++ /dev/null @@ -1,118 +0,0 @@ ---- -title: Mutability -description: - Learn about mutable variables, constants, and globals in Noir programming language. Discover how - to declare, modify, and use them in your programs. -keywords: [noir programming language, mutability in noir, mutable variables, constants, globals] ---- - -# Mutability - -Variables in noir can be declared mutable via the `mut` keyword. Mutable variables can be reassigned -to via an assignment expression. - -```rust -let x = 2; -x = 3; // error: x must be mutable to be assigned to - -let mut y = 3; -let y = 4; // OK -``` - -The `mut` modifier can also apply to patterns: - -```rust -let (a, mut b) = (1, 2); -a = 11; // error: a must be mutable to be assigned to -b = 12; // OK - -let mut (c, d) = (3, 4); -c = 13; // OK -d = 14; // OK - -// etc. -let MyStruct { x: mut y } = MyStruct { x: a } -// y is now in scope -``` - -Note that mutability in noir is local and everything is passed by value, so if a called function -mutates its parameters then the parent function will keep the old value of the parameters. - -```rust -fn main() -> Field { - let x = 3; - helper(x); - x // x is still 3 -} - -fn helper(mut x: i32) { - x = 4; -} -``` - -## Comptime values - -Comptime value are values that are known at compile-time. This is different to a witness -which changes per proof. If a comptime value that is being used in your program is changed, then your -circuit will also change. - -Below we show how to declare a comptime value: - -```rust -fn main() { - let a: comptime Field = 5; - - // `comptime Field` can also be inferred: - let a = 5; -} -``` - -Note that variables declared as mutable may not be comptime: - -```rust -fn main() { - // error: Cannot mark a comptime type as mutable - let mut a: comptime Field = 5; - - // a inferred as a private Field here - let mut a = 5; -} -``` - -## Globals - -Noir also supports global variables. However, they must be compile-time variables. If `comptime` is -not explicitly written in the type annotation the compiler will implicitly specify the declaration -as compile-time. They can then be used like any other compile-time variable inside functions. The -global type can also be inferred by the compiler entirely. Globals can also be used to specify array -annotations for function parameters and can be imported from submodules. - -```rust -global N: Field = 5; // Same as `global N: comptime Field = 5` - -fn main(x : Field, y : [Field; N]) { - let res = x * N; - - constrain res == y[0]; - - let res2 = x * mysubmodule::N; - constrain res != res2; -} - -mod mysubmodule { - use dep::std; - - global N: Field = 10; - - fn my_helper() -> comptime Field { - let x = N; - x - } -} -``` - -## Why only local mutability? - -Witnesses in a proving system are immutable in nature. Noir aims to _closely_ mirror this setting -without applying additional overhead to the user. Modeling a mutable reference is not as -straightforward as on conventional architectures and would incur some possibly unexpected overhead. diff --git a/docs/versioned_docs/version-v0.6.0/language_concepts/08_comments.md b/docs/versioned_docs/version-v0.6.0/language_concepts/08_comments.md deleted file mode 100644 index 5b1d9fa38f2..00000000000 --- a/docs/versioned_docs/version-v0.6.0/language_concepts/08_comments.md +++ /dev/null @@ -1,27 +0,0 @@ ---- -title: Comments -description: - Learn how to write comments in Noir programming language. A comment is a line of code that is - ignored by the compiler, but it can be read by programmers. Single-line and multi-line comments - are supported in Noir. -keywords: [Noir programming language, comments, single-line comments, multi-line comments] ---- - -A comment is a line in your codebase which the compiler ignores, however it can be read by -programmers. - -Here is a single line comment: - -```rust -// This is a comment and is ignored -``` - -`//` is used to tell the compiler to ignore the rest of the line. - -Noir doesn't have multi-line comments, but you can emulate them via using `//` on each line - -```rust -// This is a multi line -// comment, that is ignored by -// the compiler -``` diff --git a/docs/versioned_docs/version-v0.6.0/language_concepts/09_distinct.md b/docs/versioned_docs/version-v0.6.0/language_concepts/09_distinct.md deleted file mode 100644 index 03759d4bb4a..00000000000 --- a/docs/versioned_docs/version-v0.6.0/language_concepts/09_distinct.md +++ /dev/null @@ -1,63 +0,0 @@ ---- -title: Distinct Witnesses ---- - -The `distinct` keyword prevents repetitions of witness indices in the program's ABI. This ensures -that the witnesses being returned as public inputs are all unique. - -The `distinct` keyword is only used for return values on program entry points (usually the `main()` -function). - -When using `disctinct` and `pub` simultaneously, `distinct` comes first. See the example below. - -You can read more about the problem this solves -[here](https://github.com/noir-lang/noir/issues/1183). - -## Example - -Without the `distinct` keyword, the following program - -```rust -fn main(x : pub Field, y : pub Field) -> pub [Field; 4] { - let a = 1; - let b = 1; - [x + 1, y, a, b] -} -``` - -compiles to - -```json -{ - //... - "abi": { - //... - "param_witnesses": { "x": [1], "y": [2] }, - "return_witnesses": [3, 2, 4, 4] - } -} -``` - -Whereas (with the `distinct` keyword) - -```rust -fn main(x : pub Field, y : pub Field) -> distinct pub [Field; 4] { - let a = 1; - let b = 1; - [x + 1, y, a, b] -} -``` - -compiles to - -```json -{ - //... - "abi": { - //... - "param_witnesses": { "x": [1], "y": [2] }, - //... - "return_witnesses": [3, 4, 5, 6] - } -} -``` diff --git a/docs/versioned_docs/version-v0.6.0/modules_packages_crates/crates_and_packages.md b/docs/versioned_docs/version-v0.6.0/modules_packages_crates/crates_and_packages.md deleted file mode 100644 index 34f28a71148..00000000000 --- a/docs/versioned_docs/version-v0.6.0/modules_packages_crates/crates_and_packages.md +++ /dev/null @@ -1,35 +0,0 @@ ---- -title: Crates and Packages -description: - Learn how to use Crates and Packages in your Noir project -keywords: [Nargo, dependencies, package management, crates, package] ---- - -## Crates - -A crate is the smallest amount of code that the Noir compiler considers at a time. -Crates can contain modules, and the modules may be defined in other files that get compiled with the crate, as we’ll see in the coming sections. - -### Crate Types - -A Noir crate can come in one of two forms: a binary crate or a library crate. - -_Binary crates_ are programs which you can compile to an ACIR circuit which you can then create proofs against. Each must have a function called `main` that defines the ACIR circuit which is to be proved. - -_Library crates_ don't have a `main` function and they don't compile down to ACIR. Instead they define functionality intended to be shared with multiple projects, and eventually included in a binary crate. - -### Crate Root - -Every crate has a root, which is the source file that the compiler starts, this is also known as the root module. The Noir compiler does not enforce any conditions on the name of the file which is the crate root, however if you are compiling via Nargo the crate root must be called `lib.nr` or `main.nr` for library or binary crates respectively. - -## Packages - -A Nargo _package_ is a collection of one of more crates that provides a set of functionality. A package must include a Nargo.toml file. - -A package _must_ contain either a library or a binary crate, but not both. - -### Differences from Cargo Packages - -One notable difference between Rust's Cargo and Noir's Nargo is that while Cargo allows a package to contain an unlimited number of binary crates and a single library crate, Nargo currently only allows a package to contain a single crate. - -In future this restriction may be lifted to allow a Nargo package to contain both a binary and library crate or multiple binary crates. diff --git a/docs/versioned_docs/version-v0.6.0/modules_packages_crates/dependencies.md b/docs/versioned_docs/version-v0.6.0/modules_packages_crates/dependencies.md deleted file mode 100644 index f3b40eb849b..00000000000 --- a/docs/versioned_docs/version-v0.6.0/modules_packages_crates/dependencies.md +++ /dev/null @@ -1,85 +0,0 @@ ---- -title: Managing Dependencies -description: - Learn how to specify and manage dependencies in Nargo, allowing you to upload packages to GitHub - and use them easily in your project. -keywords: [Nargo, dependencies, GitHub, package management, versioning] ---- - -Nargo allows you to upload packages to GitHub and use them as dependencies. - -## Specifying a dependency - -Specifying a dependency requires a tag to a specific commit and the git url to the url containing -the package. - -Currently, there are no requirements on the tag contents. If requirements are added, it would follow -semver 2.0 guidelines. - -> Note: Without a `tag` , there would be no versioning and dependencies would change each time you -> compile your project. - -For example, to add the [ecrecover-noir library](https://github.com/colinnielsen/ecrecover-noir) to your project, add it to `Nargo.toml`: - -```toml -# Nargo.toml - -[dependencies] -ecrecover = {tag = "v0.2.0", git = "https://github.com/colinnielsen/ecrecover-noir"} -``` - -## Specifying a local dependency - -You can also specify dependencies that are local to your machine. - -For example, this file structure has a library and binary crate - -``` -├── binary_crate -│   ├── Nargo.toml -│   └── src -│   └── main.nr -└── liba - ├── Nargo.toml - └── src - └── lib.nr -``` - -Inside of the binary crate, you can specify: - -```toml -# Nargo.toml - -[dependencies] -libA = { path = "../liba" } -``` - -## Importing dependencies - -You can import a dependency to a Noir file using the following syntax. For example, to import the -ecrecover-noir library and local liba referenced above: - -```rust -use dep::ecrecover; -use dep::libA; -``` - -You can also import only the specific parts of dependency that you want to use. For example, -demonstrated in the -[elliptic curve example](../standard_library/cryptographic_primitives/ec_primitives#examples), you -can import just the `Point` and the `Curve` that you want to use. - -```rust -use dep::std::ec::tecurve::affine::Curve; -use dep::std::ec::tecurve::affine::Point; -``` - -## Available Libraries - -Noir does not currently have an official package manager. You can find a list of available Noir libraries in the [awesome-noir repo here](https://github.com/noir-lang/awesome-noir#libraries). - -Some libraries that are available today include: - -- BigInt -- "`ecrecover`" -- sparse merkle tree verifier diff --git a/docs/versioned_docs/version-v0.6.0/modules_packages_crates/modules.md b/docs/versioned_docs/version-v0.6.0/modules_packages_crates/modules.md deleted file mode 100644 index e429b336511..00000000000 --- a/docs/versioned_docs/version-v0.6.0/modules_packages_crates/modules.md +++ /dev/null @@ -1,104 +0,0 @@ ---- -title: Understanding Modules -description: - Learn how to organize your files using modules in Noir, following the same convention as Rust's - module system. Examples included. -keywords: [Noir, Rust, modules, organizing files, sub-modules] ---- - -# Modules - -Noir's module system follows the same convention as the _newer_ version of Rust's module system. - -## Purpose of Modules - -Modules are used to organise files. Without modules all of your code would need to live in a single -file. In Noir, the compiler does not automatically scan all of your files to detect modules. This -must be done explicitly by the developer. - -## Examples - -### Importing a module in the crate root - -Filename : `src/main.nr` - -```rust -mod foo; - -fn main() { - foo::hello_world(); -} -``` - -Filename : `src/foo.nr` - -```rust -fn from_foo() {} -``` - -In the above snippet, the crate root is the `src/main.nr` file. The compiler sees the module -declaration `mod foo` which prompts it to look for a foo.nr file. - -Visually this module hierarchy looks like the following : - -``` -crate - ├── main - │ - └── foo - └── from_foo - -``` - -### Importing a module throughout the tree -All modules are accessible from the ``crate::`` namespace. - -``` -crate - ├── bar - ├── foo - └── main - -``` -In the above snippet, if ``bar`` would like to use functions in ``foo``, it can do so by ``use crate::foo::function_name``. - -### Sub-modules - -Filename : `src/main.nr` - -```rust -mod foo; - -fn main() { - foo::from_foo(); -} -``` - -Filename : `src/foo.nr` - -```rust -mod bar; -fn from_foo() {} -``` - -Filename : `src/foo/bar.nr` - -```rust -fn from_bar() {} -``` - -In the above snippet, we have added an extra module to the module tree; `bar`. `bar` is a submodule -of `foo` hence we declare bar in `foo.nr` with `mod bar`. Since `foo` is not the crate root, the -compiler looks for the file associated with the `bar` module in `src/foo/bar.nr` - -Visually the module hierarchy looks as follows: - -``` -crate - ├── main - │ - └── foo - ├── from_foo - └── bar - └── from_bar -``` diff --git a/docs/versioned_docs/version-v0.6.0/nargo/01_commands.md b/docs/versioned_docs/version-v0.6.0/nargo/01_commands.md deleted file mode 100644 index f9d9ddb77ff..00000000000 --- a/docs/versioned_docs/version-v0.6.0/nargo/01_commands.md +++ /dev/null @@ -1,130 +0,0 @@ ---- -title: Commands -description: - Noir CLI Commands for Noir Prover and Verifier to create, execute, prove and verify programs, - generate Solidity verifier smart contract and compile into JSON file containing ACIR - representation and ABI of circuit. -keywords: - [ - Nargo, - Noir CLI, - Noir Prover, - Noir Verifier, - generate Solidity verifier, - compile JSON file, - ACIR representation, - ABI of circuit, - TypeScript, - ] ---- - -## General options - -``` -Options: - -s, --show-ssa Emit debug information for the intermediate SSA IR - -d, --deny-warnings Quit execution when warnings are emitted - --show-output Display output of `println` statements during tests - -h, --help Print help -``` - -## `nargo help [subcommand]` - -Prints the list of available commands or specific information of a subcommand. - -_Arguments_ - -- `` - The subcommand whose help message to display - -## `nargo check` - -Generate the `Prover.toml` and `Verifier.toml` files for specifying prover and verifier in/output -values of the Noir program respectively. - -## `nargo codegen-verifier` - -Generate a Solidity verifier smart contract for the program. - -## `nargo compile ` - -Compile the program into a JSON build artifact file containing the ACIR representation and the ABI -of the circuit. This build artifact can then be used to generate and verify proofs. - -_Arguments_ - -- `` - The name of the circuit file - -_Options_ - -- `-c, --contracts` - Compile each contract function used within the program - -## `nargo new [path]` - -Creates a new Noir project. - -_Arguments_ - -- `` - Name of the package -- `[path]` - The path to save the new project - -## `nargo execute [witness_name]` - -Runs the Noir program and prints its return value. - -_Arguments_ - -- `[witness_name]` - The name of the witness - -_Usage_ - -The inputs to the circuit are read from the `Prover.toml` file generated by `nargo check`, which -must be filled in. - -To save the witness to file, run the command with a value for the `witness-name` argument. A -`.tr` file will then be saved in the `build` folder. - -> **Info:** The `.tr` file is the witness file. The witness file can be considered as program inputs -> parsed for your program's ACIR. -> -> This file can be passed along with circuit's ACIR into a TypeScript project for proving and -> verification. See the [TypeScript](../typescript#proving-and-verifying-externally-compiled-files) -> section to learn more. - -## `nargo prove ` - -Creates a proof for the program. - -_Arguments_ - -- `` - The name of the proof - -_Options_ - -- `-v, --verify` - Verify proof after proving - -## `nargo verify ` - -Given a proof and a program, verify whether the proof is valid. - -_Arguments_ - -- `` - The proof to verify - -## `nargo test ` - -Nargo will automatically compile and run any functions which have the decorator `#[test]` on them if -you run `nargo test`. - -See an example on the [testing page](./testing). - -_Arguments_ - -- `` - a pattern to indicate to only run tests with names containing the pattern - -## `nargo gates` - -Counts the occurrences of different gates in circuit - -## `nargo print-acir` - -Print a compiled circuit to stdout such that the ACIR can be inspected. diff --git a/docs/versioned_docs/version-v0.6.0/nargo/02_testing.md b/docs/versioned_docs/version-v0.6.0/nargo/02_testing.md deleted file mode 100644 index 73f91c72bfd..00000000000 --- a/docs/versioned_docs/version-v0.6.0/nargo/02_testing.md +++ /dev/null @@ -1,32 +0,0 @@ ---- -title: Testing in Noir -description: Learn how to use Nargo to test your Noir program in a quick and easy way -keywords: [Nargo, testing, Noir, compile, test] ---- - -You can test your Noir programs using Noir circuits. - -Nargo will automatically compile and run any functions which have the decorator `#[test]` on them if -you run `nargo test`. - -For example if you have a program like: - -```rust -fn add(x: u64, y: u64) -> u64 { - x + y -} -#[test] -fn test_add() { - constrain add(2,2) == 4; - constrain add(0,1) == 1; - constrain add(1,0) == 1; -} -``` - -Running `nargo test` will test that the `test_add` function can be executed while satisfying the all -the contraints which allows you to test that add returns the expected values. Test functions can't -have any arguments currently. - -This is much faster compared to testing in Typescript but the only downside is that you can't -explicitly test that a certain set of inputs are invalid. i.e. you can't say that you want -add(2^64-1, 2^64-1) to fail. diff --git a/docs/versioned_docs/version-v0.6.0/nargo/03_solidity_verifier.md b/docs/versioned_docs/version-v0.6.0/nargo/03_solidity_verifier.md deleted file mode 100644 index 69a5607f1b2..00000000000 --- a/docs/versioned_docs/version-v0.6.0/nargo/03_solidity_verifier.md +++ /dev/null @@ -1,116 +0,0 @@ ---- -title: Solidity Verifier -description: - Learn how to run the verifier as a smart contract on the blockchain. Compile a Solidity verifier - contract for your Noir program and deploy it on any EVM blockchain acting as a verifier smart - contract. Read more to find out! -keywords: - [ - solidity verifier, - smart contract, - blockchain, - compiler, - plonk_vk.sol, - EVM blockchain, - verifying Noir programs, - proving backend, - Barretenberg, - ] ---- - -For certain applications, it may be desirable to run the verifier as a smart contract instead of on -a local machine. - -Compile a Solidity verifier contract for your Noir program by running: - -```sh -nargo codegen-verifier -``` - -A new `contract` folder would then be generated in your project directory, containing the Solidity -file `plonk_vk.sol`. It can be deployed on any EVM blockchain acting as a verifier smart contract. - -> **Note:** It is possible to compile verifier contracts of Noir programs for other smart contract -> platforms as long as the proving backend supplies an implementation. -> -> Barretenberg, the default proving backend for Nargo, supports compilation of verifier contracts in -> Solidity only for the time being. - -## Verify - -To verify a proof using the Solidity verifier contract, call the `verify` function with the -following signature: - -```solidity -function verify(bytes calldata _proof, bytes32[] calldata _publicInputs) external view returns (bool) -``` - -### Public Inputs - -:::tip - -A circuit doesn't have the concept of a return value. Return values are just syntactic sugar in -Noir. - -Under the hood, the return value is passed as an input to the circuit and is checked at the end of -the circuit program. - -::: - -The verifier contract uses the output (return) value of a Noir program as a public input. So if you -have the following function - -```rust -fn main( - // Public inputs - pubkey_x: pub Field, - pubkey_y: pub Field, - // Private inputs - priv_key: Field, -) -> pub Field -``` - -then `verify` in `plonk_vk.sol` will expect 3 public inputs. Passing two inputs will result in an -error like `Reason: PUBLIC_INPUT_COUNT_INVALID(3, 2)`. - -In this case the 3 inputs to `verify` would be ordered as `[pubkey_x, pubkey_y, return]`. - -#### Struct inputs - -Consider the following program: - -```rust -struct Type1 { - val1: Field, - val2: Field, -} - -struct Nested { - t1: Type1, - is_true: bool, -} - -fn main(x: pub Field, nested: pub Nested, y: pub Field) { - //... -} -``` - -Structs will be flattened so that the array of inputs is 1-dimensional array. The order of these inputs would be flattened to: `[x, nested.t1.val1, nested.t1.val2, nested.is_true, y]` - -## Noir for EVM chains - -You can currently deploy the Solidity verifier contracts to most EVM compatible chains. EVM chains that have been tested and are known to work include: - -- Optimism -- Arbitrum -- Polygon PoS -- Scroll -- Celo - -Other EVM chains should work, but have not been tested directly by our team. If you test any other chains, please open a PR on this page to update the list. See [this doc](https://github.com/noir-lang/noir-starter/tree/main/with-foundry#testing-on-chain) for more info about testing verifier contracts on different EVM chains. - -### Unsupported chains - -Unfortunately not all "EVM" chains are supported. - -**zkSync** and the **Polygon zkEVM** do *not* currently support proof verification via Solidity verifier contracts. They are missing the bn256 precompile contract that the verifier contract requires. Once these chains support this precompile, they may work. diff --git a/docs/versioned_docs/version-v0.6.0/standard_library/array_methods.md b/docs/versioned_docs/version-v0.6.0/standard_library/array_methods.md deleted file mode 100644 index 701590ccf54..00000000000 --- a/docs/versioned_docs/version-v0.6.0/standard_library/array_methods.md +++ /dev/null @@ -1,180 +0,0 @@ ---- -title: Array Methods -description: - Learn about the commonly used methods available for arrays in Noir, including len, sort, fold, - reduce, all, and any. -keywords: [rust, array, methods, len, sort, fold, reduce, all, any] ---- - -# Array - -For convenience, the STD provides some ready-to-use, common methods for arrays[^migrationnote]: - -## len - -Returns the length of an array - -```rust -fn len(_array: [T; N]) -> comptime Field -``` - -example - -```rust -fn main() { - let array = [42, 42]; - constrain array.len() == 2; -} -``` - -## sort - -Returns a new sorted array. The original array remains untouched. Notice that this function will -only work for arrays of fields or integers, not for any arbitrary type. This is because the sorting -logic it uses internally is optimized specifically for these values. If you need a sort function to -sort any type, you should use the function `sort_via` described below. - -```rust -fn sort(_array: [T; N]) -> [T; N] -``` - -example - -```rust -fn main() { - let arr = [42, 32] - let sorted = arr.sort(); - constrain sorted == [32, 42]; -} -``` - -## sort_via - -Sorts the array with a custom comparison function - -```rust -fn sort_via(mut a: [T; N], ordering: fn(T, T) -> bool) -> [T; N] -``` - -example - -```rust -fn main() { - let arr = [42, 32] - let sorted_ascending = arr.sort_via(|a, b| a < b); - constrain sorted_ascending == [32, 42]; // verifies - - let sorted_descending = arr.sort_via(|a, b| a > b); - constrain sorted_descending == [32, 42]; // does not verify -} -``` - -## map - -Applies a function to each element of the array, returning a new array containing the mapped elements. - -```rust -fn map(f: fn(T) -> U) -> [U; N] -``` - -example - -```rust -let a = [1, 2, 3]; -let b = a.map(|a| a * 2) // b is now [2, 4, 6] -``` - -## fold - -Applies a function to each element of the array, returning the final accumulated value. The first -parameter is the initial value. - -```rust -fn fold(mut accumulator: U, f: fn(U, T) -> U) -> U -``` - -This is a left fold, so the given function will be applied to the accumulator and first element of -the array, then the second, and so on. For a given call the expected result would be equivalent to: - -```rust -let a1 = [1]; -let a2 = [1, 2]; -let a3 = [1, 2, 3]; - -let f = |a, b| a - b; -a1.fold(10, f) //=> f(10, 1) -a2.fold(10, f) //=> f(f(10, 1), 2) -a3.fold(10, f) //=> f(f(f(10, 1), 2), 3) -``` - -example: - -```rust - -fn main() { - let arr = [2,2,2,2,2] - let folded = arr.fold(0, |a, b| a + b); - constrain folded == 10; -} - -``` - -## reduce - -Same as fold, but uses the first element as starting element. - -```rust -fn reduce(f: fn(T, T) -> T) -> T -``` - -example: - -```rust -fn main() { - let arr = [2,2,2,2,2] - let reduced = arr.reduce(|a, b| a + b); - constrain reduced == 10; -} -``` - -## all - -Returns true if all the elements satisfy the given predicate - -```rust -fn all(predicate: fn(T) -> bool) -> bool -``` - -example: - -```rust -fn main() { - let arr = [2,2,2,2,2] - let all = arr.all(|a| a == 2); - constrain all; -} -``` - -## any - -Returns true if any of the elements satisfy the given predicate - -```rust -fn any(predicate: fn(T) -> bool) -> bool -``` - -example: - -```rust -fn main() { - let arr = [2,2,2,2,5] - let any = arr.any(|a| a == 5); - constrain any; -} - -``` - -[^migrationnote]: - Migration Note: These methods were previously free functions, called via `std::array::len()`. - For the sake of ease of use and readability, these functions are now methods and the old syntax - for them is now deprecated. diff --git a/docs/versioned_docs/version-v0.6.0/standard_library/black_box_fns.md b/docs/versioned_docs/version-v0.6.0/standard_library/black_box_fns.md deleted file mode 100644 index 3063e71c147..00000000000 --- a/docs/versioned_docs/version-v0.6.0/standard_library/black_box_fns.md +++ /dev/null @@ -1,44 +0,0 @@ ---- -title: Black Box Functions -description: Black box functions are functions in Noir that rely on backends implementing support for specialized constraints. -keywords: [noir, black box functions] ---- - -Black box functions are functions in Noir that rely on backends implementing support for specialized constraints. This makes certain zk-snark unfriendly computations cheaper than if they were implemented in Noir. - -:::warning - -It is likely that not all backends will support a particular black box function. - -::: - -Because it is not guaranteed that all backends will support black box functions, it is possible that certain Noir programs won't compile against a particular backend if they use an unsupported black box function. It is possible to fallback to less efficient implementations written in Noir/ACIR in some cases. - -Black box functions are specified with the `#[foreign(black_box_fn)]` attribute. For example, the SHA256 function in the Noir [source code](https://github.com/noir-lang/noir/blob/v0.5.1/noir_stdlib/src/hash.nr) looks like: - -```rust -#[foreign(sha256)] -fn sha256(_input : [u8; N]) -> [u8; 32] {} -``` - -## Function list - -Here is a list of the current black box functions that are supported by UltraPlonk: - -- AES -- [SHA256](./cryptographic_primitives/hashes#sha256) -- [Schnorr signature verification](./cryptographic_primitives/schnorr) -- [Blake2s](./cryptographic_primitives/hashes#blake2s) -- [Pedersen](./cryptographic_primitives/hashes#pedersen) -- [HashToField128Security](./cryptographic_primitives/hashes#hash_to_field) -- [ECDSA signature verification](./cryptographic_primitives/ecdsa_secp256k1) -- [Fixed base scalar multiplication](./cryptographic_primitives/scalar) -- [Compute merkle root](./merkle_trees#compute_merkle_root) -- AND -- XOR -- RANGE -- [Keccak256](./cryptographic_primitives/hashes#keccak256) - -Most black box functions are included as part of the Noir standard library, however `AND`, `XOR` and `RANGE` are used as part of the Noir language syntax. For instance, using the bitwise operator `&` will invoke the `AND` black box function. To ensure compatibility across backends, the ACVM has fallback implementations of `AND`, `XOR` and `RANGE` defined in its standard library which it can seamlessly fallback to if the backend doesn't support them. - -You can view the black box functions defined in the ACVM code [here](https://github.com/noir-lang/acvm/blob/acir-v0.12.0/acir/src/circuit/black_box_functions.rs). diff --git a/docs/versioned_docs/version-v0.6.0/standard_library/cryptographic_primitives/00_hashes.mdx b/docs/versioned_docs/version-v0.6.0/standard_library/cryptographic_primitives/00_hashes.mdx deleted file mode 100644 index c373f10ca6a..00000000000 --- a/docs/versioned_docs/version-v0.6.0/standard_library/cryptographic_primitives/00_hashes.mdx +++ /dev/null @@ -1,149 +0,0 @@ ---- -title: Hash methods -description: - Learn about the cryptographic primitives ready to use for any Noir project, including sha256, blake2s, pedersen, mimc_bn254 and mimc -keywords: - [ - cryptographic primitives, - Noir project, - sha256, - blake2s, - pedersen, - mimc_bn254, - mimc, - hash - ] ---- - -import BlackBoxInfo from './common/\_blackbox.mdx'; - -## sha256 - -Given an array of bytes, returns the resulting sha256 hash. - -```rust -fn sha256(_input : [u8]) -> [u8; 32] -``` - -example: - -```rust -fn main() { - let x = [163, 117, 178, 149] // some random bytes - let hash = std::hash::sha256(x); -} -``` - - - -## blake2s - -Given an array of bytes, returns an array with the Blake2 hash - -```rust -fn blake2s(_input : [u8]) -> [u8; 32] -``` - -example: - -```rust -fn main() { - let x = [163, 117, 178, 149] // some random bytes - let hash = std::hash::blake2s(x); -} -``` - - - -## pedersen - -Given an array of Fields, returns the Pedersen hash. - -```rust -fn pedersen(_input : [Field]) -> [Field; 2] -``` - -example: - -```rust -fn main() { - let x = [163, 117, 178, 149] // some random bytes - let hash = std::hash::pedersen(x); -} -``` - - - -## keccak256 - -Given an array of bytes (`u8`), returns the resulting keccak hash as an array of 32 bytes (`[u8; 32]`). - -```rust -fn keccak256(_input : [u8; N]) -> [u8; 32] -``` - -example: - -```rust -fn main() { - let x = [163, 117, 178, 149] // some random bytes - let hash = std::hash::keccak256(x); -} -``` - - - -## poseidon - -Given an array of Fields, returns a new Field with the Poseidon Hash. Mind that you need to specify how many inputs are there to your Poseidon function. - -```rust -// example for hash_1, hash_2 accepts an array of length 2, etc -fn hash_1(input: [Field; 1]) -> Field -``` - -example: - -```rust -fn main() -{ - let hash1 = std::hash::poseidon::bn254::hash_2([1, 2]); - constrain hash1 == 0x115cc0f5e7d690413df64c6b9662e9cf2a3617f2743245519e19607a4417189a; -} -``` - -## mimc_bn254 and mimc - -`mimc_bn254` is `mimc`, but with hardcoded parameters for the BN254 curve. You can use it by -providing an array of Fields, and it returns a Field with the hash. You can use the `mimc` method if -you're willing to input your own constants: - -```rust -fn mimc(x: Field, k: Field, constants: [Field; N], exp : Field) -> Field -``` - -otherwise, use the `mimc_bn254` method: - -```rust -fn mimc_bn254(array: [Field; N]) -> Field -``` - -example: - -```rust - -fn main() { - let x = [163, 117, 178, 149] // some random bytes - let hash = std::hash::mimc_bn254(x); -} -``` - -## hash_to_field - -```rust -fn hash_to_field(_input : [Field; N]) -> Field {} -``` - -Calculates the `blake2s` hash of the inputs and returns the hash modulo the field modulus to return a value which can be represented as a `Field`. - - diff --git a/docs/versioned_docs/version-v0.6.0/standard_library/cryptographic_primitives/01_scalar.mdx b/docs/versioned_docs/version-v0.6.0/standard_library/cryptographic_primitives/01_scalar.mdx deleted file mode 100644 index 62265cddb1e..00000000000 --- a/docs/versioned_docs/version-v0.6.0/standard_library/cryptographic_primitives/01_scalar.mdx +++ /dev/null @@ -1,33 +0,0 @@ ---- -title: Scalar multiplication -description: - See how you can perform scalar multiplications over a fixed base in Noir -keywords: - [ - cryptographic primitives, - Noir project, - scalar multiplication, - ] ---- - -import BlackBoxInfo from './common/\_blackbox.mdx'; - -## scalar_mul::fixed_base - -Performs scalar multiplication over the embedded curve whose coordinates are defined by the -configured noir field. For the BN254 scalar field, this is BabyJubJub or Grumpkin. - -```rust -fn fixed_base(_input : Field) -> [Field; 2] -``` - -example - -```rust -fn main(x : Field) { - let scal = std::scalar_mul::fixed_base(x); - std::println(scal); -} -``` - - diff --git a/docs/versioned_docs/version-v0.6.0/standard_library/cryptographic_primitives/02_schnorr.mdx b/docs/versioned_docs/version-v0.6.0/standard_library/cryptographic_primitives/02_schnorr.mdx deleted file mode 100644 index c2c6f3ae19a..00000000000 --- a/docs/versioned_docs/version-v0.6.0/standard_library/cryptographic_primitives/02_schnorr.mdx +++ /dev/null @@ -1,37 +0,0 @@ ---- -title: Schnorr Signatures -description: Learn how you can verify Schnorr signatures using Noir -keywords: [cryptographic primitives, Noir project, schnorr, signatures] ---- - -import BlackBoxInfo from './common/\_blackbox.mdx'; - -## schnorr::verify_signature - -Verifier for Schnorr signatures over the embedded curve (for BN254 it is Grumpkin). - -```rust -fn verify_signature(_public_key_x: Field, _public_key_y: Field, _signature: [u8; 64], _message: [u8]) -> Field -``` - -where `_signature` can be generated like so using the npm package -[@noir-lang/barretenberg](https://www.npmjs.com/package/@noir-lang/barretenberg) - -```js -const { BarretenbergWasm } = require('@noir-lang/barretenberg/dest/wasm'); -const { Schnorr } = require('@noir-lang/barretenberg/dest/crypto/schnorr'); - -... - -const barretenberg = await BarretenbergWasm.new(); -const schnorr = new Schnorr(barretenberg); -const pubKey = schnorr.computePublicKey(privateKey); -const message = ... -const signature = Array.from( - schnorr.constructSignature(hash, privateKey).toBuffer() -); - -... -``` - - diff --git a/docs/versioned_docs/version-v0.6.0/standard_library/cryptographic_primitives/03_ecdsa_secp256k1.mdx b/docs/versioned_docs/version-v0.6.0/standard_library/cryptographic_primitives/03_ecdsa_secp256k1.mdx deleted file mode 100644 index 88892712fb6..00000000000 --- a/docs/versioned_docs/version-v0.6.0/standard_library/cryptographic_primitives/03_ecdsa_secp256k1.mdx +++ /dev/null @@ -1,25 +0,0 @@ ---- -title: ECDSA Verification -description: - Learn about the cryptographic primitives regarding ECDSA over the secp256k1 curve -keywords: - [ - cryptographic primitives, - Noir project, - ecdsa, - secp256k1, - signatures, - ] ---- - -import BlackBoxInfo from './common/\_blackbox.mdx'; - -## ecdsa_secp256k1::verify_signature - -Verifier for ECDSA Secp256k1 signatures - -```rust -fn verify_signature(_public_key_x : [u8; 32], _public_key_y : [u8; 32], _signature: [u8; 64], _message: [u8]) -> Field -``` - - diff --git a/docs/versioned_docs/version-v0.6.0/standard_library/cryptographic_primitives/04_ec_primitives.md b/docs/versioned_docs/version-v0.6.0/standard_library/cryptographic_primitives/04_ec_primitives.md deleted file mode 100644 index 2d4ed0f8fae..00000000000 --- a/docs/versioned_docs/version-v0.6.0/standard_library/cryptographic_primitives/04_ec_primitives.md +++ /dev/null @@ -1,102 +0,0 @@ ---- -title: Elliptic Curve Primitives -keywords: [cryptographic primitives, Noir project] ---- - -Data structures and methods on them that allow you to carry out computations involving elliptic -curves over the (mathematical) field corresponding to `Field`. For the field currently at our -disposal, applications would involve a curve embedded in BN254, e.g. the -[Baby Jubjub curve](https://eips.ethereum.org/EIPS/eip-2494). - -## Data structures - -### Elliptic curve configurations - -(`std::ec::{tecurve,montcurve,swcurve}::{affine,curvegroup}::Curve`), i.e. the specific elliptic -curve you want to use, which would be specified using any one of the methods -`std::ec::{tecurve,montcurve,swcurve}::{affine,curvegroup}::new` which take the coefficients in the -defining equation together with a generator point as parameters. You can find more detail in the -comments in -[`noir_stdlib/src/ec.nr`](https://github.com/noir-lang/noir/blob/master/noir_stdlib/src/ec.nr), but -the gist of it is that the elliptic curves of interest are usually expressed in one of the standard -forms implemented here (Twisted Edwards, Montgomery and Short Weierstraß), and in addition to that, -you could choose to use `affine` coordinates (Cartesian coordinates - the usual (x,y) - possibly -together with a point at infinity) or `curvegroup` coordinates (some form of projective coordinates -requiring more coordinates but allowing for more efficient implementations of elliptic curve -operations). Conversions between all of these forms are provided, and under the hood these -conversions are done whenever an operation is more efficient in a different representation (or a -mixed coordinate representation is employed). - -### Points - -(`std::ec::{tecurve,montcurve,swcurve}::{affine,curvegroup}::Point`), i.e. points lying on the -elliptic curve. For a curve configuration `c` and a point `p`, it may be checked checked that `p` -does indeed lie on `c` by calling `c.contains(p1)`. - -## Methods - -(given a choice of curve representation, e.g. use `std::ec::tecurve::affine::Curve` and use -`std::ec::tecurve::affine::Point`) - -- The **zero element** is given by `Point::zero()`, and we can verify whether a point `p: Point` is - zero by calling `p.is_zero()`. -- **Equality**: Points `p1: Point` and `p2: Point` may be checked for equality by calling - `p1.eq(p2)`. -- **Addition**: For `c: Curve` and points `p1: Point` and `p2: Point` on the curve, adding these two - points is accomplished by calling `c.add(p1,p2)`. -- **Negation**: For a point `p: Point`, `p.negate()` is its negation. -- **Subtraction**: For `c` and `p1`, `p2` as above, subtracting `p2` from `p1` is accomplished by - calling `c.subtract(p1,p2)`. -- **Scalar multiplication**: For `c` as above, `p: Point` a point on the curve and `n: Field`, - scalar multiplication is given by `c.mul(n,p)`. If instead `n :: [u1; N]`, i.e. `n` is a bit - array, the `bit_mul` method may be used instead: `c.bit_mul(n,p)` -- **Multi-scalar multiplication**: For `c` as above and arrays `n: [Field; N]` and `p: [Point; N]`, - multi-scalar multiplication is given by `c.msm(n,p)`. -- **Coordinate representation conversions**: The `into_group` method converts a point or curve - configuration in the affine representation to one in the CurveGroup representation, and - `into_affine` goes in the other direction. -- **Curve representation conversions**: `tecurve` and `montcurve` curves and points are equivalent - and may be converted between one another by calling `into_montcurve` or `into_tecurve` on their - configurations or points. `swcurve` is more general and a curve c of one of the other two types - may be converted to this representation by calling `c.into_swcurve()`, whereas a point `p` lying - on the curve given by `c` may be mapped to its corresponding `swcurve` point by calling - `c.map_into_swcurve(p)`. -- **Map-to-curve methods**: The Elligator 2 method of mapping a field element `n: Field` into a - `tecurve` or `montcurve` with configuration `c` may be called as `c.elligator2_map(n)`. For all of - the curve configurations, the SWU map-to-curve method may be called as `c.swu_map(z,n)`, where - `z: Field` depends on `Field` and `c` and must be chosen by the user (the conditions it needs to - satisfy are specified in the comments - [here](https://github.com/noir-lang/noir/blob/master/noir_stdlib/src/ec.nr)). - -## Examples - -The -[ec_baby_jubjub test](https://github.com/noir-lang/noir/blob/master/crates/nargo_cli/tests/test_data/ec_baby_jubjub/src/main.nr) -illustrates all of the above primitives on various forms of the Baby Jubjub curve. A couple of more -interesting examples in Noir would be: - -Public-key cryptography: Given an elliptic curve and a 'base point' on it, determine the public key -from the private key. This is a matter of using scalar multiplication. In the case of Baby Jubjub, -for example, this code would do: - -```rust -use dep::std::ec::tecurve::affine::Curve; -use dep::std::ec::tecurve::affine::Point; - -fn bjj_pub_key(priv_key: Field) -> Point -{ - - let bjj = Curve::new(168700, 168696, G::new(995203441582195749578291179787384436505546430278305826713579947235728471134,5472060717959818805561601436314318772137091100104008585924551046643952123905)); - - let base_pt = Point::new(5299619240641551281634865583518297030282874472190772894086521144482721001553, 16950150798460657717958625567821834550301663161624707787222815936182638968203); - - bjj.mul(priv_key,base_pt) -} -``` - -This would come in handy in a Merkle proof. - -- EdDSA signature verification: This is a matter of combining these primitives with a suitable hash - function. See - [feat(stdlib): EdDSA sig verification noir#1136](https://github.com/noir-lang/noir/pull/1136) for - the case of Baby Jubjub and the Poseidon hash function. diff --git a/docs/versioned_docs/version-v0.6.0/standard_library/cryptographic_primitives/common/_blackbox.mdx b/docs/versioned_docs/version-v0.6.0/standard_library/cryptographic_primitives/common/_blackbox.mdx deleted file mode 100644 index 9fe9b48fbff..00000000000 --- a/docs/versioned_docs/version-v0.6.0/standard_library/cryptographic_primitives/common/_blackbox.mdx +++ /dev/null @@ -1,5 +0,0 @@ -:::info - -This is a black box function. Read [this section](../black_box_fns) to learn more about black box functions in Noir. - -::: \ No newline at end of file diff --git a/docs/versioned_docs/version-v0.6.0/standard_library/field_methods.md b/docs/versioned_docs/version-v0.6.0/standard_library/field_methods.md deleted file mode 100644 index 7cea9846102..00000000000 --- a/docs/versioned_docs/version-v0.6.0/standard_library/field_methods.md +++ /dev/null @@ -1,149 +0,0 @@ ---- -title: Field Methods -description: - Learn about common methods on Noir Field, including to_le_bits, to_le_bytes, to_le_radix, - to_be_radix, pow_32, etc, and see code examples. -keywords: - [ - Noir Field, - to_le_bits, - to_le_bytes, - to_le_radix, - to_be_radix, - pow_32, - Little Endian, - Big Endian, - Vector, - Exponent, - ] ---- - -After declaring a Field, you can use these common methods on it: - -## to_le_bits - -Transforms the field into an array of bits, Little Endian. - -```rust -fn to_le_bits(_x : Field, _bit_size: u32) -> [u1; N] -``` - -example: - -```rust -fn main() { - let field = 2 - let bits = field.to_le_bits(32); -} -``` - -## to_be_bits - -Transforms the field into an array of bits, Big Endian. - -```rust -fn to_be_bits(_x : Field, _bit_size: u32) -> [u1; N] -``` - -example: - -```rust -fn main() { - let field = 2 - let bits = field.to_be_bits(32); -} -``` - -## to_le_bytes - -Transforms into an array of bytes, Little Endian - -```rust -fn to_le_bytes(_x : Field, byte_size: u32) -> [u8] -``` - -example: - -```rust -fn main() { - let field = 2 - let bytes = field.to_le_bytes(4); -} -``` - -## to_be_bytes - -Transforms into an array of bytes, Big Endian - -```rust -fn to_be_bytes(_x : Field, byte_size: u32) -> [u8] -``` - -example: - -```rust -fn main() { - let field = 2 - let bytes = field.to_be_bytes(4); -} -``` - -## to_le_radix - -Decomposes into a vector over the specified base, Little Endian - -```rust -fn to_le_radix(_x : Field, _radix: u32, _result_len: u32) -> [u8] -``` - -example: - -```rust -fn main() { - let field = 2 - let radix = field.to_le_radix(256, 4); -} -``` - -## to_be_radix - -Decomposes into a vector over the specified base, Big Endian - -```rust -fn to_be_radix(_x : Field, _radix: u32, _result_len: u32) -> [u8] -``` - -example: - -```rust -fn main() { - let field = 2 - let radix = field.to_be_radix(256, 4); -} -``` - -## pow_32 - -Returns the value to the power of the specified exponent - -```rust -fn pow_32(self, exponent: Field) -> Field -``` - -example: - -```rust -fn main() { - let field = 2 - let pow = field.pow_32(4); - constrain pow == 16; -} -``` - -## sgn0 - -Parity of (prime) Field element, i.e. sgn0(x mod p) = 0 if x ∈ {0, ..., p-1} is even, otherwise sgn0(x mod p) = 1. - -```rust -fn sgn0(self) -> u1 -``` diff --git a/docs/versioned_docs/version-v0.6.0/standard_library/logging.md b/docs/versioned_docs/version-v0.6.0/standard_library/logging.md deleted file mode 100644 index 649d35a3f0b..00000000000 --- a/docs/versioned_docs/version-v0.6.0/standard_library/logging.md +++ /dev/null @@ -1,55 +0,0 @@ ---- -title: Logging -description: - Learn how to use the println statement for debugging in Noir with this tutorial. Understand the - basics of logging in Noir and how to implement it in your code. -keywords: - [ - noir logging, - println statement, - debugging in noir, - noir std library, - logging tutorial, - basic logging in noir, - noir logging implementation, - noir debugging techniques, - rust, - ] ---- - -# Logging - -The standard library provides a familiar `println` statement you can use. Despite being a limited -implementation of rust's `println!` macro, this construct can be useful for debugging. - -The `println` statement only works for fields, integers and arrays (including strings). - -```rust -use dep::std; - -fn main(string: pub str<5>) { - let x = 5; - std::println(x) -} - -``` - -To view the output of the `println` statement you need to set the `--show-output` flag. - -``` -$ nargo prove --help -Create proof for this program. The proof is returned as a hex encoded string - -Usage: nargo prove [OPTIONS] [PROOF_NAME] [CIRCUIT_NAME] - -Arguments: - [PROOF_NAME] The name of the proof - [CIRCUIT_NAME] The name of the circuit build files (ACIR, proving and verification keys) - -Options: - -v, --verify Verify proof after proving - -s, --show-ssa Emit debug information for the intermediate SSA IR - -d, --deny-warnings Quit execution when warnings are emitted - --show-output Display output of `println` statements during tests - -h, --help Print help -``` diff --git a/docs/versioned_docs/version-v0.6.0/standard_library/merkle_trees.md b/docs/versioned_docs/version-v0.6.0/standard_library/merkle_trees.md deleted file mode 100644 index fc8909a4795..00000000000 --- a/docs/versioned_docs/version-v0.6.0/standard_library/merkle_trees.md +++ /dev/null @@ -1,59 +0,0 @@ ---- -title: Merkle Trees -description: - Learn about Merkle Trees in Noir with this tutorial. Explore the basics of computing a merkle root using a proof, with examples. -keywords: - [ - Merkle trees in Noir, - Noir programming language, - check membership, - computing root from leaf, - Noir Merkle tree implementation, - Merkle tree tutorial, - Merkle tree code examples, - Noir libraries, - pedersen hash., - ] ---- - -## compute_merkle_root - -Returns the root of the tree from the provided leaf and its hash path, using a [Pedersen hash](cryptographic_primitives/00_hashes.mdx#pedersen). - -```rust -fn compute_merkle_root(leaf : Field, index : Field, hash_path: [Field]) -> Field -``` - -example: - -```rust -/** - // these values are for this example only - index = "0" - priv_key = "0x000000000000000000000000000000000000000000000000000000616c696365" - secret = "0x1929ea3ab8d9106a899386883d9428f8256cfedb3c4f6b66bf4aa4d28a79988f" - note_hash_path = [ - "0x1e61bdae0f027b1b2159e1f9d3f8d00fa668a952dddd822fda80dc745d6f65cc", - "0x0e4223f3925f98934393c74975142bd73079ab0621f4ee133cee050a3c194f1a", - "0x2fd7bb412155bf8693a3bd2a3e7581a679c95c68a052f835dddca85fa1569a40" - ] - */ -fn main(index: Field, priv_key: Field, secret: Field, note_hash_path: [Field; 3]) { - - let pubkey = std::scalar_mul::fixed_base(priv_key); - let pubkey_x = pubkey[0]; - let pubkey_y = pubkey[1]; - let note_commitment = std::hash::pedersen([pubkey_x, pubkey_y, secret]); - - let root = std::merkle::compute_merkle_root(note_commitment[0], index, note_hash_path); - std::println(root); -} -``` - -To check merkle tree membership: - -1. Include a merkle root as a program input. -2. Compute the merkle root of a given leaf, index and hash path. -3. Assert the merkle roots are equal. - -For more info about merkle trees, see the Wikipedia [page](https://en.wikipedia.org/wiki/Merkle_tree). \ No newline at end of file diff --git a/docs/versioned_docs/version-v0.6.0/typescript.md b/docs/versioned_docs/version-v0.6.0/typescript.md deleted file mode 100644 index fae002dfd28..00000000000 --- a/docs/versioned_docs/version-v0.6.0/typescript.md +++ /dev/null @@ -1,262 +0,0 @@ ---- -title: Working with TypeScript -description: - Learn how to interact with Noir programs using TypeScript. Follow this tutorial to compile your - program, specify inputs, initialize a prover & verifier, and prove and verify your program. -keywords: [TypeScript, Noir, tutorial, compile, inputs, prover, verifier, proof] ---- - -Interactions with Noir programs can also be performed in TypeScript, which can come in handy when -writing tests or when working in TypeScript-based projects like [Hardhat](https://hardhat.org/). - -This guide is based on the [noir-starter](https://github.com/signorecello/noir-starter) example. -Please refer to it for an example implementation. - -:::note - -You may find unexpected errors working with some frameworks such as `vite`. This is due to the -nature of `wasm` files and the way Noir uses web workers. As we figure it out, we suggest using -[Create React App](https://create-react-app.dev/), or [Next.js](https://nextjs.org/) for a quick -start. - -::: - -## Setup - -We're assuming you're using ES6 for both browser (for example with React), or nodejs. - -Install [Yarn](https://yarnpkg.com/) or [Node.js](https://nodejs.org/en). Init a new project with -`npm init`. Install Noir dependencies in your project by running: - -```bash -npm i @noir-lang/noir_wasm@0.3.2-fa0e9cff github:noir-lang/barretenberg#39a1547875f941ef6640217a42d8f34972425c97 @noir-lang/aztec_backend@0.1.0-0c3b2f2 -``` - -:::note - -While Noir is in rapid development, some packages could interfere with others. For that reason, you -should use these specified versions. Let us know if for some reason you need to use other ones. - -::: - -As for the circuit, we will use the _Standard Noir Example_ and place it in the `src` folder. Feel -free to use any other, as long as you refactor the below examples accordingly. - -This standard example is a program that multiplies input `x` with input `y` and returns the result: - -```rust -// src/main.nr -fn main(x: u32, y: pub u32) -> pub u32 { - let z = x * y; - z -} -``` - -One valid scenario for proving could be `x = 3`, `y = 4` and `return = 12` - -## Imports - -We need some imports, for both the `noir_wasm` library (which will compile the circuit into `wasm` -executables) and `aztec_backend` which is the actual proving backend we will be using. - -We also need to tell the compiler where to find the `.nr` files, so we need to import -`initialiseResolver`. - -```ts -import initNoirWasm, { acir_read_bytes, compile } from '@noir-lang/noir_wasm'; -import initialiseAztecBackend from '@noir-lang/aztec_backend'; -import { initialiseResolver } from '@noir-lang/noir-source-resolver'; -``` - -## Compiling - -We'll go over the code line-by-line later: - -```ts -export const compileCircuit = async () => { - await initNoirWasm(); - - return await fetch(new URL('../src/main.nr', import.meta.url)) - .then(r => r.text()) - .then(code => { - initialiseResolver((id: any) => { - return code; - }); - }) - .then(() => { - try { - const compiled_noir = compile({}); - return compiled_noir; - } catch (e) { - console.log('Error while compiling:', e); - } - }); -}; -``` - -1. First we're calling `initNoirWasm`. This is required on the browser only. -2. We then pass an URL that points to our `main.nr` file, and call `.then` on it so we can get the - actual text of the source code -3. We call `initialiseResolver` returning the source code -4. Finally, we can call the `compile` function - -This function should return us the compiled circuit. - -:::note - -You can use as many files as you need, -[importing them as you would do with Nargo](./modules_packages_crates/dependencies), and you don't -need to set them up in the `src` folder. Just mind the following particularities about -`initialiseResolver`: - -1. The `compile` function expects a `main.nr` file as an entry point. If you need another one, just - pass it as a `entry_point` parameter to `compile`. Check the - [noir starter](https://github.com/signorecello/noir-starter) for an example on multiple files and - a non-default entry point. -2. `initialiseResolver` needs to be synchronous -3. Different frameworks use different ways of fetching files. It's beyond the scope of this guide to - explain why and how, but for reference, - [noir starter](https://github.com/signorecello/noir-starter) uses both Next.js and node.js for - testing. - -Quick tip: an easy way to deal with `initialiseResolver` is just to prepare a -`{fileName: "literally_the_code"}` object beforehand - -::: - -## ACIR - -Noir compiles to two properties: - -1. The ACIR, which is the intermediate language used by backends such as Barretenberg -2. The ABI, which tells you which inputs are to be read - -Let's write a little function that gets us both, initializes the backend, and returns the ACIR as -bytes: - -```ts -export const getAcir = async () => { - const { circuit, abi } = await compileCircuit(); - await initialiseAztecBackend(); - - let acir_bytes = new Uint8Array(Buffer.from(circuit, 'hex')); - return acir_read_bytes(acir_bytes); -}; -``` - -Calling `getAcir()` now should return us the ACIR of the circuit, ready to be used in proofs. - -## Initializing Prover & Verifier - -Prior to proving and verifying, the prover and verifier have to first be initialized by calling -`barretenberg`'s `setup_generic_prover_and_verifier` with your Noir program's ACIR: - -```ts -let [prover, verifier] = await setup_generic_prover_and_verifier(acir); -``` - -This is probably a good time to store this prover and verifier into your state like React Context, -Redux, or others. - -## Proving - -The Noir program can then be executed and proved by calling `barretenberg`'s `create_proof` -function: - -```ts -const proof = await create_proof(prover, acir, abi); -``` - -On the browser, this proof can fail as it requires heavy loads to be run on worker threads. Here's a -quick example of a worker: - -```ts -// worker.ts -onmessage = async event => { - try { - await initializeAztecBackend(); - const { acir, input } = event.data; - const [prover, verifier] = await setup_generic_prover_and_verifier(acir); - const proof = await create_proof(prover, acir, input); - postMessage(proof); - } catch (er) { - postMessage(er); - } finally { - close(); - } -}; -``` - -Which would be called like this, for example: - -```ts -// index.ts -const worker = new Worker(new URL('./worker.ts', import.meta.url)); -worker.onmessage = e => { - if (e.data instanceof Error) { - // oh no! - } else { - // yey! - } -}; -worker.postMessage({ acir, input: { x: 3, y: 4 } }); -``` - -## Verifying - -The `proof` obtained can be verified by calling `barretenberg`'s `verify_proof` function: - -```ts -// 1_mul.ts -const verified = await verify_proof(verifier, proof); -``` - -The function should return `true` if the entire process is working as intended, which can be -asserted if you are writing a test script: - -```ts -expect(verified).eq(true); -``` - -## Verifying with Smart Contract - -Alternatively, a verifier smart contract can be generated and used for verifying Noir proofs in -TypeScript as well. - -This could be useful if the Noir program is designed to be decentrally verified and/or make use of -decentralized states and logics that is handled at the smart contract level. - -To generate the verifier smart contract using typescript: - -```ts -// generator.ts -import { writeFileSync } from 'fs'; - -const sc = verifier.SmartContract(); -syncWriteFile('../contracts/plonk_vk.sol', sc); - -function syncWriteFile(filename: string, data: any) { - writeFileSync(join(__dirname, filename), data, { - flag: 'w', - }); -} -``` - -You can then verify a Noir proof using the verifier contract, for example using Hardhat: - -```ts -// verifier.ts -import { ethers } from 'hardhat'; -import { Contract, ContractFactory, utils } from 'ethers'; - -let Verifier: ContractFactory; -let verifierContract: Contract; - -before(async () => { - Verifier = await ethers.getContractFactory('TurboVerifier'); - verifierContract = await Verifier.deploy(); -}); - -// Verify proof -const sc_verified = await verifierContract.verify(proof); -``` diff --git a/docs/versioned_docs/version-v0.7.1/examples/merkle-proof.md b/docs/versioned_docs/version-v0.7.1/examples/merkle-proof.md deleted file mode 100644 index 4696b4a1426..00000000000 --- a/docs/versioned_docs/version-v0.7.1/examples/merkle-proof.md +++ /dev/null @@ -1,48 +0,0 @@ ---- -title: Merkle Proof Membership -description: - Learn how to use merkle membership proof in Noir to prove that a given leaf is a member of a - merkle tree with a specified root, at a given index. -keywords: - [merkle proof, merkle membership proof, Noir, rust, hash function, Pedersen, sha256, merkle tree] ---- - -Let's walk through an example of a merkle membership proof in Noir that proves that a given leaf is -in a merkle tree. - -```rust -use dep::std; - -fn main(message : [Field; 62], index : Field, hashpath : [Field; 40], root : Field) { - let leaf = std::hash::hash_to_field(message); - let merkle_root = std::merkle::compute_merkle_root(leaf, index, hashpath); - assert(merkle_root == root); -} - -``` - -The message is hashed using `hash_to_field`. The specific hash function that is being used is chosen -by the backend. The only requirement is that this hash function can heuristically be used as a -random oracle. If only collision resistance is needed, then one can call `std::hash::pedersen` -instead. - -```rust -let leaf = std::hash::hash_to_field(message); -``` - -The leaf is then passed to a compute_merkle_root function with the root, index and hashpath. The returned root can then be asserted to be the same as the provided root. - -```rust -let merkle_root = std::merkle::compute_merkle_root(leaf, index, hashpath); -assert (merkle_root == root); -``` - -> **Note:** It is possible to re-implement the merkle tree implementation without standard library. -> However, for most usecases, it is enough. In general, the standard library will always opt to be -> as conservative as possible, while striking a balance with efficiency. - -An example, the merkle membership proof, only requires a hash function that has collision -resistance, hence a hash function like Pedersen is allowed, which in most cases is more efficient -than the even more conservative sha256. - -[view an example on the starter repo](https://github.com/noir-lang/noir-examples/blob/3ea09545cabfa464124ec2f3ea8e60c608abe6df/stealthdrop/circuits/src/main.nr#L20) diff --git a/docs/versioned_docs/version-v0.7.1/getting_started/00_nargo_installation.md b/docs/versioned_docs/version-v0.7.1/getting_started/00_nargo_installation.md deleted file mode 100644 index fb86a966e75..00000000000 --- a/docs/versioned_docs/version-v0.7.1/getting_started/00_nargo_installation.md +++ /dev/null @@ -1,284 +0,0 @@ ---- -title: Nargo Installation -description: - nargo is a command line tool for interacting with Noir programs (e.g. compiling, proving, - verifying and more). Learn how to install and use Nargo for your projects with this comprehensive - guide. -keywords: [Nargo, command line tool, Noir programs, installation guide, how to use Nargo] ---- - -`nargo` is a command line tool for interacting with Noir programs (e.g. compiling, proving, -verifying and more). - -Alternatively, the interactions can also be performed in [TypeScript](../typescript). - -### UltraPlonk - -Nargo versions <0.5.0 of `aztec_backend` and `aztec_wasm_backend` are based on the TurboPlonk -version of Aztec Backend, which lacks efficient implementations of useful primitives (e.g. Keccak256 in 18k constraints, ECDSA verification in 36k constraints) that the UltraPlonk version offers. - -## Installation - -There are four approaches for installing Nargo: - -- [Option 1: Noirup](#option-1-noirup) -- [Option 2: Binaries](#option-2-binaries) -- [Option 3: Install via Nix](#option-3-install-via-nix) -- [Option 4: Compile from Source](#option-4-compile-from-source) - -Optionally you can also install [Noir VS Code extension] for syntax highlighting. - -### Option 1: Noirup - -If you're on OSX or Linux, the easiest way to start using Noir and Nargo is via noirup. Just open a -terminal and run: - -```bash -curl -L https://raw.githubusercontent.com/noir-lang/noirup/main/install | bash -``` - -Close the terminal, open another one, and run - -```bash -noirup -``` - -Done, you should have the latest version working. You can check with `nargo --version`. - -You can also install nightlies, specific versions -or branches, check out the [noirup repository](https://github.com/noir-lang/noirup) for more -information. - -#### GitHub Actions - -You can use `noirup` with GitHub Actions for CI/CD and automated testing. It is as simple as -installing `noirup` and running tests in your GitHub Action `yml` file. - -See the -[config file](https://github.com/TomAFrench/noir-hashes/blob/master/.github/workflows/noir.yml) in -this repo containing hash functions in Noir for an example. - -#### Nightly versions - -To install the nightly version of Noir (updated daily) run: - -```bash -noirup -n -``` - -### Option 2: Binaries - -See [GitHub Releases](https://github.com/noir-lang/noir/releases) for the latest and previous -platform specific binaries. - -#### Step 1 - -Paste and run the following in the terminal to extract and install the binary: - -> **macOS / Linux:** If you are prompted with `Permission denied` when running commands, prepend -> `sudo` and re-run it. - -##### macOS (Apple Silicon) - -```bash -mkdir -p $HOME/.nargo/bin && \ -curl -o $HOME/.nargo/bin/nargo-aarch64-apple-darwin.tar.gz -L https://github.com/noir-lang/noir/releases/download/v0.6.0/nargo-aarch64-apple-darwin.tar.gz && \ -tar -xvf $HOME/.nargo/bin/nargo-aarch64-apple-darwin.tar.gz -C $HOME/.nargo/bin/ && \ -echo '\nexport PATH=$PATH:$HOME/.nargo/bin' >> ~/.zshrc && \ -source ~/.zshrc -``` - -##### macOS (Intel) - -```bash -mkdir -p $HOME/.nargo/bin && \ -curl -o $HOME/.nargo/bin/nargo-x86_64-apple-darwin.tar.gz -L https://github.com/noir-lang/noir/releases/download/v0.6.0/nargo-x86_64-apple-darwin.tar.gz && \ -tar -xvf $HOME/.nargo/bin/nargo-x86_64-apple-darwin.tar.gz -C $HOME/.nargo/bin/ && \ -echo '\nexport PATH=$PATH:$HOME/.nargo/bin' >> ~/.zshrc && \ -source ~/.zshrc -``` - -##### Windows (PowerShell) - -Open PowerShell as Administrator and run: - -```powershell -mkdir -f -p "$env:USERPROFILE\.nargo\bin\"; ` -Invoke-RestMethod -Method Get -Uri https://github.com/noir-lang/noir/releases/download/v0.4.1/nargo-x86_64-pc-windows-msvc.zip -Outfile "$env:USERPROFILE\.nargo\bin\nargo-x86_64-pc-windows-msvc.zip"; ` -Expand-Archive -Path "$env:USERPROFILE\.nargo\bin\nargo-x86_64-pc-windows-msvc.zip" -DestinationPath "$env:USERPROFILE\.nargo\bin\"; ` -$Reg = "Registry::HKLM\System\CurrentControlSet\Control\Session Manager\Environment"; ` -$OldPath = (Get-ItemProperty -Path "$Reg" -Name PATH).Path; ` -$NewPath = $OldPath + ’;’ + "$env:USERPROFILE\.nargo\bin\"; ` -Set-ItemProperty -Path "$Reg" -Name PATH –Value "$NewPath"; ` -$env:Path = [System.Environment]::GetEnvironmentVariable("Path","Machine") + ";" + [System.Environment]::GetEnvironmentVariable("Path","User") -``` - -##### Linux (Bash) - -```bash -mkdir -p $HOME/.nargo/bin && \ -curl -o $HOME/.nargo/bin/nargo-x86_64-unknown-linux-gnu.tar.gz -L https://github.com/noir-lang/noir/releases/download/v0.6.0/nargo-x86_64-unknown-linux-gnu.tar.gz && \ -tar -xvf $HOME/.nargo/bin/nargo-x86_64-unknown-linux-gnu.tar.gz -C $HOME/.nargo/bin/ && \ -echo -e '\nexport PATH=$PATH:$HOME/.nargo/bin' >> ~/.bashrc && \ -source ~/.bashrc -``` - -#### Step 2 - -Check if the installation was successful by running `nargo --help`. - -> **macOS:** If you are prompted with an OS alert, right-click and open the _nargo_ executable from -> Finder. Close the new terminal popped up and `nargo` should now be accessible. - -For a successful installation, you should see something similar to the following after running the -command: - -```sh -$ nargo --help - -Noir's package manager - -Usage: nargo - -Commands: - check Checks the constraint system for errors - codegen-verifier Generates a Solidity verifier smart contract for the program - compile Compile the program and its secret execution trace into ACIR format - new Create a new binary project - execute Executes a circuit to calculate its return value - prove Create proof for this program. The proof is returned as a hex encoded string - verify Given a proof and a program, verify whether the proof is valid - test Run the tests for this program - gates Counts the occurrences of different gates in circuit - help Print this message or the help of the given subcommand(s) -``` - -### Option 3: Install via Nix - -Due to the large number of native dependencies, Noir projects can be installed via [Nix](https://nixos.org/). - -#### Installing Nix - -For the best experience, please follow these instructions to setup Nix: - -1. Install Nix following [their guide](https://nixos.org/download.html) for your operating system. -2. Create the file `~/.config/nix/nix.conf` with the contents: - -```ini -experimental-features = nix-command -extra-experimental-features = flakes -``` - -#### Install Nargo into your Nix profile - -1. Use `nix profile` to install Nargo - -```sh -nix profile install github:noir-lang/noir -``` - -### Option 4: Compile from Source - -Due to the large number of native dependencies, Noir projects uses [Nix](https://nixos.org/) and [direnv](https://direnv.net/) to streamline the development experience. - -#### Setting up your environment - -For the best experience, please follow these instructions to setup your environment: - -1. Install Nix following [their guide](https://nixos.org/download.html) for your operating system. -2. Create the file `~/.config/nix/nix.conf` with the contents: - -```ini -experimental-features = nix-command -extra-experimental-features = flakes -``` - -3. Install direnv into your Nix profile by running: - -```sh -nix profile install nixpkgs#direnv -``` - -4. Add direnv to your shell following [their guide](https://direnv.net/docs/hook.html). -5. Restart your shell. - -#### Shell & editor experience - -Now that your environment is set up, you can get to work on the project. - -1. Clone the repository, such as: - -```sh -git clone git@github.com:noir-lang/noir -``` - -> Replacing `noir` with whichever repository you want to work on. - -2. Navigate to the directory: - -```sh -cd noir -``` - -> Replacing `noir` with whichever repository you cloned. - -3. You should see a **direnv error** because projects aren't allowed by default. Make sure you've reviewed and trust our `.envrc` file, then you need to run: - -```sh -direnv allow -``` - -4. Now, wait awhile for all the native dependencies to be built. This will take some time and direnv will warn you that it is taking a long time, but we just need to let it run. - -5. Once you are presented with your prompt again, you can start your editor within the project directory (we recommend [VSCode](https://code.visualstudio.com/)): - -```sh -code . -``` - -6. (Recommended) When launching VSCode for the first time, you should be prompted to install our recommended plugins. We highly recommend installing these for the best development experience. - -#### Building and testing - -Assuming you are using `direnv` to populate your environment, building and testing the project can be done -with the typical `cargo build`, `cargo test`, and `cargo clippy` commands. You'll notice that the `cargo` version matches the version we specify in `flake.nix`, which is 1.66.0 at the time of this writing. - -If you want to build the entire project in an isolated sandbox, you can use Nix commands: - -1. `nix build .` (or `nix build . -L` for verbose output) to build the project in a Nix sandbox. -2. `nix flake check` (or `nix flake check -L` for verbose output) to run clippy and tests in a Nix sandbox. - -#### Without `direnv` - -If you have hesitations with using direnv, you can launch a subshell with `nix develop` and then launch your editor from within the subshell. However, if VSCode was already launched in the project directory, the environment won't be updated. - -Advanced: If you aren't using direnv nor launching your editor within the subshell, you can try to install Barretenberg and other global dependencies the package needs. This is an advanced workflow and likely won't receive support! - -## Uninstalling Nargo - -### Noirup - -If you installed Noir with `noirup`, you can uninstall Noir by removing the files in `~/.nargo`, `~/nargo` and `~/noir_cache`. - -```bash -rm -r ~/.nargo -rm -r ~/nargo -rm -r ~/noir_cache -``` - -### Nix - -If you installed Noir with Nix or from source, you can remove the binary located at `~/.nix-profile/bin/nargo`. - -```bash -rm ~/.nix-profile/bin/nargo -``` - -[git]: https://git-scm.com/book/en/v2/Getting-Started-Installing-Git -[rust]: https://www.rust-lang.org/tools/install -[noir vs code extension]: https://marketplace.visualstudio.com/items?itemName=noir-lang.vscode-noir -[homebrew]: https://brew.sh/ -[cmake]: https://cmake.org/install/ -[llvm]: https://llvm.org/docs/GettingStarted.html -[openmp]: https://openmp.llvm.org/ -[barretenberg]: https://github.com/AztecProtocol/barretenberg diff --git a/docs/versioned_docs/version-v0.7.1/getting_started/01_hello_world.md b/docs/versioned_docs/version-v0.7.1/getting_started/01_hello_world.md deleted file mode 100644 index 0f21ad45569..00000000000 --- a/docs/versioned_docs/version-v0.7.1/getting_started/01_hello_world.md +++ /dev/null @@ -1,147 +0,0 @@ ---- -title: Create A Project -description: - Learn how to create and verify your first Noir program using Nargo, a programming language for - zero-knowledge proofs. -keywords: - [ - Nargo, - Noir, - zero-knowledge proofs, - programming language, - create Noir program, - verify Noir program, - step-by-step guide, - ] ---- - -Now that we have installed Nargo, it is time to make our first hello world program! - -## Create a Project Directory - -Noir code can live anywhere on your computer. Let us create a _projects_ folder in the home -directory to house our Noir programs. - -For Linux, macOS, and Windows PowerShell, create the directory and change directory into it by -running: - -```sh -mkdir ~/projects -cd ~/projects -``` - -For Windows CMD, run: - -```sh -> mkdir "%USERPROFILE%\projects" -> cd /d "%USERPROFILE%\projects" -``` - -## Create Our First Nargo Project - -Now that we are in the projects directory, create a new Nargo project by running: - -```sh -nargo new hello_world -``` - -> **Note:** `hello_world` can be any arbitrary project name, we are simply using `hello_world` for -> demonstration. -> -> In production, the common practice is to name the project folder as `circuits` for better -> identifiability when sitting alongside other folders in the codebase (e.g. `contracts`, `scripts`, -> `test`). - -A `hello_world` folder would be created. Similar to Rust, the folder houses _src/main.nr_ and -_Nargo.toml_ that contains the source code and environmental options of your Noir program -respectively. - -### Intro to Noir Syntax - -Let us take a closer look at _main.nr_. The default _main.nr_ generated should look like this: - -```rust -fn main(x : Field, y : pub Field) { - assert(x != y); -} -``` - -The first line of the program specifies the program's inputs: - -```rust -x : Field, y : pub Field -``` - -Program inputs in Noir are private by default (e.g. `x`), but can be labeled public using the -keyword `pub` (e.g. `y`). To learn more about private and public values, check the -[Data Types](../language_concepts/data_types) section. - -The next line of the program specifies its body: - -```rust -assert(x != y); -``` - -The Noir syntax `assert` can be interpreted as something similar to constraints in other zk-contract languages. - -For more Noir syntax, check the [Language Concepts](../language_concepts/comments) chapter. - -## Build In/Output Files - -Change directory into _hello_world_ and build in/output files for your Noir program by running: - -```sh -cd hello_world -nargo check -``` - -Two additional files would be generated in your project directory: - -_Prover.toml_ houses input values, and _Verifier.toml_ houses public values. - -## Prove Our Noir Program - -Now that the project is set up, we can create a proof of correct execution on our Noir program. - -Fill in input values for execution in the _Prover.toml_ file. For example: - -```toml -x = "1" -y = "2" -``` - -Prove the valid execution of your Noir program with your preferred proof name, for example `p`: - -```sh -nargo prove p -``` - -A new folder _proofs_ would then be generated in your project directory, containing the proof file -`p.proof`. - -The _Verifier.toml_ file would also be updated with the public values computed from program -execution (in this case the value of `y`): - -```toml -y = "0x0000000000000000000000000000000000000000000000000000000000000002" -``` - -> **Note:** Values in _Verifier.toml_ are computed as 32-byte hex values. - -## Verify Our Noir Program - -Once a proof is generated, we can verify correct execution of our Noir program by verifying the -proof file. - -Verify your proof of name `p` by running: - -```sh -nargo verify p -``` - -The verification will complete in silence if it is successful. If it fails, it will log the -corresponding error instead. - -Congratulations, you have now created and verified a proof for your very first Noir program! - -In the [next section](breakdown), we will go into more detail on each step performed. diff --git a/docs/versioned_docs/version-v0.7.1/getting_started/02_breakdown.md b/docs/versioned_docs/version-v0.7.1/getting_started/02_breakdown.md deleted file mode 100644 index 64d04c4e062..00000000000 --- a/docs/versioned_docs/version-v0.7.1/getting_started/02_breakdown.md +++ /dev/null @@ -1,122 +0,0 @@ ---- -title: Project Breakdown -description: - Learn about the anatomy of a Nargo project, including the purpose of the Prover and Verifier TOML - files, and how to prove and verify your program. -keywords: - [Nargo, Nargo project, Prover.toml, Verifier.toml, proof verification, private asset transfer] ---- - -This section breaks down our hello world program in section _1.2_. We elaborate on the project -structure and what the `prove` and `verify` commands did in the previous section. - -## Anatomy of a Nargo Project - -Upon creating a new project with `nargo new` and building the in/output files with `nargo check` -commands, you would get a minimal Nargo project of the following structure: - - - src - - Prover.toml - - Verifier.toml - - Nargo.toml - -The source directory _src_ holds the source code for your Noir program. By default only a _main.nr_ -file will be generated within it. - -_Prover.toml_ is used for specifying the input values for executing and proving the program. You can specify `toml` files with different names by using the `--prover-name` or `-p` flags, see the [Prover](#provertoml) section below. Optionally you may specify expected output values for prove-time checking as well. - -_Verifier.toml_ contains public in/output values computed when executing the Noir program. - -_Nargo.toml_ contains the environmental options of your project. - -_proofs_ and _contract_ directories will not be immediately visible until you create a proof or -verifier contract respectively. - -### main.nr - -The _main.nr_ file contains a `main` method, this method is the entry point into your Noir program. - -In our sample program, _main.nr_ looks like this: - -```rust -fn main(x : Field, y : Field) { - assert(x != y); -} -``` - -The parameters `x` and `y` can be seen as the API for the program and must be supplied by the -prover. Since neither `x` nor `y` is marked as public, the verifier does not supply any inputs, when -verifying the proof. - -The prover supplies the values for `x` and `y` in the _Prover.toml_ file. - -As for the program body, `assert` ensures the satisfaction of the condition (e.g. `x != y`) is -constrained by the proof of the execution of said program (i.e. if the condition was not met, the -verifier would reject the proof as an invalid proof). - -### Prover.toml - -The _Prover.toml_ file is a file which the prover uses to supply his witness values(both private and -public). - -In our hello world program the _Prover.toml_ file looks like this: - -```toml -x = "1" -y = "2" -``` - -When the command `nargo prove my_proof` is executed, two processes happen: - -1. Noir creates a proof that `x` which holds the value of `1` and `y` which holds the value of `2` - is not equal. This not equal constraint is due to the line `assert(x != y)`. - -2. Noir creates and stores the proof of this statement in the _proofs_ directory and names the proof - file _my_proof_. Opening this file will display the proof in hex format. - -#### Custom toml files - -You can specify a `toml` file with a different name to use for proving by using the `--prover-name` or `-p` flags. - -This command looks for proof inputs in the default **Prover.toml** and generates proof `p`: - -```bash -nargo prove p -``` - -This command looks for proof inputs in the custom **OtherProver.toml** and generates proof `pp`: - -```bash -nargo prove -p OtherProver pp -``` - -## Verifying a Proof - -When the command `nargo verify my_proof` is executed, two processes happen: - -1. Noir checks in the _proofs_ directory for a file called _my_proof_ - -2. If that file is found, the proof's validity is checked - -> **Note:** The validity of the proof is linked to the current Noir program; if the program is -> changed and the verifier verifies the proof, it will fail because the proof is not valid for the -> _modified_ Noir program. - -In production, the prover and the verifier are usually two separate entities. A prover would -retrieve the necessary inputs, execute the Noir program, generate a proof and pass it to the -verifier. The verifier would then retrieve the public inputs from usually external sources and -verifies the validity of the proof against it. - -Take a private asset transfer as an example: - -A user on browser as the prover would retrieve private inputs (e.g. the user's private key) and -public inputs (e.g. the user's encrypted balance on-chain), compute the transfer, generate a proof -and submit it to the verifier smart contract. - -The verifier contract would then draw the user's encrypted balance directly from the blockchain and -verify the proof submitted against it. If the verification passes, additional functions in the -verifier contract could trigger (e.g. approve the asset transfer). - -Now that you understand the concepts, you'll probably want some editor feedback while you are writing more complex code. - -In the [next section](language_server), we will explain how to utilize the Noir Language Server. diff --git a/docs/versioned_docs/version-v0.7.1/getting_started/03_language_server.md b/docs/versioned_docs/version-v0.7.1/getting_started/03_language_server.md deleted file mode 100644 index e6f5dfc2faa..00000000000 --- a/docs/versioned_docs/version-v0.7.1/getting_started/03_language_server.md +++ /dev/null @@ -1,31 +0,0 @@ ---- -title: Language Server -description: - Learn about the Noir Language Server, how to install the components, and configuration that may be required. -keywords: - [Nargo, Language Server, LSP, VSCode, Visual Studio Code] ---- - -This section helps you install and configure the Noir Language Server. - -The Language Server Protocol (LSP) has two components, the [Server](#language-server) and the [Client](#language-client). Below we describe each in the context of Noir. - -## Language Server - -The Server component is provided by the Nargo command line tool that you installed at the beginning of this guide. -As long as Nargo is installed and you've used it to run other commands in this guide, it should be good to go! - -If you'd like to verify that the `nargo lsp` command is available, you can run `nargo --help` and look for `lsp` in the list of commands. If you see it, you're using a version of Noir with LSP support. - -## Language Client - -The Client component is usually an editor plugin that launches the Server. It communicates LSP messages between the editor and the Server. For example, when you save a file, the Client will alert the Server, so it can try to compile the project and report any errors. - -Currently, Noir provides a Language Client for Visual Studio Code via the [vscode-noir](https://github.com/noir-lang/vscode-noir) extension. You can install it via the [Visual Studio Marketplace](https://marketplace.visualstudio.com/items?itemName=noir-lang.vscode-noir). - -### Configuration - -* __Noir: Enable LSP__ - If checked, the extension will launch the Language Server via `nargo lsp` and communicate with it. -* __Noir: Nargo Flags__ - Additional flags may be specified if you require them to be added when the extension calls `nargo lsp`. -* __Noir: Nargo Path__ - An absolute path to a Nargo binary with the `lsp` command. This may be useful if Nargo is not within the `PATH` of your editor. -* __Noir > Trace: Server__ - Setting this to `"messages"` or `"verbose"` will log LSP messages between the Client and Server. Useful for debugging. diff --git a/docs/versioned_docs/version-v0.7.1/index.md b/docs/versioned_docs/version-v0.7.1/index.md deleted file mode 100644 index e56b24bccd8..00000000000 --- a/docs/versioned_docs/version-v0.7.1/index.md +++ /dev/null @@ -1,103 +0,0 @@ ---- -title: Introducing Noir -description: - Learn about the public alpha release of Noir, a domain specific language heavily influenced by - Rust that compiles to an intermediate language which can be compiled to an arithmetic circuit or a - rank-1 constraint system. -keywords: - [ - Noir, - Domain Specific Language, - Rust, - Intermediate Language, - Arithmetic Circuit, - Rank-1 Constraint System, - Ethereum Developers, - Protocol Developers, - Blockchain Developers, - Proving System, - Smart Contract Language, - ] -slug: / ---- - -This version of the book is being released with the public alpha. There will be a lot of features -that are missing in this version, however the syntax and the feel of the language will mostly be -completed. - -## What is Noir? - -Noir is a Domain Specific Language for SNARK proving systems. It has been designed to use any ACIR compatible proving system. - -It's design choices are influenced heavily by Rust and focuses on a simple, familiar syntax. - -## Who is Noir for? - -Noir can be used for a variety of purposes. - -### Solidity Developers - -Noir currently includes a command to create a Solidity contract which verifies your Noir program. This will -be modularised in the future; however, as of the alpha, you can use the [`nargo codegen-verifier`](./nargo/commands#nargo-codegen-verifier) command to create -a verifier contract. - -### Protocol Developers - -As a protocol developer, you may not want to use the Aztec backend due to it not being a fit for -your stack, or maybe you simply want to use a different proving system. Since Noir does not compile -to a specific proof system, it is possible for protocol developers to replace the PLONK-based -proving system with a different proving system altogether. - -### Blockchain developers - -As a blockchain developer, you will be constrained by parameters set by your blockchain (for example, the -proving system and smart contract language has been pre-defined). In order for you to use Noir in -your blockchain, a proving system backend and a smart contract interface -must be implemented for it. - -## What's new about Noir? - -Noir is simple and flexible in its design, as it does not compile immediately to a fixed -NP-complete language. Instead, Noir compiles to an intermediate language (ACIR), which itself can be compiled -to an arithmetic circuit (if choosing to target Aztec's barretenberg backend) or a rank-1 constraint system (if choosing to target an R1CS backend like Arkwork's Marlin backend, or others). - -This in itself brings up a few challenges within the design process, but allows one to decouple the programming language completely from the backend. This is similar in theory to LLVM. - -## Current Features - -Compiler: - -- Module System -- For expressions -- Arrays -- Bit Operations -- Binary operations (<, <=, >, >=, +, -, \*, /, %) [See documentation for an extensive list] -- Unsigned integers -- If statements -- Structures and Tuples -- Generics - -ACIR Supported OPCODES: - -- Sha256 -- Blake2s -- Schnorr signature verification -- MerkleMembership -- Pedersen -- HashToField - -## Libraries - -Noir does not currently have an official package manager. You can find a list of available Noir libraries in the [awesome-noir repo here](https://github.com/noir-lang/awesome-noir#libraries). - -Some libraries that are available today include: - -- [Standard Library](https://github.com/noir-lang/noir/tree/master/noir_stdlib) - the Noir Standard Library -- [Ethereum Storage Proof Verification](https://github.com/aragonzkresearch/noir-trie-proofs) - a library that contains the primitives necessary for RLP decoding (in the form of look-up table construction) and Ethereum state and storage proof verification (or verification of any trie proof involving 32-byte long keys) -- [BigInt](https://github.com/shuklaayush/noir-bigint) - a library that provides a custom BigUint56 data type, allowing for computations on large unsigned integers -- [ECrecover](https://github.com/colinnielsen/ecrecover-noir/tree/main) - a library to verify an ECDSA signature and return the source Ethereum address -- [Sparse Merkle Tree Verifier](https://github.com/vocdoni/smtverifier-noir/tree/main) - a library for verification of sparse Merkle trees -- [Signed Int](https://github.com/resurgencelabs/signed_int) - a library for accessing a custom Signed Integer data type, allowing access to negative numbers on Noir -- [Fraction](https://github.com/resurgencelabs/fraction) - a library for accessing fractional number data type in Noir, allowing results that aren't whole numbers - -See the section on [dependencies](./modules_packages_crates/dependencies) for more information. diff --git a/docs/versioned_docs/version-v0.7.1/language_concepts/00_data_types.md b/docs/versioned_docs/version-v0.7.1/language_concepts/00_data_types.md deleted file mode 100644 index 6ac494ad404..00000000000 --- a/docs/versioned_docs/version-v0.7.1/language_concepts/00_data_types.md +++ /dev/null @@ -1,305 +0,0 @@ ---- -title: Data Types -description: - Get a clear understanding of the two categories of Noir data types - primitive types and compound - types. Learn about their characteristics, differences, and how to use them in your Noir - programming. -keywords: - [ - noir, - data types, - primitive types, - compound types, - private types, - public types, - field type, - integer types, - boolean type, - array type, - tuple type, - struct type, - ] ---- - -Every value in Noir has a type, which determines which operations are valid for it. - -All values in Noir are fundamentally composed of `Field` elements. For a more approachable -developing experience, abstractions are added on top to introduce different data types in Noir. - -Noir has two category of data types: primitive types (e.g. `Field`, integers, `bool`) and compound -types that group primitive types (e.g. arrays, tuples, structs). Each value can either be private or -public. - -## Private & Public Types - -A **private value** is known only to the Prover, while a **public value** is known by both the -Prover and Verifier. Mark values as `private` when the value should only be known to the prover. All -primitive types (including individual fields of compound types) in Noir are private by default, and -can be marked public when certain values are intended to be revealed to the Verifier. - -> **Note:** For public values defined in Noir programs paired with smart contract verifiers, once -> the proofs are verified on-chain the values can be considered known to everyone that has access to -> that blockchain. - -Public data types are treated no differently to private types apart from the fact that their values -will be revealed in proofs generated. Simply changing the value of a public type will not change the -circuit (where the same goes for changing values of private types as well). - -_Private values_ are also referred to as _witnesses_ sometimes. - -> **Note:** The terms private and public when applied to a type (e.g. `pub Field`) have a different -> meaning than when applied to a function (e.g. `pub fn foo() {}`). -> -> The former is a visibility modifier for the Prover to interpret if a value should be made known to -> the Verifier, while the latter is a visibility modifier for the compiler to interpret if a -> function should be made accessible to external Noir programs like in other languages. - -### pub Modifier - -All data types in Noir are private by default. Types are explicitly declared as public using the -`pub` modifier: - -```rust -fn main(x : Field, y : pub Field) -> pub Field { - x + y -} -``` - -In this example, `x` is **private** while `y` and `x + y` (the return value) are **public**. Note -that visibility is handled **per variable**, so it is perfectly valid to have one input that is -private and another that is public. - -> **Note:** Public types can only be declared through parameters on `main`. - -## Primitive Types - -A primitive type represents a single value. They can be private or public. - -### Fields - -The field type corresponds to the native field type of the proving backend. - -The size of a Noir field depends on the elliptic curve's finite field for the proving backend -adopted. For example, a field would be a 254-bit integer when paired with the default backend that -spans the Grumpkin curve. - -Fields support integer arithmetic and are often used as the default numeric type in Noir: - -```rust -fn main(x : Field, y : Field) { - let z = x + y; -} -``` - -`x`, `y` and `z` are all private fields in this example. Using the `let` keyword we defined a new -private value `z` constrained to be equal to `x + y`. - -If proving efficiency is of priority, fields should be used as a default for solving problems. -Smaller integer types (e.g. `u64`) incur extra range constraints. - -### Integers - -An integer type is a range constrained field type. The Noir frontend currently supports unsigned, -arbitrary-sized integer types. - -An integer type is specified first with the letter `u`, indicating its unsigned nature, followed by -its length in bits (e.g. `32`). For example, a `u32` variable can store a value in the range of -$\\([0,2^{32}-1]\\)$: - -```rust -fn main(x : Field, y : u32) { - let z = x as u32 + y; -} -``` - -`x`, `y` and `z` are all private values in this example. However, `x` is a field while `y` and `z` -are unsigned 32-bit integers. If `y` or `z` exceeds the range $\\([0,2^{32}-1]\\)$, proofs created -will be rejected by the verifier. - -> **Note:** The default backend supports both even (e.g. `u16`, `u48`) and odd (e.g. `u5`, `u3`) -> sized integer types. - -### Booleans - -The `bool` type in Noir has two possible values: `true` and `false`: - -```rust -fn main() { - let t = true; - let f: bool = false; -} -``` - -> **Note:** When returning a boolean value, it will show up as a value of 1 for `true` and 0 for -> `false` in _Verifier.toml_. - -The boolean type is most commonly used in conditionals like `if` expressions and `assert` -statements. More about conditionals is covered in the [Control Flow](./control_flow) and -[Assert Function](./assert) sections. - -### Strings - -The string type is a fixed length value defined with `str`. - -You can use strings in `assert()` functions or print them with -`std::println()`. - -```rust -fn main(message : pub str<11>, hex_as_string : str<4>) { - std::println(message); - assert(message == "hello world"); - assert(hex_as_string == "0x41"); -} -``` - -## Compound Types - -A compound type groups together multiple values into one type. Elements within a compound type can -be private or public. - -### Arrays - -An array is one way of grouping together values into one compound type. Array types can be inferred -or explicitly specified via the syntax `[; ]`: - -```rust -fn main(x : Field, y : Field) { - let my_arr = [x, y]; - let your_arr: [Field; 2] = [x, y]; -} -``` - -Here, both `my_arr` and `your_arr` are instantiated as an array containing two `Field` elements. - -Array elements can be accessed using indexing: - -```rust -fn main() { - let a = [1, 2, 3, 4, 5]; - - let first = a[0]; - let second = a[1]; -} -``` - -All elements in an array must be of the same type (i.e. homogeneous). That is, an array cannot group -a `Field` value and a `u8` value together for example. - -You can write mutable arrays, like: - -```rust -fn main() { - let mut arr = [1, 2, 3, 4, 5]; - assert(arr[0] == 1); - - arr[0] = 42; - assert(arr[0] == 42); -} -``` - -#### Types - -You can create arrays of primitive types or structs. There is not yet support for nested arrays -(arrays of arrays) or arrays of structs that contain arrays. - -### Tuples - -A tuple collects multiple values like an array, but with the added ability to collect values of -different types: - -```rust -fn main() { - let tup: (u8, u64, Field) = (255, 500, 1000); -} -``` - -One way to access tuple elements is via destructuring using pattern matching: - -```rust -fn main() { - let tup = (1, 2); - - let (one, two) = tup; - - let three = one + two; -} -``` - -Another way to access tuple elements is via direct member access, using a period (`.`) followed by -the index of the element we want to access. Index `0` corresponds to the first tuple element, `1` to -the second and so on: - -```rust -fn main() { - let tup = (5, 6, 7, 8); - - let five = tup.0; - let eight = tup.3; -} -``` - -### Structs - -A struct also allows for grouping multiple values of different types. Unlike tuples, we can also -name each field. - -> **Note:** The usage of _field_ here refers to each element of the struct and is unrelated to the -> field type of Noir. - -Defining a struct requires giving it a name and listing each field within as `: ` pairs: - -```rust -struct Animal { - hands: Field, - legs: Field, - eyes: u8, -} -``` - -An instance of a struct can then be created with actual values in `: ` pairs in any -order. Struct fields are accessible using their given names: - -```rust -fn main() { - let legs = 4; - - let dog = Animal { - eyes: 2, - hands: 0, - legs, - }; - - let zero = dog.hands; -} -``` - -Structs can also be destructured in a pattern, binding each field to a new variable: - -```rust -fn main() { - let Animal { hands, legs: feet, eyes } = get_octopus(); - - let ten = hands + feet + eyes as u8; -} - -fn get_octopus() -> Animal { - let octopus = Animal { - hands: 0, - legs: 8, - eyes: 2, - }; - - octopus -} -``` - -The new variables can be bound with names different from the original struct field names, as -showcased in the `legs --> feet` binding in the example above. - -:::note -You can use Structs as inputs to the `main` function, but you can't output them -::: - -### BigInt - -You can acheive BigInt functionality using the [Noir BigInt](https://github.com/shuklaayush/noir-bigint) library. diff --git a/docs/versioned_docs/version-v0.7.1/language_concepts/01_functions.md b/docs/versioned_docs/version-v0.7.1/language_concepts/01_functions.md deleted file mode 100644 index 54c618599d2..00000000000 --- a/docs/versioned_docs/version-v0.7.1/language_concepts/01_functions.md +++ /dev/null @@ -1,88 +0,0 @@ ---- -title: Functions -description: - Learn how to declare functions and methods in Noir, a programming language with Rust semantics. - This guide covers parameter declaration, return types, call expressions, and more. -keywords: [Noir, Rust, functions, methods, parameter declaration, return types, call expressions] ---- - -Functions in Noir follow the same semantics of Rust, though Noir does not support early returns. - -To declare a function the `fn` keyword is used. - -```rust -fn foo() {} -``` - -All parameters in a function must have a type and all types are known at compile time. The parameter -is pre-pended with a colon and the parameter type. Multiple parameters are separated using a comma. - -```rust -fn foo(x : Field, y : pub Field){} -``` - -The return type of a function can be stated by using the `->` arrow notation. The function below -states that the foo function must return a `Field`. If the function returns no value, then the arrow -is omitted. - -```rust -fn foo(x : Field, y : pub Field) -> Field { - x + y -} -``` - -Note that a `return` keyword is unneeded in this case - the last expression in a function's body is -returned. - -## Call Expressions - -Calling a function in Noir is executed by using the function name and passing in the necessary -arguments. - -Below we show how to call the `foo` function from the `main` function using a call expression: - -```rust -fn main(x : Field, y : Field) { - let z = foo(x); -} - -fn foo(x : Field) -> Field { - x + x -} -``` - -## Methods - -You can define methods in Noir on any struct type in scope. - -```rust -struct MyStruct { - foo: Field, - bar: Field, -} - -impl MyStruct { - fn new(foo: Field) -> MyStruct { - MyStruct { - foo, - bar: 2, - } - } - - fn sum(self) -> Field { - self.foo + self.bar - } -} - -fn main() { - let s = MyStruct::new(40); - assert(s.sum() == 42); -} -``` - -Methods are just syntactic sugar for functions, so if we wanted to we could also call `sum` as -follows: - -```rust -assert(MyStruct::sum(s) == 42); -``` diff --git a/docs/versioned_docs/version-v0.7.1/language_concepts/02_control_flow.md b/docs/versioned_docs/version-v0.7.1/language_concepts/02_control_flow.md deleted file mode 100644 index 691c514d9a8..00000000000 --- a/docs/versioned_docs/version-v0.7.1/language_concepts/02_control_flow.md +++ /dev/null @@ -1,42 +0,0 @@ ---- -title: Control Flow -description: - Learn how to use loops and if expressions in the Noir programming language. Discover the syntax - and examples for for loops and if-else statements. -keywords: [Noir programming language, loops, for loop, if-else statements, Rust syntax] ---- - -## Loops - -Noir has one kind of loop: the `for` loop. `for` loops allow you to repeat a block of code multiple -times. - -The following block of code between the braces is run 10 times. - -```rust -for i in 0..10 { - // do something -}; -``` - -## If Expressions - -Noir supports `if-else` statements. The syntax is most similar to Rust's where it is not required -for the statement's conditional to be surrounded by parentheses. - -```rust -let a = 0; -let mut x: u32 = 0; - -if a == 0 { - if a != 0 { - x = 6; - } else { - x = 2; - } -} else { - x = 5; - assert(x == 5); -} -assert(x == 2); -``` diff --git a/docs/versioned_docs/version-v0.7.1/language_concepts/03_ops.md b/docs/versioned_docs/version-v0.7.1/language_concepts/03_ops.md deleted file mode 100644 index da02b126059..00000000000 --- a/docs/versioned_docs/version-v0.7.1/language_concepts/03_ops.md +++ /dev/null @@ -1,97 +0,0 @@ ---- -title: Logical Operations -description: - Learn about the supported arithmetic and logical operations in the Noir programming language. - Discover how to perform operations on private input types, integers, and booleans. -keywords: - [ - Noir programming language, - supported operations, - arithmetic operations, - logical operations, - predicate operators, - bitwise operations, - short-circuiting, - backend, - ] ---- - -# Operations - -## Table of Supported Operations - -| Operation | Description | Requirements | -| :-------- | :------------------------------------------------------------: | -------------------------------------: | -| + | Adds two private input types together | Types must be private input | -| - | Subtracts two private input types together | Types must be private input | -| \* | Multiplies two private input types together | Types must be private input | -| / | Divides two private input types together | Types must be private input | -| ^ | XOR two private input types together | Types must be integer | -| & | AND two private input types together | Types must be integer | -| \| | OR two private input types together | Types must be integer | -| << | Left shift an integer by another integer amount | Types must be integer | -| >> | Right shift an integer by another integer amount | Types must be integer | -| ! | Bitwise not of a value | Type must be integer or boolean | -| < | returns a bool if one value is less than the other | Upper bound must have a known bit size | -| <= | returns a bool if one value is less than or equal to the other | Upper bound must have a known bit size | -| > | returns a bool if one value is more than the other | Upper bound must have a known bit size | -| >= | returns a bool if one value is more than or equal to the other | Upper bound must have a known bit size | -| == | returns a bool if one value is equal to the other | Both types must not be constants | -| != | returns a bool if one value is not equal to the other | Both types must not be constants | - -### Predicate Operators - -`<,<=, !=, == , >, >=` are known as predicate/comparison operations because they compare two values. -This differs from the operations such as `+` where the operands are used in _computation_. - -### Bitwise Operations Example - -```rust -fn main(x : Field) { - let y = x as u32; - let z = y & y; -} -``` - -`z` is implicitly constrained to be the result of `y & y`. The `&` operand is used to denote bitwise -`&`. - -> `x & x` would not compile as `x` is a `Field` and not an integer type. - -### Logical Operators - -Noir has no support for the logical operators `||` and `&&`. This is because encoding the -short-circuiting that these operators require can be inefficient for Noir's backend. Instead you can -use the bitwise operators `|` and `&` which operate indentically for booleans, just without the -short-circuiting. - -```rust -let my_val = 5; - -let mut flag = 1; -if (my_val > 6) | (my_val == 0) { - flag = 0; -} -assert(flag == 1); - -if (my_val != 10) & (my_val < 50) { - flag = 0; -} -assert(flag == 0); -``` - -### Shorthand operators - -Noir shorthand operators for most of the above operators, namely `+=, -=, *=, /=, %=, &=, |=, ^=, <<=`, and `>>=`. These allow for more concise syntax. For example: - -```rust -let mut i = 0; -i = i + 1; -``` - -could be written as: - -```rust -let mut i = 0; -i += 1; -``` diff --git a/docs/versioned_docs/version-v0.7.1/language_concepts/04_assert.md b/docs/versioned_docs/version-v0.7.1/language_concepts/04_assert.md deleted file mode 100644 index a25a946123d..00000000000 --- a/docs/versioned_docs/version-v0.7.1/language_concepts/04_assert.md +++ /dev/null @@ -1,34 +0,0 @@ ---- -title: Assert Function -description: - Learn about the assert function in Noir, which can be used to explicitly constrain the predicate or - comparison expression that follows to be true, and what happens if the expression is false at - runtime. -keywords: [Noir programming language, assert statement, predicate expression, comparison expression] ---- - -Noir includes a special `assert` function which will explicitly constrain the predicate/comparison -expression that follows to be true. If this expression is false at runtime, the program will fail to -be proven. - -### Example - -```rust -fn main(x : Field, y : Field) { - assert(x == y); -} -``` - -The above snippet compiles because `==` is a predicate operation. Conversely, the following will not -compile: - -```rust -// INCORRECT - -fn main(x : Field, y : Field) { - assert(x + y); -} -``` - -> The rationale behind this not compiling is due to ambiguity. It is not clear if the above should -> equate to `x + y == 0` or if it should check the truthiness of the result. diff --git a/docs/versioned_docs/version-v0.7.1/language_concepts/06_generics.md b/docs/versioned_docs/version-v0.7.1/language_concepts/06_generics.md deleted file mode 100644 index 66f2e85e16b..00000000000 --- a/docs/versioned_docs/version-v0.7.1/language_concepts/06_generics.md +++ /dev/null @@ -1,116 +0,0 @@ ---- -title: Generics -description: - Learn how to use Generics in Noir -keywords: [Noir, Rust, generics, functions, structs] ---- - -# Generics - -Generics allow you to use the same functions with multiple different concrete data types. You can -read more about the concept of generics in the Rust documentation -[here](https://doc.rust-lang.org/book/ch10-01-syntax.html). - -Here is a trivial example showing the identity function that supports any type. In Rust, it is -common to refer to the most general type as `T`. We follow the same convention in Noir. - -```rust -fn id(x: T) -> T { - x -} -``` - -## In Structs - -Generics are useful for specifying types in structs. For example, we can specify that a field in a -struct will be of a certain generic type. In this case `value` is of type `T`. - -```rust -struct RepeatedValue { - value: T, - count: comptime Field, -} - -impl RepeatedValue { - fn new(value: T) -> Self { - Self { value, count: 1 } - } - - fn increment(mut repeated: Self) -> Self { - repeated.count += 1; - repeated - } - - fn print(self) { - for _i in 0 .. self.count { - dep::std::println(self.value); - } - } -} - -fn main() { - let mut repeated = RepeatedValue::new("Hello!"); - repeated = repeated.increment(); - repeated.print(); -} -``` - -The `print` function will print `Hello!` an arbitrary number of times, twice in this case. - -If we want to be generic over array lengths (which are type-level integers), we can use numeric -generics. Using these looks just like using regular generics, but these generics can resolve to -integers at compile-time, rather than resolving to types. Here's an example of a struct that is -generic over the size of the array it contains internally: - -```rust -struct BigInt { - limbs: [u32; N], -} - -impl BigInt { - // `N` is in scope of all methods in the impl - fn first(first: BigInt, second: BigInt) -> Self { - assert(first.limbs != second.limbs); - first - - fn second(first: BigInt, second: Self) -> Self { - assert(first.limbs != second.limbs); - second - } -} -``` - -## Calling functions on generic parameters - -Unlike Rust, Noir does not have traits, so how can one translate the equivalent of a trait bound in -Rust into Noir? That is, how can we write a function that is generic over some type `T`, while also -requiring there is a function like `eq: fn(T, T) -> bool` that works on the type? - -The answer is that we can translate this by passing in the function manually. Here's an example of -implementing array equality in Noir: - -```rust -fn array_eq(array1: [T; N], array2: [T; N], elem_eq: fn(T, T) -> bool) -> bool { - if array1.len() != array2.len() { - false - } else { - let mut result = true; - for i in 0 .. array1.len() { - result &= elem_eq(array1[i], array2[i]); - } - result - } -} - -fn main() { - assert(array_eq([1, 2, 3], [1, 2, 3], |a, b| a == b)); - - // We can use array_eq even for arrays of structs, as long as we have - // an equality function for these structs we can pass in - let array = [MyStruct::new(), MyStruct::new()]; - assert(array_eq(array, array, MyStruct::eq)); -} -``` - -You can see an example of generics in the tests -[here](https://github.com/noir-lang/noir/blob/master/tooling/nargo_cli/tests/execution_success/generics/src/main.nr). diff --git a/docs/versioned_docs/version-v0.7.1/language_concepts/07_mutability.md b/docs/versioned_docs/version-v0.7.1/language_concepts/07_mutability.md deleted file mode 100644 index 5631a322659..00000000000 --- a/docs/versioned_docs/version-v0.7.1/language_concepts/07_mutability.md +++ /dev/null @@ -1,118 +0,0 @@ ---- -title: Mutability -description: - Learn about mutable variables, constants, and globals in Noir programming language. Discover how - to declare, modify, and use them in your programs. -keywords: [noir programming language, mutability in noir, mutable variables, constants, globals] ---- - -Variables in Noir can be declared mutable via the `mut` keyword. Mutable variables can be reassigned -to via an assignment expression. - -```rust -let x = 2; -x = 3; // error: x must be mutable to be assigned to - -let mut y = 3; -let y = 4; // OK -``` - -The `mut` modifier can also apply to patterns: - -```rust -let (a, mut b) = (1, 2); -a = 11; // error: a must be mutable to be assigned to -b = 12; // OK - -let mut (c, d) = (3, 4); -c = 13; // OK -d = 14; // OK - -// etc. -let MyStruct { x: mut y } = MyStruct { x: a } -// y is now in scope -``` - -Note that mutability in noir is local and everything is passed by value, so if a called function -mutates its parameters then the parent function will keep the old value of the parameters. - -```rust -fn main() -> Field { - let x = 3; - helper(x); - x // x is still 3 -} - -fn helper(mut x: i32) { - x = 4; -} -``` - -## Comptime Values - -Comptime values are values that are known at compile-time. This is different to a witness -which changes per proof. If a comptime value that is being used in your program is changed, then your -circuit will also change. - -Comptime is slightly different from Rust's `const`. Namely, it is a bit more flexible in that normal functions can accept comptime parameters. For example, this is used to verify an array index is known at compile-time. Note that the "known at compile-time" here means "known after function inlining is performed while optimizing the program" and not "known during type-checking." - -Below we show how to declare a comptime value: - -```rust -fn main() { - let a: comptime Field = 5; - - // `comptime Field` can also be inferred: - let a = 5; -} -``` - -Note that variables declared as mutable may not be comptime: - -```rust -fn main() { - // error: Cannot mark a comptime type as mutable - let mut a: comptime Field = 5; - - // a inferred as a private Field here - let mut a = 5; -} -``` - -## Globals - -Noir also supports global variables. However, they must be compile-time variables. If `comptime` is -not explicitly written in the type annotation the compiler will implicitly specify the declaration -as compile-time. They can then be used like any other compile-time variable inside functions. The -global type can also be inferred by the compiler entirely. Globals can also be used to specify array -annotations for function parameters and can be imported from submodules. - -```rust -global N: Field = 5; // Same as `global N: comptime Field = 5` - -fn main(x : Field, y : [Field; N]) { - let res = x * N; - - assert(res == y[0]); - - let res2 = x * mysubmodule::N; - assert(res != res2); -} - -mod mysubmodule { - use dep::std; - - global N: Field = 10; - - fn my_helper() -> comptime Field { - let x = N; - x - } -} -``` - -## Why only local mutability? - -Witnesses in a proving system are immutable in nature. Noir aims to _closely_ mirror this setting -without applying additional overhead to the user. Modeling a mutable reference is not as -straightforward as on conventional architectures and would incur some possibly unexpected overhead. diff --git a/docs/versioned_docs/version-v0.7.1/language_concepts/08_comments.md b/docs/versioned_docs/version-v0.7.1/language_concepts/08_comments.md deleted file mode 100644 index 5b1d9fa38f2..00000000000 --- a/docs/versioned_docs/version-v0.7.1/language_concepts/08_comments.md +++ /dev/null @@ -1,27 +0,0 @@ ---- -title: Comments -description: - Learn how to write comments in Noir programming language. A comment is a line of code that is - ignored by the compiler, but it can be read by programmers. Single-line and multi-line comments - are supported in Noir. -keywords: [Noir programming language, comments, single-line comments, multi-line comments] ---- - -A comment is a line in your codebase which the compiler ignores, however it can be read by -programmers. - -Here is a single line comment: - -```rust -// This is a comment and is ignored -``` - -`//` is used to tell the compiler to ignore the rest of the line. - -Noir doesn't have multi-line comments, but you can emulate them via using `//` on each line - -```rust -// This is a multi line -// comment, that is ignored by -// the compiler -``` diff --git a/docs/versioned_docs/version-v0.7.1/language_concepts/09_distinct.md b/docs/versioned_docs/version-v0.7.1/language_concepts/09_distinct.md deleted file mode 100644 index 03759d4bb4a..00000000000 --- a/docs/versioned_docs/version-v0.7.1/language_concepts/09_distinct.md +++ /dev/null @@ -1,63 +0,0 @@ ---- -title: Distinct Witnesses ---- - -The `distinct` keyword prevents repetitions of witness indices in the program's ABI. This ensures -that the witnesses being returned as public inputs are all unique. - -The `distinct` keyword is only used for return values on program entry points (usually the `main()` -function). - -When using `disctinct` and `pub` simultaneously, `distinct` comes first. See the example below. - -You can read more about the problem this solves -[here](https://github.com/noir-lang/noir/issues/1183). - -## Example - -Without the `distinct` keyword, the following program - -```rust -fn main(x : pub Field, y : pub Field) -> pub [Field; 4] { - let a = 1; - let b = 1; - [x + 1, y, a, b] -} -``` - -compiles to - -```json -{ - //... - "abi": { - //... - "param_witnesses": { "x": [1], "y": [2] }, - "return_witnesses": [3, 2, 4, 4] - } -} -``` - -Whereas (with the `distinct` keyword) - -```rust -fn main(x : pub Field, y : pub Field) -> distinct pub [Field; 4] { - let a = 1; - let b = 1; - [x + 1, y, a, b] -} -``` - -compiles to - -```json -{ - //... - "abi": { - //... - "param_witnesses": { "x": [1], "y": [2] }, - //... - "return_witnesses": [3, 4, 5, 6] - } -} -``` diff --git a/docs/versioned_docs/version-v0.7.1/modules_packages_crates/crates_and_packages.md b/docs/versioned_docs/version-v0.7.1/modules_packages_crates/crates_and_packages.md deleted file mode 100644 index 34f28a71148..00000000000 --- a/docs/versioned_docs/version-v0.7.1/modules_packages_crates/crates_and_packages.md +++ /dev/null @@ -1,35 +0,0 @@ ---- -title: Crates and Packages -description: - Learn how to use Crates and Packages in your Noir project -keywords: [Nargo, dependencies, package management, crates, package] ---- - -## Crates - -A crate is the smallest amount of code that the Noir compiler considers at a time. -Crates can contain modules, and the modules may be defined in other files that get compiled with the crate, as we’ll see in the coming sections. - -### Crate Types - -A Noir crate can come in one of two forms: a binary crate or a library crate. - -_Binary crates_ are programs which you can compile to an ACIR circuit which you can then create proofs against. Each must have a function called `main` that defines the ACIR circuit which is to be proved. - -_Library crates_ don't have a `main` function and they don't compile down to ACIR. Instead they define functionality intended to be shared with multiple projects, and eventually included in a binary crate. - -### Crate Root - -Every crate has a root, which is the source file that the compiler starts, this is also known as the root module. The Noir compiler does not enforce any conditions on the name of the file which is the crate root, however if you are compiling via Nargo the crate root must be called `lib.nr` or `main.nr` for library or binary crates respectively. - -## Packages - -A Nargo _package_ is a collection of one of more crates that provides a set of functionality. A package must include a Nargo.toml file. - -A package _must_ contain either a library or a binary crate, but not both. - -### Differences from Cargo Packages - -One notable difference between Rust's Cargo and Noir's Nargo is that while Cargo allows a package to contain an unlimited number of binary crates and a single library crate, Nargo currently only allows a package to contain a single crate. - -In future this restriction may be lifted to allow a Nargo package to contain both a binary and library crate or multiple binary crates. diff --git a/docs/versioned_docs/version-v0.7.1/modules_packages_crates/dependencies.md b/docs/versioned_docs/version-v0.7.1/modules_packages_crates/dependencies.md deleted file mode 100644 index 2807ad52046..00000000000 --- a/docs/versioned_docs/version-v0.7.1/modules_packages_crates/dependencies.md +++ /dev/null @@ -1,110 +0,0 @@ ---- -title: Managing Dependencies -description: - Learn how to specify and manage dependencies in Nargo, allowing you to upload packages to GitHub - and use them easily in your project. -keywords: [Nargo, dependencies, GitHub, package management, versioning] ---- - -Nargo allows you to upload packages to GitHub and use them as dependencies. - -## Specifying a dependency - -Specifying a dependency requires a tag to a specific commit and the git url to the url containing -the package. - -Currently, there are no requirements on the tag contents. If requirements are added, it would follow -semver 2.0 guidelines. - -> Note: Without a `tag` , there would be no versioning and dependencies would change each time you -> compile your project. - -For example, to add the [ecrecover-noir library](https://github.com/colinnielsen/ecrecover-noir) to your project, add it to `Nargo.toml`: - -```toml -# Nargo.toml - -[dependencies] -ecrecover = {tag = "v0.8.0", git = "https://github.com/colinnielsen/ecrecover-noir"} -``` - -## Specifying a local dependency - -You can also specify dependencies that are local to your machine. - -For example, this file structure has a library and binary crate - -``` -├── binary_crate -│   ├── Nargo.toml -│   └── src -│   └── main.nr -└── liba - ├── Nargo.toml - └── src - └── lib.nr -``` - -Inside of the binary crate, you can specify: - -```toml -# Nargo.toml - -[dependencies] -libA = { path = "../liba" } -``` - -## Importing dependencies - -You can import a dependency to a Noir file using the following syntax. For example, to import the -ecrecover-noir library and local liba referenced above: - -```rust -use dep::ecrecover; -use dep::libA; -``` - -You can also import only the specific parts of dependency that you want to use, like so: - -```rust -use dep::std::hash::sha256; -use dep::std::scalar_mul::fixed_base; -``` - -Lastly, as demonstrated in the -[elliptic curve example](../standard_library/cryptographic_primitives/ec_primitives#examples), you -can import multiple items in the same line by enclosing them in curly braces: - -```rust -use dep::std::ec::tecurve::affine::{Curve, Point}; -``` - -## Dependencies of Dependencies - -Note that when you import a dependency, you also get access to all of the dependencies of that package. - -For example, the [phy_vector](https://github.com/resurgencelabs/phy_vector) library imports an [fraction](https://github.com/resurgencelabs/fraction) library. If you're importing the phy_vector library, then you can access the functions in fractions library like so: - -```rust -use dep::phy_vector; - -fn main(x : Field, y : pub Field) { - //... - let f = phy_vector::fraction::toFraction(true, 2, 1); - //... -} -``` - -## Available Libraries - -Noir does not currently have an official package manager. You can find a list of available Noir libraries in the [awesome-noir repo here](https://github.com/noir-lang/awesome-noir#libraries). - -Some libraries that are available today include: - -- [Standard Library](https://github.com/noir-lang/noir/tree/master/noir_stdlib) - the Noir Standard Library -- [Ethereum Storage Proof Verification](https://github.com/aragonzkresearch/noir-trie-proofs) - a library that contains the primitives necessary for RLP decoding (in the form of look-up table construction) and Ethereum state and storage proof verification (or verification of any trie proof involving 32-byte long keys) -- [BigInt](https://github.com/shuklaayush/noir-bigint) - a library that provides a custom BigUint56 data type, allowing for computations on large unsigned integers -- [ECrecover](https://github.com/colinnielsen/ecrecover-noir/tree/main) - a library to verify an ECDSA signature and return the source Ethereum address -- [Sparse Merkle Tree Verifier](https://github.com/vocdoni/smtverifier-noir/tree/main) - a library for verification of sparse Merkle trees -- [Signed Int](https://github.com/resurgencelabs/signed_int) - a library for accessing a custom Signed Integer data type, allowing access to negative numbers on Noir -- [Fraction](https://github.com/resurgencelabs/fraction) - a library for accessing fractional number data type in Noir, allowing results that aren't whole numbers diff --git a/docs/versioned_docs/version-v0.7.1/modules_packages_crates/modules.md b/docs/versioned_docs/version-v0.7.1/modules_packages_crates/modules.md deleted file mode 100644 index e429b336511..00000000000 --- a/docs/versioned_docs/version-v0.7.1/modules_packages_crates/modules.md +++ /dev/null @@ -1,104 +0,0 @@ ---- -title: Understanding Modules -description: - Learn how to organize your files using modules in Noir, following the same convention as Rust's - module system. Examples included. -keywords: [Noir, Rust, modules, organizing files, sub-modules] ---- - -# Modules - -Noir's module system follows the same convention as the _newer_ version of Rust's module system. - -## Purpose of Modules - -Modules are used to organise files. Without modules all of your code would need to live in a single -file. In Noir, the compiler does not automatically scan all of your files to detect modules. This -must be done explicitly by the developer. - -## Examples - -### Importing a module in the crate root - -Filename : `src/main.nr` - -```rust -mod foo; - -fn main() { - foo::hello_world(); -} -``` - -Filename : `src/foo.nr` - -```rust -fn from_foo() {} -``` - -In the above snippet, the crate root is the `src/main.nr` file. The compiler sees the module -declaration `mod foo` which prompts it to look for a foo.nr file. - -Visually this module hierarchy looks like the following : - -``` -crate - ├── main - │ - └── foo - └── from_foo - -``` - -### Importing a module throughout the tree -All modules are accessible from the ``crate::`` namespace. - -``` -crate - ├── bar - ├── foo - └── main - -``` -In the above snippet, if ``bar`` would like to use functions in ``foo``, it can do so by ``use crate::foo::function_name``. - -### Sub-modules - -Filename : `src/main.nr` - -```rust -mod foo; - -fn main() { - foo::from_foo(); -} -``` - -Filename : `src/foo.nr` - -```rust -mod bar; -fn from_foo() {} -``` - -Filename : `src/foo/bar.nr` - -```rust -fn from_bar() {} -``` - -In the above snippet, we have added an extra module to the module tree; `bar`. `bar` is a submodule -of `foo` hence we declare bar in `foo.nr` with `mod bar`. Since `foo` is not the crate root, the -compiler looks for the file associated with the `bar` module in `src/foo/bar.nr` - -Visually the module hierarchy looks as follows: - -``` -crate - ├── main - │ - └── foo - ├── from_foo - └── bar - └── from_bar -``` diff --git a/docs/versioned_docs/version-v0.7.1/nargo/01_commands.md b/docs/versioned_docs/version-v0.7.1/nargo/01_commands.md deleted file mode 100644 index 8efa286b5f0..00000000000 --- a/docs/versioned_docs/version-v0.7.1/nargo/01_commands.md +++ /dev/null @@ -1,132 +0,0 @@ ---- -title: Commands -description: - Noir CLI Commands for Noir Prover and Verifier to create, execute, prove and verify programs, - generate Solidity verifier smart contract and compile into JSON file containing ACIR - representation and ABI of circuit. -keywords: - [ - Nargo, - Noir CLI, - Noir Prover, - Noir Verifier, - generate Solidity verifier, - compile JSON file, - ACIR representation, - ABI of circuit, - TypeScript, - ] ---- - -## General options - -``` -Options: - -s, --show-ssa Emit debug information for the intermediate SSA IR - -d, --deny-warnings Quit execution when warnings are emitted - --show-output Display output of `println` statements during tests - -h, --help Print help -``` - -## `nargo help [subcommand]` - -Prints the list of available commands or specific information of a subcommand. - -_Arguments_ - -- `` - The subcommand whose help message to display - -## `nargo check` - -Generate the `Prover.toml` and `Verifier.toml` files for specifying prover and verifier in/output -values of the Noir program respectively. - -## `nargo codegen-verifier` - -Generate a Solidity verifier smart contract for the program. - -## `nargo compile ` - -Compile the program into a JSON build artifact file containing the ACIR representation and the ABI -of the circuit. This build artifact can then be used to generate and verify proofs. - -_Arguments_ - -- `` - The name of the circuit file - -_Options_ - -- `-c, --contracts` - Compile each contract function used within the program -- `--print-acir` - Displays the ACIR for the compiled circuit - -## `nargo new [path]` - -Creates a new Noir project. - -_Arguments_ - -- `` - Name of the package -- `[path]` - The path to save the new project - -## `nargo execute [witness_name]` - -Runs the Noir program and prints its return value. - -_Arguments_ - -- `[witness_name]` - The name of the witness - -_Usage_ - -The inputs to the circuit are read from the `Prover.toml` file generated by `nargo check`, which -must be filled in. - -To save the witness to file, run the command with a value for the `witness-name` argument. A -`.tr` file will then be saved in the `build` folder. - -> **Info:** The `.tr` file is the witness file. The witness file can be considered as program inputs -> parsed for your program's ACIR. -> -> This file can be passed along with circuit's ACIR into a TypeScript project for proving and -> verification. See the [TypeScript](../typescript#proving-and-verifying-externally-compiled-files) -> section to learn more. - -## `nargo prove ` - -Creates a proof for the program. - -_Arguments_ - -- `` - The name of the proof - -_Options_ - -- `-v, --verify` - Verify proof after proving - -## `nargo verify ` - -Given a proof and a program, verify whether the proof is valid. - -_Arguments_ - -- `` - The proof to verify - -## `nargo test ` - -Nargo will automatically compile and run any functions which have the decorator `#[test]` on them if -you run `nargo test`. - -See an example on the [testing page](./testing). - -_Arguments_ - -- `` - a pattern to indicate to only run tests with names containing the pattern - -## `nargo gates` - -Counts the occurrences of different gates in circuit - -## `nargo lsp` - -Start a long-running Language Server process that communicates over stdin/stdout. -Usually this command is not run by a user, but instead will be run by a Language Client, such as [vscode-noir](https://github.com/noir-lang/vscode-noir). diff --git a/docs/versioned_docs/version-v0.7.1/nargo/02_testing.md b/docs/versioned_docs/version-v0.7.1/nargo/02_testing.md deleted file mode 100644 index ba0bebd658b..00000000000 --- a/docs/versioned_docs/version-v0.7.1/nargo/02_testing.md +++ /dev/null @@ -1,32 +0,0 @@ ---- -title: Testing in Noir -description: Learn how to use Nargo to test your Noir program in a quick and easy way -keywords: [Nargo, testing, Noir, compile, test] ---- - -You can test your Noir programs using Noir circuits. - -Nargo will automatically compile and run any functions which have the decorator `#[test]` on them if -you run `nargo test`. - -For example if you have a program like: - -```rust -fn add(x: u64, y: u64) -> u64 { - x + y -} -#[test] -fn test_add() { - assert(add(2,2) == 4); - assert(add(0,1) == 1); - assert(add(1,0) == 1); -} -``` - -Running `nargo test` will test that the `test_add` function can be executed while satisfying the all -the contraints which allows you to test that add returns the expected values. Test functions can't -have any arguments currently. - -This is much faster compared to testing in Typescript but the only downside is that you can't -explicitly test that a certain set of inputs are invalid. i.e. you can't say that you want -add(2^64-1, 2^64-1) to fail. diff --git a/docs/versioned_docs/version-v0.7.1/standard_library/array_methods.md b/docs/versioned_docs/version-v0.7.1/standard_library/array_methods.md deleted file mode 100644 index 8d8a9531243..00000000000 --- a/docs/versioned_docs/version-v0.7.1/standard_library/array_methods.md +++ /dev/null @@ -1,180 +0,0 @@ ---- -title: Array Methods -description: - Learn about the commonly used methods available for arrays in Noir, including len, sort, fold, - reduce, all, and any. -keywords: [rust, array, methods, len, sort, fold, reduce, all, any] ---- - -# Array - -For convenience, the STD provides some ready-to-use, common methods for arrays[^migrationnote]: - -## len - -Returns the length of an array - -```rust -fn len(_array: [T; N]) -> comptime Field -``` - -example - -```rust -fn main() { - let array = [42, 42]; - assert(array.len() == 2); -} -``` - -## sort - -Returns a new sorted array. The original array remains untouched. Notice that this function will -only work for arrays of fields or integers, not for any arbitrary type. This is because the sorting -logic it uses internally is optimized specifically for these values. If you need a sort function to -sort any type, you should use the function `sort_via` described below. - -```rust -fn sort(_array: [T; N]) -> [T; N] -``` - -example - -```rust -fn main() { - let arr = [42, 32]; - let sorted = arr.sort(); - assert(sorted == [32, 42]); -} -``` - -## sort_via - -Sorts the array with a custom comparison function - -```rust -fn sort_via(mut a: [T; N], ordering: fn(T, T) -> bool) -> [T; N] -``` - -example - -```rust -fn main() { - let arr = [42, 32] - let sorted_ascending = arr.sort_via(|a, b| a < b); - assert(sorted_ascending == [32, 42]); // verifies - - let sorted_descending = arr.sort_via(|a, b| a > b); - assert(sorted_descending == [32, 42]); // does not verify -} -``` - -## map - -Applies a function to each element of the array, returning a new array containing the mapped elements. - -```rust -fn map(f: fn(T) -> U) -> [U; N] -``` - -example - -```rust -let a = [1, 2, 3]; -let b = a.map(|a| a * 2); // b is now [2, 4, 6] -``` - -## fold - -Applies a function to each element of the array, returning the final accumulated value. The first -parameter is the initial value. - -```rust -fn fold(mut accumulator: U, f: fn(U, T) -> U) -> U -``` - -This is a left fold, so the given function will be applied to the accumulator and first element of -the array, then the second, and so on. For a given call the expected result would be equivalent to: - -```rust -let a1 = [1]; -let a2 = [1, 2]; -let a3 = [1, 2, 3]; - -let f = |a, b| a - b; -a1.fold(10, f) //=> f(10, 1) -a2.fold(10, f) //=> f(f(10, 1), 2) -a3.fold(10, f) //=> f(f(f(10, 1), 2), 3) -``` - -example: - -```rust - -fn main() { - let arr = [2, 2, 2, 2, 2]; - let folded = arr.fold(0, |a, b| a + b); - assert(folded == 10); -} - -``` - -## reduce - -Same as fold, but uses the first element as starting element. - -```rust -fn reduce(f: fn(T, T) -> T) -> T -``` - -example: - -```rust -fn main() { - let arr = [2, 2, 2, 2, 2]; - let reduced = arr.reduce(|a, b| a + b); - assert(reduced == 10); -} -``` - -## all - -Returns true if all the elements satisfy the given predicate - -```rust -fn all(predicate: fn(T) -> bool) -> bool -``` - -example: - -```rust -fn main() { - let arr = [2, 2, 2, 2, 2]; - let all = arr.all(|a| a == 2); - assert(all); -} -``` - -## any - -Returns true if any of the elements satisfy the given predicate - -```rust -fn any(predicate: fn(T) -> bool) -> bool -``` - -example: - -```rust -fn main() { - let arr = [2, 2, 2, 2, 5]; - let any = arr.any(|a| a == 5); - assert(any); -} - -``` - -[^migrationnote]: - Migration Note: These methods were previously free functions, called via `std::array::len()`. - For the sake of ease of use and readability, these functions are now methods and the old syntax - for them is now deprecated. diff --git a/docs/versioned_docs/version-v0.7.1/standard_library/black_box_fns.md b/docs/versioned_docs/version-v0.7.1/standard_library/black_box_fns.md deleted file mode 100644 index 3063e71c147..00000000000 --- a/docs/versioned_docs/version-v0.7.1/standard_library/black_box_fns.md +++ /dev/null @@ -1,44 +0,0 @@ ---- -title: Black Box Functions -description: Black box functions are functions in Noir that rely on backends implementing support for specialized constraints. -keywords: [noir, black box functions] ---- - -Black box functions are functions in Noir that rely on backends implementing support for specialized constraints. This makes certain zk-snark unfriendly computations cheaper than if they were implemented in Noir. - -:::warning - -It is likely that not all backends will support a particular black box function. - -::: - -Because it is not guaranteed that all backends will support black box functions, it is possible that certain Noir programs won't compile against a particular backend if they use an unsupported black box function. It is possible to fallback to less efficient implementations written in Noir/ACIR in some cases. - -Black box functions are specified with the `#[foreign(black_box_fn)]` attribute. For example, the SHA256 function in the Noir [source code](https://github.com/noir-lang/noir/blob/v0.5.1/noir_stdlib/src/hash.nr) looks like: - -```rust -#[foreign(sha256)] -fn sha256(_input : [u8; N]) -> [u8; 32] {} -``` - -## Function list - -Here is a list of the current black box functions that are supported by UltraPlonk: - -- AES -- [SHA256](./cryptographic_primitives/hashes#sha256) -- [Schnorr signature verification](./cryptographic_primitives/schnorr) -- [Blake2s](./cryptographic_primitives/hashes#blake2s) -- [Pedersen](./cryptographic_primitives/hashes#pedersen) -- [HashToField128Security](./cryptographic_primitives/hashes#hash_to_field) -- [ECDSA signature verification](./cryptographic_primitives/ecdsa_secp256k1) -- [Fixed base scalar multiplication](./cryptographic_primitives/scalar) -- [Compute merkle root](./merkle_trees#compute_merkle_root) -- AND -- XOR -- RANGE -- [Keccak256](./cryptographic_primitives/hashes#keccak256) - -Most black box functions are included as part of the Noir standard library, however `AND`, `XOR` and `RANGE` are used as part of the Noir language syntax. For instance, using the bitwise operator `&` will invoke the `AND` black box function. To ensure compatibility across backends, the ACVM has fallback implementations of `AND`, `XOR` and `RANGE` defined in its standard library which it can seamlessly fallback to if the backend doesn't support them. - -You can view the black box functions defined in the ACVM code [here](https://github.com/noir-lang/acvm/blob/acir-v0.12.0/acir/src/circuit/black_box_functions.rs). diff --git a/docs/versioned_docs/version-v0.7.1/standard_library/cryptographic_primitives.md b/docs/versioned_docs/version-v0.7.1/standard_library/cryptographic_primitives.md deleted file mode 100644 index 2df4f929474..00000000000 --- a/docs/versioned_docs/version-v0.7.1/standard_library/cryptographic_primitives.md +++ /dev/null @@ -1,14 +0,0 @@ ---- -title: Cryptographic primitives in Noir -description: - Learn about the cryptographic primitives ready to use for any Noir project -keywords: - [ - cryptographic primitives, - Noir project, - ] ---- - -The Noir team is progressively adding new cryptographic primitives to the standard library. Reach out for news or if you would be interested in adding more of these calculations in Noir. - -Some methods are available thanks to the Aztec backend, not being performed using Noir. When using other backends, these methods may or may not be supplied. diff --git a/docs/versioned_docs/version-v0.7.1/standard_library/cryptographic_primitives/00_hashes.mdx b/docs/versioned_docs/version-v0.7.1/standard_library/cryptographic_primitives/00_hashes.mdx deleted file mode 100644 index 31a84cdb753..00000000000 --- a/docs/versioned_docs/version-v0.7.1/standard_library/cryptographic_primitives/00_hashes.mdx +++ /dev/null @@ -1,146 +0,0 @@ ---- -title: Hash methods -description: - Learn about the cryptographic primitives ready to use for any Noir project, including sha256, - blake2s, pedersen, mimc_bn254 and mimc -keywords: - [cryptographic primitives, Noir project, sha256, blake2s, pedersen, mimc_bn254, mimc, hash] ---- - -import BlackBoxInfo from './common/_blackbox.mdx'; - -## sha256 - -Given an array of bytes, returns the resulting sha256 hash. - -```rust -fn sha256(_input : [u8]) -> [u8; 32] -``` - -example: - -```rust -fn main() { - let x = [163, 117, 178, 149] // some random bytes - let hash = std::hash::sha256(x); -} -``` - - - -## blake2s - -Given an array of bytes, returns an array with the Blake2 hash - -```rust -fn blake2s(_input : [u8]) -> [u8; 32] -``` - -example: - -```rust -fn main() { - let x = [163, 117, 178, 149] // some random bytes - let hash = std::hash::blake2s(x); -} -``` - - - -## pedersen - -Given an array of Fields, returns the Pedersen hash. - -```rust -fn pedersen(_input : [Field]) -> [Field; 2] -``` - -example: - -```rust -fn main() { - let x = [163, 117, 178, 149] // some random bytes - let hash = std::hash::pedersen(x); -} -``` - - - -## keccak256 - -Given an array of bytes (`u8`), returns the resulting keccak hash as an array of 32 bytes -(`[u8; 32]`). Specify a message_size to hash only the first `message_size` bytes -of the input. - -```rust -fn keccak256(_input : [u8; N], _message_size: u32) -> [u8; 32] -``` - -example: - -```rust -fn main() { - let x = [163, 117, 178, 149] // some random bytes - let message_size = 4; - let hash = std::hash::keccak256(x, message_size); -} -``` - - - -## poseidon - -Given an array of Fields, returns a new Field with the Poseidon Hash. Mind that you need to specify -how many inputs are there to your Poseidon function. - -```rust -// example for hash_1, hash_2 accepts an array of length 2, etc -fn hash_1(input: [Field; 1]) -> Field -``` - -example: - -```rust -fn main() -{ - let hash1 = std::hash::poseidon::bn254::hash_2([1, 2]); - assert(hash1 == 0x115cc0f5e7d690413df64c6b9662e9cf2a3617f2743245519e19607a4417189a); -} -``` - -## mimc_bn254 and mimc - -`mimc_bn254` is `mimc`, but with hardcoded parameters for the BN254 curve. You can use it by -providing an array of Fields, and it returns a Field with the hash. You can use the `mimc` method if -you're willing to input your own constants: - -```rust -fn mimc(x: Field, k: Field, constants: [Field; N], exp : Field) -> Field -``` - -otherwise, use the `mimc_bn254` method: - -```rust -fn mimc_bn254(array: [Field; N]) -> Field -``` - -example: - -```rust - -fn main() { - let x = [163, 117, 178, 149] // some random bytes - let hash = std::hash::mimc_bn254(x); -} -``` - -## hash_to_field - -```rust -fn hash_to_field(_input : [Field; N]) -> Field {} -``` - -Calculates the `blake2s` hash of the inputs and returns the hash modulo the field modulus to return -a value which can be represented as a `Field`. - - diff --git a/docs/versioned_docs/version-v0.7.1/standard_library/cryptographic_primitives/01_scalar.mdx b/docs/versioned_docs/version-v0.7.1/standard_library/cryptographic_primitives/01_scalar.mdx deleted file mode 100644 index 62265cddb1e..00000000000 --- a/docs/versioned_docs/version-v0.7.1/standard_library/cryptographic_primitives/01_scalar.mdx +++ /dev/null @@ -1,33 +0,0 @@ ---- -title: Scalar multiplication -description: - See how you can perform scalar multiplications over a fixed base in Noir -keywords: - [ - cryptographic primitives, - Noir project, - scalar multiplication, - ] ---- - -import BlackBoxInfo from './common/\_blackbox.mdx'; - -## scalar_mul::fixed_base - -Performs scalar multiplication over the embedded curve whose coordinates are defined by the -configured noir field. For the BN254 scalar field, this is BabyJubJub or Grumpkin. - -```rust -fn fixed_base(_input : Field) -> [Field; 2] -``` - -example - -```rust -fn main(x : Field) { - let scal = std::scalar_mul::fixed_base(x); - std::println(scal); -} -``` - - diff --git a/docs/versioned_docs/version-v0.7.1/standard_library/cryptographic_primitives/02_schnorr.mdx b/docs/versioned_docs/version-v0.7.1/standard_library/cryptographic_primitives/02_schnorr.mdx deleted file mode 100644 index 0e219c0e5ff..00000000000 --- a/docs/versioned_docs/version-v0.7.1/standard_library/cryptographic_primitives/02_schnorr.mdx +++ /dev/null @@ -1,37 +0,0 @@ ---- -title: Schnorr Signatures -description: Learn how you can verify Schnorr signatures using Noir -keywords: [cryptographic primitives, Noir project, schnorr, signatures] ---- - -import BlackBoxInfo from './common/_blackbox.mdx'; - -## schnorr::verify_signature - -Verifier for Schnorr signatures over the embedded curve (for BN254 it is Grumpkin). - -```rust -fn verify_signature(_public_key_x: Field, _public_key_y: Field, _signature: [u8; 64], _message: [u8]) -> bool -``` - -where `_signature` can be generated like so using the npm package -[@noir-lang/barretenberg](https://www.npmjs.com/package/@noir-lang/barretenberg) - -```js -const { BarretenbergWasm } = require('@noir-lang/barretenberg/dest/wasm'); -const { Schnorr } = require('@noir-lang/barretenberg/dest/crypto/schnorr'); - -... - -const barretenberg = await BarretenbergWasm.new(); -const schnorr = new Schnorr(barretenberg); -const pubKey = schnorr.computePublicKey(privateKey); -const message = ... -const signature = Array.from( - schnorr.constructSignature(hash, privateKey).toBuffer() -); - -... -``` - - diff --git a/docs/versioned_docs/version-v0.7.1/standard_library/cryptographic_primitives/03_ecdsa_secp256k1.mdx b/docs/versioned_docs/version-v0.7.1/standard_library/cryptographic_primitives/03_ecdsa_secp256k1.mdx deleted file mode 100644 index 0b800fdbc5f..00000000000 --- a/docs/versioned_docs/version-v0.7.1/standard_library/cryptographic_primitives/03_ecdsa_secp256k1.mdx +++ /dev/null @@ -1,17 +0,0 @@ ---- -title: ECDSA Verification -description: Learn about the cryptographic primitives regarding ECDSA over the secp256k1 curve -keywords: [cryptographic primitives, Noir project, ecdsa, secp256k1, signatures] ---- - -import BlackBoxInfo from './common/_blackbox.mdx'; - -## ecdsa_secp256k1::verify_signature - -Verifier for ECDSA Secp256k1 signatures - -```rust -fn verify_signature(_public_key_x : [u8; 32], _public_key_y : [u8; 32], _signature: [u8; 64], _message: [u8]) -> bool -``` - - diff --git a/docs/versioned_docs/version-v0.7.1/standard_library/cryptographic_primitives/04_ec_primitives.md b/docs/versioned_docs/version-v0.7.1/standard_library/cryptographic_primitives/04_ec_primitives.md deleted file mode 100644 index 26fb4c09e88..00000000000 --- a/docs/versioned_docs/version-v0.7.1/standard_library/cryptographic_primitives/04_ec_primitives.md +++ /dev/null @@ -1,101 +0,0 @@ ---- -title: Elliptic Curve Primitives -keywords: [cryptographic primitives, Noir project] ---- - -Data structures and methods on them that allow you to carry out computations involving elliptic -curves over the (mathematical) field corresponding to `Field`. For the field currently at our -disposal, applications would involve a curve embedded in BN254, e.g. the -[Baby Jubjub curve](https://eips.ethereum.org/EIPS/eip-2494). - -## Data structures - -### Elliptic curve configurations - -(`std::ec::{tecurve,montcurve,swcurve}::{affine,curvegroup}::Curve`), i.e. the specific elliptic -curve you want to use, which would be specified using any one of the methods -`std::ec::{tecurve,montcurve,swcurve}::{affine,curvegroup}::new` which take the coefficients in the -defining equation together with a generator point as parameters. You can find more detail in the -comments in -[`noir_stdlib/src/ec.nr`](https://github.com/noir-lang/noir/blob/master/noir_stdlib/src/ec.nr), but -the gist of it is that the elliptic curves of interest are usually expressed in one of the standard -forms implemented here (Twisted Edwards, Montgomery and Short Weierstraß), and in addition to that, -you could choose to use `affine` coordinates (Cartesian coordinates - the usual (x,y) - possibly -together with a point at infinity) or `curvegroup` coordinates (some form of projective coordinates -requiring more coordinates but allowing for more efficient implementations of elliptic curve -operations). Conversions between all of these forms are provided, and under the hood these -conversions are done whenever an operation is more efficient in a different representation (or a -mixed coordinate representation is employed). - -### Points - -(`std::ec::{tecurve,montcurve,swcurve}::{affine,curvegroup}::Point`), i.e. points lying on the -elliptic curve. For a curve configuration `c` and a point `p`, it may be checked checked that `p` -does indeed lie on `c` by calling `c.contains(p1)`. - -## Methods - -(given a choice of curve representation, e.g. use `std::ec::tecurve::affine::Curve` and use -`std::ec::tecurve::affine::Point`) - -- The **zero element** is given by `Point::zero()`, and we can verify whether a point `p: Point` is - zero by calling `p.is_zero()`. -- **Equality**: Points `p1: Point` and `p2: Point` may be checked for equality by calling - `p1.eq(p2)`. -- **Addition**: For `c: Curve` and points `p1: Point` and `p2: Point` on the curve, adding these two - points is accomplished by calling `c.add(p1,p2)`. -- **Negation**: For a point `p: Point`, `p.negate()` is its negation. -- **Subtraction**: For `c` and `p1`, `p2` as above, subtracting `p2` from `p1` is accomplished by - calling `c.subtract(p1,p2)`. -- **Scalar multiplication**: For `c` as above, `p: Point` a point on the curve and `n: Field`, - scalar multiplication is given by `c.mul(n,p)`. If instead `n :: [u1; N]`, i.e. `n` is a bit - array, the `bit_mul` method may be used instead: `c.bit_mul(n,p)` -- **Multi-scalar multiplication**: For `c` as above and arrays `n: [Field; N]` and `p: [Point; N]`, - multi-scalar multiplication is given by `c.msm(n,p)`. -- **Coordinate representation conversions**: The `into_group` method converts a point or curve - configuration in the affine representation to one in the CurveGroup representation, and - `into_affine` goes in the other direction. -- **Curve representation conversions**: `tecurve` and `montcurve` curves and points are equivalent - and may be converted between one another by calling `into_montcurve` or `into_tecurve` on their - configurations or points. `swcurve` is more general and a curve c of one of the other two types - may be converted to this representation by calling `c.into_swcurve()`, whereas a point `p` lying - on the curve given by `c` may be mapped to its corresponding `swcurve` point by calling - `c.map_into_swcurve(p)`. -- **Map-to-curve methods**: The Elligator 2 method of mapping a field element `n: Field` into a - `tecurve` or `montcurve` with configuration `c` may be called as `c.elligator2_map(n)`. For all of - the curve configurations, the SWU map-to-curve method may be called as `c.swu_map(z,n)`, where - `z: Field` depends on `Field` and `c` and must be chosen by the user (the conditions it needs to - satisfy are specified in the comments - [here](https://github.com/noir-lang/noir/blob/master/noir_stdlib/src/ec.nr)). - -## Examples - -The -[ec_baby_jubjub test](https://github.com/noir-lang/noir/blob/master/crates/nargo_cli/tests/test_data/ec_baby_jubjub/src/main.nr) -illustrates all of the above primitives on various forms of the Baby Jubjub curve. A couple of more -interesting examples in Noir would be: - -Public-key cryptography: Given an elliptic curve and a 'base point' on it, determine the public key -from the private key. This is a matter of using scalar multiplication. In the case of Baby Jubjub, -for example, this code would do: - -```rust -use dep::std::ec::tecurve::affine::{Curve, Point}; - -fn bjj_pub_key(priv_key: Field) -> Point -{ - - let bjj = Curve::new(168700, 168696, G::new(995203441582195749578291179787384436505546430278305826713579947235728471134,5472060717959818805561601436314318772137091100104008585924551046643952123905)); - - let base_pt = Point::new(5299619240641551281634865583518297030282874472190772894086521144482721001553, 16950150798460657717958625567821834550301663161624707787222815936182638968203); - - bjj.mul(priv_key,base_pt) -} -``` - -This would come in handy in a Merkle proof. - -- EdDSA signature verification: This is a matter of combining these primitives with a suitable hash - function. See - [feat(stdlib): EdDSA sig verification noir#1136](https://github.com/noir-lang/noir/pull/1136) for - the case of Baby Jubjub and the Poseidon hash function. diff --git a/docs/versioned_docs/version-v0.7.1/standard_library/cryptographic_primitives/05_eddsa.mdx b/docs/versioned_docs/version-v0.7.1/standard_library/cryptographic_primitives/05_eddsa.mdx deleted file mode 100644 index 8f060ed3316..00000000000 --- a/docs/versioned_docs/version-v0.7.1/standard_library/cryptographic_primitives/05_eddsa.mdx +++ /dev/null @@ -1,17 +0,0 @@ ---- -title: EdDSA Verification -description: Learn about the cryptographic primitives regarding EdDSA -keywords: [cryptographic primitives, Noir project, eddsa, signatures] ---- - -import BlackBoxInfo from './common/_blackbox.mdx'; - -## eddsa::eddsa_poseidon_verify - -Verifier for EdDSA signatures - -```rust -fn eddsa_poseidon_verify(public_key_x : Field, public_key_y : Field, signature_s: Field, signature_r8_x: Field, signature_r8_y: Field, message: Field) -> bool -``` - - diff --git a/docs/versioned_docs/version-v0.7.1/standard_library/cryptographic_primitives/common/_blackbox.mdx b/docs/versioned_docs/version-v0.7.1/standard_library/cryptographic_primitives/common/_blackbox.mdx deleted file mode 100644 index 9fe9b48fbff..00000000000 --- a/docs/versioned_docs/version-v0.7.1/standard_library/cryptographic_primitives/common/_blackbox.mdx +++ /dev/null @@ -1,5 +0,0 @@ -:::info - -This is a black box function. Read [this section](../black_box_fns) to learn more about black box functions in Noir. - -::: \ No newline at end of file diff --git a/docs/versioned_docs/version-v0.7.1/standard_library/field_methods.md b/docs/versioned_docs/version-v0.7.1/standard_library/field_methods.md deleted file mode 100644 index 4d1cdc953e9..00000000000 --- a/docs/versioned_docs/version-v0.7.1/standard_library/field_methods.md +++ /dev/null @@ -1,149 +0,0 @@ ---- -title: Field Methods -description: - Learn about common methods on Noir Field, including to_le_bits, to_le_bytes, to_le_radix, - to_be_radix, pow_32, etc, and see code examples. -keywords: - [ - Noir Field, - to_le_bits, - to_le_bytes, - to_le_radix, - to_be_radix, - pow_32, - Little Endian, - Big Endian, - Vector, - Exponent, - ] ---- - -After declaring a Field, you can use these common methods on it: - -## to_le_bits - -Transforms the field into an array of bits, Little Endian. - -```rust -fn to_le_bits(_x : Field, _bit_size: u32) -> [u1; N] -``` - -example: - -```rust -fn main() { - let field = 2 - let bits = field.to_le_bits(32); -} -``` - -## to_be_bits - -Transforms the field into an array of bits, Big Endian. - -```rust -fn to_be_bits(_x : Field, _bit_size: u32) -> [u1; N] -``` - -example: - -```rust -fn main() { - let field = 2 - let bits = field.to_be_bits(32); -} -``` - -## to_le_bytes - -Transforms into an array of bytes, Little Endian - -```rust -fn to_le_bytes(_x : Field, byte_size: u32) -> [u8] -``` - -example: - -```rust -fn main() { - let field = 2 - let bytes = field.to_le_bytes(4); -} -``` - -## to_be_bytes - -Transforms into an array of bytes, Big Endian - -```rust -fn to_be_bytes(_x : Field, byte_size: u32) -> [u8] -``` - -example: - -```rust -fn main() { - let field = 2 - let bytes = field.to_be_bytes(4); -} -``` - -## to_le_radix - -Decomposes into a vector over the specified base, Little Endian - -```rust -fn to_le_radix(_x : Field, _radix: u32, _result_len: u32) -> [u8] -``` - -example: - -```rust -fn main() { - let field = 2 - let radix = field.to_le_radix(256, 4); -} -``` - -## to_be_radix - -Decomposes into a vector over the specified base, Big Endian - -```rust -fn to_be_radix(_x : Field, _radix: u32, _result_len: u32) -> [u8] -``` - -example: - -```rust -fn main() { - let field = 2 - let radix = field.to_be_radix(256, 4); -} -``` - -## pow_32 - -Returns the value to the power of the specified exponent - -```rust -fn pow_32(self, exponent: Field) -> Field -``` - -example: - -```rust -fn main() { - let field = 2 - let pow = field.pow_32(4); - assert(pow == 16); -} -``` - -## sgn0 - -Parity of (prime) Field element, i.e. sgn0(x mod p) = 0 if x ∈ {0, ..., p-1} is even, otherwise sgn0(x mod p) = 1. - -```rust -fn sgn0(self) -> u1 -``` diff --git a/docs/versioned_docs/version-v0.7.1/standard_library/logging.md b/docs/versioned_docs/version-v0.7.1/standard_library/logging.md deleted file mode 100644 index 649d35a3f0b..00000000000 --- a/docs/versioned_docs/version-v0.7.1/standard_library/logging.md +++ /dev/null @@ -1,55 +0,0 @@ ---- -title: Logging -description: - Learn how to use the println statement for debugging in Noir with this tutorial. Understand the - basics of logging in Noir and how to implement it in your code. -keywords: - [ - noir logging, - println statement, - debugging in noir, - noir std library, - logging tutorial, - basic logging in noir, - noir logging implementation, - noir debugging techniques, - rust, - ] ---- - -# Logging - -The standard library provides a familiar `println` statement you can use. Despite being a limited -implementation of rust's `println!` macro, this construct can be useful for debugging. - -The `println` statement only works for fields, integers and arrays (including strings). - -```rust -use dep::std; - -fn main(string: pub str<5>) { - let x = 5; - std::println(x) -} - -``` - -To view the output of the `println` statement you need to set the `--show-output` flag. - -``` -$ nargo prove --help -Create proof for this program. The proof is returned as a hex encoded string - -Usage: nargo prove [OPTIONS] [PROOF_NAME] [CIRCUIT_NAME] - -Arguments: - [PROOF_NAME] The name of the proof - [CIRCUIT_NAME] The name of the circuit build files (ACIR, proving and verification keys) - -Options: - -v, --verify Verify proof after proving - -s, --show-ssa Emit debug information for the intermediate SSA IR - -d, --deny-warnings Quit execution when warnings are emitted - --show-output Display output of `println` statements during tests - -h, --help Print help -``` diff --git a/docs/versioned_docs/version-v0.7.1/standard_library/merkle_trees.md b/docs/versioned_docs/version-v0.7.1/standard_library/merkle_trees.md deleted file mode 100644 index fc8909a4795..00000000000 --- a/docs/versioned_docs/version-v0.7.1/standard_library/merkle_trees.md +++ /dev/null @@ -1,59 +0,0 @@ ---- -title: Merkle Trees -description: - Learn about Merkle Trees in Noir with this tutorial. Explore the basics of computing a merkle root using a proof, with examples. -keywords: - [ - Merkle trees in Noir, - Noir programming language, - check membership, - computing root from leaf, - Noir Merkle tree implementation, - Merkle tree tutorial, - Merkle tree code examples, - Noir libraries, - pedersen hash., - ] ---- - -## compute_merkle_root - -Returns the root of the tree from the provided leaf and its hash path, using a [Pedersen hash](cryptographic_primitives/00_hashes.mdx#pedersen). - -```rust -fn compute_merkle_root(leaf : Field, index : Field, hash_path: [Field]) -> Field -``` - -example: - -```rust -/** - // these values are for this example only - index = "0" - priv_key = "0x000000000000000000000000000000000000000000000000000000616c696365" - secret = "0x1929ea3ab8d9106a899386883d9428f8256cfedb3c4f6b66bf4aa4d28a79988f" - note_hash_path = [ - "0x1e61bdae0f027b1b2159e1f9d3f8d00fa668a952dddd822fda80dc745d6f65cc", - "0x0e4223f3925f98934393c74975142bd73079ab0621f4ee133cee050a3c194f1a", - "0x2fd7bb412155bf8693a3bd2a3e7581a679c95c68a052f835dddca85fa1569a40" - ] - */ -fn main(index: Field, priv_key: Field, secret: Field, note_hash_path: [Field; 3]) { - - let pubkey = std::scalar_mul::fixed_base(priv_key); - let pubkey_x = pubkey[0]; - let pubkey_y = pubkey[1]; - let note_commitment = std::hash::pedersen([pubkey_x, pubkey_y, secret]); - - let root = std::merkle::compute_merkle_root(note_commitment[0], index, note_hash_path); - std::println(root); -} -``` - -To check merkle tree membership: - -1. Include a merkle root as a program input. -2. Compute the merkle root of a given leaf, index and hash path. -3. Assert the merkle roots are equal. - -For more info about merkle trees, see the Wikipedia [page](https://en.wikipedia.org/wiki/Merkle_tree). \ No newline at end of file diff --git a/docs/versioned_docs/version-v0.7.1/typescript.md b/docs/versioned_docs/version-v0.7.1/typescript.md deleted file mode 100644 index fae002dfd28..00000000000 --- a/docs/versioned_docs/version-v0.7.1/typescript.md +++ /dev/null @@ -1,262 +0,0 @@ ---- -title: Working with TypeScript -description: - Learn how to interact with Noir programs using TypeScript. Follow this tutorial to compile your - program, specify inputs, initialize a prover & verifier, and prove and verify your program. -keywords: [TypeScript, Noir, tutorial, compile, inputs, prover, verifier, proof] ---- - -Interactions with Noir programs can also be performed in TypeScript, which can come in handy when -writing tests or when working in TypeScript-based projects like [Hardhat](https://hardhat.org/). - -This guide is based on the [noir-starter](https://github.com/signorecello/noir-starter) example. -Please refer to it for an example implementation. - -:::note - -You may find unexpected errors working with some frameworks such as `vite`. This is due to the -nature of `wasm` files and the way Noir uses web workers. As we figure it out, we suggest using -[Create React App](https://create-react-app.dev/), or [Next.js](https://nextjs.org/) for a quick -start. - -::: - -## Setup - -We're assuming you're using ES6 for both browser (for example with React), or nodejs. - -Install [Yarn](https://yarnpkg.com/) or [Node.js](https://nodejs.org/en). Init a new project with -`npm init`. Install Noir dependencies in your project by running: - -```bash -npm i @noir-lang/noir_wasm@0.3.2-fa0e9cff github:noir-lang/barretenberg#39a1547875f941ef6640217a42d8f34972425c97 @noir-lang/aztec_backend@0.1.0-0c3b2f2 -``` - -:::note - -While Noir is in rapid development, some packages could interfere with others. For that reason, you -should use these specified versions. Let us know if for some reason you need to use other ones. - -::: - -As for the circuit, we will use the _Standard Noir Example_ and place it in the `src` folder. Feel -free to use any other, as long as you refactor the below examples accordingly. - -This standard example is a program that multiplies input `x` with input `y` and returns the result: - -```rust -// src/main.nr -fn main(x: u32, y: pub u32) -> pub u32 { - let z = x * y; - z -} -``` - -One valid scenario for proving could be `x = 3`, `y = 4` and `return = 12` - -## Imports - -We need some imports, for both the `noir_wasm` library (which will compile the circuit into `wasm` -executables) and `aztec_backend` which is the actual proving backend we will be using. - -We also need to tell the compiler where to find the `.nr` files, so we need to import -`initialiseResolver`. - -```ts -import initNoirWasm, { acir_read_bytes, compile } from '@noir-lang/noir_wasm'; -import initialiseAztecBackend from '@noir-lang/aztec_backend'; -import { initialiseResolver } from '@noir-lang/noir-source-resolver'; -``` - -## Compiling - -We'll go over the code line-by-line later: - -```ts -export const compileCircuit = async () => { - await initNoirWasm(); - - return await fetch(new URL('../src/main.nr', import.meta.url)) - .then(r => r.text()) - .then(code => { - initialiseResolver((id: any) => { - return code; - }); - }) - .then(() => { - try { - const compiled_noir = compile({}); - return compiled_noir; - } catch (e) { - console.log('Error while compiling:', e); - } - }); -}; -``` - -1. First we're calling `initNoirWasm`. This is required on the browser only. -2. We then pass an URL that points to our `main.nr` file, and call `.then` on it so we can get the - actual text of the source code -3. We call `initialiseResolver` returning the source code -4. Finally, we can call the `compile` function - -This function should return us the compiled circuit. - -:::note - -You can use as many files as you need, -[importing them as you would do with Nargo](./modules_packages_crates/dependencies), and you don't -need to set them up in the `src` folder. Just mind the following particularities about -`initialiseResolver`: - -1. The `compile` function expects a `main.nr` file as an entry point. If you need another one, just - pass it as a `entry_point` parameter to `compile`. Check the - [noir starter](https://github.com/signorecello/noir-starter) for an example on multiple files and - a non-default entry point. -2. `initialiseResolver` needs to be synchronous -3. Different frameworks use different ways of fetching files. It's beyond the scope of this guide to - explain why and how, but for reference, - [noir starter](https://github.com/signorecello/noir-starter) uses both Next.js and node.js for - testing. - -Quick tip: an easy way to deal with `initialiseResolver` is just to prepare a -`{fileName: "literally_the_code"}` object beforehand - -::: - -## ACIR - -Noir compiles to two properties: - -1. The ACIR, which is the intermediate language used by backends such as Barretenberg -2. The ABI, which tells you which inputs are to be read - -Let's write a little function that gets us both, initializes the backend, and returns the ACIR as -bytes: - -```ts -export const getAcir = async () => { - const { circuit, abi } = await compileCircuit(); - await initialiseAztecBackend(); - - let acir_bytes = new Uint8Array(Buffer.from(circuit, 'hex')); - return acir_read_bytes(acir_bytes); -}; -``` - -Calling `getAcir()` now should return us the ACIR of the circuit, ready to be used in proofs. - -## Initializing Prover & Verifier - -Prior to proving and verifying, the prover and verifier have to first be initialized by calling -`barretenberg`'s `setup_generic_prover_and_verifier` with your Noir program's ACIR: - -```ts -let [prover, verifier] = await setup_generic_prover_and_verifier(acir); -``` - -This is probably a good time to store this prover and verifier into your state like React Context, -Redux, or others. - -## Proving - -The Noir program can then be executed and proved by calling `barretenberg`'s `create_proof` -function: - -```ts -const proof = await create_proof(prover, acir, abi); -``` - -On the browser, this proof can fail as it requires heavy loads to be run on worker threads. Here's a -quick example of a worker: - -```ts -// worker.ts -onmessage = async event => { - try { - await initializeAztecBackend(); - const { acir, input } = event.data; - const [prover, verifier] = await setup_generic_prover_and_verifier(acir); - const proof = await create_proof(prover, acir, input); - postMessage(proof); - } catch (er) { - postMessage(er); - } finally { - close(); - } -}; -``` - -Which would be called like this, for example: - -```ts -// index.ts -const worker = new Worker(new URL('./worker.ts', import.meta.url)); -worker.onmessage = e => { - if (e.data instanceof Error) { - // oh no! - } else { - // yey! - } -}; -worker.postMessage({ acir, input: { x: 3, y: 4 } }); -``` - -## Verifying - -The `proof` obtained can be verified by calling `barretenberg`'s `verify_proof` function: - -```ts -// 1_mul.ts -const verified = await verify_proof(verifier, proof); -``` - -The function should return `true` if the entire process is working as intended, which can be -asserted if you are writing a test script: - -```ts -expect(verified).eq(true); -``` - -## Verifying with Smart Contract - -Alternatively, a verifier smart contract can be generated and used for verifying Noir proofs in -TypeScript as well. - -This could be useful if the Noir program is designed to be decentrally verified and/or make use of -decentralized states and logics that is handled at the smart contract level. - -To generate the verifier smart contract using typescript: - -```ts -// generator.ts -import { writeFileSync } from 'fs'; - -const sc = verifier.SmartContract(); -syncWriteFile('../contracts/plonk_vk.sol', sc); - -function syncWriteFile(filename: string, data: any) { - writeFileSync(join(__dirname, filename), data, { - flag: 'w', - }); -} -``` - -You can then verify a Noir proof using the verifier contract, for example using Hardhat: - -```ts -// verifier.ts -import { ethers } from 'hardhat'; -import { Contract, ContractFactory, utils } from 'ethers'; - -let Verifier: ContractFactory; -let verifierContract: Contract; - -before(async () => { - Verifier = await ethers.getContractFactory('TurboVerifier'); - verifierContract = await Verifier.deploy(); -}); - -// Verify proof -const sc_verified = await verifierContract.verify(proof); -``` diff --git a/docs/versioned_docs/version-v0.9.0/examples/merkle-proof.mdx b/docs/versioned_docs/version-v0.9.0/examples/merkle-proof.mdx deleted file mode 100644 index 6430780817c..00000000000 --- a/docs/versioned_docs/version-v0.9.0/examples/merkle-proof.mdx +++ /dev/null @@ -1,48 +0,0 @@ ---- -title: Merkle Proof Membership -description: - Learn how to use merkle membership proof in Noir to prove that a given leaf is a member of a - merkle tree with a specified root, at a given index. -keywords: - [merkle proof, merkle membership proof, Noir, rust, hash function, Pedersen, sha256, merkle tree] ---- - -Let's walk through an example of a merkle membership proof in Noir that proves that a given leaf is -in a merkle tree. - -```rust -use dep::std; - -fn main(message : [Field; 62], index : Field, hashpath : [Field; 40], root : Field) { - let leaf = std::hash::hash_to_field(message); - let merkle_root = std::merkle::compute_merkle_root(leaf, index, hashpath); - assert(merkle_root == root); -} - -``` - -The message is hashed using `hash_to_field`. The specific hash function that is being used is chosen -by the backend. The only requirement is that this hash function can heuristically be used as a -random oracle. If only collision resistance is needed, then one can call `std::hash::pedersen` -instead. - -```rust -let leaf = std::hash::hash_to_field(message); -``` - -The leaf is then passed to a compute_merkle_root function with the root, index and hashpath. The returned root can then be asserted to be the same as the provided root. - -```rust -let merkle_root = std::merkle::compute_merkle_root(leaf, index, hashpath); -assert (merkle_root == root); -``` - -> **Note:** It is possible to re-implement the merkle tree implementation without standard library. -> However, for most usecases, it is enough. In general, the standard library will always opt to be -> as conservative as possible, while striking a balance with efficiency. - -An example, the merkle membership proof, only requires a hash function that has collision -resistance, hence a hash function like Pedersen is allowed, which in most cases is more efficient -than the even more conservative sha256. - -[View an example on the starter repo](https://github.com/noir-lang/noir-examples/blob/3ea09545cabfa464124ec2f3ea8e60c608abe6df/stealthdrop/circuits/src/main.nr#L20) diff --git a/docs/versioned_docs/version-v0.9.0/getting_started/00_nargo_installation.md b/docs/versioned_docs/version-v0.9.0/getting_started/00_nargo_installation.md deleted file mode 100644 index de30869732d..00000000000 --- a/docs/versioned_docs/version-v0.9.0/getting_started/00_nargo_installation.md +++ /dev/null @@ -1,285 +0,0 @@ ---- -title: Nargo Installation -description: - nargo is a command line tool for interacting with Noir programs (e.g. compiling, proving, - verifying and more). Learn how to install and use Nargo for your projects with this comprehensive - guide. -keywords: [Nargo, command line tool, Noir programs, installation guide, how to use Nargo] ---- - -`nargo` is a command line tool for interacting with Noir programs (e.g. compiling, proving, -verifying and more). - -Alternatively, the interactions can also be performed in [TypeScript](../typescript). - -### UltraPlonk - -Nargo versions <0.5.0 of `aztec_backend` and `aztec_wasm_backend` are based on the TurboPlonk -version of Aztec Backend, which lacks efficient implementations of useful primitives (e.g. Keccak256 in 18k constraints, ECDSA verification in 36k constraints) that the UltraPlonk version offers. - -## Installation - -There are four approaches for installing Nargo: - -- [Option 1: Noirup](#option-1-noirup) -- [Option 2: Binaries](#option-2-binaries) -- [Option 3: Install via Nix](#option-3-install-via-nix) -- [Option 4: Compile from Source](#option-4-compile-from-source) - -Optionally you can also install [Noir VS Code extension] for syntax highlighting. - -### Option 1: Noirup - -If you're on OSX or Linux, the easiest way to start using Noir and Nargo is via noirup. Just open a -terminal and run: - -```bash -curl -L https://raw.githubusercontent.com/noir-lang/noirup/main/install | bash -``` - -Close the terminal, open another one, and run - -```bash -noirup -``` - -Done, you should have the latest version working. You can check with `nargo --version`. - -You can also install nightlies, specific versions -or branches, check out the [noirup repository](https://github.com/noir-lang/noirup) for more -information. - -#### GitHub Actions - -You can use `noirup` with GitHub Actions for CI/CD and automated testing. It is as simple as -installing `noirup` and running tests in your GitHub Action `yml` file. - -See the -[config file](https://github.com/TomAFrench/noir-hashes/blob/master/.github/workflows/noir.yml) in -this repo containing hash functions in Noir for an example. - -#### Nightly versions - -To install the nightly version of Noir (updated daily) run: - -```bash -noirup -n -``` - -### Option 2: Binaries - -See [GitHub Releases](https://github.com/noir-lang/noir/releases) for the latest and previous -platform specific binaries. - -#### Step 1 - -Paste and run the following in the terminal to extract and install the binary: - -> **macOS / Linux:** If you are prompted with `Permission denied` when running commands, prepend -> `sudo` and re-run it. - -##### macOS (Apple Silicon) - -```bash -mkdir -p $HOME/.nargo/bin && \ -curl -o $HOME/.nargo/bin/nargo-aarch64-apple-darwin.tar.gz -L https://github.com/noir-lang/noir/releases/download/v0.6.0/nargo-aarch64-apple-darwin.tar.gz && \ -tar -xvf $HOME/.nargo/bin/nargo-aarch64-apple-darwin.tar.gz -C $HOME/.nargo/bin/ && \ -echo '\nexport PATH=$PATH:$HOME/.nargo/bin' >> ~/.zshrc && \ -source ~/.zshrc -``` - -##### macOS (Intel) - -```bash -mkdir -p $HOME/.nargo/bin && \ -curl -o $HOME/.nargo/bin/nargo-x86_64-apple-darwin.tar.gz -L https://github.com/noir-lang/noir/releases/download/v0.6.0/nargo-x86_64-apple-darwin.tar.gz && \ -tar -xvf $HOME/.nargo/bin/nargo-x86_64-apple-darwin.tar.gz -C $HOME/.nargo/bin/ && \ -echo '\nexport PATH=$PATH:$HOME/.nargo/bin' >> ~/.zshrc && \ -source ~/.zshrc -``` - -##### Windows (PowerShell) - -Open PowerShell as Administrator and run: - -```powershell -mkdir -f -p "$env:USERPROFILE\.nargo\bin\"; ` -Invoke-RestMethod -Method Get -Uri https://github.com/noir-lang/noir/releases/download/v0.4.1/nargo-x86_64-pc-windows-msvc.zip -Outfile "$env:USERPROFILE\.nargo\bin\nargo-x86_64-pc-windows-msvc.zip"; ` -Expand-Archive -Path "$env:USERPROFILE\.nargo\bin\nargo-x86_64-pc-windows-msvc.zip" -DestinationPath "$env:USERPROFILE\.nargo\bin\"; ` -$Reg = "Registry::HKLM\System\CurrentControlSet\Control\Session Manager\Environment"; ` -$OldPath = (Get-ItemProperty -Path "$Reg" -Name PATH).Path; ` -$NewPath = $OldPath + ’;’ + "$env:USERPROFILE\.nargo\bin\"; ` -Set-ItemProperty -Path "$Reg" -Name PATH –Value "$NewPath"; ` -$env:Path = [System.Environment]::GetEnvironmentVariable("Path","Machine") + ";" + [System.Environment]::GetEnvironmentVariable("Path","User") -``` - -##### Linux (Bash) - -```bash -mkdir -p $HOME/.nargo/bin && \ -curl -o $HOME/.nargo/bin/nargo-x86_64-unknown-linux-gnu.tar.gz -L https://github.com/noir-lang/noir/releases/download/v0.6.0/nargo-x86_64-unknown-linux-gnu.tar.gz && \ -tar -xvf $HOME/.nargo/bin/nargo-x86_64-unknown-linux-gnu.tar.gz -C $HOME/.nargo/bin/ && \ -echo -e '\nexport PATH=$PATH:$HOME/.nargo/bin' >> ~/.bashrc && \ -source ~/.bashrc -``` - -#### Step 2 - -Check if the installation was successful by running `nargo --help`. - -> **macOS:** If you are prompted with an OS alert, right-click and open the _nargo_ executable from -> Finder. Close the new terminal popped up and `nargo` should now be accessible. - -For a successful installation, you should see something similar to the following after running the -command: - -```sh -$ nargo --help - -Noir's package manager - -Usage: nargo - -Commands: - check Checks the constraint system for errors - codegen-verifier Generates a Solidity verifier smart contract for the program - compile Compile the program and its secret execution trace into ACIR format - new Create a new binary project - execute Executes a circuit to calculate its return value - prove Create proof for this program. The proof is returned as a hex encoded string - verify Given a proof and a program, verify whether the proof is valid - test Run the tests for this program - gates Counts the occurrences of different gates in circuit - help Print this message or the help of the given subcommand(s) -``` - -### Option 3: Install via Nix - -Due to the large number of native dependencies, Noir projects can be installed via [Nix](https://nixos.org/). - -#### Installing Nix - -For the best experience, please follow these instructions to setup Nix: - -1. Install Nix following [their guide](https://nixos.org/download.html) for your operating system. -2. Create the file `~/.config/nix/nix.conf` with the contents: - -```ini -experimental-features = nix-command -extra-experimental-features = flakes -``` - -#### Install Nargo into your Nix profile - -1. Use `nix profile` to install Nargo - -```sh -nix profile install github:noir-lang/noir -``` - -### Option 4: Compile from Source - -Due to the large number of native dependencies, Noir projects uses [Nix](https://nixos.org/) and [direnv](https://direnv.net/) to streamline the development experience. - -#### Setting up your environment - -For the best experience, please follow these instructions to setup your environment: - -1. Install Nix following [their guide](https://nixos.org/download.html) for your operating system. -2. Create the file `~/.config/nix/nix.conf` with the contents: - -```ini -experimental-features = nix-command -extra-experimental-features = flakes -``` - -3. Install direnv into your Nix profile by running: - -```sh -nix profile install nixpkgs#direnv -``` - -4. Add direnv to your shell following [their guide](https://direnv.net/docs/hook.html). - 1. For bash or zshell, add `eval "$(direnv hook bash)"` or `eval "$(direnv hook zsh)"` to your ~/.bashrc or ~/.zshrc file, respectively. -5. Restart your shell. - -#### Shell & editor experience - -Now that your environment is set up, you can get to work on the project. - -1. Clone the repository, such as: - -```sh -git clone git@github.com:noir-lang/noir -``` - -> Replacing `noir` with whichever repository you want to work on. - -2. Navigate to the directory: - -```sh -cd noir -``` - -> Replacing `noir` with whichever repository you cloned. - -3. You should see a **direnv error** because projects aren't allowed by default. Make sure you've reviewed and trust our `.envrc` file, then you need to run: - -```sh -direnv allow -``` - -4. Now, wait awhile for all the native dependencies to be built. This will take some time and direnv will warn you that it is taking a long time, but we just need to let it run. - -5. Once you are presented with your prompt again, you can start your editor within the project directory (we recommend [VSCode](https://code.visualstudio.com/)): - -```sh -code . -``` - -6. (Recommended) When launching VSCode for the first time, you should be prompted to install our recommended plugins. We highly recommend installing these for the best development experience. - -#### Building and testing - -Assuming you are using `direnv` to populate your environment, building and testing the project can be done -with the typical `cargo build`, `cargo test`, and `cargo clippy` commands. You'll notice that the `cargo` version matches the version we specify in `flake.nix`, which is 1.66.0 at the time of this writing. - -If you want to build the entire project in an isolated sandbox, you can use Nix commands: - -1. `nix build .` (or `nix build . -L` for verbose output) to build the project in a Nix sandbox. -2. `nix flake check` (or `nix flake check -L` for verbose output) to run clippy and tests in a Nix sandbox. - -#### Without `direnv` - -If you have hesitations with using direnv, you can launch a subshell with `nix develop` and then launch your editor from within the subshell. However, if VSCode was already launched in the project directory, the environment won't be updated. - -Advanced: If you aren't using direnv nor launching your editor within the subshell, you can try to install Barretenberg and other global dependencies the package needs. This is an advanced workflow and likely won't receive support! - -## Uninstalling Nargo - -### Noirup - -If you installed Noir with `noirup`, you can uninstall Noir by removing the files in `~/.nargo`, `~/nargo` and `~/noir_cache`. - -```bash -rm -r ~/.nargo -rm -r ~/nargo -rm -r ~/noir_cache -``` - -### Nix - -If you installed Noir with Nix or from source, you can remove the binary located at `~/.nix-profile/bin/nargo`. - -```bash -rm ~/.nix-profile/bin/nargo -``` - -[git]: https://git-scm.com/book/en/v2/Getting-Started-Installing-Git -[rust]: https://www.rust-lang.org/tools/install -[noir vs code extension]: https://marketplace.visualstudio.com/items?itemName=noir-lang.vscode-noir -[homebrew]: https://brew.sh/ -[cmake]: https://cmake.org/install/ -[llvm]: https://llvm.org/docs/GettingStarted.html -[openmp]: https://openmp.llvm.org/ -[barretenberg]: https://github.com/AztecProtocol/barretenberg diff --git a/docs/versioned_docs/version-v0.9.0/getting_started/01_hello_world.md b/docs/versioned_docs/version-v0.9.0/getting_started/01_hello_world.md deleted file mode 100644 index 0f21ad45569..00000000000 --- a/docs/versioned_docs/version-v0.9.0/getting_started/01_hello_world.md +++ /dev/null @@ -1,147 +0,0 @@ ---- -title: Create A Project -description: - Learn how to create and verify your first Noir program using Nargo, a programming language for - zero-knowledge proofs. -keywords: - [ - Nargo, - Noir, - zero-knowledge proofs, - programming language, - create Noir program, - verify Noir program, - step-by-step guide, - ] ---- - -Now that we have installed Nargo, it is time to make our first hello world program! - -## Create a Project Directory - -Noir code can live anywhere on your computer. Let us create a _projects_ folder in the home -directory to house our Noir programs. - -For Linux, macOS, and Windows PowerShell, create the directory and change directory into it by -running: - -```sh -mkdir ~/projects -cd ~/projects -``` - -For Windows CMD, run: - -```sh -> mkdir "%USERPROFILE%\projects" -> cd /d "%USERPROFILE%\projects" -``` - -## Create Our First Nargo Project - -Now that we are in the projects directory, create a new Nargo project by running: - -```sh -nargo new hello_world -``` - -> **Note:** `hello_world` can be any arbitrary project name, we are simply using `hello_world` for -> demonstration. -> -> In production, the common practice is to name the project folder as `circuits` for better -> identifiability when sitting alongside other folders in the codebase (e.g. `contracts`, `scripts`, -> `test`). - -A `hello_world` folder would be created. Similar to Rust, the folder houses _src/main.nr_ and -_Nargo.toml_ that contains the source code and environmental options of your Noir program -respectively. - -### Intro to Noir Syntax - -Let us take a closer look at _main.nr_. The default _main.nr_ generated should look like this: - -```rust -fn main(x : Field, y : pub Field) { - assert(x != y); -} -``` - -The first line of the program specifies the program's inputs: - -```rust -x : Field, y : pub Field -``` - -Program inputs in Noir are private by default (e.g. `x`), but can be labeled public using the -keyword `pub` (e.g. `y`). To learn more about private and public values, check the -[Data Types](../language_concepts/data_types) section. - -The next line of the program specifies its body: - -```rust -assert(x != y); -``` - -The Noir syntax `assert` can be interpreted as something similar to constraints in other zk-contract languages. - -For more Noir syntax, check the [Language Concepts](../language_concepts/comments) chapter. - -## Build In/Output Files - -Change directory into _hello_world_ and build in/output files for your Noir program by running: - -```sh -cd hello_world -nargo check -``` - -Two additional files would be generated in your project directory: - -_Prover.toml_ houses input values, and _Verifier.toml_ houses public values. - -## Prove Our Noir Program - -Now that the project is set up, we can create a proof of correct execution on our Noir program. - -Fill in input values for execution in the _Prover.toml_ file. For example: - -```toml -x = "1" -y = "2" -``` - -Prove the valid execution of your Noir program with your preferred proof name, for example `p`: - -```sh -nargo prove p -``` - -A new folder _proofs_ would then be generated in your project directory, containing the proof file -`p.proof`. - -The _Verifier.toml_ file would also be updated with the public values computed from program -execution (in this case the value of `y`): - -```toml -y = "0x0000000000000000000000000000000000000000000000000000000000000002" -``` - -> **Note:** Values in _Verifier.toml_ are computed as 32-byte hex values. - -## Verify Our Noir Program - -Once a proof is generated, we can verify correct execution of our Noir program by verifying the -proof file. - -Verify your proof of name `p` by running: - -```sh -nargo verify p -``` - -The verification will complete in silence if it is successful. If it fails, it will log the -corresponding error instead. - -Congratulations, you have now created and verified a proof for your very first Noir program! - -In the [next section](breakdown), we will go into more detail on each step performed. diff --git a/docs/versioned_docs/version-v0.9.0/getting_started/02_breakdown.md b/docs/versioned_docs/version-v0.9.0/getting_started/02_breakdown.md deleted file mode 100644 index 7c320cef9c5..00000000000 --- a/docs/versioned_docs/version-v0.9.0/getting_started/02_breakdown.md +++ /dev/null @@ -1,154 +0,0 @@ ---- -title: Project Breakdown -description: - Learn about the anatomy of a Nargo project, including the purpose of the Prover and Verifier TOML - files, and how to prove and verify your program. -keywords: - [Nargo, Nargo project, Prover.toml, Verifier.toml, proof verification, private asset transfer] ---- - -This section breaks down our hello world program in section _1.2_. We elaborate on the project -structure and what the `prove` and `verify` commands did in the previous section. - -## Anatomy of a Nargo Project - -Upon creating a new project with `nargo new` and building the in/output files with `nargo check` -commands, you would get a minimal Nargo project of the following structure: - - - src - - Prover.toml - - Verifier.toml - - Nargo.toml - -The source directory _src_ holds the source code for your Noir program. By default only a _main.nr_ -file will be generated within it. - -_Prover.toml_ is used for specifying the input values for executing and proving the program. You can specify `toml` files with different names by using the `--prover-name` or `-p` flags, see the [Prover](#provertoml) section below. Optionally you may specify expected output values for prove-time checking as well. - -_Verifier.toml_ contains public in/output values computed when executing the Noir program. - -_Nargo.toml_ contains the environmental options of your project. - -_proofs_ and _contract_ directories will not be immediately visible until you create a proof or -verifier contract respectively. - -### main.nr - -The _main.nr_ file contains a `main` method, this method is the entry point into your Noir program. - -In our sample program, _main.nr_ looks like this: - -```rust -fn main(x : Field, y : Field) { - assert(x != y); -} -``` - -The parameters `x` and `y` can be seen as the API for the program and must be supplied by the -prover. Since neither `x` nor `y` is marked as public, the verifier does not supply any inputs, when -verifying the proof. - -The prover supplies the values for `x` and `y` in the _Prover.toml_ file. - -As for the program body, `assert` ensures the satisfaction of the condition (e.g. `x != y`) is -constrained by the proof of the execution of said program (i.e. if the condition was not met, the -verifier would reject the proof as an invalid proof). - -### Prover.toml - -The _Prover.toml_ file is a file which the prover uses to supply his witness values(both private and -public). - -In our hello world program the _Prover.toml_ file looks like this: - -```toml -x = "1" -y = "2" -``` - -When the command `nargo prove my_proof` is executed, two processes happen: - -1. Noir creates a proof that `x` which holds the value of `1` and `y` which holds the value of `2` - is not equal. This not equal constraint is due to the line `assert(x != y)`. - -2. Noir creates and stores the proof of this statement in the _proofs_ directory and names the proof - file _my_proof_. Opening this file will display the proof in hex format. - -#### Arrays of Structs - -The following code shows how to pass an array of structs to a Noir program to generate a proof. - -```rust -// main.nr -struct Foo { - bar: Field, - baz: Field, -} - -fn main(foos: [Foo; 3]) -> pub Field { - foos[2].bar + foos[2].baz -} -``` - -Prover.toml: - -```toml -[[foos]] # foos[0] -bar = 0 -baz = 0 - -[[foos]] # foos[1] -bar = 0 -baz = 0 - -[[foos]] # foos[2] -bar = 1 -baz = 2 -``` - -#### Custom toml files - -You can specify a `toml` file with a different name to use for proving by using the `--prover-name` or `-p` flags. - -This command looks for proof inputs in the default **Prover.toml** and generates proof `p`: - -```bash -nargo prove p -``` - -This command looks for proof inputs in the custom **OtherProver.toml** and generates proof `pp`: - -```bash -nargo prove -p OtherProver pp -``` - -## Verifying a Proof - -When the command `nargo verify my_proof` is executed, two processes happen: - -1. Noir checks in the _proofs_ directory for a file called _my_proof_ - -2. If that file is found, the proof's validity is checked - -> **Note:** The validity of the proof is linked to the current Noir program; if the program is -> changed and the verifier verifies the proof, it will fail because the proof is not valid for the -> _modified_ Noir program. - -In production, the prover and the verifier are usually two separate entities. A prover would -retrieve the necessary inputs, execute the Noir program, generate a proof and pass it to the -verifier. The verifier would then retrieve the public inputs from usually external sources and -verifies the validity of the proof against it. - -Take a private asset transfer as an example: - -A user on browser as the prover would retrieve private inputs (e.g. the user's private key) and -public inputs (e.g. the user's encrypted balance on-chain), compute the transfer, generate a proof -and submit it to the verifier smart contract. - -The verifier contract would then draw the user's encrypted balance directly from the blockchain and -verify the proof submitted against it. If the verification passes, additional functions in the -verifier contract could trigger (e.g. approve the asset transfer). - -Now that you understand the concepts, you'll probably want some editor feedback while you are writing more complex code. - -In the [next section](language_server), we will explain how to utilize the Noir Language Server. diff --git a/docs/versioned_docs/version-v0.9.0/getting_started/03_language_server.md b/docs/versioned_docs/version-v0.9.0/getting_started/03_language_server.md deleted file mode 100644 index e6f5dfc2faa..00000000000 --- a/docs/versioned_docs/version-v0.9.0/getting_started/03_language_server.md +++ /dev/null @@ -1,31 +0,0 @@ ---- -title: Language Server -description: - Learn about the Noir Language Server, how to install the components, and configuration that may be required. -keywords: - [Nargo, Language Server, LSP, VSCode, Visual Studio Code] ---- - -This section helps you install and configure the Noir Language Server. - -The Language Server Protocol (LSP) has two components, the [Server](#language-server) and the [Client](#language-client). Below we describe each in the context of Noir. - -## Language Server - -The Server component is provided by the Nargo command line tool that you installed at the beginning of this guide. -As long as Nargo is installed and you've used it to run other commands in this guide, it should be good to go! - -If you'd like to verify that the `nargo lsp` command is available, you can run `nargo --help` and look for `lsp` in the list of commands. If you see it, you're using a version of Noir with LSP support. - -## Language Client - -The Client component is usually an editor plugin that launches the Server. It communicates LSP messages between the editor and the Server. For example, when you save a file, the Client will alert the Server, so it can try to compile the project and report any errors. - -Currently, Noir provides a Language Client for Visual Studio Code via the [vscode-noir](https://github.com/noir-lang/vscode-noir) extension. You can install it via the [Visual Studio Marketplace](https://marketplace.visualstudio.com/items?itemName=noir-lang.vscode-noir). - -### Configuration - -* __Noir: Enable LSP__ - If checked, the extension will launch the Language Server via `nargo lsp` and communicate with it. -* __Noir: Nargo Flags__ - Additional flags may be specified if you require them to be added when the extension calls `nargo lsp`. -* __Noir: Nargo Path__ - An absolute path to a Nargo binary with the `lsp` command. This may be useful if Nargo is not within the `PATH` of your editor. -* __Noir > Trace: Server__ - Setting this to `"messages"` or `"verbose"` will log LSP messages between the Client and Server. Useful for debugging. diff --git a/docs/versioned_docs/version-v0.9.0/index.md b/docs/versioned_docs/version-v0.9.0/index.md deleted file mode 100644 index e56b24bccd8..00000000000 --- a/docs/versioned_docs/version-v0.9.0/index.md +++ /dev/null @@ -1,103 +0,0 @@ ---- -title: Introducing Noir -description: - Learn about the public alpha release of Noir, a domain specific language heavily influenced by - Rust that compiles to an intermediate language which can be compiled to an arithmetic circuit or a - rank-1 constraint system. -keywords: - [ - Noir, - Domain Specific Language, - Rust, - Intermediate Language, - Arithmetic Circuit, - Rank-1 Constraint System, - Ethereum Developers, - Protocol Developers, - Blockchain Developers, - Proving System, - Smart Contract Language, - ] -slug: / ---- - -This version of the book is being released with the public alpha. There will be a lot of features -that are missing in this version, however the syntax and the feel of the language will mostly be -completed. - -## What is Noir? - -Noir is a Domain Specific Language for SNARK proving systems. It has been designed to use any ACIR compatible proving system. - -It's design choices are influenced heavily by Rust and focuses on a simple, familiar syntax. - -## Who is Noir for? - -Noir can be used for a variety of purposes. - -### Solidity Developers - -Noir currently includes a command to create a Solidity contract which verifies your Noir program. This will -be modularised in the future; however, as of the alpha, you can use the [`nargo codegen-verifier`](./nargo/commands#nargo-codegen-verifier) command to create -a verifier contract. - -### Protocol Developers - -As a protocol developer, you may not want to use the Aztec backend due to it not being a fit for -your stack, or maybe you simply want to use a different proving system. Since Noir does not compile -to a specific proof system, it is possible for protocol developers to replace the PLONK-based -proving system with a different proving system altogether. - -### Blockchain developers - -As a blockchain developer, you will be constrained by parameters set by your blockchain (for example, the -proving system and smart contract language has been pre-defined). In order for you to use Noir in -your blockchain, a proving system backend and a smart contract interface -must be implemented for it. - -## What's new about Noir? - -Noir is simple and flexible in its design, as it does not compile immediately to a fixed -NP-complete language. Instead, Noir compiles to an intermediate language (ACIR), which itself can be compiled -to an arithmetic circuit (if choosing to target Aztec's barretenberg backend) or a rank-1 constraint system (if choosing to target an R1CS backend like Arkwork's Marlin backend, or others). - -This in itself brings up a few challenges within the design process, but allows one to decouple the programming language completely from the backend. This is similar in theory to LLVM. - -## Current Features - -Compiler: - -- Module System -- For expressions -- Arrays -- Bit Operations -- Binary operations (<, <=, >, >=, +, -, \*, /, %) [See documentation for an extensive list] -- Unsigned integers -- If statements -- Structures and Tuples -- Generics - -ACIR Supported OPCODES: - -- Sha256 -- Blake2s -- Schnorr signature verification -- MerkleMembership -- Pedersen -- HashToField - -## Libraries - -Noir does not currently have an official package manager. You can find a list of available Noir libraries in the [awesome-noir repo here](https://github.com/noir-lang/awesome-noir#libraries). - -Some libraries that are available today include: - -- [Standard Library](https://github.com/noir-lang/noir/tree/master/noir_stdlib) - the Noir Standard Library -- [Ethereum Storage Proof Verification](https://github.com/aragonzkresearch/noir-trie-proofs) - a library that contains the primitives necessary for RLP decoding (in the form of look-up table construction) and Ethereum state and storage proof verification (or verification of any trie proof involving 32-byte long keys) -- [BigInt](https://github.com/shuklaayush/noir-bigint) - a library that provides a custom BigUint56 data type, allowing for computations on large unsigned integers -- [ECrecover](https://github.com/colinnielsen/ecrecover-noir/tree/main) - a library to verify an ECDSA signature and return the source Ethereum address -- [Sparse Merkle Tree Verifier](https://github.com/vocdoni/smtverifier-noir/tree/main) - a library for verification of sparse Merkle trees -- [Signed Int](https://github.com/resurgencelabs/signed_int) - a library for accessing a custom Signed Integer data type, allowing access to negative numbers on Noir -- [Fraction](https://github.com/resurgencelabs/fraction) - a library for accessing fractional number data type in Noir, allowing results that aren't whole numbers - -See the section on [dependencies](./modules_packages_crates/dependencies) for more information. diff --git a/docs/versioned_docs/version-v0.9.0/language_concepts/00_data_types.md b/docs/versioned_docs/version-v0.9.0/language_concepts/00_data_types.md deleted file mode 100644 index abbadca86be..00000000000 --- a/docs/versioned_docs/version-v0.9.0/language_concepts/00_data_types.md +++ /dev/null @@ -1,405 +0,0 @@ ---- -title: Data Types -description: - Get a clear understanding of the two categories of Noir data types - primitive types and compound - types. Learn about their characteristics, differences, and how to use them in your Noir - programming. -keywords: - [ - noir, - data types, - primitive types, - compound types, - private types, - public types, - field type, - integer types, - boolean type, - array type, - tuple type, - struct type, - ] ---- - -Every value in Noir has a type, which determines which operations are valid for it. - -All values in Noir are fundamentally composed of `Field` elements. For a more approachable -developing experience, abstractions are added on top to introduce different data types in Noir. - -Noir has two category of data types: primitive types (e.g. `Field`, integers, `bool`) and compound -types that group primitive types (e.g. arrays, tuples, structs). Each value can either be private or -public. - -## Private & Public Types - -A **private value** is known only to the Prover, while a **public value** is known by both the -Prover and Verifier. Mark values as `private` when the value should only be known to the prover. All -primitive types (including individual fields of compound types) in Noir are private by default, and -can be marked public when certain values are intended to be revealed to the Verifier. - -> **Note:** For public values defined in Noir programs paired with smart contract verifiers, once -> the proofs are verified on-chain the values can be considered known to everyone that has access to -> that blockchain. - -Public data types are treated no differently to private types apart from the fact that their values -will be revealed in proofs generated. Simply changing the value of a public type will not change the -circuit (where the same goes for changing values of private types as well). - -_Private values_ are also referred to as _witnesses_ sometimes. - -> **Note:** The terms private and public when applied to a type (e.g. `pub Field`) have a different -> meaning than when applied to a function (e.g. `pub fn foo() {}`). -> -> The former is a visibility modifier for the Prover to interpret if a value should be made known to -> the Verifier, while the latter is a visibility modifier for the compiler to interpret if a -> function should be made accessible to external Noir programs like in other languages. - -### pub Modifier - -All data types in Noir are private by default. Types are explicitly declared as public using the -`pub` modifier: - -```rust -fn main(x : Field, y : pub Field) -> pub Field { - x + y -} -``` - -In this example, `x` is **private** while `y` and `x + y` (the return value) are **public**. Note -that visibility is handled **per variable**, so it is perfectly valid to have one input that is -private and another that is public. - -> **Note:** Public types can only be declared through parameters on `main`. - -## Primitive Types - -A primitive type represents a single value. They can be private or public. - -### Fields - -The field type corresponds to the native field type of the proving backend. - -The size of a Noir field depends on the elliptic curve's finite field for the proving backend -adopted. For example, a field would be a 254-bit integer when paired with the default backend that -spans the Grumpkin curve. - -Fields support integer arithmetic and are often used as the default numeric type in Noir: - -```rust -fn main(x : Field, y : Field) { - let z = x + y; -} -``` - -`x`, `y` and `z` are all private fields in this example. Using the `let` keyword we defined a new -private value `z` constrained to be equal to `x + y`. - -If proving efficiency is of priority, fields should be used as a default for solving problems. -Smaller integer types (e.g. `u64`) incur extra range constraints. - -### Integers - -An integer type is a range constrained field type. The Noir frontend currently supports unsigned, -arbitrary-sized integer types. - -An integer type is specified first with the letter `u`, indicating its unsigned nature, followed by -its length in bits (e.g. `32`). For example, a `u32` variable can store a value in the range of -$\\([0,2^{32}-1]\\)$: - -```rust -fn main(x : Field, y : u32) { - let z = x as u32 + y; -} -``` - -`x`, `y` and `z` are all private values in this example. However, `x` is a field while `y` and `z` -are unsigned 32-bit integers. If `y` or `z` exceeds the range $\\([0,2^{32}-1]\\)$, proofs created -will be rejected by the verifier. - -> **Note:** The default backend supports both even (e.g. `u16`, `u48`) and odd (e.g. `u5`, `u3`) -> sized integer types. - -### Booleans - -The `bool` type in Noir has two possible values: `true` and `false`: - -```rust -fn main() { - let t = true; - let f: bool = false; -} -``` - -> **Note:** When returning a boolean value, it will show up as a value of 1 for `true` and 0 for -> `false` in _Verifier.toml_. - -The boolean type is most commonly used in conditionals like `if` expressions and `assert` -statements. More about conditionals is covered in the [Control Flow](./control_flow) and -[Assert Function](./assert) sections. - -### Strings - -The string type is a fixed length value defined with `str`. - -You can use strings in `assert()` functions or print them with -`std::println()`. - -```rust -fn main(message : pub str<11>, hex_as_string : str<4>) { - std::println(message); - assert(message == "hello world"); - assert(hex_as_string == "0x41"); -} -``` - -## Compound Types - -A compound type groups together multiple values into one type. Elements within a compound type can -be private or public. - -### Arrays - -An array is one way of grouping together values into one compound type. Array types can be inferred -or explicitly specified via the syntax `[; ]`: - -```rust -fn main(x : Field, y : Field) { - let my_arr = [x, y]; - let your_arr: [Field; 2] = [x, y]; -} -``` - -Here, both `my_arr` and `your_arr` are instantiated as an array containing two `Field` elements. - -Array elements can be accessed using indexing: - -```rust -fn main() { - let a = [1, 2, 3, 4, 5]; - - let first = a[0]; - let second = a[1]; -} -``` - -All elements in an array must be of the same type (i.e. homogeneous). That is, an array cannot group -a `Field` value and a `u8` value together for example. - -You can write mutable arrays, like: - -```rust -fn main() { - let mut arr = [1, 2, 3, 4, 5]; - assert(arr[0] == 1); - - arr[0] = 42; - assert(arr[0] == 42); -} -``` - -You can instantiate a new array of a fixed size with the same value repeated for each element. The following example instantiates an array of length 32 where each element is of type Field and has the value 0. - -```rust -let array: [Field; 32] = [0; 32]; -``` - -#### Types - -You can create arrays of primitive types or structs. There is not yet support for nested arrays -(arrays of arrays) or arrays of structs that contain arrays. - -### Slices - -:::caution - -This feature is experimental. You should expect it to change in future versions, -cause unexpected behavior, or simply not work at all. - -::: - -A slice is a dynamically-sized view into a sequence of elements. They can be resized at runtime, but because they don't own the data, they cannot be returned from a circuit. You can treat slices as arrays without a constrained size. - -Slices are part of the [noir standard library](../standard_library/slice_methods) so you need to import the respective module in order to work with it. For example: - -```rust -use dep::std::slice; - -fn main() -> pub Field { - let mut slice: [Field] = [0; 2]; - - let mut new_slice = slice.push_back(6); - new_slice.len() -} -``` - -### Vectors - -:::caution - -This feature is experimental. You should expect it to change in future versions, -cause unexpected behavior, or simply not work at all. - -::: - -A vector is a collection type similar to Rust's Vector type. It's convenient way to use slices as mutable arrays. - -Example: - -```rust -use std::collections::vec::Vec; - -let mut vector: Vec = Vec::new(); -for i in 0..5 { - vector.push(i); -} -assert(vector.len() == 5); -``` - -### Tuples - -A tuple collects multiple values like an array, but with the added ability to collect values of -different types: - -```rust -fn main() { - let tup: (u8, u64, Field) = (255, 500, 1000); -} -``` - -One way to access tuple elements is via destructuring using pattern matching: - -```rust -fn main() { - let tup = (1, 2); - - let (one, two) = tup; - - let three = one + two; -} -``` - -Another way to access tuple elements is via direct member access, using a period (`.`) followed by -the index of the element we want to access. Index `0` corresponds to the first tuple element, `1` to -the second and so on: - -```rust -fn main() { - let tup = (5, 6, 7, 8); - - let five = tup.0; - let eight = tup.3; -} -``` - -### Structs - -A struct also allows for grouping multiple values of different types. Unlike tuples, we can also -name each field. - -> **Note:** The usage of _field_ here refers to each element of the struct and is unrelated to the -> field type of Noir. - -Defining a struct requires giving it a name and listing each field within as `: ` pairs: - -```rust -struct Animal { - hands: Field, - legs: Field, - eyes: u8, -} -``` - -An instance of a struct can then be created with actual values in `: ` pairs in any -order. Struct fields are accessible using their given names: - -```rust -fn main() { - let legs = 4; - - let dog = Animal { - eyes: 2, - hands: 0, - legs, - }; - - let zero = dog.hands; -} -``` - -Structs can also be destructured in a pattern, binding each field to a new variable: - -```rust -fn main() { - let Animal { hands, legs: feet, eyes } = get_octopus(); - - let ten = hands + feet + eyes as u8; -} - -fn get_octopus() -> Animal { - let octopus = Animal { - hands: 0, - legs: 8, - eyes: 2, - }; - - octopus -} -``` - -The new variables can be bound with names different from the original struct field names, as -showcased in the `legs --> feet` binding in the example above. - -:::note -You can use Structs as inputs to the `main` function, but you can't output them -::: - -### Type Aliases - -A type alias is a new name for an existing type. Type aliases are declared with the keyword `type`: - -```rust -type Id = u8; - -fn main() { - let id: Id = 1; - let zero: u8 = 0; - assert(zero + 1 == id); -} -``` - -Type aliases can also be used with [generics](./06_generics.md): - -```rust -type Id = Size; - -fn main() { - let id: Id = 1; - let zero: u32 = 0; - assert(zero + 1 == id); -} -``` - -### BigInt - -You can acheive BigInt functionality using the [Noir BigInt](https://github.com/shuklaayush/noir-bigint) library. - -## References - -Noir supports first-class references. References are a bit like pointers: they point to a specific address that can be followed to access the data stored at that address. You can use Rust-like syntax to use pointers in Noir: the `&` operator references the variable, the `*` operator dereferences it. - -Example: - -```rust -fn main() { - let mut x = 2; - - // you can reference x as &mut and pass it to multiplyBy2 - multiplyBy2(&mut x); -} - -// you can access &mut here -fn multiplyBy2(x: &mut Field) { - // and dereference it with * - *x = *x * 2; -} -``` diff --git a/docs/versioned_docs/version-v0.9.0/language_concepts/01_functions.md b/docs/versioned_docs/version-v0.9.0/language_concepts/01_functions.md deleted file mode 100644 index 54c618599d2..00000000000 --- a/docs/versioned_docs/version-v0.9.0/language_concepts/01_functions.md +++ /dev/null @@ -1,88 +0,0 @@ ---- -title: Functions -description: - Learn how to declare functions and methods in Noir, a programming language with Rust semantics. - This guide covers parameter declaration, return types, call expressions, and more. -keywords: [Noir, Rust, functions, methods, parameter declaration, return types, call expressions] ---- - -Functions in Noir follow the same semantics of Rust, though Noir does not support early returns. - -To declare a function the `fn` keyword is used. - -```rust -fn foo() {} -``` - -All parameters in a function must have a type and all types are known at compile time. The parameter -is pre-pended with a colon and the parameter type. Multiple parameters are separated using a comma. - -```rust -fn foo(x : Field, y : pub Field){} -``` - -The return type of a function can be stated by using the `->` arrow notation. The function below -states that the foo function must return a `Field`. If the function returns no value, then the arrow -is omitted. - -```rust -fn foo(x : Field, y : pub Field) -> Field { - x + y -} -``` - -Note that a `return` keyword is unneeded in this case - the last expression in a function's body is -returned. - -## Call Expressions - -Calling a function in Noir is executed by using the function name and passing in the necessary -arguments. - -Below we show how to call the `foo` function from the `main` function using a call expression: - -```rust -fn main(x : Field, y : Field) { - let z = foo(x); -} - -fn foo(x : Field) -> Field { - x + x -} -``` - -## Methods - -You can define methods in Noir on any struct type in scope. - -```rust -struct MyStruct { - foo: Field, - bar: Field, -} - -impl MyStruct { - fn new(foo: Field) -> MyStruct { - MyStruct { - foo, - bar: 2, - } - } - - fn sum(self) -> Field { - self.foo + self.bar - } -} - -fn main() { - let s = MyStruct::new(40); - assert(s.sum() == 42); -} -``` - -Methods are just syntactic sugar for functions, so if we wanted to we could also call `sum` as -follows: - -```rust -assert(MyStruct::sum(s) == 42); -``` diff --git a/docs/versioned_docs/version-v0.9.0/language_concepts/02_control_flow.md b/docs/versioned_docs/version-v0.9.0/language_concepts/02_control_flow.md deleted file mode 100644 index 691c514d9a8..00000000000 --- a/docs/versioned_docs/version-v0.9.0/language_concepts/02_control_flow.md +++ /dev/null @@ -1,42 +0,0 @@ ---- -title: Control Flow -description: - Learn how to use loops and if expressions in the Noir programming language. Discover the syntax - and examples for for loops and if-else statements. -keywords: [Noir programming language, loops, for loop, if-else statements, Rust syntax] ---- - -## Loops - -Noir has one kind of loop: the `for` loop. `for` loops allow you to repeat a block of code multiple -times. - -The following block of code between the braces is run 10 times. - -```rust -for i in 0..10 { - // do something -}; -``` - -## If Expressions - -Noir supports `if-else` statements. The syntax is most similar to Rust's where it is not required -for the statement's conditional to be surrounded by parentheses. - -```rust -let a = 0; -let mut x: u32 = 0; - -if a == 0 { - if a != 0 { - x = 6; - } else { - x = 2; - } -} else { - x = 5; - assert(x == 5); -} -assert(x == 2); -``` diff --git a/docs/versioned_docs/version-v0.9.0/language_concepts/03_ops.md b/docs/versioned_docs/version-v0.9.0/language_concepts/03_ops.md deleted file mode 100644 index da02b126059..00000000000 --- a/docs/versioned_docs/version-v0.9.0/language_concepts/03_ops.md +++ /dev/null @@ -1,97 +0,0 @@ ---- -title: Logical Operations -description: - Learn about the supported arithmetic and logical operations in the Noir programming language. - Discover how to perform operations on private input types, integers, and booleans. -keywords: - [ - Noir programming language, - supported operations, - arithmetic operations, - logical operations, - predicate operators, - bitwise operations, - short-circuiting, - backend, - ] ---- - -# Operations - -## Table of Supported Operations - -| Operation | Description | Requirements | -| :-------- | :------------------------------------------------------------: | -------------------------------------: | -| + | Adds two private input types together | Types must be private input | -| - | Subtracts two private input types together | Types must be private input | -| \* | Multiplies two private input types together | Types must be private input | -| / | Divides two private input types together | Types must be private input | -| ^ | XOR two private input types together | Types must be integer | -| & | AND two private input types together | Types must be integer | -| \| | OR two private input types together | Types must be integer | -| << | Left shift an integer by another integer amount | Types must be integer | -| >> | Right shift an integer by another integer amount | Types must be integer | -| ! | Bitwise not of a value | Type must be integer or boolean | -| < | returns a bool if one value is less than the other | Upper bound must have a known bit size | -| <= | returns a bool if one value is less than or equal to the other | Upper bound must have a known bit size | -| > | returns a bool if one value is more than the other | Upper bound must have a known bit size | -| >= | returns a bool if one value is more than or equal to the other | Upper bound must have a known bit size | -| == | returns a bool if one value is equal to the other | Both types must not be constants | -| != | returns a bool if one value is not equal to the other | Both types must not be constants | - -### Predicate Operators - -`<,<=, !=, == , >, >=` are known as predicate/comparison operations because they compare two values. -This differs from the operations such as `+` where the operands are used in _computation_. - -### Bitwise Operations Example - -```rust -fn main(x : Field) { - let y = x as u32; - let z = y & y; -} -``` - -`z` is implicitly constrained to be the result of `y & y`. The `&` operand is used to denote bitwise -`&`. - -> `x & x` would not compile as `x` is a `Field` and not an integer type. - -### Logical Operators - -Noir has no support for the logical operators `||` and `&&`. This is because encoding the -short-circuiting that these operators require can be inefficient for Noir's backend. Instead you can -use the bitwise operators `|` and `&` which operate indentically for booleans, just without the -short-circuiting. - -```rust -let my_val = 5; - -let mut flag = 1; -if (my_val > 6) | (my_val == 0) { - flag = 0; -} -assert(flag == 1); - -if (my_val != 10) & (my_val < 50) { - flag = 0; -} -assert(flag == 0); -``` - -### Shorthand operators - -Noir shorthand operators for most of the above operators, namely `+=, -=, *=, /=, %=, &=, |=, ^=, <<=`, and `>>=`. These allow for more concise syntax. For example: - -```rust -let mut i = 0; -i = i + 1; -``` - -could be written as: - -```rust -let mut i = 0; -i += 1; -``` diff --git a/docs/versioned_docs/version-v0.9.0/language_concepts/04_assert.md b/docs/versioned_docs/version-v0.9.0/language_concepts/04_assert.md deleted file mode 100644 index a25a946123d..00000000000 --- a/docs/versioned_docs/version-v0.9.0/language_concepts/04_assert.md +++ /dev/null @@ -1,34 +0,0 @@ ---- -title: Assert Function -description: - Learn about the assert function in Noir, which can be used to explicitly constrain the predicate or - comparison expression that follows to be true, and what happens if the expression is false at - runtime. -keywords: [Noir programming language, assert statement, predicate expression, comparison expression] ---- - -Noir includes a special `assert` function which will explicitly constrain the predicate/comparison -expression that follows to be true. If this expression is false at runtime, the program will fail to -be proven. - -### Example - -```rust -fn main(x : Field, y : Field) { - assert(x == y); -} -``` - -The above snippet compiles because `==` is a predicate operation. Conversely, the following will not -compile: - -```rust -// INCORRECT - -fn main(x : Field, y : Field) { - assert(x + y); -} -``` - -> The rationale behind this not compiling is due to ambiguity. It is not clear if the above should -> equate to `x + y == 0` or if it should check the truthiness of the result. diff --git a/docs/versioned_docs/version-v0.9.0/language_concepts/05_unconstrained.md b/docs/versioned_docs/version-v0.9.0/language_concepts/05_unconstrained.md deleted file mode 100644 index 6b621eda3eb..00000000000 --- a/docs/versioned_docs/version-v0.9.0/language_concepts/05_unconstrained.md +++ /dev/null @@ -1,96 +0,0 @@ ---- -title: Unconstrained Functions -description: "Learn about what unconstrained functions in Noir are, how to use them and when you'd want to." - -keywords: [Noir programming language, unconstrained, open] ---- - - - -Unconstrained functions are functions which do not constrain any of the included computation and allow for non-determinisitic computation. - -## Why? - -Zero-knowledge (ZK) domain-specific languages (DSL) enable developers to generate ZK proofs from their programs by compiling code down to the constraints of an NP complete language (such as R1CS or PLONKish languages). However, the hard bounds of a constraint system can be very limiting to the functionality of a ZK DSL. - -Enabling a circuit language to perform unconstrained execution is a powerful tool. Said another way, unconstrained execution lets developers generate witnesses from code that does not generate any constraints. Being able to execute logic outside of a circuit is critical for both circuit performance and constructing proofs on information that is external to a circuit. - -Fetching information from somewhere external to a circuit can also be used to enable developers to improve circuit efficiency. - -A ZK DSL does not just prove computation, but proves that some computation was handled correctly. Thus, it is necessary that when we switch from performing some operation directly inside of a circuit to inside of an unconstrained environment that the appropriate constraints are still laid down elsewhere in the circuit. - -## Example - -An in depth example might help drive the point home. This example comes from the excellent [post](https://discord.com/channels/1113924620781883405/1124022445054111926/1128747641853972590) by Tom in the Noir Discord. - -Let's look at how we can optimize a function to turn a `u72` into an array of `u8`s. - -```rust -fn main(num: u72) -> pub [u8; 8] { - let mut out: [u8; 8] = [0; 8]; - for i in 0..8 { - out[i] = (num >> (56 - (i * 8)) as u72 & 0xff) as u8; - } - - out -} -``` - -``` -Total ACIR opcodes generated for language PLONKCSat { width: 3 }: 91 -Backend circuit size: 3619 -``` - -A lot of the operations in this function are optimized away by the compiler (all the bit-shifts turn into divisions by constants). However we can save a bunch of gates by casting to u8 a bit earlier. This automatically truncates the bit-shifted value to fit in a u8 which allows us to remove the XOR against 0xff. This saves us ~480 gates in total. - -```rust -fn main(num: u72) -> pub [u8; 8] { - let mut out: [u8; 8] = [0; 8]; - for i in 0..8 { - out[i] = (num >> (56 - (i * 8)) as u8; - } - - out -} -``` - -``` -Total ACIR opcodes generated for language PLONKCSat { width: 3 }: 75 -Backend circuit size: 3143 -``` - -Those are some nice savings already but we can do better. This code is all constrained so we're proving every step of calculating out using num, but we don't actually care about how we calculate this, just that it's correct. This is where brillig comes in. - -It turns out that truncating a u72 into a u8 is hard to do inside a snark, each time we do as u8 we lay down 4 ACIR opcodes which get converted into multiple gates. It's actually much easier to calculate num from out than the other way around. All we need to do is multiply each element of out by a constant and add them all together, both relatively easy operations inside a snark. - -We can then run u72_to_u8 as unconstrained brillig code in order to calculate out, then use that result in our constrained function and assert that if we were to do the reverse calculation we'd get back num. This looks a little like the below: - -```rust -fn main(num: u72) -> pub [u8; 8] { - let out = u72_to_u8(num); - - let mut reconstructed_num: u72 = 0; - for i in 0..8 { - reconstructed_num += (out[i] as u72 << (56 - (8 * i))); - } - assert(num == reconstructed_num); - out -} - -unconstrained fn u72_to_u8(num: u72) -> [u8; 8] { - let mut out: [u8; 8] = [0; 8]; - for i in 0..8 { - out[i] = (num >> (56 - (i * 8))) as u8; - } - out -} -``` - -``` -Total ACIR opcodes generated for language PLONKCSat { width: 3 }: 78 -Backend circuit size: 2902 -``` - -This ends up taking off another ~250 gates from our circuit! We've ended up with more ACIR opcodes than before but they're easier for the backend to prove (resulting in fewer gates). - -Generally we want to use brillig whenever there's something that's easy to verify but hard to compute within the circuit. For example, if you wanted to calculate a square root of a number it'll be a much better idea to calculate this in brillig and then assert that if you square the result you get back your number. diff --git a/docs/versioned_docs/version-v0.9.0/language_concepts/06_generics.md b/docs/versioned_docs/version-v0.9.0/language_concepts/06_generics.md deleted file mode 100644 index 66f2e85e16b..00000000000 --- a/docs/versioned_docs/version-v0.9.0/language_concepts/06_generics.md +++ /dev/null @@ -1,116 +0,0 @@ ---- -title: Generics -description: - Learn how to use Generics in Noir -keywords: [Noir, Rust, generics, functions, structs] ---- - -# Generics - -Generics allow you to use the same functions with multiple different concrete data types. You can -read more about the concept of generics in the Rust documentation -[here](https://doc.rust-lang.org/book/ch10-01-syntax.html). - -Here is a trivial example showing the identity function that supports any type. In Rust, it is -common to refer to the most general type as `T`. We follow the same convention in Noir. - -```rust -fn id(x: T) -> T { - x -} -``` - -## In Structs - -Generics are useful for specifying types in structs. For example, we can specify that a field in a -struct will be of a certain generic type. In this case `value` is of type `T`. - -```rust -struct RepeatedValue { - value: T, - count: comptime Field, -} - -impl RepeatedValue { - fn new(value: T) -> Self { - Self { value, count: 1 } - } - - fn increment(mut repeated: Self) -> Self { - repeated.count += 1; - repeated - } - - fn print(self) { - for _i in 0 .. self.count { - dep::std::println(self.value); - } - } -} - -fn main() { - let mut repeated = RepeatedValue::new("Hello!"); - repeated = repeated.increment(); - repeated.print(); -} -``` - -The `print` function will print `Hello!` an arbitrary number of times, twice in this case. - -If we want to be generic over array lengths (which are type-level integers), we can use numeric -generics. Using these looks just like using regular generics, but these generics can resolve to -integers at compile-time, rather than resolving to types. Here's an example of a struct that is -generic over the size of the array it contains internally: - -```rust -struct BigInt { - limbs: [u32; N], -} - -impl BigInt { - // `N` is in scope of all methods in the impl - fn first(first: BigInt, second: BigInt) -> Self { - assert(first.limbs != second.limbs); - first - - fn second(first: BigInt, second: Self) -> Self { - assert(first.limbs != second.limbs); - second - } -} -``` - -## Calling functions on generic parameters - -Unlike Rust, Noir does not have traits, so how can one translate the equivalent of a trait bound in -Rust into Noir? That is, how can we write a function that is generic over some type `T`, while also -requiring there is a function like `eq: fn(T, T) -> bool` that works on the type? - -The answer is that we can translate this by passing in the function manually. Here's an example of -implementing array equality in Noir: - -```rust -fn array_eq(array1: [T; N], array2: [T; N], elem_eq: fn(T, T) -> bool) -> bool { - if array1.len() != array2.len() { - false - } else { - let mut result = true; - for i in 0 .. array1.len() { - result &= elem_eq(array1[i], array2[i]); - } - result - } -} - -fn main() { - assert(array_eq([1, 2, 3], [1, 2, 3], |a, b| a == b)); - - // We can use array_eq even for arrays of structs, as long as we have - // an equality function for these structs we can pass in - let array = [MyStruct::new(), MyStruct::new()]; - assert(array_eq(array, array, MyStruct::eq)); -} -``` - -You can see an example of generics in the tests -[here](https://github.com/noir-lang/noir/blob/master/tooling/nargo_cli/tests/execution_success/generics/src/main.nr). diff --git a/docs/versioned_docs/version-v0.9.0/language_concepts/07_mutability.md b/docs/versioned_docs/version-v0.9.0/language_concepts/07_mutability.md deleted file mode 100644 index 69798c7a276..00000000000 --- a/docs/versioned_docs/version-v0.9.0/language_concepts/07_mutability.md +++ /dev/null @@ -1,120 +0,0 @@ ---- -title: Mutability -description: - Learn about mutable variables, constants, and globals in Noir programming language. Discover how - to declare, modify, and use them in your programs. -keywords: [noir programming language, mutability in noir, mutable variables, constants, globals] ---- - -Variables in noir can be declared mutable via the `mut` keyword. Mutable variables can be reassigned -to via an assignment expression. - -```rust -let x = 2; -x = 3; // error: x must be mutable to be assigned to - -let mut y = 3; -let y = 4; // OK -``` - -The `mut` modifier can also apply to patterns: - -```rust -let (a, mut b) = (1, 2); -a = 11; // error: a must be mutable to be assigned to -b = 12; // OK - -let mut (c, d) = (3, 4); -c = 13; // OK -d = 14; // OK - -// etc. -let MyStruct { x: mut y } = MyStruct { x: a } -// y is now in scope -``` - -Note that mutability in noir is local and everything is passed by value, so if a called function -mutates its parameters then the parent function will keep the old value of the parameters. - -```rust -fn main() -> Field { - let x = 3; - helper(x); - x // x is still 3 -} - -fn helper(mut x: i32) { - x = 4; -} -``` - -## Comptime Values - -Comptime values are values that are known at compile-time. This is different to a witness -which changes per proof. If a comptime value that is being used in your program is changed, then your -circuit will also change. - -Comptime is slightly different from Rust's `const`. Namely, it is a bit more flexible in that normal functions can accept comptime parameters. For example, this is used to verify an array index is known at compile-time. Note that the "known at compile-time" here means "known after function inlining is performed while optimizing the program" and not "known during type-checking." - -Below we show how to declare a comptime value: - -```rust -fn main() { - let a: comptime Field = 5; - - // `comptime Field` can also be inferred: - let a = 5; -} -``` - -Comptime variables can be mutuable, but must be known at compile time: - -```rust -fn main(runtime_var: Field) -> pub Field { - let known_at_compile_time: comptime Field = 1; - - // The next line will cause an error - let bad_var: comptime Field = runtime_var; - -} -``` - -As `runtime_var` is a argument to the circuit it cannot be known at compile time and so assigning it to a comptime variable should fail. A circuit's arguments is the only way in which non-comptime variables can enter the circuit (excluding [brillig](./unconstrained) foreign calls). - -## Globals - -Noir also supports global variables. However, they must be compile-time variables. If `comptime` is -not explicitly written in the type annotation the compiler will implicitly specify the declaration -as compile-time. They can then be used like any other compile-time variable inside functions. The -global type can also be inferred by the compiler entirely. Globals can also be used to specify array -annotations for function parameters and can be imported from submodules. - -```rust -global N: Field = 5; // Same as `global N: comptime Field = 5` - -fn main(x : Field, y : [Field; N]) { - let res = x * N; - - assert(res == y[0]); - - let res2 = x * mysubmodule::N; - assert(res != res2); -} - -mod mysubmodule { - use dep::std; - - global N: Field = 10; - - fn my_helper() -> comptime Field { - let x = N; - x - } -} -``` - -## Why only local mutability? - -Witnesses in a proving system are immutable in nature. Noir aims to _closely_ mirror this setting -without applying additional overhead to the user. Modeling a mutable reference is not as -straightforward as on conventional architectures and would incur some possibly unexpected overhead. diff --git a/docs/versioned_docs/version-v0.9.0/modules_packages_crates/crates_and_packages.md b/docs/versioned_docs/version-v0.9.0/modules_packages_crates/crates_and_packages.md deleted file mode 100644 index 34f28a71148..00000000000 --- a/docs/versioned_docs/version-v0.9.0/modules_packages_crates/crates_and_packages.md +++ /dev/null @@ -1,35 +0,0 @@ ---- -title: Crates and Packages -description: - Learn how to use Crates and Packages in your Noir project -keywords: [Nargo, dependencies, package management, crates, package] ---- - -## Crates - -A crate is the smallest amount of code that the Noir compiler considers at a time. -Crates can contain modules, and the modules may be defined in other files that get compiled with the crate, as we’ll see in the coming sections. - -### Crate Types - -A Noir crate can come in one of two forms: a binary crate or a library crate. - -_Binary crates_ are programs which you can compile to an ACIR circuit which you can then create proofs against. Each must have a function called `main` that defines the ACIR circuit which is to be proved. - -_Library crates_ don't have a `main` function and they don't compile down to ACIR. Instead they define functionality intended to be shared with multiple projects, and eventually included in a binary crate. - -### Crate Root - -Every crate has a root, which is the source file that the compiler starts, this is also known as the root module. The Noir compiler does not enforce any conditions on the name of the file which is the crate root, however if you are compiling via Nargo the crate root must be called `lib.nr` or `main.nr` for library or binary crates respectively. - -## Packages - -A Nargo _package_ is a collection of one of more crates that provides a set of functionality. A package must include a Nargo.toml file. - -A package _must_ contain either a library or a binary crate, but not both. - -### Differences from Cargo Packages - -One notable difference between Rust's Cargo and Noir's Nargo is that while Cargo allows a package to contain an unlimited number of binary crates and a single library crate, Nargo currently only allows a package to contain a single crate. - -In future this restriction may be lifted to allow a Nargo package to contain both a binary and library crate or multiple binary crates. diff --git a/docs/versioned_docs/version-v0.9.0/modules_packages_crates/dependencies.md b/docs/versioned_docs/version-v0.9.0/modules_packages_crates/dependencies.md deleted file mode 100644 index 2807ad52046..00000000000 --- a/docs/versioned_docs/version-v0.9.0/modules_packages_crates/dependencies.md +++ /dev/null @@ -1,110 +0,0 @@ ---- -title: Managing Dependencies -description: - Learn how to specify and manage dependencies in Nargo, allowing you to upload packages to GitHub - and use them easily in your project. -keywords: [Nargo, dependencies, GitHub, package management, versioning] ---- - -Nargo allows you to upload packages to GitHub and use them as dependencies. - -## Specifying a dependency - -Specifying a dependency requires a tag to a specific commit and the git url to the url containing -the package. - -Currently, there are no requirements on the tag contents. If requirements are added, it would follow -semver 2.0 guidelines. - -> Note: Without a `tag` , there would be no versioning and dependencies would change each time you -> compile your project. - -For example, to add the [ecrecover-noir library](https://github.com/colinnielsen/ecrecover-noir) to your project, add it to `Nargo.toml`: - -```toml -# Nargo.toml - -[dependencies] -ecrecover = {tag = "v0.8.0", git = "https://github.com/colinnielsen/ecrecover-noir"} -``` - -## Specifying a local dependency - -You can also specify dependencies that are local to your machine. - -For example, this file structure has a library and binary crate - -``` -├── binary_crate -│   ├── Nargo.toml -│   └── src -│   └── main.nr -└── liba - ├── Nargo.toml - └── src - └── lib.nr -``` - -Inside of the binary crate, you can specify: - -```toml -# Nargo.toml - -[dependencies] -libA = { path = "../liba" } -``` - -## Importing dependencies - -You can import a dependency to a Noir file using the following syntax. For example, to import the -ecrecover-noir library and local liba referenced above: - -```rust -use dep::ecrecover; -use dep::libA; -``` - -You can also import only the specific parts of dependency that you want to use, like so: - -```rust -use dep::std::hash::sha256; -use dep::std::scalar_mul::fixed_base; -``` - -Lastly, as demonstrated in the -[elliptic curve example](../standard_library/cryptographic_primitives/ec_primitives#examples), you -can import multiple items in the same line by enclosing them in curly braces: - -```rust -use dep::std::ec::tecurve::affine::{Curve, Point}; -``` - -## Dependencies of Dependencies - -Note that when you import a dependency, you also get access to all of the dependencies of that package. - -For example, the [phy_vector](https://github.com/resurgencelabs/phy_vector) library imports an [fraction](https://github.com/resurgencelabs/fraction) library. If you're importing the phy_vector library, then you can access the functions in fractions library like so: - -```rust -use dep::phy_vector; - -fn main(x : Field, y : pub Field) { - //... - let f = phy_vector::fraction::toFraction(true, 2, 1); - //... -} -``` - -## Available Libraries - -Noir does not currently have an official package manager. You can find a list of available Noir libraries in the [awesome-noir repo here](https://github.com/noir-lang/awesome-noir#libraries). - -Some libraries that are available today include: - -- [Standard Library](https://github.com/noir-lang/noir/tree/master/noir_stdlib) - the Noir Standard Library -- [Ethereum Storage Proof Verification](https://github.com/aragonzkresearch/noir-trie-proofs) - a library that contains the primitives necessary for RLP decoding (in the form of look-up table construction) and Ethereum state and storage proof verification (or verification of any trie proof involving 32-byte long keys) -- [BigInt](https://github.com/shuklaayush/noir-bigint) - a library that provides a custom BigUint56 data type, allowing for computations on large unsigned integers -- [ECrecover](https://github.com/colinnielsen/ecrecover-noir/tree/main) - a library to verify an ECDSA signature and return the source Ethereum address -- [Sparse Merkle Tree Verifier](https://github.com/vocdoni/smtverifier-noir/tree/main) - a library for verification of sparse Merkle trees -- [Signed Int](https://github.com/resurgencelabs/signed_int) - a library for accessing a custom Signed Integer data type, allowing access to negative numbers on Noir -- [Fraction](https://github.com/resurgencelabs/fraction) - a library for accessing fractional number data type in Noir, allowing results that aren't whole numbers diff --git a/docs/versioned_docs/version-v0.9.0/modules_packages_crates/modules.md b/docs/versioned_docs/version-v0.9.0/modules_packages_crates/modules.md deleted file mode 100644 index e429b336511..00000000000 --- a/docs/versioned_docs/version-v0.9.0/modules_packages_crates/modules.md +++ /dev/null @@ -1,104 +0,0 @@ ---- -title: Understanding Modules -description: - Learn how to organize your files using modules in Noir, following the same convention as Rust's - module system. Examples included. -keywords: [Noir, Rust, modules, organizing files, sub-modules] ---- - -# Modules - -Noir's module system follows the same convention as the _newer_ version of Rust's module system. - -## Purpose of Modules - -Modules are used to organise files. Without modules all of your code would need to live in a single -file. In Noir, the compiler does not automatically scan all of your files to detect modules. This -must be done explicitly by the developer. - -## Examples - -### Importing a module in the crate root - -Filename : `src/main.nr` - -```rust -mod foo; - -fn main() { - foo::hello_world(); -} -``` - -Filename : `src/foo.nr` - -```rust -fn from_foo() {} -``` - -In the above snippet, the crate root is the `src/main.nr` file. The compiler sees the module -declaration `mod foo` which prompts it to look for a foo.nr file. - -Visually this module hierarchy looks like the following : - -``` -crate - ├── main - │ - └── foo - └── from_foo - -``` - -### Importing a module throughout the tree -All modules are accessible from the ``crate::`` namespace. - -``` -crate - ├── bar - ├── foo - └── main - -``` -In the above snippet, if ``bar`` would like to use functions in ``foo``, it can do so by ``use crate::foo::function_name``. - -### Sub-modules - -Filename : `src/main.nr` - -```rust -mod foo; - -fn main() { - foo::from_foo(); -} -``` - -Filename : `src/foo.nr` - -```rust -mod bar; -fn from_foo() {} -``` - -Filename : `src/foo/bar.nr` - -```rust -fn from_bar() {} -``` - -In the above snippet, we have added an extra module to the module tree; `bar`. `bar` is a submodule -of `foo` hence we declare bar in `foo.nr` with `mod bar`. Since `foo` is not the crate root, the -compiler looks for the file associated with the `bar` module in `src/foo/bar.nr` - -Visually the module hierarchy looks as follows: - -``` -crate - ├── main - │ - └── foo - ├── from_foo - └── bar - └── from_bar -``` diff --git a/docs/versioned_docs/version-v0.9.0/nargo/01_commands.md b/docs/versioned_docs/version-v0.9.0/nargo/01_commands.md deleted file mode 100644 index d550e137258..00000000000 --- a/docs/versioned_docs/version-v0.9.0/nargo/01_commands.md +++ /dev/null @@ -1,139 +0,0 @@ ---- -title: Commands -description: - Noir CLI Commands for Noir Prover and Verifier to create, execute, prove and verify programs, - generate Solidity verifier smart contract and compile into JSON file containing ACIR - representation and ABI of circuit. -keywords: - [ - Nargo, - Noir CLI, - Noir Prover, - Noir Verifier, - generate Solidity verifier, - compile JSON file, - ACIR representation, - ABI of circuit, - TypeScript, - ] ---- - -## General options - -``` -Options: - -s, --show-ssa Emit debug information for the intermediate SSA IR - -d, --deny-warnings Quit execution when warnings are emitted - -h, --help Print help -``` - -## `nargo help [subcommand]` - -Prints the list of available commands or specific information of a subcommand. - -_Arguments_ - -- `` - The subcommand whose help message to display - -## `nargo check` - -Generate the `Prover.toml` and `Verifier.toml` files for specifying prover and verifier in/output -values of the Noir program respectively. - -## `nargo codegen-verifier` - -Generate a Solidity verifier smart contract for the program. - -## `nargo compile ` - -Compile the program into a JSON build artifact file containing the ACIR representation and the ABI -of the circuit. This build artifact can then be used to generate and verify proofs. - -You can also use "build" as an alias for compile. - -For example, `nargo build `. - -_Arguments_ - -- `` - The name of the circuit file - -_Options_ - -- `-c, --contracts` - Compile each contract function used within the program -- `--print-acir` - Displays the ACIR for the compiled circuit - -## `nargo new [path]` - -Creates a new Noir project in a new folder called `` - Name of the package -- `[path]` - The path to save the new project - -## `nargo init` - -Creates a new Noir project in the current directory. - -## `nargo execute [witness_name]` - -Runs the Noir program and prints its return value. - -_Arguments_ - -- `[witness_name]` - The name of the witness - -_Usage_ - -The inputs to the circuit are read from the `Prover.toml` file generated by `nargo check`, which -must be filled in. - -To save the witness to file, run the command with a value for the `witness-name` argument. A -`.tr` file will then be saved in the `build` folder. - -> **Info:** The `.tr` file is the witness file. The witness file can be considered as program inputs -> parsed for your program's ACIR. -> -> This file can be passed along with circuit's ACIR into a TypeScript project for proving and -> verification. See the [TypeScript](../typescript#proving-and-verifying-externally-compiled-files) -> section to learn more. - -## `nargo prove ` - -Creates a proof for the program. - -_Arguments_ - -- `` - The name of the proof - -_Options_ - -- `-v, --verify` - Verify proof after proving - -## `nargo verify ` - -Given a proof and a program, verify whether the proof is valid. - -_Arguments_ - -- `` - The proof to verify - -## `nargo test ` - -Nargo will automatically compile and run any functions which have the decorator `#[test]` on them if -you run `nargo test`. - -See an example on the [testing page](./testing). - -_Arguments_ - -- `` - a pattern to indicate to only run tests with names containing the pattern - -## `nargo gates` - -Counts the occurrences of different gates in circuit - -## `nargo lsp` - -Start a long-running Language Server process that communicates over stdin/stdout. -Usually this command is not run by a user, but instead will be run by a Language Client, such as [vscode-noir](https://github.com/noir-lang/vscode-noir). diff --git a/docs/versioned_docs/version-v0.9.0/nargo/02_testing.md b/docs/versioned_docs/version-v0.9.0/nargo/02_testing.md deleted file mode 100644 index ba0bebd658b..00000000000 --- a/docs/versioned_docs/version-v0.9.0/nargo/02_testing.md +++ /dev/null @@ -1,32 +0,0 @@ ---- -title: Testing in Noir -description: Learn how to use Nargo to test your Noir program in a quick and easy way -keywords: [Nargo, testing, Noir, compile, test] ---- - -You can test your Noir programs using Noir circuits. - -Nargo will automatically compile and run any functions which have the decorator `#[test]` on them if -you run `nargo test`. - -For example if you have a program like: - -```rust -fn add(x: u64, y: u64) -> u64 { - x + y -} -#[test] -fn test_add() { - assert(add(2,2) == 4); - assert(add(0,1) == 1); - assert(add(1,0) == 1); -} -``` - -Running `nargo test` will test that the `test_add` function can be executed while satisfying the all -the contraints which allows you to test that add returns the expected values. Test functions can't -have any arguments currently. - -This is much faster compared to testing in Typescript but the only downside is that you can't -explicitly test that a certain set of inputs are invalid. i.e. you can't say that you want -add(2^64-1, 2^64-1) to fail. diff --git a/docs/versioned_docs/version-v0.9.0/nargo/03_solidity_verifier.md b/docs/versioned_docs/version-v0.9.0/nargo/03_solidity_verifier.md deleted file mode 100644 index 9ac60cb0ba7..00000000000 --- a/docs/versioned_docs/version-v0.9.0/nargo/03_solidity_verifier.md +++ /dev/null @@ -1,129 +0,0 @@ ---- -title: Solidity Verifier -description: - Learn how to run the verifier as a smart contract on the blockchain. Compile a Solidity verifier - contract for your Noir program and deploy it on any EVM blockchain acting as a verifier smart - contract. Read more to find out! -keywords: - [ - solidity verifier, - smart contract, - blockchain, - compiler, - plonk_vk.sol, - EVM blockchain, - verifying Noir programs, - proving backend, - Barretenberg, - ] ---- - -For certain applications, it may be desirable to run the verifier as a smart contract instead of on -a local machine. - -Compile a Solidity verifier contract for your Noir program by running: - -```sh -nargo codegen-verifier -``` - -A new `contract` folder would then be generated in your project directory, containing the Solidity -file `plonk_vk.sol`. It can be deployed on any EVM blockchain acting as a verifier smart contract. - -> **Note:** It is possible to compile verifier contracts of Noir programs for other smart contract -> platforms as long as the proving backend supplies an implementation. -> -> Barretenberg, the default proving backend for Nargo, supports compilation of verifier contracts in -> Solidity only for the time being. - -## Verify - -To verify a proof using the Solidity verifier contract, call the `verify` function with the -following signature: - -```solidity -function verify(bytes calldata _proof, bytes32[] calldata _publicInputs) external view returns (bool) -``` - -You can see an example of how the `verify` function is called in the example zk voting application [here](https://github.com/noir-lang/noir-examples/blob/33e598c257e2402ea3a6b68dd4c5ad492bce1b0a/foundry-voting/src/zkVote.sol#L35): - -```solidity -function castVote(bytes calldata proof, uint proposalId, uint vote, bytes32 nullifierHash) public returns (bool) { - // ... - bytes32[] memory publicInputs = new bytes32[](4); - publicInputs[0] = merkleRoot; - publicInputs[1] = bytes32(proposalId); - publicInputs[2] = bytes32(vote); - publicInputs[3] = nullifierHash; - require(verifier.verify(proof, publicInputs), "Invalid proof"); -``` - -### Public Inputs - -:::tip - -A circuit doesn't have the concept of a return value. Return values are just syntactic sugar in -Noir. - -Under the hood, the return value is passed as an input to the circuit and is checked at the end of -the circuit program. - -::: - -The verifier contract uses the output (return) value of a Noir program as a public input. So if you -have the following function - -```rust -fn main( - // Public inputs - pubkey_x: pub Field, - pubkey_y: pub Field, - // Private inputs - priv_key: Field, -) -> pub Field -``` - -then `verify` in `plonk_vk.sol` will expect 3 public inputs. Passing two inputs will result in an -error like `Reason: PUBLIC_INPUT_COUNT_INVALID(3, 2)`. - -In this case the 3 inputs to `verify` would be ordered as `[pubkey_x, pubkey_y, return]`. - -#### Struct inputs - -Consider the following program: - -```rust -struct Type1 { - val1: Field, - val2: Field, -} - -struct Nested { - t1: Type1, - is_true: bool, -} - -fn main(x: pub Field, nested: pub Nested, y: pub Field) { - //... -} -``` - -Structs will be flattened so that the array of inputs is 1-dimensional array. The order of these inputs would be flattened to: `[x, nested.t1.val1, nested.t1.val2, nested.is_true, y]` - -## Noir for EVM chains - -You can currently deploy the Solidity verifier contracts to most EVM compatible chains. EVM chains that have been tested and are known to work include: - -- Optimism -- Arbitrum -- Polygon PoS -- Scroll -- Celo - -Other EVM chains should work, but have not been tested directly by our team. If you test any other chains, please open a PR on this page to update the list. See [this doc](https://github.com/noir-lang/noir-starter/tree/main/with-foundry#testing-on-chain) for more info about testing verifier contracts on different EVM chains. - -### Unsupported chains - -Unfortunately not all "EVM" chains are supported. - -**zkSync** and the **Polygon zkEVM** do _not_ currently support proof verification via Solidity verifier contracts. They are missing the bn256 precompile contract that the verifier contract requires. Once these chains support this precompile, they may work. diff --git a/docs/versioned_docs/version-v0.9.0/standard_library/black_box_fns.md b/docs/versioned_docs/version-v0.9.0/standard_library/black_box_fns.md deleted file mode 100644 index c758846b688..00000000000 --- a/docs/versioned_docs/version-v0.9.0/standard_library/black_box_fns.md +++ /dev/null @@ -1,45 +0,0 @@ ---- -title: Black Box Functions -description: Black box functions are functions in Noir that rely on backends implementing support for specialized constraints. -keywords: [noir, black box functions] ---- - -Black box functions are functions in Noir that rely on backends implementing support for specialized constraints. This makes certain zk-snark unfriendly computations cheaper than if they were implemented in Noir. - -:::warning - -It is likely that not all backends will support a particular black box function. - -::: - -Because it is not guaranteed that all backends will support black box functions, it is possible that certain Noir programs won't compile against a particular backend if they use an unsupported black box function. It is possible to fallback to less efficient implementations written in Noir/ACIR in some cases. - -Black box functions are specified with the `#[foreign(black_box_fn)]` attribute. For example, the SHA256 function in the Noir [source code](https://github.com/noir-lang/noir/blob/v0.5.1/noir_stdlib/src/hash.nr) looks like: - -```rust -#[foreign(sha256)] -fn sha256(_input : [u8; N]) -> [u8; 32] {} -``` - -## Function list - -Here is a list of the current black box functions that are supported by UltraPlonk: - -- AES -- [SHA256](./cryptographic_primitives/hashes#sha256) -- [Schnorr signature verification](./cryptographic_primitives/schnorr) -- [Blake2s](./cryptographic_primitives/hashes#blake2s) -- [Pedersen](./cryptographic_primitives/hashes#pedersen) -- [HashToField128Security](./cryptographic_primitives/hashes#hash_to_field) -- [ECDSA signature verification](./cryptographic_primitives/ecdsa_sig_verification) -- [Fixed base scalar multiplication](./cryptographic_primitives/scalar) -- [Compute merkle root](./merkle_trees#compute_merkle_root) -- AND -- XOR -- RANGE -- [Keccak256](./cryptographic_primitives/hashes#keccak256) -- [Recursive proof verification](./recursion) - -Most black box functions are included as part of the Noir standard library, however `AND`, `XOR` and `RANGE` are used as part of the Noir language syntax. For instance, using the bitwise operator `&` will invoke the `AND` black box function. To ensure compatibility across backends, the ACVM has fallback implementations of `AND`, `XOR` and `RANGE` defined in its standard library which it can seamlessly fallback to if the backend doesn't support them. - -You can view the black box functions defined in the ACVM code [here](https://github.com/noir-lang/acvm/blob/acir-v0.12.0/acir/src/circuit/black_box_functions.rs). diff --git a/docs/versioned_docs/version-v0.9.0/standard_library/cryptographic_primitives.md b/docs/versioned_docs/version-v0.9.0/standard_library/cryptographic_primitives.md deleted file mode 100644 index 2df4f929474..00000000000 --- a/docs/versioned_docs/version-v0.9.0/standard_library/cryptographic_primitives.md +++ /dev/null @@ -1,14 +0,0 @@ ---- -title: Cryptographic primitives in Noir -description: - Learn about the cryptographic primitives ready to use for any Noir project -keywords: - [ - cryptographic primitives, - Noir project, - ] ---- - -The Noir team is progressively adding new cryptographic primitives to the standard library. Reach out for news or if you would be interested in adding more of these calculations in Noir. - -Some methods are available thanks to the Aztec backend, not being performed using Noir. When using other backends, these methods may or may not be supplied. diff --git a/docs/versioned_docs/version-v0.9.0/standard_library/cryptographic_primitives/00_hashes.mdx b/docs/versioned_docs/version-v0.9.0/standard_library/cryptographic_primitives/00_hashes.mdx deleted file mode 100644 index 31a84cdb753..00000000000 --- a/docs/versioned_docs/version-v0.9.0/standard_library/cryptographic_primitives/00_hashes.mdx +++ /dev/null @@ -1,146 +0,0 @@ ---- -title: Hash methods -description: - Learn about the cryptographic primitives ready to use for any Noir project, including sha256, - blake2s, pedersen, mimc_bn254 and mimc -keywords: - [cryptographic primitives, Noir project, sha256, blake2s, pedersen, mimc_bn254, mimc, hash] ---- - -import BlackBoxInfo from './common/_blackbox.mdx'; - -## sha256 - -Given an array of bytes, returns the resulting sha256 hash. - -```rust -fn sha256(_input : [u8]) -> [u8; 32] -``` - -example: - -```rust -fn main() { - let x = [163, 117, 178, 149] // some random bytes - let hash = std::hash::sha256(x); -} -``` - - - -## blake2s - -Given an array of bytes, returns an array with the Blake2 hash - -```rust -fn blake2s(_input : [u8]) -> [u8; 32] -``` - -example: - -```rust -fn main() { - let x = [163, 117, 178, 149] // some random bytes - let hash = std::hash::blake2s(x); -} -``` - - - -## pedersen - -Given an array of Fields, returns the Pedersen hash. - -```rust -fn pedersen(_input : [Field]) -> [Field; 2] -``` - -example: - -```rust -fn main() { - let x = [163, 117, 178, 149] // some random bytes - let hash = std::hash::pedersen(x); -} -``` - - - -## keccak256 - -Given an array of bytes (`u8`), returns the resulting keccak hash as an array of 32 bytes -(`[u8; 32]`). Specify a message_size to hash only the first `message_size` bytes -of the input. - -```rust -fn keccak256(_input : [u8; N], _message_size: u32) -> [u8; 32] -``` - -example: - -```rust -fn main() { - let x = [163, 117, 178, 149] // some random bytes - let message_size = 4; - let hash = std::hash::keccak256(x, message_size); -} -``` - - - -## poseidon - -Given an array of Fields, returns a new Field with the Poseidon Hash. Mind that you need to specify -how many inputs are there to your Poseidon function. - -```rust -// example for hash_1, hash_2 accepts an array of length 2, etc -fn hash_1(input: [Field; 1]) -> Field -``` - -example: - -```rust -fn main() -{ - let hash1 = std::hash::poseidon::bn254::hash_2([1, 2]); - assert(hash1 == 0x115cc0f5e7d690413df64c6b9662e9cf2a3617f2743245519e19607a4417189a); -} -``` - -## mimc_bn254 and mimc - -`mimc_bn254` is `mimc`, but with hardcoded parameters for the BN254 curve. You can use it by -providing an array of Fields, and it returns a Field with the hash. You can use the `mimc` method if -you're willing to input your own constants: - -```rust -fn mimc(x: Field, k: Field, constants: [Field; N], exp : Field) -> Field -``` - -otherwise, use the `mimc_bn254` method: - -```rust -fn mimc_bn254(array: [Field; N]) -> Field -``` - -example: - -```rust - -fn main() { - let x = [163, 117, 178, 149] // some random bytes - let hash = std::hash::mimc_bn254(x); -} -``` - -## hash_to_field - -```rust -fn hash_to_field(_input : [Field; N]) -> Field {} -``` - -Calculates the `blake2s` hash of the inputs and returns the hash modulo the field modulus to return -a value which can be represented as a `Field`. - - diff --git a/docs/versioned_docs/version-v0.9.0/standard_library/cryptographic_primitives/01_scalar.mdx b/docs/versioned_docs/version-v0.9.0/standard_library/cryptographic_primitives/01_scalar.mdx deleted file mode 100644 index 62265cddb1e..00000000000 --- a/docs/versioned_docs/version-v0.9.0/standard_library/cryptographic_primitives/01_scalar.mdx +++ /dev/null @@ -1,33 +0,0 @@ ---- -title: Scalar multiplication -description: - See how you can perform scalar multiplications over a fixed base in Noir -keywords: - [ - cryptographic primitives, - Noir project, - scalar multiplication, - ] ---- - -import BlackBoxInfo from './common/\_blackbox.mdx'; - -## scalar_mul::fixed_base - -Performs scalar multiplication over the embedded curve whose coordinates are defined by the -configured noir field. For the BN254 scalar field, this is BabyJubJub or Grumpkin. - -```rust -fn fixed_base(_input : Field) -> [Field; 2] -``` - -example - -```rust -fn main(x : Field) { - let scal = std::scalar_mul::fixed_base(x); - std::println(scal); -} -``` - - diff --git a/docs/versioned_docs/version-v0.9.0/standard_library/cryptographic_primitives/02_schnorr.mdx b/docs/versioned_docs/version-v0.9.0/standard_library/cryptographic_primitives/02_schnorr.mdx deleted file mode 100644 index 0e219c0e5ff..00000000000 --- a/docs/versioned_docs/version-v0.9.0/standard_library/cryptographic_primitives/02_schnorr.mdx +++ /dev/null @@ -1,37 +0,0 @@ ---- -title: Schnorr Signatures -description: Learn how you can verify Schnorr signatures using Noir -keywords: [cryptographic primitives, Noir project, schnorr, signatures] ---- - -import BlackBoxInfo from './common/_blackbox.mdx'; - -## schnorr::verify_signature - -Verifier for Schnorr signatures over the embedded curve (for BN254 it is Grumpkin). - -```rust -fn verify_signature(_public_key_x: Field, _public_key_y: Field, _signature: [u8; 64], _message: [u8]) -> bool -``` - -where `_signature` can be generated like so using the npm package -[@noir-lang/barretenberg](https://www.npmjs.com/package/@noir-lang/barretenberg) - -```js -const { BarretenbergWasm } = require('@noir-lang/barretenberg/dest/wasm'); -const { Schnorr } = require('@noir-lang/barretenberg/dest/crypto/schnorr'); - -... - -const barretenberg = await BarretenbergWasm.new(); -const schnorr = new Schnorr(barretenberg); -const pubKey = schnorr.computePublicKey(privateKey); -const message = ... -const signature = Array.from( - schnorr.constructSignature(hash, privateKey).toBuffer() -); - -... -``` - - diff --git a/docs/versioned_docs/version-v0.9.0/standard_library/cryptographic_primitives/03_ecdsa_sig_verification.mdx b/docs/versioned_docs/version-v0.9.0/standard_library/cryptographic_primitives/03_ecdsa_sig_verification.mdx deleted file mode 100644 index 3934a0338d0..00000000000 --- a/docs/versioned_docs/version-v0.9.0/standard_library/cryptographic_primitives/03_ecdsa_sig_verification.mdx +++ /dev/null @@ -1,45 +0,0 @@ ---- -title: ECDSA Signature Verification -description: Learn about the cryptographic primitives regarding ECDSA over the secp256k1 and secp256r1 curves -keywords: [cryptographic primitives, Noir project, ecdsa, secp256k1, secp256r1, signatures] ---- - -import BlackBoxInfo from './common/_blackbox.mdx'; - -Noir supports ECDSA signatures verification over the secp256k1 and secp256r1 curves. - -## ecdsa_secp256k1::verify_signature - -Verifier for ECDSA Secp256k1 signatures - -```rust -fn verify_signature(_public_key_x : [u8; 32], _public_key_y : [u8; 32], _signature: [u8; 64], _message: [u8]) -> bool -``` - -example: - -```rust -fn main(hashed_message : [u8;32], pub_key_x : [u8;32], pub_key_y : [u8;32], signature : [u8;64]) { - let valid_signature = std::ecdsa_secp256k1::verify_signature(pub_key_x, pub_key_y, signature, hashed_message); - assert(valid_signature); -} -``` - -## ecdsa_secp256r1::verify_signature - -Verifier for ECDSA Secp256r1 signatures - -```rust -fn verify_signature(_public_key_x : [u8; 32], _public_key_y : [u8; 32], _signature: [u8; 64], _message: [u8]) -> bool -``` - -example: - -```rust -fn main(hashed_message : [u8;32], pub_key_x : [u8;32], pub_key_y : [u8;32], signature : [u8;64]) { - let valid_signature = std::ecdsa_secp256r1::verify_signature(pub_key_x, pub_key_y, signature, hashed_message); - assert(valid_signature); -} -``` - - diff --git a/docs/versioned_docs/version-v0.9.0/standard_library/cryptographic_primitives/05_eddsa.mdx b/docs/versioned_docs/version-v0.9.0/standard_library/cryptographic_primitives/05_eddsa.mdx deleted file mode 100644 index 8f060ed3316..00000000000 --- a/docs/versioned_docs/version-v0.9.0/standard_library/cryptographic_primitives/05_eddsa.mdx +++ /dev/null @@ -1,17 +0,0 @@ ---- -title: EdDSA Verification -description: Learn about the cryptographic primitives regarding EdDSA -keywords: [cryptographic primitives, Noir project, eddsa, signatures] ---- - -import BlackBoxInfo from './common/_blackbox.mdx'; - -## eddsa::eddsa_poseidon_verify - -Verifier for EdDSA signatures - -```rust -fn eddsa_poseidon_verify(public_key_x : Field, public_key_y : Field, signature_s: Field, signature_r8_x: Field, signature_r8_y: Field, message: Field) -> bool -``` - - diff --git a/docs/versioned_docs/version-v0.9.0/standard_library/cryptographic_primitives/common/_blackbox.mdx b/docs/versioned_docs/version-v0.9.0/standard_library/cryptographic_primitives/common/_blackbox.mdx deleted file mode 100644 index 9fe9b48fbff..00000000000 --- a/docs/versioned_docs/version-v0.9.0/standard_library/cryptographic_primitives/common/_blackbox.mdx +++ /dev/null @@ -1,5 +0,0 @@ -:::info - -This is a black box function. Read [this section](../black_box_fns) to learn more about black box functions in Noir. - -::: \ No newline at end of file diff --git a/docs/versioned_docs/version-v0.9.0/standard_library/field_methods.md b/docs/versioned_docs/version-v0.9.0/standard_library/field_methods.md deleted file mode 100644 index 4d1cdc953e9..00000000000 --- a/docs/versioned_docs/version-v0.9.0/standard_library/field_methods.md +++ /dev/null @@ -1,149 +0,0 @@ ---- -title: Field Methods -description: - Learn about common methods on Noir Field, including to_le_bits, to_le_bytes, to_le_radix, - to_be_radix, pow_32, etc, and see code examples. -keywords: - [ - Noir Field, - to_le_bits, - to_le_bytes, - to_le_radix, - to_be_radix, - pow_32, - Little Endian, - Big Endian, - Vector, - Exponent, - ] ---- - -After declaring a Field, you can use these common methods on it: - -## to_le_bits - -Transforms the field into an array of bits, Little Endian. - -```rust -fn to_le_bits(_x : Field, _bit_size: u32) -> [u1; N] -``` - -example: - -```rust -fn main() { - let field = 2 - let bits = field.to_le_bits(32); -} -``` - -## to_be_bits - -Transforms the field into an array of bits, Big Endian. - -```rust -fn to_be_bits(_x : Field, _bit_size: u32) -> [u1; N] -``` - -example: - -```rust -fn main() { - let field = 2 - let bits = field.to_be_bits(32); -} -``` - -## to_le_bytes - -Transforms into an array of bytes, Little Endian - -```rust -fn to_le_bytes(_x : Field, byte_size: u32) -> [u8] -``` - -example: - -```rust -fn main() { - let field = 2 - let bytes = field.to_le_bytes(4); -} -``` - -## to_be_bytes - -Transforms into an array of bytes, Big Endian - -```rust -fn to_be_bytes(_x : Field, byte_size: u32) -> [u8] -``` - -example: - -```rust -fn main() { - let field = 2 - let bytes = field.to_be_bytes(4); -} -``` - -## to_le_radix - -Decomposes into a vector over the specified base, Little Endian - -```rust -fn to_le_radix(_x : Field, _radix: u32, _result_len: u32) -> [u8] -``` - -example: - -```rust -fn main() { - let field = 2 - let radix = field.to_le_radix(256, 4); -} -``` - -## to_be_radix - -Decomposes into a vector over the specified base, Big Endian - -```rust -fn to_be_radix(_x : Field, _radix: u32, _result_len: u32) -> [u8] -``` - -example: - -```rust -fn main() { - let field = 2 - let radix = field.to_be_radix(256, 4); -} -``` - -## pow_32 - -Returns the value to the power of the specified exponent - -```rust -fn pow_32(self, exponent: Field) -> Field -``` - -example: - -```rust -fn main() { - let field = 2 - let pow = field.pow_32(4); - assert(pow == 16); -} -``` - -## sgn0 - -Parity of (prime) Field element, i.e. sgn0(x mod p) = 0 if x ∈ {0, ..., p-1} is even, otherwise sgn0(x mod p) = 1. - -```rust -fn sgn0(self) -> u1 -``` diff --git a/docs/versioned_docs/version-v0.9.0/standard_library/logging.md b/docs/versioned_docs/version-v0.9.0/standard_library/logging.md deleted file mode 100644 index 42a84be1992..00000000000 --- a/docs/versioned_docs/version-v0.9.0/standard_library/logging.md +++ /dev/null @@ -1,42 +0,0 @@ ---- -title: Logging -description: - Learn how to use the println statement for debugging in Noir with this tutorial. Understand the - basics of logging in Noir and how to implement it in your code. -keywords: - [ - noir logging, - println statement, - debugging in noir, - noir std library, - logging tutorial, - basic logging in noir, - noir logging implementation, - noir debugging techniques, - rust, - ] ---- - -# Logging - -The standard library provides a familiar `println` statement you can use. Despite being a limited -implementation of rust's `println!` macro, this construct can be useful for debugging. - -The `println` statement is unconstrained, so it works for outputting integers, fields, strings, and even structs or expressions. For example: - -```rust -use dep::std; - -struct Person { - age : Field, - height : Field, -} - -fn main(age : Field, height : Field) { - let person = Person { age : age, height : height }; - std::println(person); - std::println(age + height); - std::println("Hello world!"); -} - -``` diff --git a/docs/versioned_docs/version-v0.9.0/standard_library/merkle_trees.md b/docs/versioned_docs/version-v0.9.0/standard_library/merkle_trees.md deleted file mode 100644 index 57d8c4a9e4f..00000000000 --- a/docs/versioned_docs/version-v0.9.0/standard_library/merkle_trees.md +++ /dev/null @@ -1,58 +0,0 @@ ---- -title: Merkle Trees -description: Learn about Merkle Trees in Noir with this tutorial. Explore the basics of computing a merkle root using a proof, with examples. -keywords: - [ - Merkle trees in Noir, - Noir programming language, - check membership, - computing root from leaf, - Noir Merkle tree implementation, - Merkle tree tutorial, - Merkle tree code examples, - Noir libraries, - pedersen hash., - ] ---- - -## compute_merkle_root - -Returns the root of the tree from the provided leaf and its hash path, using a [Pedersen hash](cryptographic_primitives/00_hashes.mdx#pedersen). - -```rust -fn compute_merkle_root(leaf : Field, index : Field, hash_path: [Field]) -> Field -``` - -example: - -```rust -/** - // these values are for this example only - index = "0" - priv_key = "0x000000000000000000000000000000000000000000000000000000616c696365" - secret = "0x1929ea3ab8d9106a899386883d9428f8256cfedb3c4f6b66bf4aa4d28a79988f" - note_hash_path = [ - "0x1e61bdae0f027b1b2159e1f9d3f8d00fa668a952dddd822fda80dc745d6f65cc", - "0x0e4223f3925f98934393c74975142bd73079ab0621f4ee133cee050a3c194f1a", - "0x2fd7bb412155bf8693a3bd2a3e7581a679c95c68a052f835dddca85fa1569a40" - ] - */ -fn main(index: Field, priv_key: Field, secret: Field, note_hash_path: [Field; 3]) { - - let pubkey = std::scalar_mul::fixed_base(priv_key); - let pubkey_x = pubkey[0]; - let pubkey_y = pubkey[1]; - let note_commitment = std::hash::pedersen([pubkey_x, pubkey_y, secret]); - - let root = std::merkle::compute_merkle_root(note_commitment[0], index, note_hash_path); - std::println(root); -} -``` - -To check merkle tree membership: - -1. Include a merkle root as a program input. -2. Compute the merkle root of a given leaf, index and hash path. -3. Assert the merkle roots are equal. - -For more info about merkle trees, see the Wikipedia [page](https://en.wikipedia.org/wiki/Merkle_tree). diff --git a/docs/versioned_docs/version-v0.9.0/standard_library/recursion.md b/docs/versioned_docs/version-v0.9.0/standard_library/recursion.md deleted file mode 100644 index 4705ae6c575..00000000000 --- a/docs/versioned_docs/version-v0.9.0/standard_library/recursion.md +++ /dev/null @@ -1,96 +0,0 @@ ---- -title: Recursive Proofs -description: Learn about how to write recursive proofs in Noir. -keywords: [recursion, recursive proofs, verification_key, aggregation object, verify_proof] ---- - -Noir supports recursively verifying proofs, meaning you verify the proof of a Noir program in another Noir program. This enables creating proofs of arbitrary size by doing step-wise verification of smaller components of a large proof. - -The `verify_proof` function takes a verification key, proof and public inputs for a zk program, as well as a key hash and an input aggregation object. The key hash is used to check the validity of the verification key and the input aggregation object is required by some proving systems. The `verify_proof` function returns an output aggregation object that can then be fed into future iterations of the proof verification if required. - -```rust -#[foreign(verify_proof)] -fn verify_proof(_verification_key : [Field], _proof : [Field], _public_input : Field, _key_hash : Field, _input_aggregation_object : [Field]) -> [Field] {} -``` - -:::info - -This is a black box function. Read [this section](./black_box_fns) to learn more about black box functions in Noir. - -::: - -## Aggregation Object - -The purpose of the input aggregation object is a little less clear though (and the output aggregation object that is returned from the `std::verify_proof` method). Recursive zkSNARK schemes do not necessarily "verify a proof" in the sense that you expect a true or false to be spit out by the verifier. Rather an aggregation object is built over the public inputs. In the case of PLONK the recursive aggregation object is two G1 points (expressed as 16 witness values). The final verifier (in our case this is most often the smart contract verifier) has to be aware of this aggregation object to execute a pairing and check the validity of these points (thus completing the recursive verification). - -So for example in this circuit: - -```rust -use dep::std; - -fn main( - verification_key : [Field; 114], - proof : [Field; 94], - public_inputs : [Field; 1], - key_hash : Field, - input_aggregation_object : [Field; 16], - proof_b : [Field; 94], -) -> pub [Field; 16] { - let output_aggregation_object_a = std::verify_proof( - verification_key, - proof, - public_inputs, - key_hash, - input_aggregation_object - ); - - let output_aggregation_object = std::verify_proof( - verification_key, - proof_b, - public_inputs, - key_hash, - output_aggregation_object_a - ); - - let mut output = [0; 16]; - for i in 0..16 { - output[i] = output_aggregation_object[i]; - } - output -} -``` - -In this example we have a circuit, that generates proofs A and B, that is being verified in circuit C. Assuming that the proof being passed in is not already a recursive proof, the `input_aggregation_object` will be all zeros. It will then generate an `output_aggregation_object`. This blob of data then becomes the `input_aggregation_object` of the next recursive aggregation we wish to compute. We can see here as the same public inputs, verification key, and key hash are used that we are verifying two proofs generated from the same circuit in this single circuit. `std::verify_proof` returns a `[Field]` because the size of an aggregation object is proof system dependent--in barretenberg, aggregation objects are two G1 points, while in Halo2, the aggregation object is a list of G1 points that is log the circuit size. So for the final step we convert the slice into an array of size 16 because we are generating proofs using UltraPlonk. - -## Parameters - -### `verification_key` - -The verification key for the zk program that is being verified. - -### `proof` - -The proof for the zk program that is being verified. - -### `public_inputs` - -These represent the public inputs of the proof we are verifying. They should be checked against in the circuit after construction of a new aggregation state. - -### `key_hash` - -A key hash is used to check the validity of the verification key. The circuit implementing this opcode can use this hash to ensure that the key provided to the circuit matches the key produced by the circuit creator. - -### `input_aggregation_object` - -An aggregation object is blob of data that the top-level verifier must run some proof system specific algorithm on to complete verification. The size is proof system specific and will be set by the backend integrating this opcode. The input aggregation object is only not `None` when we are verifying a previous recursive aggregation in the current circuit. If this is the first recursive aggregation there is no input aggregation object. It is left to the backend to determine how to handle when there is no input aggregation object. - -## Return value - -### `output_aggregation_object` - -This is the result of a recursive aggregation and is what will be fed into the next verifier. -The next verifier can either perform a final verification (returning true or false) or perform another recursive aggregation where this output aggregation object will be the input aggregation object of the next recursive aggregation. - -## Example - -You can see an example of how to do recursive proofs in [this example recursion demo repo](https://github.com/Savio-Sou/recursion-demo/tree/main). diff --git a/docs/versioned_docs/version-v0.9.0/standard_library/slice_methods.md b/docs/versioned_docs/version-v0.9.0/standard_library/slice_methods.md deleted file mode 100644 index 8b93d8ea427..00000000000 --- a/docs/versioned_docs/version-v0.9.0/standard_library/slice_methods.md +++ /dev/null @@ -1,279 +0,0 @@ ---- -title: Slice Methods -description: - Learn about the commonly used methods available for slices in Noir, including push_back, len, srt, map, fold, reduce, all, and any. -keywords: [rust, slice, methods, push_back, len, sort, fold, reduce, all, any] ---- - -For convenience, the STD provides some ready-to-use, common methods for slices: - -## push_back - -Pushes a new element to the end of the slice, returning a new slice with a length one greater than the original unmodified slice. - -```rust -fn push_back(_self: [T], _elem: T) -> [T] -``` - -example: - -```rust -fn main() -> pub Field { - let mut slice: [Field] = [0; 2]; - - let mut new_slice = slice.push_back(6); - new_slice.len() -} -``` - -View the corresponding test file [here][test-file]. - -## push_front - -Returns a new array with the specified element inserted at index 0. The existing elements indexes are incremented by 1. - -```rust -fn push_front(_self: Self, _elem: T) -> Self -``` - -Example: - -```rust -let mut new_slice: [Field] = []; -new_slice = new_slice.push_front(20); -assert(new_slice[0] == 20); // returns true -``` - -View the corresponding test file [here][test-file]. - -## pop_front - -Returns a tuple of two items, the first element of the array and the rest of the array. - -```rust -fn pop_front(_self: Self) -> (T, Self) -``` - -Example: - -```rust -let (first_elem, rest_of_slice) = slice.pop_front(); -``` - -View the corresponding test file [here][test-file]. - -## pop_back - -Returns a tuple of two items, the beginning of the array with the last element omitted and the last element. - -```rust -fn pop_back(_self: Self) -> (Self, T) -``` - -Example: - -```rust -let (popped_slice, last_elem) = slice.pop_back(); -``` - -View the corresponding test file [here][test-file]. - -## insert - -Inserts an element at a specified index and shifts all following elements by 1. - -```rust -fn insert(_self: Self, _index: Field, _elem: T) -> Self -``` - -Example: - -```rust - new_slice = rest_of_slice.insert(2, 100); -assert(new_slice[2] == 100); -``` - -View the corresponding test file [here][test-file]. - -## remove - -Remove an element at a specified index, shifting all elements after it to the left, returning the altered slice and the removed element. - -```rust -fn remove(_self: Self, _index: Field) -> (Self, T) -``` - -Example: - -```rust -let (remove_slice, removed_elem) = slice.remove(3); -``` - -View the corresponding test file [here]([test-file]. - -## len - -Returns the length of a slice - -```rust -fn len(_slice: [T]) -> comptime Field -``` - -Example: - -```rust -fn main() { - let slic = [42, 42] - assert(slic.len() == 2); -} -``` - -## sort - -Returns a new sorted slice. The original slice remains untouched. Notice that this function will -only work for slices of fields or integers, not for any arbitrary type. This is because the sorting -logic the function uses internally is optimized specifically for these values. If you need a sort function to -sort any type, you should use the function `sort_via` described below. - -```rust -fn sort(_slice: [T]) -> [T] -``` - -Example: - -```rust -fn main() { - let slic = [42, 32] - let sorted = slic.sort(); - assert(sorted == [32, 42]); -} -``` - -## sort_via - -Sorts the slice with a custom comparison function - -```rust -fn sort_via(mut a: [T], ordering: fn(T, T) -> bool) -> [T] -``` - -Example: - -```rust -fn main() { - let slic = [42, 32] - let sorted_ascending = slic.sort_via(|a, b| a < b); - assert(sorted_ascending == [32, 42]); // verifies - - let sorted_descending = slic.sort_via(|a, b| a > b); - assert(sorted_descending == [32, 42]); // does not verify -} -``` - -## map - -Applies a function to each element of the slice, returning a new slice containing the mapped elements. - -```rust -fn map(f: fn(T) -> U) -> [U] -``` - -Example: - -```rust -let a = [1, 2, 3]; -let b = a.map(|a| a * 2) // b is now [2, 4, 6] -``` - -## fold - -Applies a function to each element of the slice, returning the final accumulated value. The first -parameter is the initial value. - -```rust -fn fold(mut accumulator: U, f: fn(U, T) -> U) -> U -``` - -This is a left fold, so the given function will be applied to the accumulator and first element of -the slice, then the second, and so on. For a given call the expected result would be equivalent to: - -```rust -let a1 = [1]; -let a2 = [1, 2]; -let a3 = [1, 2, 3]; - -let f = |a, b| a - b; -a1.fold(10, f) //=> f(10, 1) -a2.fold(10, f) //=> f(f(10, 1), 2) -a3.fold(10, f) //=> f(f(f(10, 1), 2), 3) -``` - -Example: - -```rust - -fn main() { - let slic = [2,2,2,2,2] - let folded = slic.fold(0, |a, b| a + b); - assert(folded == 10); -} - -``` - -## reduce - -Same as fold, but uses the first element as starting element. - -```rust -fn reduce(f: fn(T, T) -> T) -> T -``` - -Example: - -```rust -fn main() { - let slic = [2,2,2,2,2] - let reduced = slic.reduce(|a, b| a + b); - assert(reduced == 10); -} -``` - -## all - -Returns true if all the elements satisfy the given predicate - -```rust -fn all(predicate: fn(T) -> bool) -> bool -``` - -Example: - -```rust -fn main() { - let slic = [2,2,2,2,2] - let all = slic.all(|a| a == 2); - assert(all); -} -``` - -## any - -Returns true if any of the elements satisfy the given predicate - -```rust -fn any(predicate: fn(T) -> bool) -> bool -``` - -Example: - -```rust -fn main() { - let slic = [2,2,2,2,5] - let any = slic.any(|a| a == 5); - assert(any); -} - -``` - -[test-file]: https://github.com/noir-lang/noir/blob/f387ec1475129732f72ba294877efdf6857135ac/crates/nargo_cli/tests/test_data_ssa_refactor/slices/src/main.nr - diff --git a/docs/versioned_docs/version-v0.9.0/typescript.md b/docs/versioned_docs/version-v0.9.0/typescript.md deleted file mode 100644 index 8608783784c..00000000000 --- a/docs/versioned_docs/version-v0.9.0/typescript.md +++ /dev/null @@ -1,243 +0,0 @@ ---- -title: Working with TypeScript -description: - Learn how to interact with Noir programs using TypeScript. Follow this tutorial to compile your - program, specify inputs, initialize a prover & verifier, and prove and verify your program. -keywords: [TypeScript, Noir, tutorial, compile, inputs, prover, verifier, proof] ---- - -Interactions with Noir programs can also be performed in TypeScript, which can come in handy when -writing tests or when working in TypeScript-based projects like [Hardhat](https://hardhat.org/). - -You can check the complete code for this tutorial here: [browser with next.js](https://github.com/signorecello/noir-min-browser-example) and [node.js](https://github.com/signorecello/noir-min-nodejs-example). If you want just a browser boilerplate to start with, check out the [noir-starter](https://github.com/noir-lang/noir-starter) for an example implementation. - -:::note - -You may find unexpected errors working with some frameworks such as `vite`. This is due to the -nature of `wasm` files and the way Noir uses web workers. As we figure it out, we suggest using -[Create React App](https://create-react-app.dev/), or [Next.js](https://nextjs.org/) for a quick -start. - -::: - -## Setup - -Make sure you are using Noir version >= 0.10.1. - -You can check your current version by running `nargo --version`. - -You can install version 0.10.1 with noirup with - -```bash -noirup -v 0.10.1 -``` - -See the [Installation page](./getting_started/nargo_installation) for more info. - -We're assuming you're using ES6 and ESM for both browser (for example with React), or nodejs. Install [Node.js](https://nodejs.org/en). Init a new project with `npm init` and add `"type": "module"` to your `package.json`, to let `node` know we're using the new ESM sytem: - -```json -{ - "type": "module" - // the rest of your package.json -} -``` - -Install Noir dependencies in your project by running: - -```bash -npm i @aztec/bb.js@0.3.6 https://git@github.com/noir-lang/acvm-simulator-wasm.git#b9d9ca9dfc5140839f23998d9466307215607c42 fflate ethers@5.7.2 -``` - -This will install the `acvm-simulator` that will generate our witness, and the proving backend barretenberg `bb.js`. - -We're also installing `ethers` because we're too lazy to write a function that pads public inputs with 32bytes, and `fflate` to help us decompress our circuit bytecode. - -Since we're with typescript and using `nodejs` types, we also recommend to install the `@types/node` package, otherwise your IDE will scream at you. - -```bash -npm i --save-dev @types/node -``` - -:::note - -While Noir is in rapid development, some packages could interfere with others. For that reason, you -should use these specified versions. Let us know if for some reason you need to use other ones. - -::: - -As for the circuit, run `nargo init` to create a new Noir project. - -We will use a Standard Noir Example and place it in the `src` folder. This program simply multiplies input `x` with input `y` and returns the result `z`. The verifier doesn't know the value of `x`: we're proving that we know it without making it public. - -```rust -// src/main.nr -fn main(x: u32, y: pub u32) -> pub u32 { - let z = x * y; - z -} -``` - -One valid scenario for proving could be `x = 3`, `y = 4` and `return = 12` - -## Compiling - -In order to start proving, we need to compile our circuit into the intermediate representation used by our backend. As of today, you have to do that with `nargo`. Just hop to your circuits folder and run `nargo compile`. - -:::info - -At this time, you need to use a nightly version of nargo. Using [noirup](./getting_started/00_nargo_installation.md#option-1-noirup) you can do this simply by running `noirup -n`. - -::: - -You should have a `json` file in `target/` with your circuit's bytecode. The json file is name based on the project name specified in Nargo.toml, so for a project named "test", it will be at `target/test.json`. You can then import that file normally. - -```ts -import circuit from '../target/test.json' assert { type: 'json' }; -``` - -## Decompressing the circuit - -The compiled circuit comes compressed. We need to decompress it, that's where `fflate` comes in. - -```ts -import { decompressSync } from 'fflate'; - -const acirBuffer = Buffer.from(circuit.bytecode, 'base64'); -const acirBufferUncompressed = decompressSync(acirBuffer); -``` - -From here, it's highly recommended you store `acirBuffer` and `acirBufferUncompressed` close by, as they will be used for witness generation and proving. - -## Initializing ACVM and BB.JS - -:::note - -This step will eventually be abstracted away as Noir tooling matures. For now, you should be fine just literally copy-pasting most of this into your own code. - -::: - -Before proving, `bb.js` needs to be initialized. We need to import some functions and use them - -```ts -import { Crs, newBarretenbergApiAsync, RawBuffer } from '@aztec/bb.js/dest/node/index.js'; - -const api = await newBarretenbergApiAsync(4); - -const [exact, circuitSize, subgroup] = await api.acirGetCircuitSizes(acirBufferUncompressed); -const subgroupSize = Math.pow(2, Math.ceil(Math.log2(circuitSize))); -const crs = await Crs.new(subgroupSize + 1); -await api.commonInitSlabAllocator(subgroupSize); -await api.srsInitSrs(new RawBuffer(crs.getG1Data()), crs.numPoints, new RawBuffer(crs.getG2Data())); - -const acirComposer = await api.acirNewAcirComposer(subgroupSize); -``` - -We should take two very useful objects from here: `api` and `acirComposer`. Make sure to keep these close by! - -:::info - -On the browser, you also need to init the ACVM. You can do that by importing it and calling it like: - -```ts -import initACVM, { executeCircuit, compressWitness } from '@noir-lang/acvm_js'; - -await initACVM(); -// the rest of your code -``` - -::: - -## Generating witnesses - -Witness generation is what allows us to prove with arbitrary inputs (like user inputs on a form, game, etc). In this example, our input is a simple object with our circuit inputs `x`, `y`, and return `z` (fun fact: the return value in Noir is actually a public input!). We're wrapping it in a function, so it can be conveniently called later on. - -```ts -import { ethers } from 'ethers'; // I'm lazy so I'm using ethers to pad my input -import { executeCircuit, compressWitness } from '@noir-lang/acvm_js'; - -async function generateWitness(input: any, acirBuffer: Buffer): Promise { - const initialWitness = new Map(); - initialWitness.set(1, ethers.utils.hexZeroPad(`0x${input.x.toString(16)}`, 32)); - initialWitness.set(2, ethers.utils.hexZeroPad(`0x${input.y.toString(16)}`, 32)); - - const witnessMap = await executeCircuit(acirBuffer, initialWitness, () => { - throw Error('unexpected oracle'); - }); - - const witnessBuff = compressWitness(witnessMap); - return witnessBuff; -} -``` - -## Proving - -Finally, we're ready to prove with our backend. Just like with the witness generation, could be useful to wrap it in its own function: - -```ts -async function generateProof(witness: Uint8Array) { - const proof = await api.acirCreateProof( - acirComposer, - acirBufferUncompressed, - decompressSync(witness), - false, - ); - return proof; -} -``` - -## Verifying - -Our backend should also be ready to verify our proof: - -```ts -async function verifyProof(proof: Uint8Array) { - await api.acirInitProvingKey(acirComposer, acirBufferUncompressed); - const verified = await api.acirVerifyProof(acirComposer, proof, false); - return verified; -} -``` - -## Now for the fun part - -Let's call our functions, and destroy our API! - -```ts -const input = { x: 3, y: 4 }; -const witness = await generateWitness(input, acirBuffer); -console.log('Witness generated!'); -const proof = await generateProof(witness); -console.log('Proof generated!'); -await verifyProof(proof); -console.log('Proof verified!'); -api.destroy(); -``` - -You can use [this](https://gist.github.com/critesjosh/6f3ba19fdc9298b24e90ba4f736247dc) tsconfig.json. You can see the script [here](https://gist.github.com/critesjosh/4aa36e87a0cc3f09feaf1febb4d11348). - -## Verifying with Smart Contract - -Alternatively, a verifier smart contract can be generated and used for verifying Noir proofs in -TypeScript as well. - -This could be useful if the Noir program is designed to be decentrally verified and/or make use of -decentralized states and logics that is handled at the smart contract level. - -This assumes you've already ran `nargo codegen-verifier`, got your smart contract, and deployed it with Hardhat, Foundry, or your tool of choice. You can then verify a Noir proof by simply calling it. - -Currently, `bb.js` appends the public inputs to the proof. However, these inputs need to be fed separately to the verifier contract. A simple solution is to just slice them from the resulting proof, like this: - -```ts -import { ethers } from 'ethers'; // example using ethers v5 -import artifacts from '../artifacts/circuits/contract/plonk_vk.sol/UltraVerifier.json'; // I compiled using Hardhat, so I'm getting my abi from here - -const verifierAddress = '0x123455'; // your verifier address -const provider = new ethers.providers.Web3Provider(window.ethereum); -const signer = this.provider.getSigner(); - -const contract = new ethers.Contract(verifierAddress, artifacts.abi, signer); - -const publicInputs = proof.slice(0, 32); -const slicedProof = proof.slice(32); -await contract.verify(slicedProof, [publicInputs]); -``` diff --git a/docs/versioned_sidebars/version-v..-sidebars.json b/docs/versioned_sidebars/version-v..-sidebars.json new file mode 100644 index 00000000000..b16f79cc176 --- /dev/null +++ b/docs/versioned_sidebars/version-v..-sidebars.json @@ -0,0 +1,83 @@ +{ + "sidebar": [ + { + "type": "doc", + "id": "index" + }, + { + "type": "category", + "label": "Getting Started", + "items": [ + { + "type": "autogenerated", + "dirName": "getting_started" + } + ] + }, + { + "type": "category", + "label": "The Noir Language", + "items": [ + { + "type": "autogenerated", + "dirName": "noir" + } + ] + }, + { + "type": "html", + "value": "
", + "defaultStyle": true + }, + { + "type": "category", + "label": "How To Guides", + "items": [ + { + "type": "autogenerated", + "dirName": "how_to" + } + ] + }, + { + "type": "category", + "label": "Explainers", + "items": [ + { + "type": "autogenerated", + "dirName": "explainers" + } + ] + }, + { + "type": "category", + "label": "Tutorials", + "items": [ + { + "type": "autogenerated", + "dirName": "tutorials" + } + ] + }, + { + "type": "category", + "label": "Reference", + "items": [ + { + "type": "autogenerated", + "dirName": "reference" + } + ] + }, + { + "type": "html", + "value": "
", + "defaultStyle": true + }, + { + "type": "doc", + "id": "migration_notes", + "label": "Migration notes" + } + ] +} diff --git a/docs/versioned_sidebars/version-v0.10.5-sidebars.json b/docs/versioned_sidebars/version-v0.10.5-sidebars.json deleted file mode 100644 index a51ebf87643..00000000000 --- a/docs/versioned_sidebars/version-v0.10.5-sidebars.json +++ /dev/null @@ -1,111 +0,0 @@ -{ - "sidebar": [ - { - "type": "doc", - "id": "index", - "label": "Noir" - }, - { - "type": "category", - "label": "Getting Started", - "items": [ - { - "type": "autogenerated", - "dirName": "getting_started" - } - ] - }, - { - "type": "category", - "label": "Examples", - "items": [ - { - "type": "autogenerated", - "dirName": "examples" - } - ] - }, - { - "type": "category", - "label": "Nargo", - "items": [ - { - "type": "autogenerated", - "dirName": "nargo" - } - ] - }, - { - "type": "category", - "label": "Language Concepts", - "items": [ - { - "type": "category", - "label": "Data Types", - "link": { - "type": "doc", - "id": "language_concepts/data_types" - }, - "items": [ - { - "type": "autogenerated", - "dirName": "language_concepts/data_types" - } - ] - }, - "language_concepts/functions", - "language_concepts/control_flow", - "language_concepts/ops", - "language_concepts/assert", - "language_concepts/unconstrained", - "language_concepts/generics", - "language_concepts/mutability", - "language_concepts/lambdas", - "language_concepts/comments", - "language_concepts/distinct", - "language_concepts/shadowing" - ] - }, - { - "type": "category", - "label": "Noir Standard Library", - "items": [ - { - "type": "category", - "label": "Cryptographic Primitives", - "link": { - "type": "doc", - "id": "standard_library/cryptographic_primitives" - }, - "items": [ - { - "type": "autogenerated", - "dirName": "standard_library/cryptographic_primitives" - } - ] - }, - "standard_library/recursion", - "standard_library/logging", - "standard_library/merkle_trees", - "standard_library/zeroed", - "standard_library/black_box_fns", - "standard_library/options" - ] - }, - { - "type": "category", - "label": "Modules, Packages and Crates", - "items": [ - { - "type": "autogenerated", - "dirName": "modules_packages_crates" - } - ] - }, - { - "type": "doc", - "id": "typescript", - "label": "Working with Typescript" - } - ] -} diff --git a/docs/versioned_sidebars/version-v0.19.0-sidebars.json b/docs/versioned_sidebars/version-v0.19.0-sidebars.json new file mode 100644 index 00000000000..a9ec39925d9 --- /dev/null +++ b/docs/versioned_sidebars/version-v0.19.0-sidebars.json @@ -0,0 +1,141 @@ +{ + "sidebar": [ + { + "type": "doc", + "id": "index", + "label": "Noir" + }, + { + "type": "category", + "label": "Getting Started", + "items": [ + { + "type": "autogenerated", + "dirName": "getting_started" + } + ] + }, + { + "type": "category", + "label": "Examples", + "items": [ + { + "type": "autogenerated", + "dirName": "examples" + } + ] + }, + { + "type": "category", + "label": "Nargo", + "items": [ + { + "type": "autogenerated", + "dirName": "nargo" + } + ] + }, + { + "type": "category", + "label": "Language Concepts", + "items": [ + { + "type": "category", + "label": "Data Types", + "link": { + "type": "doc", + "id": "language_concepts/data_types" + }, + "items": [ + { + "type": "autogenerated", + "dirName": "language_concepts/data_types" + } + ] + }, + "language_concepts/functions", + "language_concepts/control_flow", + "language_concepts/ops", + "language_concepts/assert", + "language_concepts/unconstrained", + "language_concepts/generics", + "language_concepts/mutability", + "language_concepts/lambdas", + "language_concepts/comments", + "language_concepts/distinct", + "language_concepts/shadowing" + ] + }, + { + "type": "category", + "label": "Noir Standard Library", + "items": [ + { + "type": "category", + "label": "Cryptographic Primitives", + "link": { + "type": "doc", + "id": "standard_library/cryptographic_primitives" + }, + "items": [ + { + "type": "autogenerated", + "dirName": "standard_library/cryptographic_primitives" + } + ] + }, + "standard_library/recursion", + "standard_library/logging", + "standard_library/merkle_trees", + "standard_library/zeroed", + "standard_library/black_box_fns", + "standard_library/options" + ] + }, + { + "type": "category", + "label": "Modules, Packages and Crates", + "items": [ + { + "type": "autogenerated", + "dirName": "modules_packages_crates" + } + ] + }, + { + "type": "category", + "label": "NoirJS", + "link": { + "type": "doc", + "id": "noir_js/noir_js" + }, + "items": [ + { + "type": "category", + "label": "Guides", + "items": [ + { + "type": "autogenerated", + "dirName": "noir_js/getting_started" + } + ] + }, + { + "type": "category", + "label": "Reference", + "items": [ + { + "type": "autogenerated", + "dirName": "noir_js/reference" + } + ] + } + ] + }, + { + "type": "doc", + "id": "migration_notes", + "label": "Migration notes" + } + ] +} diff --git a/docs/versioned_sidebars/version-v0.19.3-sidebars.json b/docs/versioned_sidebars/version-v0.19.3-sidebars.json new file mode 100644 index 00000000000..6823055c5d3 --- /dev/null +++ b/docs/versioned_sidebars/version-v0.19.3-sidebars.json @@ -0,0 +1,288 @@ +{ + "sidebar": [ + { + "type": "doc", + "id": "index", + "label": "Noir" + }, + { + "type": "category", + "label": "Getting Started", + "items": [ + { + "type": "autogenerated", + "dirName": "getting_started" + } + ] + }, + { + "type": "category", + "label": "Examples", + "items": [ + { + "type": "autogenerated", + "dirName": "examples" + } + ] + }, + { + "type": "category", + "label": "Nargo", + "items": [ + { + "type": "autogenerated", + "dirName": "nargo" + } + ] + }, + { + "type": "category", + "label": "Language Concepts", + "items": [ + { + "type": "category", + "label": "Data Types", + "link": { + "type": "doc", + "id": "language_concepts/data_types" + }, + "items": [ + { + "type": "autogenerated", + "dirName": "language_concepts/data_types" + } + ] + }, + "language_concepts/functions", + "language_concepts/control_flow", + "language_concepts/ops", + "language_concepts/assert", + "language_concepts/unconstrained", + "language_concepts/generics", + "language_concepts/mutability", + "language_concepts/lambdas", + "language_concepts/comments", + "language_concepts/distinct", + "language_concepts/shadowing" + ] + }, + { + "type": "category", + "label": "Noir Standard Library", + "items": [ + { + "type": "category", + "label": "Cryptographic Primitives", + "link": { + "type": "doc", + "id": "standard_library/cryptographic_primitives" + }, + "items": [ + { + "type": "autogenerated", + "dirName": "standard_library/cryptographic_primitives" + } + ] + }, + "standard_library/recursion", + "standard_library/logging", + "standard_library/merkle_trees", + "standard_library/zeroed", + "standard_library/black_box_fns", + "standard_library/options" + ] + }, + { + "type": "category", + "label": "Modules, Packages and Crates", + "items": [ + { + "type": "autogenerated", + "dirName": "modules_packages_crates" + } + ] + }, + { + "type": "category", + "label": "NoirJS", + "link": { + "type": "doc", + "id": "noir_js/noir_js" + }, + "items": [ + { + "type": "category", + "label": "Guides", + "items": [ + { + "type": "autogenerated", + "dirName": "noir_js/getting_started" + } + ] + }, + { + "type": "category", + "label": "Reference", + "items": [ + { + "type": "category", + "label": "Noir JS", + "link": { + "type": "doc", + "id": "noir_js/reference/noir_js/index" + }, + "items": [ + { + "type": "category", + "label": "Classes", + "items": [ + { + "type": "doc", + "id": "noir_js/reference/noir_js/classes/Noir", + "label": "Noir" + } + ] + }, + { + "type": "category", + "label": "Type Aliases", + "items": [ + { + "type": "doc", + "id": "noir_js/reference/noir_js/type-aliases/CompiledCircuit", + "label": "CompiledCircuit" + }, + { + "type": "doc", + "id": "noir_js/reference/noir_js/type-aliases/ForeignCallHandler", + "label": "ForeignCallHandler" + }, + { + "type": "doc", + "id": "noir_js/reference/noir_js/type-aliases/ForeignCallInput", + "label": "ForeignCallInput" + }, + { + "type": "doc", + "id": "noir_js/reference/noir_js/type-aliases/ForeignCallOutput", + "label": "ForeignCallOutput" + }, + { + "type": "doc", + "id": "noir_js/reference/noir_js/type-aliases/ProofData", + "label": "ProofData" + }, + { + "type": "doc", + "id": "noir_js/reference/noir_js/type-aliases/WitnessMap", + "label": "WitnessMap" + } + ] + }, + { + "type": "category", + "label": "Functions", + "items": [ + { + "type": "doc", + "id": "noir_js/reference/noir_js/functions/and", + "label": "and" + }, + { + "type": "doc", + "id": "noir_js/reference/noir_js/functions/blake2s256", + "label": "blake2s256" + }, + { + "type": "doc", + "id": "noir_js/reference/noir_js/functions/ecdsa_secp256k1_verify", + "label": "ecdsa_secp256k1_verify" + }, + { + "type": "doc", + "id": "noir_js/reference/noir_js/functions/ecdsa_secp256r1_verify", + "label": "ecdsa_secp256r1_verify" + }, + { + "type": "doc", + "id": "noir_js/reference/noir_js/functions/keccak256", + "label": "keccak256" + }, + { + "type": "doc", + "id": "noir_js/reference/noir_js/functions/sha256", + "label": "sha256" + }, + { + "type": "doc", + "id": "noir_js/reference/noir_js/functions/xor", + "label": "xor" + } + ] + } + ] + }, + { + "type": "category", + "label": "Backend Barretenberg", + "link": { + "type": "doc", + "id": "noir_js/reference/backend_barretenberg/index" + }, + "items": [ + { + "type": "category", + "label": "Classes", + "items": [ + { + "type": "doc", + "id": "noir_js/reference/backend_barretenberg/classes/BarretenbergBackend", + "label": "BarretenbergBackend" + } + ] + }, + { + "type": "category", + "label": "Interfaces", + "items": [ + { + "type": "doc", + "id": "noir_js/reference/backend_barretenberg/interfaces/Backend", + "label": "Backend" + } + ] + }, + { + "type": "category", + "label": "Type Aliases", + "items": [ + { + "type": "doc", + "id": "noir_js/reference/backend_barretenberg/type-aliases/BackendOptions", + "label": "BackendOptions" + }, + { + "type": "doc", + "id": "noir_js/reference/backend_barretenberg/type-aliases/CompiledCircuit", + "label": "CompiledCircuit" + }, + { + "type": "doc", + "id": "noir_js/reference/backend_barretenberg/type-aliases/ProofData", + "label": "ProofData" + } + ] + } + ] + } + ] + } + ] + }, + { + "type": "doc", + "id": "migration_notes", + "label": "Migration notes" + } + ] +} diff --git a/docs/versioned_sidebars/version-v0.19.4-sidebars.json b/docs/versioned_sidebars/version-v0.19.4-sidebars.json new file mode 100644 index 00000000000..a1675eca18d --- /dev/null +++ b/docs/versioned_sidebars/version-v0.19.4-sidebars.json @@ -0,0 +1,293 @@ +{ + "sidebar": [ + { + "type": "doc", + "id": "index", + "label": "Noir" + }, + { + "type": "category", + "label": "Getting Started", + "items": [ + { + "type": "autogenerated", + "dirName": "getting_started" + } + ] + }, + { + "type": "category", + "label": "Examples", + "items": [ + { + "type": "autogenerated", + "dirName": "examples" + } + ] + }, + { + "type": "category", + "label": "Nargo", + "items": [ + { + "type": "autogenerated", + "dirName": "nargo" + } + ] + }, + { + "type": "category", + "label": "Language Concepts", + "items": [ + { + "type": "category", + "label": "Data Types", + "link": { + "type": "doc", + "id": "language_concepts/data_types" + }, + "items": [ + { + "type": "autogenerated", + "dirName": "language_concepts/data_types" + } + ] + }, + "language_concepts/functions", + "language_concepts/control_flow", + "language_concepts/ops", + "language_concepts/assert", + "language_concepts/unconstrained", + "language_concepts/generics", + "language_concepts/mutability", + "language_concepts/lambdas", + "language_concepts/comments", + "language_concepts/distinct", + "language_concepts/shadowing" + ] + }, + { + "type": "category", + "label": "Noir Standard Library", + "items": [ + { + "type": "category", + "label": "Cryptographic Primitives", + "link": { + "type": "doc", + "id": "standard_library/cryptographic_primitives" + }, + "items": [ + { + "type": "autogenerated", + "dirName": "standard_library/cryptographic_primitives" + } + ] + }, + "standard_library/recursion", + "standard_library/logging", + "standard_library/merkle_trees", + "standard_library/zeroed", + "standard_library/black_box_fns", + "standard_library/options" + ] + }, + { + "type": "category", + "label": "Modules, Packages and Crates", + "items": [ + { + "type": "autogenerated", + "dirName": "modules_packages_crates" + } + ] + }, + { + "type": "category", + "label": "NoirJS", + "link": { + "type": "doc", + "id": "noir_js/noir_js" + }, + "items": [ + { + "type": "category", + "label": "Guides", + "items": [ + { + "type": "autogenerated", + "dirName": "noir_js/getting_started" + } + ] + }, + { + "type": "category", + "label": "Reference", + "items": [ + { + "type": "category", + "label": "Noir JS", + "link": { + "type": "doc", + "id": "noir_js/reference/noir_js/index" + }, + "items": [ + { + "type": "category", + "label": "Classes", + "items": [ + { + "type": "doc", + "id": "noir_js/reference/noir_js/classes/Noir", + "label": "Noir" + } + ] + }, + { + "type": "category", + "label": "Type Aliases", + "items": [ + { + "type": "doc", + "id": "noir_js/reference/noir_js/type-aliases/CompiledCircuit", + "label": "CompiledCircuit" + }, + { + "type": "doc", + "id": "noir_js/reference/noir_js/type-aliases/ForeignCallHandler", + "label": "ForeignCallHandler" + }, + { + "type": "doc", + "id": "noir_js/reference/noir_js/type-aliases/ForeignCallInput", + "label": "ForeignCallInput" + }, + { + "type": "doc", + "id": "noir_js/reference/noir_js/type-aliases/ForeignCallOutput", + "label": "ForeignCallOutput" + }, + { + "type": "doc", + "id": "noir_js/reference/noir_js/type-aliases/InputMap", + "label": "InputMap" + }, + { + "type": "doc", + "id": "noir_js/reference/noir_js/type-aliases/ProofData", + "label": "ProofData" + }, + { + "type": "doc", + "id": "noir_js/reference/noir_js/type-aliases/WitnessMap", + "label": "WitnessMap" + } + ] + }, + { + "type": "category", + "label": "Functions", + "items": [ + { + "type": "doc", + "id": "noir_js/reference/noir_js/functions/and", + "label": "and" + }, + { + "type": "doc", + "id": "noir_js/reference/noir_js/functions/blake2s256", + "label": "blake2s256" + }, + { + "type": "doc", + "id": "noir_js/reference/noir_js/functions/ecdsa_secp256k1_verify", + "label": "ecdsa_secp256k1_verify" + }, + { + "type": "doc", + "id": "noir_js/reference/noir_js/functions/ecdsa_secp256r1_verify", + "label": "ecdsa_secp256r1_verify" + }, + { + "type": "doc", + "id": "noir_js/reference/noir_js/functions/keccak256", + "label": "keccak256" + }, + { + "type": "doc", + "id": "noir_js/reference/noir_js/functions/sha256", + "label": "sha256" + }, + { + "type": "doc", + "id": "noir_js/reference/noir_js/functions/xor", + "label": "xor" + } + ] + } + ] + }, + { + "type": "category", + "label": "Backend Barretenberg", + "link": { + "type": "doc", + "id": "noir_js/reference/backend_barretenberg/index" + }, + "items": [ + { + "type": "category", + "label": "Classes", + "items": [ + { + "type": "doc", + "id": "noir_js/reference/backend_barretenberg/classes/BarretenbergBackend", + "label": "BarretenbergBackend" + } + ] + }, + { + "type": "category", + "label": "Interfaces", + "items": [ + { + "type": "doc", + "id": "noir_js/reference/backend_barretenberg/interfaces/Backend", + "label": "Backend" + } + ] + }, + { + "type": "category", + "label": "Type Aliases", + "items": [ + { + "type": "doc", + "id": "noir_js/reference/backend_barretenberg/type-aliases/BackendOptions", + "label": "BackendOptions" + }, + { + "type": "doc", + "id": "noir_js/reference/backend_barretenberg/type-aliases/CompiledCircuit", + "label": "CompiledCircuit" + }, + { + "type": "doc", + "id": "noir_js/reference/backend_barretenberg/type-aliases/ProofData", + "label": "ProofData" + } + ] + } + ] + } + ] + } + ] + }, + { + "type": "doc", + "id": "migration_notes", + "label": "Migration notes" + } + ] +} diff --git a/docs/versioned_sidebars/version-v0.6.0-sidebars.json b/docs/versioned_sidebars/version-v0.6.0-sidebars.json deleted file mode 100644 index 7323ae1c504..00000000000 --- a/docs/versioned_sidebars/version-v0.6.0-sidebars.json +++ /dev/null @@ -1,90 +0,0 @@ -{ - "sidebar": [ - { - "type": "doc", - "id": "index", - "label": "Noir" - }, - { - "type": "category", - "label": "Getting Started", - "items": [ - { - "type": "autogenerated", - "dirName": "getting_started" - } - ] - }, - { - "type": "category", - "label": "Examples", - "items": [ - { - "type": "autogenerated", - "dirName": "examples" - } - ] - }, - { - "type": "category", - "label": "Nargo", - "items": [ - { - "type": "autogenerated", - "dirName": "nargo" - } - ] - }, - { - "type": "category", - "label": "Language Concepts", - "items": [ - { - "type": "autogenerated", - "dirName": "language_concepts" - } - ] - }, - { - "type": "category", - "label": "Noir Standard Library", - "items": [ - { - "type": "category", - "label": "Cryptographic Primitives", - "link": { - "type": "doc", - "id": "standard_library/cryptographic_primitives" - }, - "items": [ - { - "type": "autogenerated", - "dirName": "standard_library/cryptographic_primitives" - } - ] - }, - "standard_library/array_methods", - "standard_library/field_methods", - "standard_library/logging", - "standard_library/merkle_trees", - "standard_library/zeroed", - "standard_library/black_box_fns" - ] - }, - { - "type": "category", - "label": "Modules, Packages and Crates", - "items": [ - { - "type": "autogenerated", - "dirName": "modules_packages_crates" - } - ] - }, - { - "type": "doc", - "id": "typescript", - "label": "Working with Typescript" - } - ] -} diff --git a/docs/versioned_sidebars/version-v0.7.1-sidebars.json b/docs/versioned_sidebars/version-v0.7.1-sidebars.json deleted file mode 100644 index 7323ae1c504..00000000000 --- a/docs/versioned_sidebars/version-v0.7.1-sidebars.json +++ /dev/null @@ -1,90 +0,0 @@ -{ - "sidebar": [ - { - "type": "doc", - "id": "index", - "label": "Noir" - }, - { - "type": "category", - "label": "Getting Started", - "items": [ - { - "type": "autogenerated", - "dirName": "getting_started" - } - ] - }, - { - "type": "category", - "label": "Examples", - "items": [ - { - "type": "autogenerated", - "dirName": "examples" - } - ] - }, - { - "type": "category", - "label": "Nargo", - "items": [ - { - "type": "autogenerated", - "dirName": "nargo" - } - ] - }, - { - "type": "category", - "label": "Language Concepts", - "items": [ - { - "type": "autogenerated", - "dirName": "language_concepts" - } - ] - }, - { - "type": "category", - "label": "Noir Standard Library", - "items": [ - { - "type": "category", - "label": "Cryptographic Primitives", - "link": { - "type": "doc", - "id": "standard_library/cryptographic_primitives" - }, - "items": [ - { - "type": "autogenerated", - "dirName": "standard_library/cryptographic_primitives" - } - ] - }, - "standard_library/array_methods", - "standard_library/field_methods", - "standard_library/logging", - "standard_library/merkle_trees", - "standard_library/zeroed", - "standard_library/black_box_fns" - ] - }, - { - "type": "category", - "label": "Modules, Packages and Crates", - "items": [ - { - "type": "autogenerated", - "dirName": "modules_packages_crates" - } - ] - }, - { - "type": "doc", - "id": "typescript", - "label": "Working with Typescript" - } - ] -} diff --git a/docs/versioned_sidebars/version-v0.9.0-sidebars.json b/docs/versioned_sidebars/version-v0.9.0-sidebars.json deleted file mode 100644 index 190363917e0..00000000000 --- a/docs/versioned_sidebars/version-v0.9.0-sidebars.json +++ /dev/null @@ -1,91 +0,0 @@ -{ - "sidebar": [ - { - "type": "doc", - "id": "index", - "label": "Noir" - }, - { - "type": "category", - "label": "Getting Started", - "items": [ - { - "type": "autogenerated", - "dirName": "getting_started" - } - ] - }, - { - "type": "category", - "label": "Examples", - "items": [ - { - "type": "autogenerated", - "dirName": "examples" - } - ] - }, - { - "type": "category", - "label": "Nargo", - "items": [ - { - "type": "autogenerated", - "dirName": "nargo" - } - ] - }, - { - "type": "category", - "label": "Language Concepts", - "items": [ - { - "type": "autogenerated", - "dirName": "language_concepts" - } - ] - }, - { - "type": "category", - "label": "Noir Standard Library", - "items": [ - { - "type": "category", - "label": "Cryptographic Primitives", - "link": { - "type": "doc", - "id": "standard_library/cryptographic_primitives" - }, - "items": [ - { - "type": "autogenerated", - "dirName": "standard_library/cryptographic_primitives" - } - ] - }, - "standard_library/slice_methods", - "standard_library/field_methods", - "standard_library/recursion", - "standard_library/logging", - "standard_library/merkle_trees", - "standard_library/zeroed", - "standard_library/black_box_fns" - ] - }, - { - "type": "category", - "label": "Modules, Packages and Crates", - "items": [ - { - "type": "autogenerated", - "dirName": "modules_packages_crates" - } - ] - }, - { - "type": "doc", - "id": "typescript", - "label": "Working with Typescript" - } - ] -} diff --git a/docs/versions.json b/docs/versions.json deleted file mode 100644 index ebd565cb0ae..00000000000 --- a/docs/versions.json +++ /dev/null @@ -1,5 +0,0 @@ -[ - "v0.19.2", - "v0.17.0", - "v0.10.5" -] diff --git a/flake.nix b/flake.nix index c23137ceff9..8f4541d198a 100644 --- a/flake.nix +++ b/flake.nix @@ -73,7 +73,7 @@ # Configuration shared between builds config = { # x-release-please-start-version - version = "0.19.2"; + version = "0.22.0"; # x-release-please-end src = pkgs.lib.cleanSourceWith { @@ -118,9 +118,6 @@ native-cargo-artifacts = craneLib.buildDepsOnly (nativeConfig // { pname = "nargo"; }); - noir-wasm-cargo-artifacts = craneLib.buildDepsOnly (wasmConfig // { - pname = "noir_wasm"; - }); noirc-abi-wasm-cargo-artifacts = craneLib.buildDepsOnly (wasmConfig // { pname = "noirc_abi_wasm"; }); @@ -142,25 +139,6 @@ doCheck = false; }); - noir_wasm = craneLib.buildPackage (wasmConfig // { - pname = "noir_wasm"; - - inherit GIT_COMMIT GIT_DIRTY; - - cargoArtifacts = noir-wasm-cargo-artifacts; - - buildPhaseCargoCommand = '' - bash compiler/wasm/buildPhaseCargoCommand.sh release - ''; - - installPhase = '' - bash compiler/wasm/installPhase.sh - ''; - - # We don't want to run tests because they don't work in the Nix sandbox - doCheck = false; - }); - noirc_abi_wasm = craneLib.buildPackage (wasmConfig // rec { pname = "noirc_abi_wasm"; @@ -248,19 +226,17 @@ # Nix flakes cannot build more than one derivation in one command (see https://github.com/NixOS/nix/issues/5591) # so we use `symlinkJoin` to build everything as the "all" package. - all = pkgs.symlinkJoin { name = "all"; paths = [ nargo noir_wasm noirc_abi_wasm noir_lsp_wasm acvm_js ]; }; - all_wasm = pkgs.symlinkJoin { name = "all_wasm"; paths = [ noir_wasm noirc_abi_wasm acvm_js ]; }; + all = pkgs.symlinkJoin { name = "all"; paths = [ nargo noirc_abi_wasm noir_lsp_wasm acvm_js ]; }; + all_wasm = pkgs.symlinkJoin { name = "all_wasm"; paths = [ noirc_abi_wasm acvm_js ]; }; # We also export individual packages to enable `nix build .#nargo -L`, etc. inherit nargo; - inherit noir_wasm; inherit noirc_abi_wasm; inherit noir_lsp_wasm; inherit acvm_js; # We expose the `*-cargo-artifacts` derivations so we can cache our cargo dependencies in CI inherit native-cargo-artifacts; - inherit noir-wasm-cargo-artifacts; inherit noirc-abi-wasm-cargo-artifacts; inherit noir-lsp-wasm-cargo-artifacts; inherit acvm-js-cargo-artifacts; @@ -271,7 +247,6 @@ devShells.default = pkgs.mkShell (environment // { inputsFrom = [ nargo - noir_wasm noirc_abi_wasm acvm_js ]; diff --git a/noir_stdlib/src/cmp.nr b/noir_stdlib/src/cmp.nr new file mode 100644 index 00000000000..11127494c18 --- /dev/null +++ b/noir_stdlib/src/cmp.nr @@ -0,0 +1,310 @@ +trait Eq { + fn eq(self, other: Self) -> bool; +} + +impl Eq for Field { fn eq(self, other: Field) -> bool { self == other } } + +impl Eq for u1 { fn eq(self, other: u1) -> bool { self == other } } +impl Eq for u8 { fn eq(self, other: u8) -> bool { self == other } } +impl Eq for u16 { fn eq(self, other: u16) -> bool { self == other } } +impl Eq for u32 { fn eq(self, other: u32) -> bool { self == other } } +impl Eq for u64 { fn eq(self, other: u64) -> bool { self == other } } + +impl Eq for i8 { fn eq(self, other: i8) -> bool { self == other } } +impl Eq for i16 { fn eq(self, other: i16) -> bool { self == other } } +impl Eq for i32 { fn eq(self, other: i32) -> bool { self == other } } +impl Eq for i64 { fn eq(self, other: i64) -> bool { self == other } } + +impl Eq for () { fn eq(_self: Self, _other: ()) -> bool { true } } +impl Eq for bool { fn eq(self, other: bool) -> bool { self == other } } + +impl Eq for [T; N] where T: Eq { + fn eq(self, other: [T; N]) -> bool { + let mut result = true; + for i in 0 .. self.len() { + result &= self[i].eq(other[i]); + } + result + } +} + +impl Eq for str { + fn eq(self, other: str) -> bool { + let self_bytes = self.as_bytes(); + let other_bytes = other.as_bytes(); + self_bytes == other_bytes + } +} + +impl Eq for (A, B) where A: Eq, B: Eq { + fn eq(self, other: (A, B)) -> bool { + self.0.eq(other.0) & self.1.eq(other.1) + } +} + +impl Eq for (A, B, C) where A: Eq, B: Eq, C: Eq { + fn eq(self, other: (A, B, C)) -> bool { + self.0.eq(other.0) & self.1.eq(other.1) & self.2.eq(other.2) + } +} + +impl Eq for (A, B, C, D) where A: Eq, B: Eq, C: Eq, D: Eq { + fn eq(self, other: (A, B, C, D)) -> bool { + self.0.eq(other.0) & self.1.eq(other.1) & self.2.eq(other.2) & self.3.eq(other.3) + } +} + +impl Eq for (A, B, C, D, E) where A: Eq, B: Eq, C: Eq, D: Eq, E: Eq { + fn eq(self, other: (A, B, C, D, E)) -> bool { + self.0.eq(other.0) & self.1.eq(other.1) & self.2.eq(other.2) & self.3.eq(other.3) & self.4.eq(other.4) + } +} + +impl Eq for Ordering { + fn eq(self, other: Ordering) -> bool { + self.result == other.result + } +} + + +// Noir doesn't have enums yet so we emulate (Lt | Eq | Gt) with a struct +// that has 3 public functions for constructing the struct. +struct Ordering { + result: Field, +} + +impl Ordering { + // Implementation note: 0, 1, and 2 for Lt, Eq, and Gt are built + // into the compiler, do not change these without also updating + // the compiler itself! + pub fn less() -> Ordering { + Ordering { result: 0 } + } + + pub fn equal() -> Ordering { + Ordering { result: 1 } + } + + pub fn greater() -> Ordering { + Ordering { result: 2 } + } +} + + +trait Ord { + fn cmp(self, other: Self) -> Ordering; +} + +// Note: Field deliberately does not implement Ord + +impl Ord for u8 { + fn cmp(self, other: u8) -> Ordering { + if self < other { + Ordering::less() + } else if self > other { + Ordering::greater() + } else { + Ordering::equal() + } + } +} + +impl Ord for u16 { + fn cmp(self, other: u16) -> Ordering { + if self < other { + Ordering::less() + } else if self > other { + Ordering::greater() + } else { + Ordering::equal() + } + } +} + +impl Ord for u32 { + fn cmp(self, other: u32) -> Ordering { + if self < other { + Ordering::less() + } else if self > other { + Ordering::greater() + } else { + Ordering::equal() + } + } +} + +impl Ord for u64 { + fn cmp(self, other: u64) -> Ordering { + if self < other { + Ordering::less() + } else if self > other { + Ordering::greater() + } else { + Ordering::equal() + } + } +} + +impl Ord for i8 { + fn cmp(self, other: i8) -> Ordering { + if self < other { + Ordering::less() + } else if self > other { + Ordering::greater() + } else { + Ordering::equal() + } + } +} + +impl Ord for i16 { + fn cmp(self, other: i16) -> Ordering { + if self < other { + Ordering::less() + } else if self > other { + Ordering::greater() + } else { + Ordering::equal() + } + } +} + +impl Ord for i32 { + fn cmp(self, other: i32) -> Ordering { + if self < other { + Ordering::less() + } else if self > other { + Ordering::greater() + } else { + Ordering::equal() + } + } +} + +impl Ord for i64 { + fn cmp(self, other: i64) -> Ordering { + if self < other { + Ordering::less() + } else if self > other { + Ordering::greater() + } else { + Ordering::equal() + } + } +} + +impl Ord for () { + fn cmp(_self: Self, _other: ()) -> Ordering { + Ordering::equal() + } +} + +impl Ord for bool { + fn cmp(self, other: bool) -> Ordering { + if self { + if other { + Ordering::equal() + } else { + Ordering::greater() + } + } else { + if other { + Ordering::less() + } else { + Ordering::equal() + } + } + } +} + +impl Ord for [T; N] where T: Ord { + // The first non-equal element of both arrays determines + // the ordering for the whole array. + fn cmp(self, other: [T; N]) -> Ordering { + let mut result = Ordering::equal(); + for i in 0 .. self.len() { + if result == Ordering::equal() { + let result_i = self[i].cmp(other[i]); + + if result_i == Ordering::less() { + result = result_i; + } else if result_i == Ordering::greater() { + result = result_i; + } + } + } + result + } +} + +impl Ord for (A, B) where A: Ord, B: Ord { + fn cmp(self, other: (A, B)) -> Ordering { + let result = self.0.cmp(other.0); + + if result != Ordering::equal() { + result + } else { + self.1.cmp(other.1) + } + } +} + +impl Ord for (A, B, C) where A: Ord, B: Ord, C: Ord { + fn cmp(self, other: (A, B, C)) -> Ordering { + let mut result = self.0.cmp(other.0); + + if result == Ordering::equal() { + result = self.1.cmp(other.1); + } + + if result == Ordering::equal() { + result = self.2.cmp(other.2); + } + + result + } +} + +impl Ord for (A, B, C, D) where A: Ord, B: Ord, C: Ord, D: Ord { + fn cmp(self, other: (A, B, C, D)) -> Ordering { + let mut result = self.0.cmp(other.0); + + if result == Ordering::equal() { + result = self.1.cmp(other.1); + } + + if result == Ordering::equal() { + result = self.2.cmp(other.2); + } + + if result == Ordering::equal() { + result = self.3.cmp(other.3); + } + + result + } +} + +impl Ord for (A, B, C, D, E) where A: Ord, B: Ord, C: Ord, D: Ord, E: Ord { + fn cmp(self, other: (A, B, C, D, E)) -> Ordering { + let mut result = self.0.cmp(other.0); + + if result == Ordering::equal() { + result = self.1.cmp(other.1); + } + + if result == Ordering::equal() { + result = self.2.cmp(other.2); + } + + if result == Ordering::equal() { + result = self.3.cmp(other.3); + } + + if result == Ordering::equal() { + result = self.4.cmp(other.4); + } + + result + } +} diff --git a/noir_stdlib/src/default.nr b/noir_stdlib/src/default.nr new file mode 100644 index 00000000000..232be74489c --- /dev/null +++ b/noir_stdlib/src/default.nr @@ -0,0 +1,48 @@ +trait Default { + fn default() -> Self; +} + +impl Default for Field { fn default() -> Field { 0 } } + +impl Default for u8 { fn default() -> u8 { 0 } } +impl Default for u16 { fn default() -> u16 { 0 } } +impl Default for u32 { fn default() -> u32 { 0 } } +impl Default for u64 { fn default() -> u64 { 0 } } + +impl Default for i8 { fn default() -> i8 { 0 } } +impl Default for i16 { fn default() -> i16 { 0 } } +impl Default for i32 { fn default() -> i32 { 0 } } +impl Default for i64 { fn default() -> i64 { 0 } } + +impl Default for () { fn default() -> () { () } } +impl Default for bool { fn default() -> bool { false } } + +impl Default for [T; N] where T: Default { + fn default() -> [T; N] { + [T::default(); N] + } +} + +impl Default for (A, B) where A: Default, B: Default { + fn default() -> (A, B) { + (A::default(), B::default()) + } +} + +impl Default for (A, B, C) where A: Default, B: Default, C: Default { + fn default() -> (A, B, C) { + (A::default(), B::default(), C::default()) + } +} + +impl Default for (A, B, C, D) where A: Default, B: Default, C: Default, D: Default { + fn default() -> (A, B, C, D) { + (A::default(), B::default(), C::default(), D::default()) + } +} + +impl Default for (A, B, C, D, E) where A: Default, B: Default, C: Default, D: Default, E: Default { + fn default() -> (A, B, C, D, E) { + (A::default(), B::default(), C::default(), D::default(), E::default()) + } +} diff --git a/noir_stdlib/src/ec/consts/te.nr b/noir_stdlib/src/ec/consts/te.nr index 469493a35ee..e25f373593a 100644 --- a/noir_stdlib/src/ec/consts/te.nr +++ b/noir_stdlib/src/ec/consts/te.nr @@ -12,14 +12,20 @@ struct BabyJubjub { pub fn baby_jubjub() -> BabyJubjub { BabyJubjub { // Baby Jubjub (ERC-2494) parameters in affine representation - curve: TECurve::new(168700, + curve: TECurve::new( + 168700, 168696, // G - TEPoint::new(995203441582195749578291179787384436505546430278305826713579947235728471134, - 5472060717959818805561601436314318772137091100104008585924551046643952123905)), + TEPoint::new( + 995203441582195749578291179787384436505546430278305826713579947235728471134, + 5472060717959818805561601436314318772137091100104008585924551046643952123905 + ) + ), // [8]G precalculated - base8: TEPoint::new(5299619240641551281634865583518297030282874472190772894086521144482721001553, - 16950150798460657717958625567821834550301663161624707787222815936182638968203), + base8: TEPoint::new( + 5299619240641551281634865583518297030282874472190772894086521144482721001553, + 16950150798460657717958625567821834550301663161624707787222815936182638968203 + ), // The size of the group formed from multiplying the base field by 8. suborder: 2736030358979909402780800718157159386076813972158567259200215660948447373041 } diff --git a/noir_stdlib/src/ec/montcurve.nr b/noir_stdlib/src/ec/montcurve.nr index 82d22837b46..83a17bae322 100644 --- a/noir_stdlib/src/ec/montcurve.nr +++ b/noir_stdlib/src/ec/montcurve.nr @@ -12,6 +12,8 @@ mod affine { use crate::ec::safe_inverse; use crate::ec::sqrt; use crate::ec::ZETA; + use crate::cmp::Eq; + // Curve specification struct Curve { // Montgomery Curve configuration (ky^2 = x^3 + j*x^2 + x) j: Field, @@ -32,11 +34,6 @@ mod affine { Self {x, y, infty: false} } - // Check for equality - fn eq(self, p: Self) -> bool { - (self.infty & p.infty) | (!self.infty & !p.infty & (self.x == p.x) & (self.y == p.y)) - } - // Check if zero pub fn is_zero(self) -> bool { self.infty @@ -76,6 +73,12 @@ mod affine { } } + impl Eq for Point { + fn eq(self, p: Self) -> bool { + (self.infty & p.infty) | (!self.infty & !p.infty & (self.x == p.x) & (self.y == p.y)) + } + } + impl Curve { // Curve constructor pub fn new(j: Field, k: Field, gen: Point) -> Self { @@ -219,6 +222,7 @@ mod curvegroup { use crate::ec::swcurve::curvegroup::Point as SWPoint; use crate::ec::tecurve::curvegroup::Curve as TECurve; use crate::ec::tecurve::curvegroup::Point as TEPoint; + use crate::cmp::Eq; struct Curve { // Montgomery Curve configuration (ky^2 z = x*(x^2 + j*x*z + z*z)) j: Field, @@ -239,11 +243,6 @@ mod curvegroup { Self {x, y, z} } - // Check for equality - fn eq(self, p: Self) -> bool { - (self.z == p.z) | (((self.x * self.z) == (p.x * p.z)) & ((self.y * self.z) == (p.y * p.z))) - } - // Check if zero pub fn is_zero(self) -> bool { self.z == 0 @@ -277,6 +276,12 @@ mod curvegroup { } } + impl Eq for Point { + fn eq(self, p: Self) -> bool { + (self.z == p.z) | (((self.x * self.z) == (p.x * p.z)) & ((self.y * self.z) == (p.y * p.z))) + } + } + impl Curve { // Curve constructor pub fn new(j: Field, k: Field, gen: Point) -> Self { diff --git a/noir_stdlib/src/ec/swcurve.nr b/noir_stdlib/src/ec/swcurve.nr index e9b6f661843..e64f5a7be02 100644 --- a/noir_stdlib/src/ec/swcurve.nr +++ b/noir_stdlib/src/ec/swcurve.nr @@ -7,6 +7,8 @@ mod affine { use crate::ec::safe_inverse; use crate::ec::is_square; use crate::ec::sqrt; + use crate::cmp::Eq; + // Curve specification struct Curve { // Short Weierstraß curve // Coefficients in defining equation y^2 = x^3 + ax + b @@ -28,15 +30,6 @@ mod affine { Self {x, y, infty: false} } - // Check for equality - fn eq(self, p: Point) -> bool { - let Self {x: x1, y: y1, infty: inf1} = self; - let Self {x: x2, y: y2, infty: inf2} = p; - - (inf1 & inf2) - | (!inf1 & !inf2 & (x1 == x2) & (y1 == y2)) - } - // Check if zero pub fn is_zero(self) -> bool { self.eq(Point::zero()) @@ -65,6 +58,16 @@ mod affine { } } + impl Eq for Point { + fn eq(self, p: Self) -> bool { + let Self {x: x1, y: y1, infty: inf1} = self; + let Self {x: x2, y: y2, infty: inf2} = p; + + (inf1 & inf2) + | (!inf1 & !inf2 & (x1 == x2) & (y1 == y2)) + } + } + impl Curve { // Curve constructor pub fn new(a: Field, b: Field, gen: Point) -> Curve { @@ -182,6 +185,8 @@ mod curvegroup { // Points are represented by three-dimensional Jacobian coordinates. // See for details. use crate::ec::swcurve::affine; + use crate::cmp::Eq; + // Curve specification struct Curve { // Short Weierstraß curve // Coefficients in defining equation y^2 = x^3 + axz^4 + bz^6 @@ -203,14 +208,6 @@ mod curvegroup { Self {x, y, z} } - // Check for equality - fn eq(self, p: Point) -> bool { - let Self {x: x1, y: y1, z: z1} = self; - let Self {x: x2, y: y2, z: z2} = p; - - ((z1 == 0) & (z2 == 0)) | ((z1 != 0) & (z2 != 0) & (x1*z2*z2 == x2*z1*z1) & (y1*z2*z2*z2 == y2*z1*z1*z1)) - } - // Check if zero pub fn is_zero(self) -> bool { self.eq(Point::zero()) @@ -240,6 +237,15 @@ mod curvegroup { } } + impl Eq for Point { + fn eq(self, p: Self) -> bool { + let Self {x: x1, y: y1, z: z1} = self; + let Self {x: x2, y: y2, z: z2} = p; + + ((z1 == 0) & (z2 == 0)) | ((z1 != 0) & (z2 != 0) & (x1*z2*z2 == x2*z1*z1) & (y1*z2*z2*z2 == y2*z1*z1*z1)) + } + } + impl Curve { // Curve constructor pub fn new(a: Field, b: Field, gen: Point) -> Curve { diff --git a/noir_stdlib/src/ec/tecurve.nr b/noir_stdlib/src/ec/tecurve.nr index 849b45ff012..5333ece4c4a 100644 --- a/noir_stdlib/src/ec/tecurve.nr +++ b/noir_stdlib/src/ec/tecurve.nr @@ -9,6 +9,8 @@ mod affine { use crate::ec::montcurve::affine::Point as MPoint; use crate::ec::swcurve::affine::Curve as SWCurve; use crate::ec::swcurve::affine::Point as SWPoint; + use crate::cmp::Eq; + // Curve specification struct Curve { // Twisted Edwards curve // Coefficients in defining equation ax^2 + y^2 = 1 + dx^2y^2 @@ -29,14 +31,6 @@ mod affine { Self { x, y } } - // Check for equality - fn eq(self, p: Point) -> bool { - let Self {x: x1, y: y1} = self; - let Self {x: x2, y: y2} = p; - - (x1 == x2) & (y1 == y2) - } - // Check if zero pub fn is_zero(self) -> bool { self.eq(Point::zero()) @@ -74,6 +68,15 @@ mod affine { } } + impl Eq for Point { + fn eq(self, p: Self) -> bool { + let Self {x: x1, y: y1} = self; + let Self {x: x2, y: y2} = p; + + (x1 == x2) & (y1 == y2) + } + } + impl Curve { // Curve constructor pub fn new(a: Field, d: Field, gen: Point) -> Curve { @@ -198,6 +201,8 @@ mod curvegroup { use crate::ec::montcurve::curvegroup::Point as MPoint; use crate::ec::swcurve::curvegroup::Curve as SWCurve; use crate::ec::swcurve::curvegroup::Point as SWPoint; + use crate::cmp::Eq; + // Curve specification struct Curve { // Twisted Edwards curve // Coefficients in defining equation a(x^2 + y^2)z^2 = z^4 + dx^2y^2 @@ -220,14 +225,6 @@ mod curvegroup { Self {x, y, t, z} } - // Check for equality - fn eq(self, p: Point) -> bool { - let Self {x: x1, y: y1, t: _t1, z: z1} = self; - let Self {x: x2, y: y2, t: _t2, z:z2} = p; - - (x1*z2 == x2*z1) & (y1*z2 == y2*z1) - } - // Check if zero pub fn is_zero(self) -> bool { let Self {x, y, t, z} = self; @@ -259,6 +256,15 @@ mod curvegroup { } } + impl Eq for Point { + fn eq(self, p: Self) -> bool { + let Self {x: x1, y: y1, t: _t1, z: z1} = self; + let Self {x: x2, y: y2, t: _t2, z:z2} = p; + + (x1*z2 == x2*z1) & (y1*z2 == y2*z1) + } + } + impl Curve { // Curve constructor pub fn new(a: Field, d: Field, gen: Point) -> Curve { diff --git a/noir_stdlib/src/ecdsa_secp256k1.nr b/noir_stdlib/src/ecdsa_secp256k1.nr index 61687f557fe..b1f2b12c76b 100644 --- a/noir_stdlib/src/ecdsa_secp256k1.nr +++ b/noir_stdlib/src/ecdsa_secp256k1.nr @@ -1,2 +1,7 @@ #[foreign(ecdsa_secp256k1)] -pub fn verify_signature(_public_key_x: [u8; 32], _public_key_y: [u8; 32], _signature: [u8; 64], _message_hash: [u8; N]) -> bool {} +pub fn verify_signature( + _public_key_x: [u8; 32], + _public_key_y: [u8; 32], + _signature: [u8; 64], + _message_hash: [u8; N] +) -> bool {} diff --git a/noir_stdlib/src/ecdsa_secp256r1.nr b/noir_stdlib/src/ecdsa_secp256r1.nr index 7af37407edd..6c3cf4d7945 100644 --- a/noir_stdlib/src/ecdsa_secp256r1.nr +++ b/noir_stdlib/src/ecdsa_secp256r1.nr @@ -1,2 +1,7 @@ #[foreign(ecdsa_secp256r1)] -pub fn verify_signature(_public_key_x: [u8; 32], _public_key_y: [u8; 32], _signature: [u8; 64], _message_hash: [u8; N]) -> bool {} +pub fn verify_signature( + _public_key_x: [u8; 32], + _public_key_y: [u8; 32], + _signature: [u8; 64], + _message_hash: [u8; N] +) -> bool {} diff --git a/noir_stdlib/src/field.nr b/noir_stdlib/src/field.nr index b4cb9b64e3c..df00b3eb653 100644 --- a/noir_stdlib/src/field.nr +++ b/noir_stdlib/src/field.nr @@ -15,6 +15,15 @@ impl Field { #[builtin(to_be_bits)] fn __to_be_bits(_self: Self, _bit_size: u32) -> [u1] {} + #[builtin(apply_range_constraint)] + fn __assert_max_bit_size(_self: Self, _bit_size: u32) {} + + pub fn assert_max_bit_size(self: Self, bit_size: u32) { + crate::assert_constant(bit_size); + assert(bit_size < modulus_num_bits() as u32); + self.__assert_max_bit_size(bit_size); + } + pub fn to_le_bytes(self: Self, byte_size: u32) -> [u8] { self.to_le_radix(256, byte_size) } diff --git a/noir_stdlib/src/hash.nr b/noir_stdlib/src/hash.nr index 157d6518367..5933209d9bc 100644 --- a/noir_stdlib/src/hash.nr +++ b/noir_stdlib/src/hash.nr @@ -7,6 +7,9 @@ pub fn sha256(_input: [u8; N]) -> [u8; 32] {} #[foreign(blake2s)] pub fn blake2s(_input: [u8; N]) -> [u8; 32] {} +#[foreign(blake3)] +pub fn blake3(_input: [u8; N]) -> [u8; 32] {} + struct PedersenPoint { x : Field, y : Field, @@ -16,7 +19,7 @@ pub fn pedersen_commitment(input: [Field; N]) -> PedersenPoint { pedersen_commitment_with_separator(input, 0) } -#[foreign(pedersen)] +#[foreign(pedersen_commitment)] pub fn __pedersen_commitment_with_separator(_input: [Field; N], _separator: u32) -> [Field; 2] {} pub fn pedersen_commitment_with_separator(input: [Field; N], separator: u32) -> PedersenPoint { diff --git a/noir_stdlib/src/hash/poseidon.nr b/noir_stdlib/src/hash/poseidon.nr index b3cb32cd357..3f4de73c0db 100644 --- a/noir_stdlib/src/hash/poseidon.nr +++ b/noir_stdlib/src/hash/poseidon.nr @@ -10,7 +10,14 @@ struct PoseidonConfig { mds: [Field; N] // MDS Matrix in row-major order } -pub fn config(t: Field, rf: u8, rp: u8, alpha: Field, ark: [Field; M], mds: [Field; N]) -> PoseidonConfig { +pub fn config( + t: Field, + rf: u8, + rp: u8, + alpha: Field, + ark: [Field; M], + mds: [Field; N] +) -> PoseidonConfig { // Input checks let mul = crate::wrapping_mul(t as u8, (rf + rp)); assert(mul == ark.len() as u8); diff --git a/noir_stdlib/src/lib.nr b/noir_stdlib/src/lib.nr index 916e71cb490..23a7c71ff45 100644 --- a/noir_stdlib/src/lib.nr +++ b/noir_stdlib/src/lib.nr @@ -19,23 +19,28 @@ mod compat; mod option; mod string; mod test; +mod cmp; +mod ops; +mod default; +mod prelude; +mod uint128; + // Oracle calls are required to be wrapped in an unconstrained function -// Thus, the only argument to the `println` oracle is expected to always be an ident -#[oracle(println)] -unconstrained fn println_oracle(_input: T) {} +// Thus, the only argument to the `println` oracle is expected to always be an ident +#[oracle(print)] +unconstrained fn print_oracle(_with_newline: bool, _input: T) {} + +unconstrained pub fn print(input: T) { + print_oracle(false, input); +} unconstrained pub fn println(input: T) { - println_oracle(input); + print_oracle(true, input); } #[foreign(recursive_aggregation)] -pub fn verify_proof( - _verification_key: [Field], - _proof: [Field], - _public_inputs: [Field], - _key_hash: Field, - _input_aggregation_object: [Field; N] -) -> [Field; N] {} +pub fn verify_proof(_verification_key: [Field], _proof: [Field], _public_inputs: [Field], _key_hash: Field) {} + // Asserts that the given value is known at compile-time. // Useful for debugging for-loop bounds. #[builtin(assert_constant)] @@ -61,7 +66,3 @@ pub fn wrapping_sub(x: T, y: T) -> T { pub fn wrapping_mul(x: T, y: T) -> T { crate::from_field(crate::as_field(x) * crate::as_field(y)) } -/// Shift-left x by y bits -/// If the result overflow the bitsize; it does not fail and returns 0 instead -#[builtin(wrapping_shift_left)] -pub fn wrapping_shift_left(_x: T, _y: T) -> T {} diff --git a/noir_stdlib/src/ops.nr b/noir_stdlib/src/ops.nr new file mode 100644 index 00000000000..3078ac11296 --- /dev/null +++ b/noir_stdlib/src/ops.nr @@ -0,0 +1,156 @@ + +trait Add { + fn add(self, other: Self) -> Self; +} + +impl Add for Field { fn add(self, other: Field) -> Field { self + other } } + +impl Add for u8 { fn add(self, other: u8) -> u8 { self + other } } +impl Add for u16 { fn add(self, other: u16) -> u16 { self + other } } +impl Add for u32 { fn add(self, other: u32) -> u32 { self + other } } +impl Add for u64 { fn add(self, other: u64) -> u64 { self + other } } + +impl Add for i8 { fn add(self, other: i8) -> i8 { self + other } } +impl Add for i16 { fn add(self, other: i16) -> i16 { self + other } } +impl Add for i32 { fn add(self, other: i32) -> i32 { self + other } } +impl Add for i64 { fn add(self, other: i64) -> i64 { self + other } } + +trait Sub { + fn sub(self, other: Self) -> Self; +} + +impl Sub for Field { fn sub(self, other: Field) -> Field { self - other } } + +impl Sub for u8 { fn sub(self, other: u8) -> u8 { self - other } } +impl Sub for u16 { fn sub(self, other: u16) -> u16 { self - other } } +impl Sub for u32 { fn sub(self, other: u32) -> u32 { self - other } } +impl Sub for u64 { fn sub(self, other: u64) -> u64 { self - other } } + +impl Sub for i8 { fn sub(self, other: i8) -> i8 { self - other } } +impl Sub for i16 { fn sub(self, other: i16) -> i16 { self - other } } +impl Sub for i32 { fn sub(self, other: i32) -> i32 { self - other } } +impl Sub for i64 { fn sub(self, other: i64) -> i64 { self - other } } + +trait Mul { + fn mul(self, other: Self) -> Self; +} + +impl Mul for Field { fn mul(self, other: Field) -> Field { self * other } } + +impl Mul for u8 { fn mul(self, other: u8) -> u8 { self * other } } +impl Mul for u16 { fn mul(self, other: u16) -> u16 { self * other } } +impl Mul for u32 { fn mul(self, other: u32) -> u32 { self * other } } +impl Mul for u64 { fn mul(self, other: u64) -> u64 { self * other } } + +impl Mul for i8 { fn mul(self, other: i8) -> i8 { self * other } } +impl Mul for i16 { fn mul(self, other: i16) -> i16 { self * other } } +impl Mul for i32 { fn mul(self, other: i32) -> i32 { self * other } } +impl Mul for i64 { fn mul(self, other: i64) -> i64 { self * other } } + +trait Div { + fn div(self, other: Self) -> Self; +} + +impl Div for Field { fn div(self, other: Field) -> Field { self / other } } + +impl Div for u8 { fn div(self, other: u8) -> u8 { self / other } } +impl Div for u16 { fn div(self, other: u16) -> u16 { self / other } } +impl Div for u32 { fn div(self, other: u32) -> u32 { self / other } } +impl Div for u64 { fn div(self, other: u64) -> u64 { self / other } } + +impl Div for i8 { fn div(self, other: i8) -> i8 { self / other } } +impl Div for i16 { fn div(self, other: i16) -> i16 { self / other } } +impl Div for i32 { fn div(self, other: i32) -> i32 { self / other } } +impl Div for i64 { fn div(self, other: i64) -> i64 { self / other } } + +trait Rem { + fn rem(self, other: Self) -> Self; +} + +impl Rem for u8 { fn rem(self, other: u8) -> u8 { self % other } } +impl Rem for u16 { fn rem(self, other: u16) -> u16 { self % other } } +impl Rem for u32 { fn rem(self, other: u32) -> u32 { self % other } } +impl Rem for u64 { fn rem(self, other: u64) -> u64 { self % other } } + +impl Rem for i8 { fn rem(self, other: i8) -> i8 { self % other } } +impl Rem for i16 { fn rem(self, other: i16) -> i16 { self % other } } +impl Rem for i32 { fn rem(self, other: i32) -> i32 { self % other } } +impl Rem for i64 { fn rem(self, other: i64) -> i64 { self % other } } + +trait BitOr { + fn bitor(self, other: Self) -> Self; +} + +impl BitOr for bool { fn bitor(self, other: bool) -> bool { self | other } } + +impl BitOr for u8 { fn bitor(self, other: u8) -> u8 { self | other } } +impl BitOr for u16 { fn bitor(self, other: u16) -> u16 { self | other } } +impl BitOr for u32 { fn bitor(self, other: u32) -> u32 { self | other } } +impl BitOr for u64 { fn bitor(self, other: u64) -> u64 { self | other } } + +impl BitOr for i8 { fn bitor(self, other: i8) -> i8 { self | other } } +impl BitOr for i16 { fn bitor(self, other: i16) -> i16 { self | other } } +impl BitOr for i32 { fn bitor(self, other: i32) -> i32 { self | other } } +impl BitOr for i64 { fn bitor(self, other: i64) -> i64 { self | other } } + +trait BitAnd { + fn bitand(self, other: Self) -> Self; +} + +impl BitAnd for bool { fn bitand(self, other: bool) -> bool { self & other } } + +impl BitAnd for u8 { fn bitand(self, other: u8) -> u8 { self & other } } +impl BitAnd for u16 { fn bitand(self, other: u16) -> u16 { self & other } } +impl BitAnd for u32 { fn bitand(self, other: u32) -> u32 { self & other } } +impl BitAnd for u64 { fn bitand(self, other: u64) -> u64 { self & other } } + +impl BitAnd for i8 { fn bitand(self, other: i8) -> i8 { self & other } } +impl BitAnd for i16 { fn bitand(self, other: i16) -> i16 { self & other } } +impl BitAnd for i32 { fn bitand(self, other: i32) -> i32 { self & other } } +impl BitAnd for i64 { fn bitand(self, other: i64) -> i64 { self & other } } + +trait BitXor { + fn bitxor(self, other: Self) -> Self; +} + +impl BitXor for bool { fn bitxor(self, other: bool) -> bool { self ^ other } } + +impl BitXor for u8 { fn bitxor(self, other: u8) -> u8 { self ^ other } } +impl BitXor for u16 { fn bitxor(self, other: u16) -> u16 { self ^ other } } +impl BitXor for u32 { fn bitxor(self, other: u32) -> u32 { self ^ other } } +impl BitXor for u64 { fn bitxor(self, other: u64) -> u64 { self ^ other } } + +impl BitXor for i8 { fn bitxor(self, other: i8) -> i8 { self ^ other } } +impl BitXor for i16 { fn bitxor(self, other: i16) -> i16 { self ^ other } } +impl BitXor for i32 { fn bitxor(self, other: i32) -> i32 { self ^ other } } +impl BitXor for i64 { fn bitxor(self, other: i64) -> i64 { self ^ other } } + +trait Shl { + fn shl(self, other: Self) -> Self; +} + +impl Shl for u8 { fn shl(self, other: u8) -> u8 { self << other } } +impl Shl for u16 { fn shl(self, other: u16) -> u16 { self << other } } +impl Shl for u32 { fn shl(self, other: u32) -> u32 { self << other } } +impl Shl for u64 { fn shl(self, other: u64) -> u64 { self << other } } + +// Bit shifting is not currently supported for signed integer types +// impl Shl for i8 { fn shl(self, other: i8) -> i8 { self << other } } +// impl Shl for i16 { fn shl(self, other: i16) -> i16 { self << other } } +// impl Shl for i32 { fn shl(self, other: i32) -> i32 { self << other } } +// impl Shl for i64 { fn shl(self, other: i64) -> i64 { self << other } } + +trait Shr { + fn shr(self, other: Self) -> Self; +} + +impl Shr for u8 { fn shr(self, other: u8) -> u8 { self >> other } } +impl Shr for u16 { fn shr(self, other: u16) -> u16 { self >> other } } +impl Shr for u32 { fn shr(self, other: u32) -> u32 { self >> other } } +impl Shr for u64 { fn shr(self, other: u64) -> u64 { self >> other } } + +// Bit shifting is not currently supported for signed integer types +// impl Shr for i8 { fn shr(self, other: i8) -> i8 { self >> other } } +// impl Shr for i16 { fn shr(self, other: i16) -> i16 { self >> other } } +// impl Shr for i32 { fn shr(self, other: i32) -> i32 { self >> other } } +// impl Shr for i64 { fn shr(self, other: i64) -> i64 { self >> other } } diff --git a/noir_stdlib/src/prelude.nr b/noir_stdlib/src/prelude.nr new file mode 100644 index 00000000000..b57ff460371 --- /dev/null +++ b/noir_stdlib/src/prelude.nr @@ -0,0 +1,6 @@ +use crate::collections::vec::Vec; +use crate::option::Option; +use crate::{print, println, assert_constant}; +use crate::uint128::U128; +use crate::cmp::{Eq, Ord}; +use crate::default::Default; diff --git a/noir_stdlib/src/schnorr.nr b/noir_stdlib/src/schnorr.nr index c78eae37243..5ed95096f97 100644 --- a/noir_stdlib/src/schnorr.nr +++ b/noir_stdlib/src/schnorr.nr @@ -1,2 +1,7 @@ #[foreign(schnorr_verify)] -pub fn verify_signature(_public_key_x: Field, _public_key_y: Field, _signature: [u8; 64], _message: [u8; N]) -> bool {} +pub fn verify_signature( + _public_key_x: Field, + _public_key_y: Field, + _signature: [u8; 64], + _message: [u8; N] +) -> bool {} diff --git a/noir_stdlib/src/sha256.nr b/noir_stdlib/src/sha256.nr index 4adb93f3848..39e39b8cb6e 100644 --- a/noir_stdlib/src/sha256.nr +++ b/noir_stdlib/src/sha256.nr @@ -6,7 +6,7 @@ fn rotr32(a: u32, b: u32) -> u32 // 32-bit right rotation { // None of the bits overlap between `(a >> b)` and `(a << (32 - b))` // Addition is then equivalent to OR, with fewer constraints. - (a >> b) + (crate::wrapping_shift_left(a, 32 - b)) + (a >> b) + (a << (32 - b)) } fn ch(x: u32, y: u32, z: u32) -> u32 { @@ -66,9 +66,13 @@ fn sha_c(msg: [u32; 16], hash: [u32; 8]) -> [u32; 8] { let mut out_h: [u32; 8] = hash; let w = sha_w(msg); for j in 0..64 { - let t1 = crate::wrapping_add(crate::wrapping_add(crate::wrapping_add(out_h[7], bigma1(out_h[4])), - ch(out_h[4], out_h[5], out_h[6])), - crate::wrapping_add(K[j], w[j])); + let t1 = crate::wrapping_add( + crate::wrapping_add( + crate::wrapping_add(out_h[7], bigma1(out_h[4])), + ch(out_h[4], out_h[5], out_h[6]) + ), + crate::wrapping_add(K[j], w[j]) + ); let t2 = crate::wrapping_add(bigma0(out_h[0]), maj(out_h[0], out_h[1], out_h[2])); out_h[7] = out_h[6]; out_h[6] = out_h[5]; diff --git a/noir_stdlib/src/sha512.nr b/noir_stdlib/src/sha512.nr index ad2926aea81..155ba593bba 100644 --- a/noir_stdlib/src/sha512.nr +++ b/noir_stdlib/src/sha512.nr @@ -6,7 +6,7 @@ fn rotr64(a: u64, b: u64) -> u64 // 64-bit right rotation { // None of the bits overlap between `(a >> b)` and `(a << (64 - b))` // Addition is then equivalent to OR, with fewer constraints. - (a >> b) + (crate::wrapping_shift_left(a, 64 - b)) + (a >> b) + (a << (64 - b)) } fn sha_ch(x: u64, y: u64, z: u64) -> u64 { diff --git a/noir_stdlib/src/slice.nr b/noir_stdlib/src/slice.nr index ca06e2aae44..a5a9a38ed53 100644 --- a/noir_stdlib/src/slice.nr +++ b/noir_stdlib/src/slice.nr @@ -21,28 +21,16 @@ impl [T] { #[builtin(slice_pop_front)] pub fn pop_front(_self: Self) -> (T, Self) { } - pub fn insert(self, _index: Field, _elem: T) -> Self { - // TODO(#2462): Slice insert with a dynamic index - crate::assert_constant(_index); - self.__slice_insert(_index, _elem) - } - /// Insert an element at a specified index, shifting all elements /// after it to the right #[builtin(slice_insert)] - fn __slice_insert(_self: Self, _index: Field, _elem: T) -> Self { } - - pub fn remove(self, _index: Field) -> (Self, T) { - // TODO(#2462): Slice remove with a dynamic index - crate::assert_constant(_index); - self.__slice_remove(_index) - } + pub fn insert(_self: Self, _index: Field, _elem: T) -> Self { } /// Remove an element at a specified index, shifting all elements /// after it to the left, returning the altered slice and /// the removed element #[builtin(slice_remove)] - fn __slice_remove(_self: Self, _index: Field) -> (Self, T) { } + pub fn remove(_self: Self, _index: Field) -> (Self, T) { } // Append each element of the `other` slice to the end of `self`. // This returns a new slice and leaves both input slices unchanged. diff --git a/noir_stdlib/src/uint128.nr b/noir_stdlib/src/uint128.nr new file mode 100644 index 00000000000..4a58b3868be --- /dev/null +++ b/noir_stdlib/src/uint128.nr @@ -0,0 +1,292 @@ +use crate::ops::{Add, Sub, Mul, Div, Rem, BitOr, BitAnd, BitXor, Shl, Shr}; +use crate::cmp::{Eq, Ord, Ordering}; + +global pow64 : Field = 18446744073709551616; //2^64; + +struct U128 { + lo: Field, + hi: Field, +} + +impl U128 { + + pub fn from_u64s_le(lo: u64, hi: u64) -> U128 { + // in order to handle multiplication, we need to represent the product of two u64 without overflow + assert(crate::field::modulus_num_bits() as u32 > 128); + U128 { + lo: lo as Field, + hi: hi as Field, + } + } + + pub fn from_u64s_be(hi: u64, lo: u64) -> U128 { + U128::from_u64s_le(lo,hi) + } + + pub fn from_le_bytes(bytes: [u8; 16]) -> U128 { + let mut lo = 0; + let mut base = 1; + for i in 0..8 { + lo += (bytes[i] as Field)*base; + base *= 256; + } + let mut hi = 0; + base = 1; + for i in 8..16 { + hi += (bytes[i] as Field)*base; + base *= 256; + } + U128 { + lo, + hi, + } + } + + pub fn to_le_bytes(self: Self) -> [u8; 16] { + let lo = self.lo.to_le_bytes(8); + let hi = self.hi.to_le_bytes(8); + let mut bytes = [0;16]; + for i in 0..8 { + bytes[i] = lo[i]; + bytes[i+8] = hi[i]; + } + bytes + } + + pub fn from_hex(hex: str) -> U128 { + let N = N as u32; + let bytes = hex.as_bytes(); + // string must starts with "0x" + assert((bytes[0] == 48) & (bytes[1] == 120), "Invalid hexadecimal string"); + assert(N < 35, "Input does not fit into a U128"); + + let mut lo = 0; + let mut hi = 0; + let mut base = 1; + if N <= 18 { + for i in 0..N-2 { + lo += U128::decode_ascii(bytes[N-i-1])*base; + base = base*16; + } + } else { + for i in 0..16 { + lo += U128::decode_ascii(bytes[N-i-1])*base; + base = base*16; + } + base = 1; + for i in 17..N-1 { + hi += U128::decode_ascii(bytes[N-i])*base; + base = base*16; + } + } + U128 { + lo: lo as Field, + hi: hi as Field, + } + } + + fn decode_ascii(ascii: u8) -> Field { + if ascii < 58 { + ascii - 48 + } else { + if ascii < 71 { + ascii - 55 + } else { + ascii - 87 + } + + } as Field + } + + unconstrained fn unconstrained_div(self: Self, b: U128) -> (U128, U128) { + if self < b { + (U128::from_u64s_le(0, 0), self) + } else { + //TODO check if this can overflow? + let (q,r) = self.unconstrained_div(b * U128::from_u64s_le(2,0)); + let q_mul_2 = q * U128::from_u64s_le(2,0); + if r < b { + (q_mul_2, r) + } else { + (q_mul_2 + U128::from_u64s_le(1,0), r - b) + } + + } + } + + pub fn from_integer(i: T) -> U128 { + let f = crate::as_field(i); + let lo = f as u64 as Field; + let hi = (f-lo) / pow64; + U128 { + lo, + hi, + } + } + + pub fn to_integer(self) -> T { + crate::from_field(self.lo+self.hi*pow64) + } + + fn wrapping_mul(self: Self, b: U128) -> U128 { + let low = self.lo*b.lo; + let lo = low as u64 as Field; + let carry = (low - lo) / pow64; + let high = if crate::field::modulus_num_bits() as u32 > 196 { + (self.lo+self.hi)*(b.lo+b.hi) - low + carry + } else { + self.lo*b.hi + self.hi*b.lo + carry + }; + let hi = high as u64 as Field; + U128 { + lo, + hi, + } + } +} + +impl Add for U128 { + pub fn add(self: Self, b: U128) -> U128 { + let low = self.lo + b.lo; + let lo = low as u64 as Field; + let carry = (low - lo) / pow64; + let high = self.hi + b.hi + carry; + let hi = high as u64 as Field; + assert(hi == high, "attempt to add with overflow"); + U128 { + lo, + hi, + } + } +} + +impl Sub for U128 { + pub fn sub(self: Self, b: U128) -> U128 { + let low = pow64 + self.lo - b.lo; + let lo = low as u64 as Field; + let borrow = (low == lo) as Field; + let high = self.hi - b.hi - borrow; + let hi = high as u64 as Field; + assert(hi == high, "attempt to subtract with overflow"); + U128 { + lo, + hi, + } + } +} + +impl Mul for U128 { + pub fn mul(self: Self, b: U128) -> U128 { + assert(self.hi*b.hi == 0, "attempt to multiply with overflow"); + let low = self.lo*b.lo; + let lo = low as u64 as Field; + let carry = (low - lo) / pow64; + let high = if crate::field::modulus_num_bits() as u32 > 196 { + (self.lo+self.hi)*(b.lo+b.hi) - low + carry + } else { + self.lo*b.hi + self.hi*b.lo + carry + }; + let hi = high as u64 as Field; + assert(hi == high, "attempt to multiply with overflow"); + U128 { + lo, + hi, + } + } +} + +impl Div for U128 { + pub fn div(self: Self, b: U128) -> U128 { + let (q,r) = self.unconstrained_div(b); + let a = b * q + r; + assert_eq(self, a); + assert(r < b); + q + } +} + +impl Rem for U128 { + pub fn rem(self: Self, b: U128) -> U128 { + let (q,r) = self.unconstrained_div(b); + let a = b * q + r; + assert_eq(self, a); + assert(r < b); + r + } +} + +impl Eq for U128 { + pub fn eq(self: Self, b: U128) -> bool { + (self.lo == b.lo) & (self.hi == b.hi) + } +} + +impl Ord for U128 { + fn cmp(self, other: Self) -> Ordering { + let hi_ordering = (self.hi as u64).cmp((other.hi as u64)); + let lo_ordering = (self.lo as u64).cmp((other.lo as u64)); + + if hi_ordering == Ordering::equal() { + lo_ordering + } else { + hi_ordering + } + } +} + +impl BitOr for U128 { + fn bitor(self, other: U128) -> U128 { + U128 { + lo: ((self.lo as u64) | (other.lo as u64)) as Field, + hi: ((self.hi as u64) | (other.hi as u64))as Field + } + } +} + +impl BitAnd for U128 { + fn bitand(self, other: U128) -> U128 { + U128 { + lo: ((self.lo as u64) & (other.lo as u64)) as Field, + hi: ((self.hi as u64) & (other.hi as u64)) as Field + } + } +} + +impl BitXor for U128 { + fn bitxor(self, other: U128) -> U128 { + U128 { + lo: ((self.lo as u64) ^ (other.lo as u64)) as Field, + hi: ((self.hi as u64) ^ (other.hi as u64)) as Field + } + } +} + +impl Shl for U128 { + fn shl(self, other: U128) -> U128 { + assert(other < U128::from_u64s_le(128,0), "attempt to shift left with overflow"); + let exp_bits = other.lo.to_be_bits(7); + + let mut r: Field = 2; + let mut y: Field = 1; + for i in 1..8 { + y = (exp_bits[7-i] as Field) * (r * y) + (1 - exp_bits[7-i] as Field) * y; + r *= r; + } + self.wrapping_mul(U128::from_integer(y)) + } +} + +impl Shr for U128 { + fn shr(self, other: U128) -> U128 { + assert(other < U128::from_u64s_le(128,0), "attempt to shift right with overflow"); + let exp_bits = other.lo.to_be_bits(7); + + let mut r: Field = 2; + let mut y: Field = 1; + for i in 1..8 { + y = (exp_bits[7-i] as Field) * (r * y) + (1 - exp_bits[7-i] as Field) * y; + r *= r; + } + self / U128::from_integer(y) + } +} \ No newline at end of file diff --git a/package.json b/package.json index 0e86b100b7c..789ba9dec55 100644 --- a/package.json +++ b/package.json @@ -3,7 +3,6 @@ "private": true, "workspaces": [ "compiler/wasm", - "compiler/source-resolver", "compiler/integration-tests", "tooling/noir_js_types", "tooling/noirc_abi_wasm", @@ -19,18 +18,18 @@ "test": "yarn workspaces foreach run test", "test:integration": "yarn workspace integration-tests test", "clean:workspaces": "yarn workspaces foreach --exclude @noir-lang/root run clean", - "clean:root": "rm -rf ./result ./target", + "clean:root": "rm -rf ./result ./target ./packages", "clean": "yarn clean:workspaces && yarn clean:root", "lint": "yarn workspaces foreach --verbose run lint", + "spellcheck": "cspell '**/*.{md,rs}' -c ./cspell.json", "install:acvm_js": "yarn workspace @noir-lang/acvm_js run install:from:nix", "install:noir_wasm": "yarn workspace @noir-lang/noir_wasm run install:from:nix", "install:noirc_abi_wasm": "yarn workspace @noir-lang/noirc_abi run install:from:nix", "install:from:nix": "yarn install:acvm_js && yarn install:noir_wasm && yarn install:noirc_abi_wasm", "build:types": "yarn workspace @noir-lang/types run build", - "build:source-resolver": "yarn workspace @noir-lang/source-resolver run build", "build:backend_barretenberg": "yarn workspace @noir-lang/backend_barretenberg run build", "build:noir_js": "yarn workspace @noir-lang/noir_js run build", - "build:js:only": "yarn build:types && yarn build:source-resolver && yarn build:backend_barretenberg && yarn build:noir_js", + "build:js:only": "yarn build:types && yarn build:backend_barretenberg && yarn build:noir_js", "prepare:publish": "yarn clean && yarn install:from:nix && yarn build:js:only", "nightly:version": "yarn workspaces foreach run nightly:version", "publish:all": "yarn install && yarn workspaces foreach run publish" @@ -39,6 +38,7 @@ "@typescript-eslint/eslint-plugin": "^6.7.3", "@typescript-eslint/parser": "^6.7.3", "chai": "^4.3.7", + "cspell": "^8.3.2", "eslint": "^8.50.0", "eslint-plugin-prettier": "^5.0.0", "mocha": "^10.2.0", diff --git a/release-please-config.json b/release-please-config.json index afc0bfd420d..e73993ca974 100644 --- a/release-please-config.json +++ b/release-please-config.json @@ -14,11 +14,6 @@ "extra-files": [ "Cargo.toml", "flake.nix", - { - "type": "json", - "path": "compiler/source-resolver/package.json", - "jsonpath": "$.version" - }, { "type": "json", "path": "compiler/wasm/package.json", @@ -48,6 +43,11 @@ "type": "json", "path": "tooling/noirc_abi_wasm/package.json", "jsonpath": "$.version" + }, + { + "type": "json", + "path": "docs/docs/package.json", + "jsonpath": "$.version" } ] }, @@ -65,8 +65,11 @@ "blackbox_solver/Cargo.toml", "brillig/Cargo.toml", "brillig_vm/Cargo.toml", - "stdlib/Cargo.toml", - "flake.nix", + { + "type": "json", + "path": "acvm_js/package.json", + "jsonpath": "$.version" + }, { "type": "json", "path": "acvm_js/package.json", @@ -77,5 +80,6 @@ }, "plugins": [ "sentence-case" - ] -} + ], + "bootstrap-sha": "690cfc0468de0b9aee53ccfe832c71c16e61e5fc" +} \ No newline at end of file diff --git a/release-tests/test/6_array.test.js b/release-tests/test/6_array.test.js index 530b7f85bf4..43d4a389264 100644 --- a/release-tests/test/6_array.test.js +++ b/release-tests/test/6_array.test.js @@ -19,27 +19,27 @@ test("promise resolved", async () => { promiseResolved = true; }); -test("nargo builds ../tooling/nargo_cli/tests/execution_success/6_array sucessfully", async () => { +test("nargo builds ../test_programs/execution_success/6_array sucessfully", async () => { await within(async () => { - cd("../tooling/nargo_cli/tests/execution_success/6_array"); + cd("../test_programs/execution_success/6_array"); const command = `${NARGO_BIN} check`; await $`${command}`.nothrow(); }); }); -test("nargo creates proof ../tooling/nargo_cli/tests/execution_success/6_array sucessfully", async () => { +test("nargo creates proof ../test_programs/execution_success/6_array sucessfully", async () => { await within(async () => { - cd("../tooling/nargo_cli/tests/execution_success/6_array"); + cd("../test_programs/execution_success/6_array"); const command = `${NARGO_BIN} prove 6_array`; await $`${command}`.nothrow(); }); }); -test("nargo verifies proof ../tooling/nargo_cli/tests/execution_success/6_array sucessfully", async () => { +test("nargo verifies proof ../test_programs/execution_success/6_array sucessfully", async () => { await within(async () => { - cd("../tooling/nargo_cli/tests/execution_success/6_array"); + cd("../test_programs/execution_success/6_array"); const command = `${NARGO_BIN} verify 6_array`; await $`${command}`.nothrow(); diff --git a/release-tests/test/utils/nargo.js b/release-tests/test/utils/nargo.js index 537cdfc8be5..6fcaccbcde7 100644 --- a/release-tests/test/utils/nargo.js +++ b/release-tests/test/utils/nargo.js @@ -1,3 +1,3 @@ -import { default as path } from "node:path"; +import { default as path } from "path"; export const NARGO_BIN = process.env.NARGO_BIN ? path.resolve(process.env.NARGO_BIN) : "nargo"; diff --git a/scripts/bootstrap_native.sh b/scripts/bootstrap_native.sh new file mode 100755 index 00000000000..3e0e2ed853a --- /dev/null +++ b/scripts/bootstrap_native.sh @@ -0,0 +1,20 @@ +#!/usr/bin/env bash +set -eu + +cd $(dirname "$0")/.. + +# If this project has been subrepod into another project, set build data manually. +export SOURCE_DATE_EPOCH=$(date +%s) +export GIT_DIRTY=false +if [ -f ".gitrepo" ]; then + export GIT_COMMIT=$(awk '/commit =/ {print $3}' .gitrepo) +else + export GIT_COMMIT=$(git rev-parse --verify HEAD) +fi + +# Build native. +if [ -n "${DEBUG:-}" ]; then + cargo build +else + cargo build --release +fi diff --git a/scripts/bootstrap_packages.sh b/scripts/bootstrap_packages.sh new file mode 100755 index 00000000000..df4ee5b3aed --- /dev/null +++ b/scripts/bootstrap_packages.sh @@ -0,0 +1,31 @@ +#!/usr/bin/env bash +set -eu + +cd $(dirname "$0")/.. + +./scripts/install_wasm-bindgen.sh + +# If this project has been subrepod into another project, set build data manually. +export SOURCE_DATE_EPOCH=$(date +%s) +export GIT_DIRTY=false +if [ -f ".gitrepo" ]; then + export GIT_COMMIT=$(awk '/commit =/ {print $3}' .gitrepo) +else + export GIT_COMMIT=$(git rev-parse --verify HEAD) +fi + +yarn +yarn build + +# We create a folder called packages, that contains each package as it would be published to npm, named correctly. +# These can be useful for testing, or portaling into other projects. +yarn workspaces foreach pack + +rm -rf packages && mkdir -p packages +tar zxfv acvm-repo/acvm_js/package.tgz -C packages && mv packages/package packages/acvm_js +tar zxfv compiler/wasm/package.tgz -C packages && mv packages/package packages/noir_wasm +tar zxfv tooling/noir_codegen/package.tgz -C packages && mv packages/package packages/noir_codegen +tar zxfv tooling/noir_js/package.tgz -C packages && mv packages/package packages/noir_js +tar zxfv tooling/noir_js_backend_barretenberg/package.tgz -C packages && mv packages/package packages/backend_barretenberg +tar zxfv tooling/noir_js_types/package.tgz -C packages && mv packages/package packages/types +tar zxfv tooling/noirc_abi_wasm/package.tgz -C packages && mv packages/package packages/noirc_abi diff --git a/scripts/install_wasm-bindgen.sh b/scripts/install_wasm-bindgen.sh new file mode 100755 index 00000000000..c6e85bac50b --- /dev/null +++ b/scripts/install_wasm-bindgen.sh @@ -0,0 +1,10 @@ +#!/usr/bin/env bash +set -eu + +cd $(dirname "$0")/.. + +# Install wasm-bindgen-cli. +if [ "$(wasm-bindgen --version | cut -d' ' -f2)" != "0.2.86" ]; then + echo "Building wasm-bindgen..." + RUSTFLAGS="-Ctarget-feature=-crt-static" cargo install -f wasm-bindgen-cli --version 0.2.86 +fi diff --git a/scripts/nargo_compile_noir_js_assert_lt.sh b/scripts/nargo_compile_noir_js_assert_lt.sh new file mode 100755 index 00000000000..636ae59b996 --- /dev/null +++ b/scripts/nargo_compile_noir_js_assert_lt.sh @@ -0,0 +1,4 @@ +#!/bin/bash + +cd ./tooling/noir_js/test/noir_compiled_examples/assert_lt +nargo compile \ No newline at end of file diff --git a/scripts/nargo_compile_wasm_fixtures.sh b/scripts/nargo_compile_wasm_fixtures.sh new file mode 100755 index 00000000000..95bb698c8a2 --- /dev/null +++ b/scripts/nargo_compile_wasm_fixtures.sh @@ -0,0 +1,7 @@ +#!/bin/bash + +fixtures_dir="./compiler/wasm/test/fixtures" + +nargo compile --program-dir=$fixtures_dir/noir-contract +nargo compile --program-dir=$fixtures_dir/simple +nargo compile --program-dir=$fixtures_dir/with-deps diff --git a/scripts/test_js_packages.sh b/scripts/test_js_packages.sh new file mode 100755 index 00000000000..a5ec5b92a70 --- /dev/null +++ b/scripts/test_js_packages.sh @@ -0,0 +1,26 @@ +#!/bin/bash +set -eu + +cd $(dirname "$0")/.. + +./scripts/install_wasm-bindgen.sh + +# If this project has been subrepod into another project, set build data manually. +export SOURCE_DATE_EPOCH=$(date +%s) +export GIT_DIRTY=false +if [ -f ".gitrepo" ]; then + export GIT_COMMIT=$(awk '/commit =/ {print $3}' .gitrepo) +else + export GIT_COMMIT=$(git rev-parse --verify HEAD) +fi + +cargo build --release +export PATH="${PATH}:/usr/src/noir/target/release/" + +yarn +yarn build +npx playwright install +npx playwright install-deps + +./scripts/test.sh +yarn test \ No newline at end of file diff --git a/scripts/test_native.sh b/scripts/test_native.sh new file mode 100755 index 00000000000..bc1c47ecf12 --- /dev/null +++ b/scripts/test_native.sh @@ -0,0 +1,15 @@ +#!/bin/bash +set -eu + +cd $(dirname "$0")/.. + +# If this project has been subrepod into another project, set build data manually. +export SOURCE_DATE_EPOCH=$(date +%s) +export GIT_DIRTY=false +if [ -f ".gitrepo" ]; then + export GIT_COMMIT=$(awk '/commit =/ {print $3}' .gitrepo) +else + export GIT_COMMIT=$(git rev-parse --verify HEAD) +fi + +cargo test --workspace --locked --release \ No newline at end of file diff --git a/scripts/update-acvm-workspace-versions.sh b/scripts/update-acvm-workspace-versions.sh new file mode 100755 index 00000000000..3478333f05a --- /dev/null +++ b/scripts/update-acvm-workspace-versions.sh @@ -0,0 +1,40 @@ +#!/bin/bash + +# Path to the top-level Cargo.toml +TOP_LEVEL_CARGO_TOML="Cargo.toml" + +# Temporary file to store paths +TEMP_PATHS_FILE="temp_paths.txt" + +# Extract local dependencies paths that start with "acvm-repo" +grep 'path = "acvm-repo' "$TOP_LEVEL_CARGO_TOML" | sed -E 's/.*path = "([^"]+)".*/\1/' > "$TEMP_PATHS_FILE" + +# Read each path and update the version in the top level workspace Cargo.toml file +# to match the version in the component's Cargo.toml +while IFS= read -r component_path; do + # Extract the component name + # This is acir_field for example or stdlib + component_name=$(basename "$component_path") + + # Extract the version from the component's Cargo.toml + # This is acvm-repo/acir_field for example + component_version=$(grep '^version =' "$component_path/Cargo.toml" | sed -E 's/version = "([^"]+)"/\1/') + + if [ -z "$component_version" ]; then + echo "Error: Unable to extract version for $component_name from $component_path/Cargo.toml" + continue + fi + + # Update the version in the top-level Cargo.toml + sed -i.bak -E "s|($component_name[[:space:]]*=[[:space:]]*\{[[:space:]]*version[[:space:]]*=[[:space:]]*\")([^\"]+)(\".*)|\1$component_version\3|" "$TOP_LEVEL_CARGO_TOML" + + if [ $? -ne 0 ]; then + echo "Error: Unable to update version for $component_name in $TOP_LEVEL_CARGO_TOML" + else + echo "Version for $component_name updated successfully to $component_version in $TOP_LEVEL_CARGO_TOML" + fi +done < "$TEMP_PATHS_FILE" + +# Remove temporary file and backup file +rm "$TEMP_PATHS_FILE" +rm "${TOP_LEVEL_CARGO_TOML}.bak" diff --git a/test_programs/.gitignore b/test_programs/.gitignore new file mode 100644 index 00000000000..01a3426160c --- /dev/null +++ b/test_programs/.gitignore @@ -0,0 +1 @@ +acir_artifacts diff --git a/tooling/nargo_cli/tests/README.md b/test_programs/README.md similarity index 100% rename from tooling/nargo_cli/tests/README.md rename to test_programs/README.md diff --git a/tooling/nargo_cli/tests/compile_failure/assert_constant_fail/Nargo.toml b/test_programs/compile_failure/assert_constant_fail/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_failure/assert_constant_fail/Nargo.toml rename to test_programs/compile_failure/assert_constant_fail/Nargo.toml diff --git a/tooling/nargo_cli/tests/compile_failure/assert_constant_fail/src/main.nr b/test_programs/compile_failure/assert_constant_fail/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/compile_failure/assert_constant_fail/src/main.nr rename to test_programs/compile_failure/assert_constant_fail/src/main.nr diff --git a/tooling/nargo_cli/tests/compile_failure/assert_eq_struct/Nargo.toml b/test_programs/compile_failure/assert_eq_struct/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_failure/assert_eq_struct/Nargo.toml rename to test_programs/compile_failure/assert_eq_struct/Nargo.toml diff --git a/tooling/nargo_cli/tests/compile_failure/assert_eq_struct/src/main.nr b/test_programs/compile_failure/assert_eq_struct/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/compile_failure/assert_eq_struct/src/main.nr rename to test_programs/compile_failure/assert_eq_struct/src/main.nr diff --git a/tooling/nargo_cli/tests/compile_failure/brillig_assert_fail/Nargo.toml b/test_programs/compile_failure/brillig_assert_fail/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_failure/brillig_assert_fail/Nargo.toml rename to test_programs/compile_failure/brillig_assert_fail/Nargo.toml diff --git a/tooling/nargo_cli/tests/compile_failure/brillig_assert_fail/Prover.toml b/test_programs/compile_failure/brillig_assert_fail/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_failure/brillig_assert_fail/Prover.toml rename to test_programs/compile_failure/brillig_assert_fail/Prover.toml diff --git a/tooling/nargo_cli/tests/compile_failure/brillig_assert_fail/src/main.nr b/test_programs/compile_failure/brillig_assert_fail/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/compile_failure/brillig_assert_fail/src/main.nr rename to test_programs/compile_failure/brillig_assert_fail/src/main.nr diff --git a/tooling/nargo_cli/tests/compile_failure/constrain_typo/Nargo.toml b/test_programs/compile_failure/constrain_typo/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_failure/constrain_typo/Nargo.toml rename to test_programs/compile_failure/constrain_typo/Nargo.toml diff --git a/tooling/nargo_cli/tests/compile_failure/constrain_typo/src/main.nr b/test_programs/compile_failure/constrain_typo/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/compile_failure/constrain_typo/src/main.nr rename to test_programs/compile_failure/constrain_typo/src/main.nr diff --git a/tooling/nargo_cli/tests/compile_failure/custom_entry_not_found/Nargo.toml b/test_programs/compile_failure/custom_entry_not_found/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_failure/custom_entry_not_found/Nargo.toml rename to test_programs/compile_failure/custom_entry_not_found/Nargo.toml diff --git a/tooling/nargo_cli/tests/compile_failure/custom_entry_not_found/Prover.toml b/test_programs/compile_failure/custom_entry_not_found/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_failure/custom_entry_not_found/Prover.toml rename to test_programs/compile_failure/custom_entry_not_found/Prover.toml diff --git a/tooling/nargo_cli/tests/compile_failure/custom_entry_not_found/src/main.nr b/test_programs/compile_failure/custom_entry_not_found/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/compile_failure/custom_entry_not_found/src/main.nr rename to test_programs/compile_failure/custom_entry_not_found/src/main.nr diff --git a/test_programs/compile_failure/cyclic_dep/Nargo.toml b/test_programs/compile_failure/cyclic_dep/Nargo.toml new file mode 100644 index 00000000000..6a5a9b7db73 --- /dev/null +++ b/test_programs/compile_failure/cyclic_dep/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "cyclic_dep" +type = "bin" +authors = [""] + +[dependencies] +dep1 = { path= "./dep1"} \ No newline at end of file diff --git a/tooling/nargo_cli/tests/compile_failure/div_by_zero_constants/Prover.toml b/test_programs/compile_failure/cyclic_dep/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_failure/div_by_zero_constants/Prover.toml rename to test_programs/compile_failure/cyclic_dep/Prover.toml diff --git a/test_programs/compile_failure/cyclic_dep/dep1/Nargo.toml b/test_programs/compile_failure/cyclic_dep/dep1/Nargo.toml new file mode 100644 index 00000000000..4782bbd5cda --- /dev/null +++ b/test_programs/compile_failure/cyclic_dep/dep1/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "dep1" +type = "lib" +authors = [""] + +[dependencies] +dep1 = { path= "../dep2"} \ No newline at end of file diff --git a/test_programs/compile_failure/cyclic_dep/dep1/src/lib.nr b/test_programs/compile_failure/cyclic_dep/dep1/src/lib.nr new file mode 100644 index 00000000000..02b68c56bd2 --- /dev/null +++ b/test_programs/compile_failure/cyclic_dep/dep1/src/lib.nr @@ -0,0 +1,3 @@ +fn bar() { + +} \ No newline at end of file diff --git a/test_programs/compile_failure/cyclic_dep/dep2/Nargo.toml b/test_programs/compile_failure/cyclic_dep/dep2/Nargo.toml new file mode 100644 index 00000000000..5e2a0f304b0 --- /dev/null +++ b/test_programs/compile_failure/cyclic_dep/dep2/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "dep2" +type = "lib" +authors = [""] + +[dependencies] +dep1 = { path= "../dep1"} \ No newline at end of file diff --git a/test_programs/compile_failure/cyclic_dep/dep2/src/lib.nr b/test_programs/compile_failure/cyclic_dep/dep2/src/lib.nr new file mode 100644 index 00000000000..298a0f3c7ca --- /dev/null +++ b/test_programs/compile_failure/cyclic_dep/dep2/src/lib.nr @@ -0,0 +1,3 @@ +fn foo() { + +} \ No newline at end of file diff --git a/test_programs/compile_failure/cyclic_dep/src/main.nr b/test_programs/compile_failure/cyclic_dep/src/main.nr new file mode 100644 index 00000000000..c55ca748334 --- /dev/null +++ b/test_programs/compile_failure/cyclic_dep/src/main.nr @@ -0,0 +1,7 @@ +use dep1::foo; +use dep2::bar; + +fn main() { + dep1::foo(); + dep2::bar(); +} diff --git a/tooling/nargo_cli/tests/compile_failure/dep_impl_primitive/Nargo.toml b/test_programs/compile_failure/dep_impl_primitive/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_failure/dep_impl_primitive/Nargo.toml rename to test_programs/compile_failure/dep_impl_primitive/Nargo.toml diff --git a/tooling/nargo_cli/tests/compile_failure/dep_impl_primitive/Prover.toml b/test_programs/compile_failure/dep_impl_primitive/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_failure/dep_impl_primitive/Prover.toml rename to test_programs/compile_failure/dep_impl_primitive/Prover.toml diff --git a/tooling/nargo_cli/tests/compile_failure/dep_impl_primitive/src/main.nr b/test_programs/compile_failure/dep_impl_primitive/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/compile_failure/dep_impl_primitive/src/main.nr rename to test_programs/compile_failure/dep_impl_primitive/src/main.nr diff --git a/tooling/nargo_cli/tests/compile_failure/depend_on_bin/Nargo.toml b/test_programs/compile_failure/depend_on_bin/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_failure/depend_on_bin/Nargo.toml rename to test_programs/compile_failure/depend_on_bin/Nargo.toml diff --git a/tooling/nargo_cli/tests/compile_failure/depend_on_bin/Prover.toml b/test_programs/compile_failure/depend_on_bin/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_failure/depend_on_bin/Prover.toml rename to test_programs/compile_failure/depend_on_bin/Prover.toml diff --git a/tooling/nargo_cli/tests/compile_failure/depend_on_bin/src/main.nr b/test_programs/compile_failure/depend_on_bin/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/compile_failure/depend_on_bin/src/main.nr rename to test_programs/compile_failure/depend_on_bin/src/main.nr diff --git a/tooling/nargo_cli/tests/compile_failure/div_by_zero_constants/Nargo.toml b/test_programs/compile_failure/div_by_zero_constants/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_failure/div_by_zero_constants/Nargo.toml rename to test_programs/compile_failure/div_by_zero_constants/Nargo.toml diff --git a/tooling/nargo_cli/tests/compile_failure/div_by_zero_modulo/Prover.toml b/test_programs/compile_failure/div_by_zero_constants/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_failure/div_by_zero_modulo/Prover.toml rename to test_programs/compile_failure/div_by_zero_constants/Prover.toml diff --git a/tooling/nargo_cli/tests/compile_failure/div_by_zero_constants/src/main.nr b/test_programs/compile_failure/div_by_zero_constants/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/compile_failure/div_by_zero_constants/src/main.nr rename to test_programs/compile_failure/div_by_zero_constants/src/main.nr diff --git a/tooling/nargo_cli/tests/compile_failure/div_by_zero_modulo/Nargo.toml b/test_programs/compile_failure/div_by_zero_modulo/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_failure/div_by_zero_modulo/Nargo.toml rename to test_programs/compile_failure/div_by_zero_modulo/Nargo.toml diff --git a/tooling/nargo_cli/tests/compile_failure/dup_trait_implementation_4/Prover.toml b/test_programs/compile_failure/div_by_zero_modulo/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_failure/dup_trait_implementation_4/Prover.toml rename to test_programs/compile_failure/div_by_zero_modulo/Prover.toml diff --git a/tooling/nargo_cli/tests/compile_failure/div_by_zero_modulo/src/main.nr b/test_programs/compile_failure/div_by_zero_modulo/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/compile_failure/div_by_zero_modulo/src/main.nr rename to test_programs/compile_failure/div_by_zero_modulo/src/main.nr diff --git a/tooling/nargo_cli/tests/compile_failure/div_by_zero_numerator_witness/Nargo.toml b/test_programs/compile_failure/div_by_zero_numerator_witness/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_failure/div_by_zero_numerator_witness/Nargo.toml rename to test_programs/compile_failure/div_by_zero_numerator_witness/Nargo.toml diff --git a/tooling/nargo_cli/tests/compile_failure/div_by_zero_numerator_witness/Prover.toml b/test_programs/compile_failure/div_by_zero_numerator_witness/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_failure/div_by_zero_numerator_witness/Prover.toml rename to test_programs/compile_failure/div_by_zero_numerator_witness/Prover.toml diff --git a/tooling/nargo_cli/tests/compile_failure/div_by_zero_numerator_witness/src/main.nr b/test_programs/compile_failure/div_by_zero_numerator_witness/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/compile_failure/div_by_zero_numerator_witness/src/main.nr rename to test_programs/compile_failure/div_by_zero_numerator_witness/src/main.nr diff --git a/tooling/nargo_cli/tests/compile_failure/div_by_zero_witness/Nargo.toml b/test_programs/compile_failure/div_by_zero_witness/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_failure/div_by_zero_witness/Nargo.toml rename to test_programs/compile_failure/div_by_zero_witness/Nargo.toml diff --git a/tooling/nargo_cli/tests/compile_failure/div_by_zero_witness/Prover.toml b/test_programs/compile_failure/div_by_zero_witness/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_failure/div_by_zero_witness/Prover.toml rename to test_programs/compile_failure/div_by_zero_witness/Prover.toml diff --git a/tooling/nargo_cli/tests/compile_failure/div_by_zero_witness/src/main.nr b/test_programs/compile_failure/div_by_zero_witness/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/compile_failure/div_by_zero_witness/src/main.nr rename to test_programs/compile_failure/div_by_zero_witness/src/main.nr diff --git a/tooling/nargo_cli/tests/compile_failure/dup_trait_implementation_4/Nargo.toml b/test_programs/compile_failure/dup_trait_implementation_4/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_failure/dup_trait_implementation_4/Nargo.toml rename to test_programs/compile_failure/dup_trait_implementation_4/Nargo.toml diff --git a/tooling/nargo_cli/tests/compile_failure/dup_trait_implementation_5/Prover.toml b/test_programs/compile_failure/dup_trait_implementation_4/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_failure/dup_trait_implementation_5/Prover.toml rename to test_programs/compile_failure/dup_trait_implementation_4/Prover.toml diff --git a/tooling/nargo_cli/tests/compile_failure/dup_trait_implementation_4/src/main.nr b/test_programs/compile_failure/dup_trait_implementation_4/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/compile_failure/dup_trait_implementation_4/src/main.nr rename to test_programs/compile_failure/dup_trait_implementation_4/src/main.nr diff --git a/tooling/nargo_cli/tests/compile_failure/dup_trait_implementation_4/src/module1.nr b/test_programs/compile_failure/dup_trait_implementation_4/src/module1.nr similarity index 100% rename from tooling/nargo_cli/tests/compile_failure/dup_trait_implementation_4/src/module1.nr rename to test_programs/compile_failure/dup_trait_implementation_4/src/module1.nr diff --git a/tooling/nargo_cli/tests/compile_failure/dup_trait_implementation_4/src/module2.nr b/test_programs/compile_failure/dup_trait_implementation_4/src/module2.nr similarity index 100% rename from tooling/nargo_cli/tests/compile_failure/dup_trait_implementation_4/src/module2.nr rename to test_programs/compile_failure/dup_trait_implementation_4/src/module2.nr diff --git a/tooling/nargo_cli/tests/compile_failure/dup_trait_implementation_4/src/module3.nr b/test_programs/compile_failure/dup_trait_implementation_4/src/module3.nr similarity index 100% rename from tooling/nargo_cli/tests/compile_failure/dup_trait_implementation_4/src/module3.nr rename to test_programs/compile_failure/dup_trait_implementation_4/src/module3.nr diff --git a/tooling/nargo_cli/tests/compile_failure/dup_trait_implementation_5/Nargo.toml b/test_programs/compile_failure/dup_trait_implementation_5/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_failure/dup_trait_implementation_5/Nargo.toml rename to test_programs/compile_failure/dup_trait_implementation_5/Nargo.toml diff --git a/tooling/nargo_cli/tests/compile_failure/dup_trait_items_1/Prover.toml b/test_programs/compile_failure/dup_trait_implementation_5/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_failure/dup_trait_items_1/Prover.toml rename to test_programs/compile_failure/dup_trait_implementation_5/Prover.toml diff --git a/tooling/nargo_cli/tests/compile_failure/dup_trait_implementation_5/src/main.nr b/test_programs/compile_failure/dup_trait_implementation_5/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/compile_failure/dup_trait_implementation_5/src/main.nr rename to test_programs/compile_failure/dup_trait_implementation_5/src/main.nr diff --git a/tooling/nargo_cli/tests/compile_failure/dup_trait_implementation_5/src/module1.nr b/test_programs/compile_failure/dup_trait_implementation_5/src/module1.nr similarity index 100% rename from tooling/nargo_cli/tests/compile_failure/dup_trait_implementation_5/src/module1.nr rename to test_programs/compile_failure/dup_trait_implementation_5/src/module1.nr diff --git a/tooling/nargo_cli/tests/compile_failure/dup_trait_implementation_5/src/module2.nr b/test_programs/compile_failure/dup_trait_implementation_5/src/module2.nr similarity index 100% rename from tooling/nargo_cli/tests/compile_failure/dup_trait_implementation_5/src/module2.nr rename to test_programs/compile_failure/dup_trait_implementation_5/src/module2.nr diff --git a/tooling/nargo_cli/tests/compile_failure/dup_trait_implementation_5/src/module3.nr b/test_programs/compile_failure/dup_trait_implementation_5/src/module3.nr similarity index 100% rename from tooling/nargo_cli/tests/compile_failure/dup_trait_implementation_5/src/module3.nr rename to test_programs/compile_failure/dup_trait_implementation_5/src/module3.nr diff --git a/tooling/nargo_cli/tests/compile_failure/dup_trait_implementation_5/src/module4.nr b/test_programs/compile_failure/dup_trait_implementation_5/src/module4.nr similarity index 100% rename from tooling/nargo_cli/tests/compile_failure/dup_trait_implementation_5/src/module4.nr rename to test_programs/compile_failure/dup_trait_implementation_5/src/module4.nr diff --git a/tooling/nargo_cli/tests/compile_failure/dup_trait_items_1/Nargo.toml b/test_programs/compile_failure/dup_trait_items_1/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_failure/dup_trait_items_1/Nargo.toml rename to test_programs/compile_failure/dup_trait_items_1/Nargo.toml diff --git a/tooling/nargo_cli/tests/compile_failure/dup_trait_items_2/Prover.toml b/test_programs/compile_failure/dup_trait_items_1/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_failure/dup_trait_items_2/Prover.toml rename to test_programs/compile_failure/dup_trait_items_1/Prover.toml diff --git a/tooling/nargo_cli/tests/compile_failure/dup_trait_items_1/src/main.nr b/test_programs/compile_failure/dup_trait_items_1/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/compile_failure/dup_trait_items_1/src/main.nr rename to test_programs/compile_failure/dup_trait_items_1/src/main.nr diff --git a/tooling/nargo_cli/tests/compile_failure/dup_trait_items_2/Nargo.toml b/test_programs/compile_failure/dup_trait_items_2/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_failure/dup_trait_items_2/Nargo.toml rename to test_programs/compile_failure/dup_trait_items_2/Nargo.toml diff --git a/tooling/nargo_cli/tests/compile_failure/dup_trait_items_3/Prover.toml b/test_programs/compile_failure/dup_trait_items_2/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_failure/dup_trait_items_3/Prover.toml rename to test_programs/compile_failure/dup_trait_items_2/Prover.toml diff --git a/tooling/nargo_cli/tests/compile_failure/dup_trait_items_2/src/main.nr b/test_programs/compile_failure/dup_trait_items_2/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/compile_failure/dup_trait_items_2/src/main.nr rename to test_programs/compile_failure/dup_trait_items_2/src/main.nr diff --git a/tooling/nargo_cli/tests/compile_failure/dup_trait_items_3/Nargo.toml b/test_programs/compile_failure/dup_trait_items_3/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_failure/dup_trait_items_3/Nargo.toml rename to test_programs/compile_failure/dup_trait_items_3/Nargo.toml diff --git a/tooling/nargo_cli/tests/compile_failure/dup_trait_items_4/Prover.toml b/test_programs/compile_failure/dup_trait_items_3/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_failure/dup_trait_items_4/Prover.toml rename to test_programs/compile_failure/dup_trait_items_3/Prover.toml diff --git a/tooling/nargo_cli/tests/compile_failure/dup_trait_items_3/src/main.nr b/test_programs/compile_failure/dup_trait_items_3/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/compile_failure/dup_trait_items_3/src/main.nr rename to test_programs/compile_failure/dup_trait_items_3/src/main.nr diff --git a/tooling/nargo_cli/tests/compile_failure/dup_trait_items_4/Nargo.toml b/test_programs/compile_failure/dup_trait_items_4/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_failure/dup_trait_items_4/Nargo.toml rename to test_programs/compile_failure/dup_trait_items_4/Nargo.toml diff --git a/tooling/nargo_cli/tests/compile_failure/dup_trait_items_5/Prover.toml b/test_programs/compile_failure/dup_trait_items_4/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_failure/dup_trait_items_5/Prover.toml rename to test_programs/compile_failure/dup_trait_items_4/Prover.toml diff --git a/tooling/nargo_cli/tests/compile_failure/dup_trait_items_4/src/main.nr b/test_programs/compile_failure/dup_trait_items_4/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/compile_failure/dup_trait_items_4/src/main.nr rename to test_programs/compile_failure/dup_trait_items_4/src/main.nr diff --git a/tooling/nargo_cli/tests/compile_failure/dup_trait_items_5/Nargo.toml b/test_programs/compile_failure/dup_trait_items_5/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_failure/dup_trait_items_5/Nargo.toml rename to test_programs/compile_failure/dup_trait_items_5/Nargo.toml diff --git a/tooling/nargo_cli/tests/compile_failure/dup_trait_items_6/Prover.toml b/test_programs/compile_failure/dup_trait_items_5/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_failure/dup_trait_items_6/Prover.toml rename to test_programs/compile_failure/dup_trait_items_5/Prover.toml diff --git a/tooling/nargo_cli/tests/compile_failure/dup_trait_items_5/src/main.nr b/test_programs/compile_failure/dup_trait_items_5/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/compile_failure/dup_trait_items_5/src/main.nr rename to test_programs/compile_failure/dup_trait_items_5/src/main.nr diff --git a/tooling/nargo_cli/tests/compile_failure/dup_trait_items_6/Nargo.toml b/test_programs/compile_failure/dup_trait_items_6/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_failure/dup_trait_items_6/Nargo.toml rename to test_programs/compile_failure/dup_trait_items_6/Nargo.toml diff --git a/tooling/nargo_cli/tests/compile_success_empty/brillig_field_binary_operations/Prover.toml b/test_programs/compile_failure/dup_trait_items_6/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_success_empty/brillig_field_binary_operations/Prover.toml rename to test_programs/compile_failure/dup_trait_items_6/Prover.toml diff --git a/tooling/nargo_cli/tests/compile_failure/dup_trait_items_6/src/main.nr b/test_programs/compile_failure/dup_trait_items_6/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/compile_failure/dup_trait_items_6/src/main.nr rename to test_programs/compile_failure/dup_trait_items_6/src/main.nr diff --git a/tooling/nargo_cli/tests/compile_failure/duplicate_declaration/Nargo.toml b/test_programs/compile_failure/duplicate_declaration/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_failure/duplicate_declaration/Nargo.toml rename to test_programs/compile_failure/duplicate_declaration/Nargo.toml diff --git a/tooling/nargo_cli/tests/compile_failure/duplicate_declaration/src/main.nr b/test_programs/compile_failure/duplicate_declaration/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/compile_failure/duplicate_declaration/src/main.nr rename to test_programs/compile_failure/duplicate_declaration/src/main.nr diff --git a/tooling/nargo_cli/tests/compile_failure/dyn_index_fail_nested_array/Nargo.toml b/test_programs/compile_failure/dyn_index_fail_nested_array/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_failure/dyn_index_fail_nested_array/Nargo.toml rename to test_programs/compile_failure/dyn_index_fail_nested_array/Nargo.toml diff --git a/tooling/nargo_cli/tests/compile_failure/dyn_index_fail_nested_array/Prover.toml b/test_programs/compile_failure/dyn_index_fail_nested_array/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_failure/dyn_index_fail_nested_array/Prover.toml rename to test_programs/compile_failure/dyn_index_fail_nested_array/Prover.toml diff --git a/tooling/nargo_cli/tests/compile_failure/dyn_index_fail_nested_array/src/main.nr b/test_programs/compile_failure/dyn_index_fail_nested_array/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/compile_failure/dyn_index_fail_nested_array/src/main.nr rename to test_programs/compile_failure/dyn_index_fail_nested_array/src/main.nr diff --git a/tooling/nargo_cli/tests/compile_failure/dynamic_index_failure/Nargo.toml b/test_programs/compile_failure/dynamic_index_failure/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_failure/dynamic_index_failure/Nargo.toml rename to test_programs/compile_failure/dynamic_index_failure/Nargo.toml diff --git a/tooling/nargo_cli/tests/compile_failure/dynamic_index_failure/Prover.toml b/test_programs/compile_failure/dynamic_index_failure/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_failure/dynamic_index_failure/Prover.toml rename to test_programs/compile_failure/dynamic_index_failure/Prover.toml diff --git a/tooling/nargo_cli/tests/compile_failure/dynamic_index_failure/src/main.nr b/test_programs/compile_failure/dynamic_index_failure/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/compile_failure/dynamic_index_failure/src/main.nr rename to test_programs/compile_failure/dynamic_index_failure/src/main.nr diff --git a/tooling/nargo_cli/tests/compile_failure/field_modulo/Nargo.toml b/test_programs/compile_failure/field_modulo/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_failure/field_modulo/Nargo.toml rename to test_programs/compile_failure/field_modulo/Nargo.toml diff --git a/tooling/nargo_cli/tests/compile_failure/field_modulo/src/main.nr b/test_programs/compile_failure/field_modulo/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/compile_failure/field_modulo/src/main.nr rename to test_programs/compile_failure/field_modulo/src/main.nr diff --git a/tooling/nargo_cli/tests/compile_failure/integer_literal_overflow/Nargo.toml b/test_programs/compile_failure/integer_literal_overflow/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_failure/integer_literal_overflow/Nargo.toml rename to test_programs/compile_failure/integer_literal_overflow/Nargo.toml diff --git a/tooling/nargo_cli/tests/compile_failure/integer_literal_overflow/src/main.nr b/test_programs/compile_failure/integer_literal_overflow/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/compile_failure/integer_literal_overflow/src/main.nr rename to test_programs/compile_failure/integer_literal_overflow/src/main.nr diff --git a/tooling/nargo_cli/tests/compile_failure/invalid_dependency_name/Nargo.toml b/test_programs/compile_failure/invalid_dependency_name/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_failure/invalid_dependency_name/Nargo.toml rename to test_programs/compile_failure/invalid_dependency_name/Nargo.toml diff --git a/tooling/nargo_cli/tests/compile_failure/invalid_dependency_name/src/main.nr b/test_programs/compile_failure/invalid_dependency_name/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/compile_failure/invalid_dependency_name/src/main.nr rename to test_programs/compile_failure/invalid_dependency_name/src/main.nr diff --git a/tooling/nargo_cli/tests/compile_failure/multiple_contracts/Nargo.toml b/test_programs/compile_failure/multiple_contracts/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_failure/multiple_contracts/Nargo.toml rename to test_programs/compile_failure/multiple_contracts/Nargo.toml diff --git a/tooling/nargo_cli/tests/compile_failure/multiple_contracts/src/main.nr b/test_programs/compile_failure/multiple_contracts/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/compile_failure/multiple_contracts/src/main.nr rename to test_programs/compile_failure/multiple_contracts/src/main.nr diff --git a/tooling/nargo_cli/tests/compile_failure/multiple_primary_attributes_fail/Nargo.toml b/test_programs/compile_failure/multiple_primary_attributes_fail/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_failure/multiple_primary_attributes_fail/Nargo.toml rename to test_programs/compile_failure/multiple_primary_attributes_fail/Nargo.toml diff --git a/tooling/nargo_cli/tests/compile_failure/multiple_primary_attributes_fail/src/main.nr b/test_programs/compile_failure/multiple_primary_attributes_fail/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/compile_failure/multiple_primary_attributes_fail/src/main.nr rename to test_programs/compile_failure/multiple_primary_attributes_fail/src/main.nr diff --git a/tooling/nargo_cli/tests/compile_failure/mutability_regression_2911/Nargo.toml b/test_programs/compile_failure/mutability_regression_2911/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_failure/mutability_regression_2911/Nargo.toml rename to test_programs/compile_failure/mutability_regression_2911/Nargo.toml diff --git a/tooling/nargo_cli/tests/compile_failure/mutability_regression_2911/src/main.nr b/test_programs/compile_failure/mutability_regression_2911/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/compile_failure/mutability_regression_2911/src/main.nr rename to test_programs/compile_failure/mutability_regression_2911/src/main.nr diff --git a/test_programs/compile_failure/negate_unsigned/Nargo.toml b/test_programs/compile_failure/negate_unsigned/Nargo.toml new file mode 100644 index 00000000000..0b5486024c2 --- /dev/null +++ b/test_programs/compile_failure/negate_unsigned/Nargo.toml @@ -0,0 +1,6 @@ +[package] +name = "negate_unsigned" +type = "bin" +authors = [""] + +[dependencies] \ No newline at end of file diff --git a/tooling/nargo_cli/tests/compile_success_empty/brillig_integer_binary_operations/Prover.toml b/test_programs/compile_failure/negate_unsigned/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_success_empty/brillig_integer_binary_operations/Prover.toml rename to test_programs/compile_failure/negate_unsigned/Prover.toml diff --git a/test_programs/compile_failure/negate_unsigned/src/main.nr b/test_programs/compile_failure/negate_unsigned/src/main.nr new file mode 100644 index 00000000000..db5f9b0820f --- /dev/null +++ b/test_programs/compile_failure/negate_unsigned/src/main.nr @@ -0,0 +1,6 @@ +use dep::std; + +fn main() { + let var = -1 as u8; + std::println(var); +} diff --git a/tooling/nargo_cli/tests/compile_failure/no_impl_from_function/Nargo.toml b/test_programs/compile_failure/no_impl_from_function/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_failure/no_impl_from_function/Nargo.toml rename to test_programs/compile_failure/no_impl_from_function/Nargo.toml diff --git a/tooling/nargo_cli/tests/compile_failure/no_impl_from_function/src/main.nr b/test_programs/compile_failure/no_impl_from_function/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/compile_failure/no_impl_from_function/src/main.nr rename to test_programs/compile_failure/no_impl_from_function/src/main.nr diff --git a/tooling/nargo_cli/tests/compile_failure/no_nested_impl/Nargo.toml b/test_programs/compile_failure/no_nested_impl/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_failure/no_nested_impl/Nargo.toml rename to test_programs/compile_failure/no_nested_impl/Nargo.toml diff --git a/test_programs/compile_failure/no_nested_impl/src/main.nr b/test_programs/compile_failure/no_nested_impl/src/main.nr new file mode 100644 index 00000000000..1f1056fb6d9 --- /dev/null +++ b/test_programs/compile_failure/no_nested_impl/src/main.nr @@ -0,0 +1,21 @@ +fn main() { + let a: [[[[Field; 2]; 2]; 2]; 2] = [[[[1, 2], [3, 4]], [[5, 6], [7, 8]]], [[[1, 2], [3, 4]], [[5, 6], [7, 8]]]]; + assert(a.my_eq(a)); +} + +trait MyEq { + fn my_eq(self, other: Self) -> bool; +} + +impl MyEq for [T; 2] where T: MyEq { + fn my_eq(self, other: Self) -> bool { + self[0].my_eq(other[0]) + & self[0].my_eq(other[0]) + } +} +// Impl for u32 but not Field +impl MyEq for u32 { + fn my_eq(self, other: Self) -> bool { + self == other + } +} diff --git a/tooling/nargo_cli/tests/compile_failure/orphaned_trait_impl/Nargo.toml b/test_programs/compile_failure/orphaned_trait_impl/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_failure/orphaned_trait_impl/Nargo.toml rename to test_programs/compile_failure/orphaned_trait_impl/Nargo.toml diff --git a/compiler/integration-tests/circuits/main/Prover.toml b/test_programs/compile_failure/orphaned_trait_impl/Prover.toml similarity index 100% rename from compiler/integration-tests/circuits/main/Prover.toml rename to test_programs/compile_failure/orphaned_trait_impl/Prover.toml diff --git a/tooling/nargo_cli/tests/compile_failure/orphaned_trait_impl/crate1/Nargo.toml b/test_programs/compile_failure/orphaned_trait_impl/crate1/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_failure/orphaned_trait_impl/crate1/Nargo.toml rename to test_programs/compile_failure/orphaned_trait_impl/crate1/Nargo.toml diff --git a/tooling/nargo_cli/tests/compile_failure/orphaned_trait_impl/crate1/src/lib.nr b/test_programs/compile_failure/orphaned_trait_impl/crate1/src/lib.nr similarity index 100% rename from tooling/nargo_cli/tests/compile_failure/orphaned_trait_impl/crate1/src/lib.nr rename to test_programs/compile_failure/orphaned_trait_impl/crate1/src/lib.nr diff --git a/tooling/nargo_cli/tests/compile_failure/orphaned_trait_impl/crate2/Nargo.toml b/test_programs/compile_failure/orphaned_trait_impl/crate2/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_failure/orphaned_trait_impl/crate2/Nargo.toml rename to test_programs/compile_failure/orphaned_trait_impl/crate2/Nargo.toml diff --git a/tooling/nargo_cli/tests/compile_failure/orphaned_trait_impl/crate2/src/lib.nr b/test_programs/compile_failure/orphaned_trait_impl/crate2/src/lib.nr similarity index 100% rename from tooling/nargo_cli/tests/compile_failure/orphaned_trait_impl/crate2/src/lib.nr rename to test_programs/compile_failure/orphaned_trait_impl/crate2/src/lib.nr diff --git a/tooling/nargo_cli/tests/compile_failure/orphaned_trait_impl/src/main.nr b/test_programs/compile_failure/orphaned_trait_impl/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/compile_failure/orphaned_trait_impl/src/main.nr rename to test_programs/compile_failure/orphaned_trait_impl/src/main.nr diff --git a/tooling/nargo_cli/tests/compile_failure/overflowing_assignment/Nargo.toml b/test_programs/compile_failure/overflowing_assignment/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_failure/overflowing_assignment/Nargo.toml rename to test_programs/compile_failure/overflowing_assignment/Nargo.toml diff --git a/tooling/nargo_cli/tests/compile_failure/overflowing_assignment/src/main.nr b/test_programs/compile_failure/overflowing_assignment/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/compile_failure/overflowing_assignment/src/main.nr rename to test_programs/compile_failure/overflowing_assignment/src/main.nr diff --git a/tooling/nargo_cli/tests/compile_failure/overlapping_generic_impls/Nargo.toml b/test_programs/compile_failure/overlapping_generic_impls/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_failure/overlapping_generic_impls/Nargo.toml rename to test_programs/compile_failure/overlapping_generic_impls/Nargo.toml diff --git a/tooling/nargo_cli/tests/compile_failure/overlapping_generic_impls/src/main.nr b/test_programs/compile_failure/overlapping_generic_impls/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/compile_failure/overlapping_generic_impls/src/main.nr rename to test_programs/compile_failure/overlapping_generic_impls/src/main.nr diff --git a/tooling/nargo_cli/tests/compile_failure/package_name_empty/Nargo.toml b/test_programs/compile_failure/package_name_empty/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_failure/package_name_empty/Nargo.toml rename to test_programs/compile_failure/package_name_empty/Nargo.toml diff --git a/tooling/nargo_cli/tests/compile_failure/package_name_empty/src/main.nr b/test_programs/compile_failure/package_name_empty/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/compile_failure/package_name_empty/src/main.nr rename to test_programs/compile_failure/package_name_empty/src/main.nr diff --git a/tooling/nargo_cli/tests/compile_failure/package_name_hyphen/Nargo.toml b/test_programs/compile_failure/package_name_hyphen/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_failure/package_name_hyphen/Nargo.toml rename to test_programs/compile_failure/package_name_hyphen/Nargo.toml diff --git a/tooling/nargo_cli/tests/compile_failure/package_name_hyphen/src/main.nr b/test_programs/compile_failure/package_name_hyphen/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/compile_failure/package_name_hyphen/src/main.nr rename to test_programs/compile_failure/package_name_hyphen/src/main.nr diff --git a/tooling/nargo_cli/tests/compile_failure/primary_attribute_struct/Nargo.toml b/test_programs/compile_failure/primary_attribute_struct/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_failure/primary_attribute_struct/Nargo.toml rename to test_programs/compile_failure/primary_attribute_struct/Nargo.toml diff --git a/tooling/nargo_cli/tests/compile_failure/primary_attribute_struct/src/main.nr b/test_programs/compile_failure/primary_attribute_struct/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/compile_failure/primary_attribute_struct/src/main.nr rename to test_programs/compile_failure/primary_attribute_struct/src/main.nr diff --git a/tooling/nargo_cli/tests/compile_failure/radix_non_constant_length/Nargo.toml b/test_programs/compile_failure/radix_non_constant_length/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_failure/radix_non_constant_length/Nargo.toml rename to test_programs/compile_failure/radix_non_constant_length/Nargo.toml diff --git a/tooling/nargo_cli/tests/compile_failure/radix_non_constant_length/Prover.toml b/test_programs/compile_failure/radix_non_constant_length/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_failure/radix_non_constant_length/Prover.toml rename to test_programs/compile_failure/radix_non_constant_length/Prover.toml diff --git a/tooling/nargo_cli/tests/compile_failure/radix_non_constant_length/src/main.nr b/test_programs/compile_failure/radix_non_constant_length/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/compile_failure/radix_non_constant_length/src/main.nr rename to test_programs/compile_failure/radix_non_constant_length/src/main.nr diff --git a/test_programs/compile_failure/raw_string_huge/Nargo.toml b/test_programs/compile_failure/raw_string_huge/Nargo.toml new file mode 100644 index 00000000000..ecef0e2a07c --- /dev/null +++ b/test_programs/compile_failure/raw_string_huge/Nargo.toml @@ -0,0 +1,5 @@ +[package] +name = "raw_string_huge" +type = "bin" +authors = [""] +[dependencies] \ No newline at end of file diff --git a/test_programs/compile_failure/raw_string_huge/src/main.nr b/test_programs/compile_failure/raw_string_huge/src/main.nr new file mode 100644 index 00000000000..7bca9942e7a --- /dev/null +++ b/test_programs/compile_failure/raw_string_huge/src/main.nr @@ -0,0 +1,4 @@ +fn main() { + // Fails because of too many hashes for raw string (256+ hashes) + let _a = r##############################################################################################################################################################################################################################################################################"hello"##############################################################################################################################################################################################################################################################################; +} diff --git a/tooling/nargo_cli/tests/compile_failure/slice_access_failure/Nargo.toml b/test_programs/compile_failure/slice_access_failure/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_failure/slice_access_failure/Nargo.toml rename to test_programs/compile_failure/slice_access_failure/Nargo.toml diff --git a/tooling/nargo_cli/tests/compile_failure/slice_access_failure/Prover.toml b/test_programs/compile_failure/slice_access_failure/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_failure/slice_access_failure/Prover.toml rename to test_programs/compile_failure/slice_access_failure/Prover.toml diff --git a/tooling/nargo_cli/tests/compile_failure/slice_access_failure/src/main.nr b/test_programs/compile_failure/slice_access_failure/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/compile_failure/slice_access_failure/src/main.nr rename to test_programs/compile_failure/slice_access_failure/src/main.nr diff --git a/tooling/nargo_cli/tests/compile_failure/slice_insert_failure/Nargo.toml b/test_programs/compile_failure/slice_insert_failure/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_failure/slice_insert_failure/Nargo.toml rename to test_programs/compile_failure/slice_insert_failure/Nargo.toml diff --git a/tooling/nargo_cli/tests/compile_failure/slice_insert_failure/Prover.toml b/test_programs/compile_failure/slice_insert_failure/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_failure/slice_insert_failure/Prover.toml rename to test_programs/compile_failure/slice_insert_failure/Prover.toml diff --git a/tooling/nargo_cli/tests/compile_failure/slice_insert_failure/src/main.nr b/test_programs/compile_failure/slice_insert_failure/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/compile_failure/slice_insert_failure/src/main.nr rename to test_programs/compile_failure/slice_insert_failure/src/main.nr diff --git a/tooling/nargo_cli/tests/compile_failure/slice_remove_failure/Nargo.toml b/test_programs/compile_failure/slice_remove_failure/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_failure/slice_remove_failure/Nargo.toml rename to test_programs/compile_failure/slice_remove_failure/Nargo.toml diff --git a/tooling/nargo_cli/tests/compile_failure/slice_remove_failure/Prover.toml b/test_programs/compile_failure/slice_remove_failure/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_failure/slice_remove_failure/Prover.toml rename to test_programs/compile_failure/slice_remove_failure/Prover.toml diff --git a/tooling/nargo_cli/tests/compile_failure/slice_remove_failure/src/main.nr b/test_programs/compile_failure/slice_remove_failure/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/compile_failure/slice_remove_failure/src/main.nr rename to test_programs/compile_failure/slice_remove_failure/src/main.nr diff --git a/tooling/nargo_cli/tests/compile_failure/trait_incorrect_generic_count/Nargo.toml b/test_programs/compile_failure/trait_incorrect_generic_count/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_failure/trait_incorrect_generic_count/Nargo.toml rename to test_programs/compile_failure/trait_incorrect_generic_count/Nargo.toml diff --git a/tooling/nargo_cli/tests/compile_failure/trait_incorrect_generic_count/src/main.nr b/test_programs/compile_failure/trait_incorrect_generic_count/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/compile_failure/trait_incorrect_generic_count/src/main.nr rename to test_programs/compile_failure/trait_incorrect_generic_count/src/main.nr diff --git a/tooling/nargo_cli/tests/compile_failure/workspace_fail/Nargo.toml b/test_programs/compile_failure/workspace_fail/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_failure/workspace_fail/Nargo.toml rename to test_programs/compile_failure/workspace_fail/Nargo.toml diff --git a/tooling/nargo_cli/tests/compile_failure/workspace_fail/crates/a/Nargo.toml b/test_programs/compile_failure/workspace_fail/crates/a/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_failure/workspace_fail/crates/a/Nargo.toml rename to test_programs/compile_failure/workspace_fail/crates/a/Nargo.toml diff --git a/tooling/nargo_cli/tests/compile_failure/workspace_fail/crates/a/Prover.toml b/test_programs/compile_failure/workspace_fail/crates/a/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_failure/workspace_fail/crates/a/Prover.toml rename to test_programs/compile_failure/workspace_fail/crates/a/Prover.toml diff --git a/tooling/nargo_cli/tests/compile_failure/workspace_fail/crates/a/src/main.nr b/test_programs/compile_failure/workspace_fail/crates/a/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/compile_failure/workspace_fail/crates/a/src/main.nr rename to test_programs/compile_failure/workspace_fail/crates/a/src/main.nr diff --git a/tooling/nargo_cli/tests/compile_failure/workspace_fail/crates/b/Nargo.toml b/test_programs/compile_failure/workspace_fail/crates/b/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_failure/workspace_fail/crates/b/Nargo.toml rename to test_programs/compile_failure/workspace_fail/crates/b/Nargo.toml diff --git a/tooling/nargo_cli/tests/compile_failure/workspace_fail/crates/b/Prover.toml b/test_programs/compile_failure/workspace_fail/crates/b/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_failure/workspace_fail/crates/b/Prover.toml rename to test_programs/compile_failure/workspace_fail/crates/b/Prover.toml diff --git a/compiler/integration-tests/circuits/main/src/main.nr b/test_programs/compile_failure/workspace_fail/crates/b/src/main.nr similarity index 100% rename from compiler/integration-tests/circuits/main/src/main.nr rename to test_programs/compile_failure/workspace_fail/crates/b/src/main.nr diff --git a/tooling/nargo_cli/tests/compile_failure/workspace_missing_toml/Nargo.toml b/test_programs/compile_failure/workspace_missing_toml/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_failure/workspace_missing_toml/Nargo.toml rename to test_programs/compile_failure/workspace_missing_toml/Nargo.toml diff --git a/tooling/nargo_cli/tests/compile_failure/workspace_missing_toml/crates/a/Prover.toml b/test_programs/compile_failure/workspace_missing_toml/crates/a/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_failure/workspace_missing_toml/crates/a/Prover.toml rename to test_programs/compile_failure/workspace_missing_toml/crates/a/Prover.toml diff --git a/tooling/nargo_cli/tests/execution_success/workspace/crates/a/src/main.nr b/test_programs/compile_failure/workspace_missing_toml/crates/a/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/execution_success/workspace/crates/a/src/main.nr rename to test_programs/compile_failure/workspace_missing_toml/crates/a/src/main.nr diff --git a/tooling/nargo_cli/tests/compile_failure/workspace_missing_toml/crates/b/Nargo.toml b/test_programs/compile_failure/workspace_missing_toml/crates/b/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_failure/workspace_missing_toml/crates/b/Nargo.toml rename to test_programs/compile_failure/workspace_missing_toml/crates/b/Nargo.toml diff --git a/tooling/nargo_cli/tests/compile_failure/workspace_missing_toml/crates/b/Prover.toml b/test_programs/compile_failure/workspace_missing_toml/crates/b/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_failure/workspace_missing_toml/crates/b/Prover.toml rename to test_programs/compile_failure/workspace_missing_toml/crates/b/Prover.toml diff --git a/tooling/nargo_cli/tests/compile_failure/workspace_fail/crates/b/src/main.nr b/test_programs/compile_failure/workspace_missing_toml/crates/b/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/compile_failure/workspace_fail/crates/b/src/main.nr rename to test_programs/compile_failure/workspace_missing_toml/crates/b/src/main.nr diff --git a/tooling/nargo_cli/tests/compile_success_contract/contract_with_impl/Nargo.toml b/test_programs/compile_success_contract/contract_with_impl/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_success_contract/contract_with_impl/Nargo.toml rename to test_programs/compile_success_contract/contract_with_impl/Nargo.toml diff --git a/tooling/nargo_cli/tests/compile_success_contract/contract_with_impl/src/main.nr b/test_programs/compile_success_contract/contract_with_impl/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/compile_success_contract/contract_with_impl/src/main.nr rename to test_programs/compile_success_contract/contract_with_impl/src/main.nr diff --git a/tooling/nargo_cli/tests/compile_success_contract/non_entry_point_method/Nargo.toml b/test_programs/compile_success_contract/non_entry_point_method/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_success_contract/non_entry_point_method/Nargo.toml rename to test_programs/compile_success_contract/non_entry_point_method/Nargo.toml diff --git a/tooling/nargo_cli/tests/compile_success_contract/non_entry_point_method/src/main.nr b/test_programs/compile_success_contract/non_entry_point_method/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/compile_success_contract/non_entry_point_method/src/main.nr rename to test_programs/compile_success_contract/non_entry_point_method/src/main.nr diff --git a/tooling/nargo_cli/tests/compile_success_contract/simple_contract/Nargo.toml b/test_programs/compile_success_contract/simple_contract/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_success_contract/simple_contract/Nargo.toml rename to test_programs/compile_success_contract/simple_contract/Nargo.toml diff --git a/test_programs/compile_success_contract/simple_contract/src/main.nr b/test_programs/compile_success_contract/simple_contract/src/main.nr new file mode 100644 index 00000000000..fea0ae08c22 --- /dev/null +++ b/test_programs/compile_success_contract/simple_contract/src/main.nr @@ -0,0 +1,19 @@ +contract Foo { + fn double(x: Field) -> pub Field { + x * 2 + } + fn triple(x: Field) -> pub Field { + x * 3 + } + internal fn quadruple(x: Field) -> pub Field { + x * 4 + } + open internal fn skibbidy(x: Field) -> pub Field { + x * 5 + } + // Regression for issue #3344 + #[contract_library_method] + fn foo(x : u8) -> u8 { + x + } +} diff --git a/tooling/nargo_cli/tests/compile_success_empty/attributes_multiple/Nargo.toml b/test_programs/compile_success_empty/attributes_multiple/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_success_empty/attributes_multiple/Nargo.toml rename to test_programs/compile_success_empty/attributes_multiple/Nargo.toml diff --git a/tooling/nargo_cli/tests/compile_success_empty/attributes_multiple/src/main.nr b/test_programs/compile_success_empty/attributes_multiple/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/compile_success_empty/attributes_multiple/src/main.nr rename to test_programs/compile_success_empty/attributes_multiple/src/main.nr diff --git a/tooling/nargo_cli/tests/compile_success_empty/attributes_struct/Nargo.toml b/test_programs/compile_success_empty/attributes_struct/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_success_empty/attributes_struct/Nargo.toml rename to test_programs/compile_success_empty/attributes_struct/Nargo.toml diff --git a/tooling/nargo_cli/tests/compile_success_empty/attributes_struct/src/main.nr b/test_programs/compile_success_empty/attributes_struct/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/compile_success_empty/attributes_struct/src/main.nr rename to test_programs/compile_success_empty/attributes_struct/src/main.nr diff --git a/tooling/nargo_cli/tests/compile_success_empty/auto_deref/Nargo.toml b/test_programs/compile_success_empty/auto_deref/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_success_empty/auto_deref/Nargo.toml rename to test_programs/compile_success_empty/auto_deref/Nargo.toml diff --git a/tooling/nargo_cli/tests/compile_success_empty/auto_deref/src/main.nr b/test_programs/compile_success_empty/auto_deref/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/compile_success_empty/auto_deref/src/main.nr rename to test_programs/compile_success_empty/auto_deref/src/main.nr diff --git a/tooling/nargo_cli/tests/compile_success_empty/brillig_cast/Nargo.toml b/test_programs/compile_success_empty/brillig_cast/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_success_empty/brillig_cast/Nargo.toml rename to test_programs/compile_success_empty/brillig_cast/Nargo.toml diff --git a/tooling/nargo_cli/tests/compile_success_empty/brillig_cast/src/main.nr b/test_programs/compile_success_empty/brillig_cast/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/compile_success_empty/brillig_cast/src/main.nr rename to test_programs/compile_success_empty/brillig_cast/src/main.nr diff --git a/tooling/nargo_cli/tests/compile_success_empty/brillig_field_binary_operations/Nargo.toml b/test_programs/compile_success_empty/brillig_field_binary_operations/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_success_empty/brillig_field_binary_operations/Nargo.toml rename to test_programs/compile_success_empty/brillig_field_binary_operations/Nargo.toml diff --git a/tooling/nargo_cli/tests/compile_success_empty/brillig_modulo/Prover.toml b/test_programs/compile_success_empty/brillig_field_binary_operations/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_success_empty/brillig_modulo/Prover.toml rename to test_programs/compile_success_empty/brillig_field_binary_operations/Prover.toml diff --git a/tooling/nargo_cli/tests/compile_success_empty/brillig_field_binary_operations/src/main.nr b/test_programs/compile_success_empty/brillig_field_binary_operations/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/compile_success_empty/brillig_field_binary_operations/src/main.nr rename to test_programs/compile_success_empty/brillig_field_binary_operations/src/main.nr diff --git a/tooling/nargo_cli/tests/compile_success_empty/brillig_integer_binary_operations/Nargo.toml b/test_programs/compile_success_empty/brillig_integer_binary_operations/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_success_empty/brillig_integer_binary_operations/Nargo.toml rename to test_programs/compile_success_empty/brillig_integer_binary_operations/Nargo.toml diff --git a/tooling/nargo_cli/tests/compile_success_empty/conditional_regression_579/Prover.toml b/test_programs/compile_success_empty/brillig_integer_binary_operations/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_success_empty/conditional_regression_579/Prover.toml rename to test_programs/compile_success_empty/brillig_integer_binary_operations/Prover.toml diff --git a/tooling/nargo_cli/tests/compile_success_empty/brillig_integer_binary_operations/src/main.nr b/test_programs/compile_success_empty/brillig_integer_binary_operations/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/compile_success_empty/brillig_integer_binary_operations/src/main.nr rename to test_programs/compile_success_empty/brillig_integer_binary_operations/src/main.nr diff --git a/tooling/nargo_cli/tests/compile_success_empty/brillig_modulo/Nargo.toml b/test_programs/compile_success_empty/brillig_modulo/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_success_empty/brillig_modulo/Nargo.toml rename to test_programs/compile_success_empty/brillig_modulo/Nargo.toml diff --git a/tooling/nargo_cli/tests/compile_success_empty/conditional_regression_to_bits/Prover.toml b/test_programs/compile_success_empty/brillig_modulo/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_success_empty/conditional_regression_to_bits/Prover.toml rename to test_programs/compile_success_empty/brillig_modulo/Prover.toml diff --git a/tooling/nargo_cli/tests/compile_success_empty/brillig_modulo/src/main.nr b/test_programs/compile_success_empty/brillig_modulo/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/compile_success_empty/brillig_modulo/src/main.nr rename to test_programs/compile_success_empty/brillig_modulo/src/main.nr diff --git a/tooling/nargo_cli/tests/compile_success_empty/closure_explicit_types/Nargo.toml b/test_programs/compile_success_empty/closure_explicit_types/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_success_empty/closure_explicit_types/Nargo.toml rename to test_programs/compile_success_empty/closure_explicit_types/Nargo.toml diff --git a/tooling/nargo_cli/tests/compile_success_empty/closure_explicit_types/src/main.nr b/test_programs/compile_success_empty/closure_explicit_types/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/compile_success_empty/closure_explicit_types/src/main.nr rename to test_programs/compile_success_empty/closure_explicit_types/src/main.nr diff --git a/tooling/nargo_cli/tests/compile_success_empty/comptime_recursion_regression/Nargo.toml b/test_programs/compile_success_empty/comptime_recursion_regression/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_success_empty/comptime_recursion_regression/Nargo.toml rename to test_programs/compile_success_empty/comptime_recursion_regression/Nargo.toml diff --git a/tooling/nargo_cli/tests/compile_success_empty/comptime_recursion_regression/Prover.toml b/test_programs/compile_success_empty/comptime_recursion_regression/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_success_empty/comptime_recursion_regression/Prover.toml rename to test_programs/compile_success_empty/comptime_recursion_regression/Prover.toml diff --git a/tooling/nargo_cli/tests/compile_success_empty/comptime_recursion_regression/src/main.nr b/test_programs/compile_success_empty/comptime_recursion_regression/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/compile_success_empty/comptime_recursion_regression/src/main.nr rename to test_programs/compile_success_empty/comptime_recursion_regression/src/main.nr diff --git a/tooling/nargo_cli/tests/compile_success_empty/comptime_sort/Nargo.toml b/test_programs/compile_success_empty/comptime_sort/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_success_empty/comptime_sort/Nargo.toml rename to test_programs/compile_success_empty/comptime_sort/Nargo.toml diff --git a/tooling/nargo_cli/tests/compile_success_empty/comptime_sort/src/main.nr b/test_programs/compile_success_empty/comptime_sort/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/compile_success_empty/comptime_sort/src/main.nr rename to test_programs/compile_success_empty/comptime_sort/src/main.nr diff --git a/tooling/nargo_cli/tests/compile_success_empty/conditional_regression_547/Nargo.toml b/test_programs/compile_success_empty/conditional_regression_547/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_success_empty/conditional_regression_547/Nargo.toml rename to test_programs/compile_success_empty/conditional_regression_547/Nargo.toml diff --git a/tooling/nargo_cli/tests/compile_success_empty/conditional_regression_547/Prover.toml b/test_programs/compile_success_empty/conditional_regression_547/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_success_empty/conditional_regression_547/Prover.toml rename to test_programs/compile_success_empty/conditional_regression_547/Prover.toml diff --git a/tooling/nargo_cli/tests/compile_success_empty/conditional_regression_547/src/main.nr b/test_programs/compile_success_empty/conditional_regression_547/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/compile_success_empty/conditional_regression_547/src/main.nr rename to test_programs/compile_success_empty/conditional_regression_547/src/main.nr diff --git a/tooling/nargo_cli/tests/compile_success_empty/conditional_regression_579/Nargo.toml b/test_programs/compile_success_empty/conditional_regression_579/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_success_empty/conditional_regression_579/Nargo.toml rename to test_programs/compile_success_empty/conditional_regression_579/Nargo.toml diff --git a/tooling/nargo_cli/tests/compile_success_empty/instruction_deduplication/Prover.toml b/test_programs/compile_success_empty/conditional_regression_579/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_success_empty/instruction_deduplication/Prover.toml rename to test_programs/compile_success_empty/conditional_regression_579/Prover.toml diff --git a/tooling/nargo_cli/tests/compile_success_empty/conditional_regression_579/src/main.nr b/test_programs/compile_success_empty/conditional_regression_579/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/compile_success_empty/conditional_regression_579/src/main.nr rename to test_programs/compile_success_empty/conditional_regression_579/src/main.nr diff --git a/tooling/nargo_cli/tests/compile_success_empty/conditional_regression_to_bits/Nargo.toml b/test_programs/compile_success_empty/conditional_regression_to_bits/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_success_empty/conditional_regression_to_bits/Nargo.toml rename to test_programs/compile_success_empty/conditional_regression_to_bits/Nargo.toml diff --git a/tooling/nargo_cli/tests/compile_success_empty/let_stmt/Prover.toml b/test_programs/compile_success_empty/conditional_regression_to_bits/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_success_empty/let_stmt/Prover.toml rename to test_programs/compile_success_empty/conditional_regression_to_bits/Prover.toml diff --git a/tooling/nargo_cli/tests/compile_success_empty/conditional_regression_to_bits/src/main.nr b/test_programs/compile_success_empty/conditional_regression_to_bits/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/compile_success_empty/conditional_regression_to_bits/src/main.nr rename to test_programs/compile_success_empty/conditional_regression_to_bits/src/main.nr diff --git a/tooling/nargo_cli/tests/compile_success_empty/ec_baby_jubjub/Nargo.toml b/test_programs/compile_success_empty/ec_baby_jubjub/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_success_empty/ec_baby_jubjub/Nargo.toml rename to test_programs/compile_success_empty/ec_baby_jubjub/Nargo.toml diff --git a/test_programs/compile_success_empty/ec_baby_jubjub/src/main.nr b/test_programs/compile_success_empty/ec_baby_jubjub/src/main.nr new file mode 100644 index 00000000000..becd3c8927a --- /dev/null +++ b/test_programs/compile_success_empty/ec_baby_jubjub/src/main.nr @@ -0,0 +1,218 @@ +// Tests may be checked against https://github.com/cfrg/draft-irtf-cfrg-hash-to-curve/tree/main/poc +use dep::std::ec::tecurve::affine::Curve as AffineCurve; +use dep::std::ec::tecurve::affine::Point as Gaffine; +use dep::std::ec::tecurve::curvegroup::Curve; +use dep::std::ec::tecurve::curvegroup::Point as G; + +use dep::std::ec::swcurve::affine::Point as SWGaffine; +use dep::std::ec::swcurve::curvegroup::Point as SWG; + +use dep::std::ec::montcurve::affine::Point as MGaffine; +use dep::std::ec::montcurve::curvegroup::Point as MG; + +fn main() { + // This test only makes sense if Field is the right prime field. + if 21888242871839275222246405745257275088548364400416034343698204186575808495617 == 0 { + // Define Baby Jubjub (ERC-2494) parameters in affine representation + let bjj_affine = AffineCurve::new( + 168700, + 168696, + Gaffine::new( + 995203441582195749578291179787384436505546430278305826713579947235728471134, + 5472060717959818805561601436314318772137091100104008585924551046643952123905 + ) + ); + // Test addition + let p1_affine = Gaffine::new( + 17777552123799933955779906779655732241715742912184938656739573121738514868268, + 2626589144620713026669568689430873010625803728049924121243784502389097019475 + ); + let p2_affine = Gaffine::new( + 16540640123574156134436876038791482806971768689494387082833631921987005038935, + 20819045374670962167435360035096875258406992893633759881276124905556507972311 + ); + + let p3_affine = bjj_affine.add(p1_affine, p2_affine); + assert( + p3_affine.eq( + Gaffine::new( + 7916061937171219682591368294088513039687205273691143098332585753343424131937, + 14035240266687799601661095864649209771790948434046947201833777492504781204499 + ) + ) + ); + // Test scalar multiplication + let p4_affine = bjj_affine.mul(2, p1_affine); + assert( + p4_affine.eq( + Gaffine::new( + 6890855772600357754907169075114257697580319025794532037257385534741338397365, + 4338620300185947561074059802482547481416142213883829469920100239455078257889 + ) + ) + ); + assert(p4_affine.eq(bjj_affine.bit_mul([0, 1], p1_affine))); + // Test subtraction + let p5_affine = bjj_affine.subtract(p3_affine, p3_affine); + assert(p5_affine.eq(Gaffine::zero())); + // Check that these points are on the curve + assert( + bjj_affine.contains(bjj_affine.gen) + & bjj_affine.contains(p1_affine) + & bjj_affine.contains(p2_affine) + & bjj_affine.contains(p3_affine) + & bjj_affine.contains(p4_affine) + & bjj_affine.contains(p5_affine) + ); + // Test CurveGroup equivalents + let bjj = bjj_affine.into_group(); // Baby Jubjub + let p1 = p1_affine.into_group(); + let p2 = p2_affine.into_group(); + let p3 = p3_affine.into_group(); + let p4 = p4_affine.into_group(); + let p5 = p5_affine.into_group(); + // Test addition + assert(p3.eq(bjj.add(p1, p2))); + // Test scalar multiplication + assert(p4.eq(bjj.mul(2, p1))); + assert(p4.eq(bjj.bit_mul([0, 1], p1))); + // Test subtraction + assert(G::zero().eq(bjj.subtract(p3, p3))); + assert(p5.eq(G::zero())); + // Check that these points are on the curve + assert( + bjj.contains(bjj.gen) + & bjj.contains(p1) + & bjj.contains(p2) + & bjj.contains(p3) + & bjj.contains(p4) + & bjj.contains(p5) + ); + // Test SWCurve equivalents of the above + // First the affine representation + let bjj_swcurve_affine = bjj_affine.into_swcurve(); + + let p1_swcurve_affine = bjj_affine.map_into_swcurve(p1_affine); + let p2_swcurve_affine = bjj_affine.map_into_swcurve(p2_affine); + let p3_swcurve_affine = bjj_affine.map_into_swcurve(p3_affine); + let p4_swcurve_affine = bjj_affine.map_into_swcurve(p4_affine); + let p5_swcurve_affine = bjj_affine.map_into_swcurve(p5_affine); + // Addition + assert(p3_swcurve_affine.eq(bjj_swcurve_affine.add(p1_swcurve_affine, p2_swcurve_affine))); + // Doubling + assert(p4_swcurve_affine.eq(bjj_swcurve_affine.mul(2, p1_swcurve_affine))); + assert(p4_swcurve_affine.eq(bjj_swcurve_affine.bit_mul([0, 1], p1_swcurve_affine))); + // Subtraction + assert(SWGaffine::zero().eq(bjj_swcurve_affine.subtract(p3_swcurve_affine, p3_swcurve_affine))); + assert(p5_swcurve_affine.eq(SWGaffine::zero())); + // Check that these points are on the curve + assert( + bjj_swcurve_affine.contains(bjj_swcurve_affine.gen) + & bjj_swcurve_affine.contains(p1_swcurve_affine) + & bjj_swcurve_affine.contains(p2_swcurve_affine) + & bjj_swcurve_affine.contains(p3_swcurve_affine) + & bjj_swcurve_affine.contains(p4_swcurve_affine) + & bjj_swcurve_affine.contains(p5_swcurve_affine) + ); + // Then the CurveGroup representation + let bjj_swcurve = bjj.into_swcurve(); + + let p1_swcurve = bjj.map_into_swcurve(p1); + let p2_swcurve = bjj.map_into_swcurve(p2); + let p3_swcurve = bjj.map_into_swcurve(p3); + let p4_swcurve = bjj.map_into_swcurve(p4); + let p5_swcurve = bjj.map_into_swcurve(p5); + // Addition + assert(p3_swcurve.eq(bjj_swcurve.add(p1_swcurve, p2_swcurve))); + // Doubling + assert(p4_swcurve.eq(bjj_swcurve.mul(2, p1_swcurve))); + assert(p4_swcurve.eq(bjj_swcurve.bit_mul([0, 1], p1_swcurve))); + // Subtraction + assert(SWG::zero().eq(bjj_swcurve.subtract(p3_swcurve, p3_swcurve))); + assert(p5_swcurve.eq(SWG::zero())); + // Check that these points are on the curve + assert( + bjj_swcurve.contains(bjj_swcurve.gen) + & bjj_swcurve.contains(p1_swcurve) + & bjj_swcurve.contains(p2_swcurve) + & bjj_swcurve.contains(p3_swcurve) + & bjj_swcurve.contains(p4_swcurve) + & bjj_swcurve.contains(p5_swcurve) + ); + // Test MontCurve conversions + // First the affine representation + let bjj_montcurve_affine = bjj_affine.into_montcurve(); + + let p1_montcurve_affine = p1_affine.into_montcurve(); + let p2_montcurve_affine = p2_affine.into_montcurve(); + let p3_montcurve_affine = p3_affine.into_montcurve(); + let p4_montcurve_affine = p4_affine.into_montcurve(); + let p5_montcurve_affine = p5_affine.into_montcurve(); + // Addition + assert(p3_montcurve_affine.eq(bjj_montcurve_affine.add(p1_montcurve_affine, p2_montcurve_affine))); + // Doubling + assert(p4_montcurve_affine.eq(bjj_montcurve_affine.mul(2, p1_montcurve_affine))); + assert(p4_montcurve_affine.eq(bjj_montcurve_affine.bit_mul([0, 1], p1_montcurve_affine))); + // Subtraction + assert(MGaffine::zero().eq(bjj_montcurve_affine.subtract(p3_montcurve_affine, p3_montcurve_affine))); + assert(p5_montcurve_affine.eq(MGaffine::zero())); + // Check that these points are on the curve + assert( + bjj_montcurve_affine.contains(bjj_montcurve_affine.gen) + & bjj_montcurve_affine.contains(p1_montcurve_affine) + & bjj_montcurve_affine.contains(p2_montcurve_affine) + & bjj_montcurve_affine.contains(p3_montcurve_affine) + & bjj_montcurve_affine.contains(p4_montcurve_affine) + & bjj_montcurve_affine.contains(p5_montcurve_affine) + ); + // Then the CurveGroup representation + let bjj_montcurve = bjj.into_montcurve(); + + let p1_montcurve = p1_montcurve_affine.into_group(); + let p2_montcurve = p2_montcurve_affine.into_group(); + let p3_montcurve = p3_montcurve_affine.into_group(); + let p4_montcurve = p4_montcurve_affine.into_group(); + let p5_montcurve = p5_montcurve_affine.into_group(); + // Addition + assert(p3_montcurve.eq(bjj_montcurve.add(p1_montcurve, p2_montcurve))); + // Doubling + assert(p4_montcurve.eq(bjj_montcurve.mul(2, p1_montcurve))); + assert(p4_montcurve.eq(bjj_montcurve.bit_mul([0, 1], p1_montcurve))); + // Subtraction + assert(MG::zero().eq(bjj_montcurve.subtract(p3_montcurve, p3_montcurve))); + assert(p5_montcurve.eq(MG::zero())); + // Check that these points are on the curve + assert( + bjj_montcurve.contains(bjj_montcurve.gen) + & bjj_montcurve.contains(p1_montcurve) + & bjj_montcurve.contains(p2_montcurve) + & bjj_montcurve.contains(p3_montcurve) + & bjj_montcurve.contains(p4_montcurve) + & bjj_montcurve.contains(p5_montcurve) + ); + // Elligator 2 map-to-curve + let ell2_pt_map = bjj_affine.elligator2_map(27); + + assert( + ell2_pt_map.eq( + MGaffine::new( + 7972459279704486422145701269802978968072470631857513331988813812334797879121, + 8142420778878030219043334189293412482212146646099536952861607542822144507872 + ).into_tecurve() + ) + ); + // SWU map-to-curve + let swu_pt_map = bjj_affine.swu_map(5, 27); + + assert( + swu_pt_map.eq( + bjj_affine.map_from_swcurve( + SWGaffine::new( + 2162719247815120009132293839392097468339661471129795280520343931405114293888, + 5341392251743377373758788728206293080122949448990104760111875914082289313973 + ) + ) + ) + ); + } +} diff --git a/tooling/nargo_cli/tests/compile_success_empty/generators/Nargo.toml b/test_programs/compile_success_empty/generators/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_success_empty/generators/Nargo.toml rename to test_programs/compile_success_empty/generators/Nargo.toml diff --git a/tooling/nargo_cli/tests/compile_success_empty/generators/src/main.nr b/test_programs/compile_success_empty/generators/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/compile_success_empty/generators/src/main.nr rename to test_programs/compile_success_empty/generators/src/main.nr diff --git a/tooling/nargo_cli/tests/compile_success_empty/higher_order_fn_selector/Nargo.toml b/test_programs/compile_success_empty/higher_order_fn_selector/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_success_empty/higher_order_fn_selector/Nargo.toml rename to test_programs/compile_success_empty/higher_order_fn_selector/Nargo.toml diff --git a/tooling/nargo_cli/tests/compile_success_empty/higher_order_fn_selector/src/main.nr b/test_programs/compile_success_empty/higher_order_fn_selector/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/compile_success_empty/higher_order_fn_selector/src/main.nr rename to test_programs/compile_success_empty/higher_order_fn_selector/src/main.nr diff --git a/tooling/nargo_cli/tests/compile_success_empty/impl_with_where_clause/Nargo.toml b/test_programs/compile_success_empty/impl_with_where_clause/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_success_empty/impl_with_where_clause/Nargo.toml rename to test_programs/compile_success_empty/impl_with_where_clause/Nargo.toml diff --git a/test_programs/compile_success_empty/impl_with_where_clause/src/main.nr b/test_programs/compile_success_empty/impl_with_where_clause/src/main.nr new file mode 100644 index 00000000000..780512f04dc --- /dev/null +++ b/test_programs/compile_success_empty/impl_with_where_clause/src/main.nr @@ -0,0 +1,27 @@ +fn main() { + let array: [Field; 3] = [1, 2, 3]; + assert(array.my_eq(array)); + // Ensure this still works if we have to infer the type of the integer literals + let array = [1, 2, 3]; + assert(array.my_eq(array)); +} + +trait MyEq { + fn my_eq(self, other: Self) -> bool; +} + +impl MyEq for [T; 3] where T: MyEq { + fn my_eq(self, other: Self) -> bool { + let mut ret = true; + for i in 0 .. self.len() { + ret &= self[i].my_eq(other[i]); + } + ret + } +} + +impl MyEq for Field { + fn my_eq(self, other: Field) -> bool { + self == other + } +} diff --git a/tooling/nargo_cli/tests/compile_success_empty/inner_outer_cl/Nargo.toml b/test_programs/compile_success_empty/inner_outer_cl/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_success_empty/inner_outer_cl/Nargo.toml rename to test_programs/compile_success_empty/inner_outer_cl/Nargo.toml diff --git a/tooling/nargo_cli/tests/compile_success_empty/inner_outer_cl/src/main.nr b/test_programs/compile_success_empty/inner_outer_cl/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/compile_success_empty/inner_outer_cl/src/main.nr rename to test_programs/compile_success_empty/inner_outer_cl/src/main.nr diff --git a/tooling/nargo_cli/tests/compile_success_empty/instruction_deduplication/Nargo.toml b/test_programs/compile_success_empty/instruction_deduplication/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_success_empty/instruction_deduplication/Nargo.toml rename to test_programs/compile_success_empty/instruction_deduplication/Nargo.toml diff --git a/tooling/nargo_cli/tests/compile_success_empty/numeric_generics/Prover.toml b/test_programs/compile_success_empty/instruction_deduplication/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_success_empty/numeric_generics/Prover.toml rename to test_programs/compile_success_empty/instruction_deduplication/Prover.toml diff --git a/tooling/nargo_cli/tests/compile_success_empty/instruction_deduplication/src/main.nr b/test_programs/compile_success_empty/instruction_deduplication/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/compile_success_empty/instruction_deduplication/src/main.nr rename to test_programs/compile_success_empty/instruction_deduplication/src/main.nr diff --git a/tooling/nargo_cli/tests/compile_success_empty/intrinsic_die/Nargo.toml b/test_programs/compile_success_empty/intrinsic_die/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_success_empty/intrinsic_die/Nargo.toml rename to test_programs/compile_success_empty/intrinsic_die/Nargo.toml diff --git a/tooling/nargo_cli/tests/compile_success_empty/intrinsic_die/src/main.nr b/test_programs/compile_success_empty/intrinsic_die/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/compile_success_empty/intrinsic_die/src/main.nr rename to test_programs/compile_success_empty/intrinsic_die/src/main.nr diff --git a/tooling/nargo_cli/tests/compile_success_empty/let_stmt/Nargo.toml b/test_programs/compile_success_empty/let_stmt/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_success_empty/let_stmt/Nargo.toml rename to test_programs/compile_success_empty/let_stmt/Nargo.toml diff --git a/tooling/nargo_cli/tests/compile_success_empty/references_aliasing/Prover.toml b/test_programs/compile_success_empty/let_stmt/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_success_empty/references_aliasing/Prover.toml rename to test_programs/compile_success_empty/let_stmt/Prover.toml diff --git a/tooling/nargo_cli/tests/compile_success_empty/let_stmt/src/main.nr b/test_programs/compile_success_empty/let_stmt/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/compile_success_empty/let_stmt/src/main.nr rename to test_programs/compile_success_empty/let_stmt/src/main.nr diff --git a/tooling/nargo_cli/tests/compile_success_empty/main_return/Nargo.toml b/test_programs/compile_success_empty/main_return/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_success_empty/main_return/Nargo.toml rename to test_programs/compile_success_empty/main_return/Nargo.toml diff --git a/tooling/nargo_cli/tests/compile_success_empty/main_return/Prover.toml b/test_programs/compile_success_empty/main_return/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_success_empty/main_return/Prover.toml rename to test_programs/compile_success_empty/main_return/Prover.toml diff --git a/tooling/nargo_cli/tests/compile_success_empty/main_return/src/main.nr b/test_programs/compile_success_empty/main_return/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/compile_success_empty/main_return/src/main.nr rename to test_programs/compile_success_empty/main_return/src/main.nr diff --git a/test_programs/compile_success_empty/method_call_regression/Nargo.toml b/test_programs/compile_success_empty/method_call_regression/Nargo.toml new file mode 100644 index 00000000000..92c9b942008 --- /dev/null +++ b/test_programs/compile_success_empty/method_call_regression/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "short" +type = "bin" +authors = [""] +compiler_version = ">=0.19.4" + +[dependencies] \ No newline at end of file diff --git a/test_programs/compile_success_empty/method_call_regression/src/main.nr b/test_programs/compile_success_empty/method_call_regression/src/main.nr new file mode 100644 index 00000000000..8bb7ebcac45 --- /dev/null +++ b/test_programs/compile_success_empty/method_call_regression/src/main.nr @@ -0,0 +1,25 @@ +use dep::std; + +fn main() { + // s: Struct + let s = Struct { b: () }; + // Regression for #3089 + s.foo(); +} + +struct Struct { b: B } + +// Before the fix, this candidate is searched first, binding ? to `u8` permanently. +impl Struct { + fn foo(self) {} +} + +// Then this candidate would be searched next but would not be a valid +// candidate since `Struct` != `Struct`. +// +// With the fix, the type of `s` correctly no longer changes until a +// method is actually selected. So this candidate is now valid since +// `Struct` unifies with `Struct` with `? = u32`. +impl Struct { + fn foo(self) {} +} diff --git a/tooling/nargo_cli/tests/compile_success_empty/numeric_generics/Nargo.toml b/test_programs/compile_success_empty/numeric_generics/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_success_empty/numeric_generics/Nargo.toml rename to test_programs/compile_success_empty/numeric_generics/Nargo.toml diff --git a/tooling/nargo_cli/tests/compile_success_empty/trait_allowed_item_name_matches/Prover.toml b/test_programs/compile_success_empty/numeric_generics/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_success_empty/trait_allowed_item_name_matches/Prover.toml rename to test_programs/compile_success_empty/numeric_generics/Prover.toml diff --git a/tooling/nargo_cli/tests/compile_success_empty/numeric_generics/src/main.nr b/test_programs/compile_success_empty/numeric_generics/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/compile_success_empty/numeric_generics/src/main.nr rename to test_programs/compile_success_empty/numeric_generics/src/main.nr diff --git a/tooling/nargo_cli/tests/compile_success_empty/option/Nargo.toml b/test_programs/compile_success_empty/option/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_success_empty/option/Nargo.toml rename to test_programs/compile_success_empty/option/Nargo.toml diff --git a/tooling/nargo_cli/tests/compile_success_empty/option/src/main.nr b/test_programs/compile_success_empty/option/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/compile_success_empty/option/src/main.nr rename to test_programs/compile_success_empty/option/src/main.nr diff --git a/test_programs/compile_success_empty/raw_string/Nargo.toml b/test_programs/compile_success_empty/raw_string/Nargo.toml new file mode 100644 index 00000000000..81147e65f34 --- /dev/null +++ b/test_programs/compile_success_empty/raw_string/Nargo.toml @@ -0,0 +1,6 @@ +[package] +name = "raw_string" +type = "bin" +authors = [""] + +[dependencies] diff --git a/test_programs/compile_success_empty/raw_string/src/main.nr b/test_programs/compile_success_empty/raw_string/src/main.nr new file mode 100644 index 00000000000..ad8dfe82ae5 --- /dev/null +++ b/test_programs/compile_success_empty/raw_string/src/main.nr @@ -0,0 +1,13 @@ +global D = r#####"Hello "world""#####; + +fn main() { + let a = "Hello \"world\""; + let b = r#"Hello "world""#; + let c = r##"Hello "world""##; + assert(a == b); + assert(b == c); + assert(c == D); + let x = r#"Hello World"#; + let y = r"Hello World"; + assert(x == y); +} diff --git a/test_programs/compile_success_empty/reexports/Nargo.toml b/test_programs/compile_success_empty/reexports/Nargo.toml new file mode 100644 index 00000000000..4a87f28fd89 --- /dev/null +++ b/test_programs/compile_success_empty/reexports/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "reexports" +type = "bin" +authors = [""] + +[dependencies] +reexporting_lib = { path = "../../test_libraries/reexporting_lib" } diff --git a/test_programs/compile_success_empty/reexports/src/main.nr b/test_programs/compile_success_empty/reexports/src/main.nr new file mode 100644 index 00000000000..bb94b21b221 --- /dev/null +++ b/test_programs/compile_success_empty/reexports/src/main.nr @@ -0,0 +1,8 @@ +use dep::reexporting_lib::{FooStruct, MyStruct, lib}; + +fn main() { + let x: FooStruct = MyStruct { + inner: 0 + }; + assert(lib::is_struct_zero(x)); +} diff --git a/tooling/nargo_cli/tests/compile_success_empty/references_aliasing/Nargo.toml b/test_programs/compile_success_empty/references_aliasing/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_success_empty/references_aliasing/Nargo.toml rename to test_programs/compile_success_empty/references_aliasing/Nargo.toml diff --git a/tooling/nargo_cli/tests/compile_success_empty/trait_associated_member_names_clashes/Prover.toml b/test_programs/compile_success_empty/references_aliasing/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_success_empty/trait_associated_member_names_clashes/Prover.toml rename to test_programs/compile_success_empty/references_aliasing/Prover.toml diff --git a/tooling/nargo_cli/tests/compile_success_empty/references_aliasing/src/main.nr b/test_programs/compile_success_empty/references_aliasing/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/compile_success_empty/references_aliasing/src/main.nr rename to test_programs/compile_success_empty/references_aliasing/src/main.nr diff --git a/tooling/nargo_cli/tests/compile_success_empty/regression_2099/Nargo.toml b/test_programs/compile_success_empty/regression_2099/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_success_empty/regression_2099/Nargo.toml rename to test_programs/compile_success_empty/regression_2099/Nargo.toml diff --git a/test_programs/compile_success_empty/regression_2099/src/main.nr b/test_programs/compile_success_empty/regression_2099/src/main.nr new file mode 100644 index 00000000000..f92373ce63a --- /dev/null +++ b/test_programs/compile_success_empty/regression_2099/src/main.nr @@ -0,0 +1,42 @@ +use dep::std::ec::tecurve::affine::Curve as AffineCurve; +use dep::std::ec::tecurve::affine::Point as Gaffine; +use dep::std::ec::tecurve::curvegroup::Curve; +use dep::std::ec::tecurve::curvegroup::Point as G; + +use dep::std::ec::swcurve::affine::Point as SWGaffine; +use dep::std::ec::swcurve::curvegroup::Point as SWG; + +use dep::std::ec::montcurve::affine::Point as MGaffine; +use dep::std::ec::montcurve::curvegroup::Point as MG; + +fn main() { + // Define Baby Jubjub (ERC-2494) parameters in affine representation + let bjj_affine = AffineCurve::new( + 168700, + 168696, + Gaffine::new( + 995203441582195749578291179787384436505546430278305826713579947235728471134, + 5472060717959818805561601436314318772137091100104008585924551046643952123905 + ) + ); + // Test addition + let p1_affine = Gaffine::new( + 17777552123799933955779906779655732241715742912184938656739573121738514868268, + 2626589144620713026669568689430873010625803728049924121243784502389097019475 + ); + let p2_affine = Gaffine::new( + 16540640123574156134436876038791482806971768689494387082833631921987005038935, + 20819045374670962167435360035096875258406992893633759881276124905556507972311 + ); + let _p3_affine = bjj_affine.add(p1_affine, p2_affine); + // Test SWCurve equivalents of the above + // First the affine representation + let bjj_swcurve_affine = bjj_affine.into_swcurve(); + + let p1_swcurve_affine = bjj_affine.map_into_swcurve(p1_affine); + let p2_swcurve_affine = bjj_affine.map_into_swcurve(p2_affine); + + let _p3_swcurve_affine_from_add = bjj_swcurve_affine.add(p1_swcurve_affine, p2_swcurve_affine); + // Check that these points are on the curve + assert(bjj_swcurve_affine.contains(p1_swcurve_affine)); +} diff --git a/test_programs/compile_success_empty/regression_3635/Nargo.toml b/test_programs/compile_success_empty/regression_3635/Nargo.toml new file mode 100644 index 00000000000..4f34121439e --- /dev/null +++ b/test_programs/compile_success_empty/regression_3635/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "regression_3635" +version = "0.1.0" +type = "bin" +authors = [""] + +[dependencies] diff --git a/test_programs/compile_success_empty/regression_3635/src/main.nr b/test_programs/compile_success_empty/regression_3635/src/main.nr new file mode 100644 index 00000000000..97a04f9d93f --- /dev/null +++ b/test_programs/compile_success_empty/regression_3635/src/main.nr @@ -0,0 +1,8 @@ +use dep::std; + +fn main() { + let x: u8 = 0x61; + let y: u8 = "a".as_bytes()[0]; + assert_eq(x, y); + assert_eq(x >> 1, y >> 1); +} diff --git a/test_programs/compile_success_empty/regression_3964/Nargo.toml b/test_programs/compile_success_empty/regression_3964/Nargo.toml new file mode 100644 index 00000000000..a3fd040bcc2 --- /dev/null +++ b/test_programs/compile_success_empty/regression_3964/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "regression_3964" +type = "bin" +authors = [""] +compiler_version = ">=0.20.0" + +[dependencies] diff --git a/test_programs/compile_success_empty/regression_3964/src/main.nr b/test_programs/compile_success_empty/regression_3964/src/main.nr new file mode 100644 index 00000000000..0600a4281a0 --- /dev/null +++ b/test_programs/compile_success_empty/regression_3964/src/main.nr @@ -0,0 +1,5 @@ +fn main() { + let one: u8 = 1; + let p = ((one, 2), (3, 4)); + assert(p == p); +} diff --git a/tooling/nargo_cli/tests/compile_success_empty/ret_fn_ret_cl/Nargo.toml b/test_programs/compile_success_empty/ret_fn_ret_cl/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_success_empty/ret_fn_ret_cl/Nargo.toml rename to test_programs/compile_success_empty/ret_fn_ret_cl/Nargo.toml diff --git a/tooling/nargo_cli/tests/compile_success_empty/ret_fn_ret_cl/Prover.toml b/test_programs/compile_success_empty/ret_fn_ret_cl/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_success_empty/ret_fn_ret_cl/Prover.toml rename to test_programs/compile_success_empty/ret_fn_ret_cl/Prover.toml diff --git a/tooling/nargo_cli/tests/compile_success_empty/ret_fn_ret_cl/src/main.nr b/test_programs/compile_success_empty/ret_fn_ret_cl/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/compile_success_empty/ret_fn_ret_cl/src/main.nr rename to test_programs/compile_success_empty/ret_fn_ret_cl/src/main.nr diff --git a/tooling/nargo_cli/tests/compile_success_empty/simple_array_param/Nargo.toml b/test_programs/compile_success_empty/simple_array_param/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_success_empty/simple_array_param/Nargo.toml rename to test_programs/compile_success_empty/simple_array_param/Nargo.toml diff --git a/tooling/nargo_cli/tests/compile_success_empty/simple_array_param/Prover.toml b/test_programs/compile_success_empty/simple_array_param/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_success_empty/simple_array_param/Prover.toml rename to test_programs/compile_success_empty/simple_array_param/Prover.toml diff --git a/tooling/nargo_cli/tests/compile_success_empty/simple_array_param/src/main.nr b/test_programs/compile_success_empty/simple_array_param/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/compile_success_empty/simple_array_param/src/main.nr rename to test_programs/compile_success_empty/simple_array_param/src/main.nr diff --git a/tooling/nargo_cli/tests/compile_success_empty/simple_program_no_body/Nargo.toml b/test_programs/compile_success_empty/simple_program_no_body/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_success_empty/simple_program_no_body/Nargo.toml rename to test_programs/compile_success_empty/simple_program_no_body/Nargo.toml diff --git a/tooling/nargo_cli/tests/compile_success_empty/simple_program_no_body/Prover.toml b/test_programs/compile_success_empty/simple_program_no_body/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_success_empty/simple_program_no_body/Prover.toml rename to test_programs/compile_success_empty/simple_program_no_body/Prover.toml diff --git a/tooling/nargo_cli/tests/compile_success_empty/simple_program_no_body/src/main.nr b/test_programs/compile_success_empty/simple_program_no_body/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/compile_success_empty/simple_program_no_body/src/main.nr rename to test_programs/compile_success_empty/simple_program_no_body/src/main.nr diff --git a/tooling/nargo_cli/tests/compile_success_empty/simple_range/Nargo.toml b/test_programs/compile_success_empty/simple_range/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_success_empty/simple_range/Nargo.toml rename to test_programs/compile_success_empty/simple_range/Nargo.toml diff --git a/tooling/nargo_cli/tests/compile_success_empty/simple_range/Prover.toml b/test_programs/compile_success_empty/simple_range/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_success_empty/simple_range/Prover.toml rename to test_programs/compile_success_empty/simple_range/Prover.toml diff --git a/tooling/nargo_cli/tests/compile_success_empty/simple_range/src/main.nr b/test_programs/compile_success_empty/simple_range/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/compile_success_empty/simple_range/src/main.nr rename to test_programs/compile_success_empty/simple_range/src/main.nr diff --git a/tooling/nargo_cli/tests/compile_success_empty/specialization/Nargo.toml b/test_programs/compile_success_empty/specialization/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_success_empty/specialization/Nargo.toml rename to test_programs/compile_success_empty/specialization/Nargo.toml diff --git a/tooling/nargo_cli/tests/compile_success_empty/specialization/src/main.nr b/test_programs/compile_success_empty/specialization/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/compile_success_empty/specialization/src/main.nr rename to test_programs/compile_success_empty/specialization/src/main.nr diff --git a/tooling/nargo_cli/tests/compile_success_empty/str_as_bytes/Nargo.toml b/test_programs/compile_success_empty/str_as_bytes/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_success_empty/str_as_bytes/Nargo.toml rename to test_programs/compile_success_empty/str_as_bytes/Nargo.toml diff --git a/tooling/nargo_cli/tests/compile_success_empty/str_as_bytes/src/main.nr b/test_programs/compile_success_empty/str_as_bytes/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/compile_success_empty/str_as_bytes/src/main.nr rename to test_programs/compile_success_empty/str_as_bytes/src/main.nr diff --git a/tooling/nargo_cli/tests/compile_success_empty/to_bits/Nargo.toml b/test_programs/compile_success_empty/to_bits/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_success_empty/to_bits/Nargo.toml rename to test_programs/compile_success_empty/to_bits/Nargo.toml diff --git a/tooling/nargo_cli/tests/compile_success_empty/to_bits/src/main.nr b/test_programs/compile_success_empty/to_bits/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/compile_success_empty/to_bits/src/main.nr rename to test_programs/compile_success_empty/to_bits/src/main.nr diff --git a/tooling/nargo_cli/tests/compile_success_empty/trait_allowed_item_name_matches/Nargo.toml b/test_programs/compile_success_empty/trait_allowed_item_name_matches/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_success_empty/trait_allowed_item_name_matches/Nargo.toml rename to test_programs/compile_success_empty/trait_allowed_item_name_matches/Nargo.toml diff --git a/tooling/nargo_cli/tests/compile_success_empty/trait_function_calls/Prover.toml b/test_programs/compile_success_empty/trait_allowed_item_name_matches/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_success_empty/trait_function_calls/Prover.toml rename to test_programs/compile_success_empty/trait_allowed_item_name_matches/Prover.toml diff --git a/tooling/nargo_cli/tests/compile_success_empty/trait_allowed_item_name_matches/src/main.nr b/test_programs/compile_success_empty/trait_allowed_item_name_matches/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/compile_success_empty/trait_allowed_item_name_matches/src/main.nr rename to test_programs/compile_success_empty/trait_allowed_item_name_matches/src/main.nr diff --git a/tooling/nargo_cli/tests/compile_success_empty/trait_associated_member_names_clashes/Nargo.toml b/test_programs/compile_success_empty/trait_associated_member_names_clashes/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_success_empty/trait_associated_member_names_clashes/Nargo.toml rename to test_programs/compile_success_empty/trait_associated_member_names_clashes/Nargo.toml diff --git a/tooling/nargo_cli/tests/compile_success_empty/trait_multi_module_test/Prover.toml b/test_programs/compile_success_empty/trait_associated_member_names_clashes/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_success_empty/trait_multi_module_test/Prover.toml rename to test_programs/compile_success_empty/trait_associated_member_names_clashes/Prover.toml diff --git a/tooling/nargo_cli/tests/compile_success_empty/trait_associated_member_names_clashes/src/main.nr b/test_programs/compile_success_empty/trait_associated_member_names_clashes/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/compile_success_empty/trait_associated_member_names_clashes/src/main.nr rename to test_programs/compile_success_empty/trait_associated_member_names_clashes/src/main.nr diff --git a/tooling/nargo_cli/tests/compile_success_empty/trait_default_implementation/Nargo.toml b/test_programs/compile_success_empty/trait_default_implementation/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_success_empty/trait_default_implementation/Nargo.toml rename to test_programs/compile_success_empty/trait_default_implementation/Nargo.toml diff --git a/tooling/nargo_cli/tests/compile_success_empty/trait_default_implementation/Prover.toml b/test_programs/compile_success_empty/trait_default_implementation/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_success_empty/trait_default_implementation/Prover.toml rename to test_programs/compile_success_empty/trait_default_implementation/Prover.toml diff --git a/test_programs/compile_success_empty/trait_default_implementation/src/main.nr b/test_programs/compile_success_empty/trait_default_implementation/src/main.nr new file mode 100644 index 00000000000..2f5bff8c40c --- /dev/null +++ b/test_programs/compile_success_empty/trait_default_implementation/src/main.nr @@ -0,0 +1,25 @@ +use dep::std; + +trait MyDefault { + fn my_default(x: Field, y: Field) -> Self; + + fn method2(x: Field) -> Field { + x + } +} + +struct Foo { + bar: Field, + array: [Field; 2], +} + +impl MyDefault for Foo { + fn my_default(x: Field,y: Field) -> Self { + Self { bar: x, array: [x,y] } + } +} + +fn main(x: Field) { + let first = Foo::method2(x); + assert(first == x); +} diff --git a/tooling/nargo_cli/tests/compile_success_empty/trait_function_calls/Nargo.toml b/test_programs/compile_success_empty/trait_function_calls/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_success_empty/trait_function_calls/Nargo.toml rename to test_programs/compile_success_empty/trait_function_calls/Nargo.toml diff --git a/tooling/nargo_cli/tests/noir_test_failure/should_fail_mismatch/Prover.toml b/test_programs/compile_success_empty/trait_function_calls/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/noir_test_failure/should_fail_mismatch/Prover.toml rename to test_programs/compile_success_empty/trait_function_calls/Prover.toml diff --git a/tooling/nargo_cli/tests/compile_success_empty/trait_function_calls/src/main.nr b/test_programs/compile_success_empty/trait_function_calls/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/compile_success_empty/trait_function_calls/src/main.nr rename to test_programs/compile_success_empty/trait_function_calls/src/main.nr diff --git a/tooling/nargo_cli/tests/compile_success_empty/trait_generics/Nargo.toml b/test_programs/compile_success_empty/trait_generics/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_success_empty/trait_generics/Nargo.toml rename to test_programs/compile_success_empty/trait_generics/Nargo.toml diff --git a/tooling/nargo_cli/tests/compile_success_empty/trait_generics/src/main.nr b/test_programs/compile_success_empty/trait_generics/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/compile_success_empty/trait_generics/src/main.nr rename to test_programs/compile_success_empty/trait_generics/src/main.nr diff --git a/tooling/nargo_cli/tests/compile_success_empty/trait_multi_module_test/Nargo.toml b/test_programs/compile_success_empty/trait_multi_module_test/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_success_empty/trait_multi_module_test/Nargo.toml rename to test_programs/compile_success_empty/trait_multi_module_test/Nargo.toml diff --git a/tooling/nargo_cli/tests/noir_test_success/should_fail_with_matches/Prover.toml b/test_programs/compile_success_empty/trait_multi_module_test/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/noir_test_success/should_fail_with_matches/Prover.toml rename to test_programs/compile_success_empty/trait_multi_module_test/Prover.toml diff --git a/tooling/nargo_cli/tests/compile_success_empty/trait_multi_module_test/src/main.nr b/test_programs/compile_success_empty/trait_multi_module_test/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/compile_success_empty/trait_multi_module_test/src/main.nr rename to test_programs/compile_success_empty/trait_multi_module_test/src/main.nr diff --git a/tooling/nargo_cli/tests/compile_success_empty/trait_multi_module_test/src/module1.nr b/test_programs/compile_success_empty/trait_multi_module_test/src/module1.nr similarity index 100% rename from tooling/nargo_cli/tests/compile_success_empty/trait_multi_module_test/src/module1.nr rename to test_programs/compile_success_empty/trait_multi_module_test/src/module1.nr diff --git a/tooling/nargo_cli/tests/compile_success_empty/trait_multi_module_test/src/module2.nr b/test_programs/compile_success_empty/trait_multi_module_test/src/module2.nr similarity index 100% rename from tooling/nargo_cli/tests/compile_success_empty/trait_multi_module_test/src/module2.nr rename to test_programs/compile_success_empty/trait_multi_module_test/src/module2.nr diff --git a/tooling/nargo_cli/tests/compile_success_empty/trait_multi_module_test/src/module3.nr b/test_programs/compile_success_empty/trait_multi_module_test/src/module3.nr similarity index 100% rename from tooling/nargo_cli/tests/compile_success_empty/trait_multi_module_test/src/module3.nr rename to test_programs/compile_success_empty/trait_multi_module_test/src/module3.nr diff --git a/tooling/nargo_cli/tests/compile_success_empty/trait_multi_module_test/src/module4.nr b/test_programs/compile_success_empty/trait_multi_module_test/src/module4.nr similarity index 100% rename from tooling/nargo_cli/tests/compile_success_empty/trait_multi_module_test/src/module4.nr rename to test_programs/compile_success_empty/trait_multi_module_test/src/module4.nr diff --git a/tooling/nargo_cli/tests/compile_success_empty/trait_multi_module_test/src/module5.nr b/test_programs/compile_success_empty/trait_multi_module_test/src/module5.nr similarity index 100% rename from tooling/nargo_cli/tests/compile_success_empty/trait_multi_module_test/src/module5.nr rename to test_programs/compile_success_empty/trait_multi_module_test/src/module5.nr diff --git a/tooling/nargo_cli/tests/compile_success_empty/trait_multi_module_test/src/module6.nr b/test_programs/compile_success_empty/trait_multi_module_test/src/module6.nr similarity index 100% rename from tooling/nargo_cli/tests/compile_success_empty/trait_multi_module_test/src/module6.nr rename to test_programs/compile_success_empty/trait_multi_module_test/src/module6.nr diff --git a/tooling/nargo_cli/tests/compile_success_empty/trait_override_implementation/Nargo.toml b/test_programs/compile_success_empty/trait_override_implementation/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_success_empty/trait_override_implementation/Nargo.toml rename to test_programs/compile_success_empty/trait_override_implementation/Nargo.toml diff --git a/tooling/nargo_cli/tests/compile_success_empty/trait_override_implementation/Prover.toml b/test_programs/compile_success_empty/trait_override_implementation/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_success_empty/trait_override_implementation/Prover.toml rename to test_programs/compile_success_empty/trait_override_implementation/Prover.toml diff --git a/test_programs/compile_success_empty/trait_override_implementation/src/main.nr b/test_programs/compile_success_empty/trait_override_implementation/src/main.nr new file mode 100644 index 00000000000..85528291870 --- /dev/null +++ b/test_programs/compile_success_empty/trait_override_implementation/src/main.nr @@ -0,0 +1,71 @@ +use dep::std; + +trait MyDefault { + fn my_default(x: Field, y: Field) -> Self; + + fn method2(x: Field) -> Field { + x + } +} + +struct Foo { + bar: Field, + array: [Field; 2], +} + +impl MyDefault for Foo { + fn my_default(x: Field,y: Field) -> Self { + Self { bar: x, array: [x,y] } + } + + fn method2(x: Field) -> Field { + x * 3 + } +} + +trait F { + fn f1(self) -> Field; + fn f2(_self: Self) -> Field { 2 } + fn f3(_self: Self) -> Field { 3 } + fn f4(_self: Self) -> Field { 4 } + fn f5(_self: Self) -> Field { 5 } +} + +struct Bar {} + +impl F for Bar { + fn f5(_self: Self) -> Field { 50 } + fn f1(_self: Self) -> Field { 10 } + fn f3(_self: Self) -> Field { 30 } +} +// Impls on mutable references are temporarily disabled +// impl F for &mut Bar { +// fn f1(self) -> Field { 101 } +// fn f5(self) -> Field { 505 } +// } +fn main(x: Field) { + let first = Foo::method2(x); + assert(first == 3 * x); + + let bar = Bar {}; + + assert(bar.f1() == 10, "1"); + assert(bar.f2() == 2, "2"); + assert(bar.f3() == 30, "3"); + assert(bar.f4() == 4, "4"); + assert(bar.f5() == 50, "5"); + + let mut bar_mut = Bar {}; + // Impls on mutable references are temporarily disabled + // assert_eq((&mut bar_mut).f1(), 101); + // assert((&mut bar_mut).f2() == 2, "7"); + // assert((&mut bar_mut).f3() == 3, "8"); + // assert((&mut bar_mut).f4() == 4, "9"); + // assert((&mut bar_mut).f5() == 505, "10"); + assert(bar_mut.f1() == 10, "10"); + assert(bar_mut.f2() == 2, "12"); + assert(bar_mut.f3() == 30, "13"); + assert(bar_mut.f4() == 4, "14"); + assert(bar_mut.f5() == 50, "15"); +} + diff --git a/tooling/nargo_cli/tests/compile_success_empty/trait_static_methods/Nargo.toml b/test_programs/compile_success_empty/trait_static_methods/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_success_empty/trait_static_methods/Nargo.toml rename to test_programs/compile_success_empty/trait_static_methods/Nargo.toml diff --git a/test_programs/compile_success_empty/trait_static_methods/src/main.nr b/test_programs/compile_success_empty/trait_static_methods/src/main.nr new file mode 100644 index 00000000000..838e47ee82e --- /dev/null +++ b/test_programs/compile_success_empty/trait_static_methods/src/main.nr @@ -0,0 +1,64 @@ +trait ATrait { + fn asd() -> Self; + + fn static_method() -> Field { + Self::static_method_2() + } + + fn static_method_2() -> Field { + 100 + } +} + +struct Foo { + x: Field +} +impl ATrait for Foo { + fn asd() -> Self { + // This should pass as Self should be bound to Foo while typechecking this + Foo{x: 100} + } +} + +struct Bar { + x: Field +} +impl ATrait for Bar { + // The trait method is declared as returning `Self` + // but explicitly specifying the type in the impl should work + fn asd() -> Bar { + Bar{x: 100} + } + + fn static_method_2() -> Field { + 200 + } +} + +fn main() { + assert(Foo::static_method() == 100); + assert(Bar::static_method() == 200); + + // Regression for #3773 + let zero: Field = MyDefault::my_default(); + assert(zero == 0); +} + +trait MyDefault { + fn my_default() -> Self; +} + +// This impl is first, so if the type directed search picks the first impl +// instead of the correct Field impl below, we'll get a panic in SSA from +// a type mismatch. +impl MyDefault for (Field, Field) { + fn my_default() -> (Field, Field) { + (0, 0) + } +} + +impl MyDefault for Field { + fn my_default() -> Field { + 0 + } +} diff --git a/tooling/nargo_cli/tests/compile_success_empty/trait_where_clause/Nargo.toml b/test_programs/compile_success_empty/trait_where_clause/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_success_empty/trait_where_clause/Nargo.toml rename to test_programs/compile_success_empty/trait_where_clause/Nargo.toml diff --git a/tooling/nargo_cli/tests/compile_success_empty/trait_where_clause/src/main.nr b/test_programs/compile_success_empty/trait_where_clause/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/compile_success_empty/trait_where_clause/src/main.nr rename to test_programs/compile_success_empty/trait_where_clause/src/main.nr diff --git a/tooling/nargo_cli/tests/compile_success_empty/trait_where_clause/src/the_trait.nr b/test_programs/compile_success_empty/trait_where_clause/src/the_trait.nr similarity index 100% rename from tooling/nargo_cli/tests/compile_success_empty/trait_where_clause/src/the_trait.nr rename to test_programs/compile_success_empty/trait_where_clause/src/the_trait.nr diff --git a/tooling/nargo_cli/tests/compile_success_empty/traits/Nargo.toml b/test_programs/compile_success_empty/traits/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_success_empty/traits/Nargo.toml rename to test_programs/compile_success_empty/traits/Nargo.toml diff --git a/tooling/nargo_cli/tests/compile_success_empty/traits/Prover.toml b/test_programs/compile_success_empty/traits/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_success_empty/traits/Prover.toml rename to test_programs/compile_success_empty/traits/Prover.toml diff --git a/test_programs/compile_success_empty/traits/src/main.nr b/test_programs/compile_success_empty/traits/src/main.nr new file mode 100644 index 00000000000..ed804559fed --- /dev/null +++ b/test_programs/compile_success_empty/traits/src/main.nr @@ -0,0 +1,21 @@ +use dep::std; + +trait MyDefault { + fn my_default(x: Field, y: Field) -> Self; +} + +struct Foo { + bar: Field, + array: [Field; 2], +} + +impl MyDefault for Foo { + fn my_default(x: Field,y: Field) -> Self { + Self { bar: x, array: [x,y] } + } +} + +fn main(x: Field, y: Field) { + let first = Foo::my_default(x, y); + assert(first.bar == x); +} diff --git a/tooling/nargo_cli/tests/compile_success_empty/unary_operators/Nargo.toml b/test_programs/compile_success_empty/unary_operators/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_success_empty/unary_operators/Nargo.toml rename to test_programs/compile_success_empty/unary_operators/Nargo.toml diff --git a/tooling/nargo_cli/tests/compile_success_empty/unary_operators/src/main.nr b/test_programs/compile_success_empty/unary_operators/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/compile_success_empty/unary_operators/src/main.nr rename to test_programs/compile_success_empty/unary_operators/src/main.nr diff --git a/tooling/nargo_cli/tests/compile_success_empty/unit/Nargo.toml b/test_programs/compile_success_empty/unit/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_success_empty/unit/Nargo.toml rename to test_programs/compile_success_empty/unit/Nargo.toml diff --git a/tooling/nargo_cli/tests/compile_success_empty/unit/src/main.nr b/test_programs/compile_success_empty/unit/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/compile_success_empty/unit/src/main.nr rename to test_programs/compile_success_empty/unit/src/main.nr diff --git a/tooling/nargo_cli/tests/compile_success_empty/unused_variables/Nargo.toml b/test_programs/compile_success_empty/unused_variables/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_success_empty/unused_variables/Nargo.toml rename to test_programs/compile_success_empty/unused_variables/Nargo.toml diff --git a/tooling/nargo_cli/tests/compile_success_empty/unused_variables/src/main.nr b/test_programs/compile_success_empty/unused_variables/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/compile_success_empty/unused_variables/src/main.nr rename to test_programs/compile_success_empty/unused_variables/src/main.nr diff --git a/tooling/nargo_cli/tests/compile_success_empty/vectors/Nargo.toml b/test_programs/compile_success_empty/vectors/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_success_empty/vectors/Nargo.toml rename to test_programs/compile_success_empty/vectors/Nargo.toml diff --git a/tooling/nargo_cli/tests/compile_success_empty/vectors/Prover.toml b/test_programs/compile_success_empty/vectors/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_success_empty/vectors/Prover.toml rename to test_programs/compile_success_empty/vectors/Prover.toml diff --git a/tooling/nargo_cli/tests/compile_success_empty/vectors/src/main.nr b/test_programs/compile_success_empty/vectors/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/compile_success_empty/vectors/src/main.nr rename to test_programs/compile_success_empty/vectors/src/main.nr diff --git a/tooling/nargo_cli/tests/compile_success_empty/workspace_reexport_bug/Nargo.toml b/test_programs/compile_success_empty/workspace_reexport_bug/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_success_empty/workspace_reexport_bug/Nargo.toml rename to test_programs/compile_success_empty/workspace_reexport_bug/Nargo.toml diff --git a/tooling/nargo_cli/tests/compile_success_empty/workspace_reexport_bug/binary/Nargo.toml b/test_programs/compile_success_empty/workspace_reexport_bug/binary/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_success_empty/workspace_reexport_bug/binary/Nargo.toml rename to test_programs/compile_success_empty/workspace_reexport_bug/binary/Nargo.toml diff --git a/tooling/nargo_cli/tests/compile_success_empty/workspace_reexport_bug/binary/src/main.nr b/test_programs/compile_success_empty/workspace_reexport_bug/binary/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/compile_success_empty/workspace_reexport_bug/binary/src/main.nr rename to test_programs/compile_success_empty/workspace_reexport_bug/binary/src/main.nr diff --git a/tooling/nargo_cli/tests/compile_success_empty/workspace_reexport_bug/library/Nargo.toml b/test_programs/compile_success_empty/workspace_reexport_bug/library/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_success_empty/workspace_reexport_bug/library/Nargo.toml rename to test_programs/compile_success_empty/workspace_reexport_bug/library/Nargo.toml diff --git a/tooling/nargo_cli/tests/compile_success_empty/workspace_reexport_bug/library/src/lib.nr b/test_programs/compile_success_empty/workspace_reexport_bug/library/src/lib.nr similarity index 100% rename from tooling/nargo_cli/tests/compile_success_empty/workspace_reexport_bug/library/src/lib.nr rename to test_programs/compile_success_empty/workspace_reexport_bug/library/src/lib.nr diff --git a/tooling/nargo_cli/tests/compile_success_empty/workspace_reexport_bug/library2/Nargo.toml b/test_programs/compile_success_empty/workspace_reexport_bug/library2/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_success_empty/workspace_reexport_bug/library2/Nargo.toml rename to test_programs/compile_success_empty/workspace_reexport_bug/library2/Nargo.toml diff --git a/tooling/nargo_cli/tests/compile_success_empty/workspace_reexport_bug/library2/src/lib.nr b/test_programs/compile_success_empty/workspace_reexport_bug/library2/src/lib.nr similarity index 100% rename from tooling/nargo_cli/tests/compile_success_empty/workspace_reexport_bug/library2/src/lib.nr rename to test_programs/compile_success_empty/workspace_reexport_bug/library2/src/lib.nr diff --git a/tooling/nargo_cli/tests/execution_success/1327_concrete_in_generic/Nargo.toml b/test_programs/execution_success/1327_concrete_in_generic/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/1327_concrete_in_generic/Nargo.toml rename to test_programs/execution_success/1327_concrete_in_generic/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/1327_concrete_in_generic/Prover.toml b/test_programs/execution_success/1327_concrete_in_generic/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/1327_concrete_in_generic/Prover.toml rename to test_programs/execution_success/1327_concrete_in_generic/Prover.toml diff --git a/tooling/nargo_cli/tests/execution_success/1327_concrete_in_generic/src/main.nr b/test_programs/execution_success/1327_concrete_in_generic/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/execution_success/1327_concrete_in_generic/src/main.nr rename to test_programs/execution_success/1327_concrete_in_generic/src/main.nr diff --git a/test_programs/execution_success/1_mul/Nargo.toml b/test_programs/execution_success/1_mul/Nargo.toml new file mode 100644 index 00000000000..94b36157cca --- /dev/null +++ b/test_programs/execution_success/1_mul/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "1_mul" +version = "0.1.0" +type = "bin" +authors = [""] + +[dependencies] diff --git a/tooling/nargo_cli/tests/execution_success/1_mul/Prover.toml b/test_programs/execution_success/1_mul/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/1_mul/Prover.toml rename to test_programs/execution_success/1_mul/Prover.toml diff --git a/tooling/nargo_cli/tests/execution_success/1_mul/src/main.nr b/test_programs/execution_success/1_mul/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/execution_success/1_mul/src/main.nr rename to test_programs/execution_success/1_mul/src/main.nr diff --git a/tooling/nargo_cli/tests/execution_success/2_div/Nargo.toml b/test_programs/execution_success/2_div/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/2_div/Nargo.toml rename to test_programs/execution_success/2_div/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/2_div/Prover.toml b/test_programs/execution_success/2_div/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/2_div/Prover.toml rename to test_programs/execution_success/2_div/Prover.toml diff --git a/tooling/nargo_cli/tests/execution_success/2_div/src/main.nr b/test_programs/execution_success/2_div/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/execution_success/2_div/src/main.nr rename to test_programs/execution_success/2_div/src/main.nr diff --git a/tooling/nargo_cli/tests/execution_success/3_add/Nargo.toml b/test_programs/execution_success/3_add/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/3_add/Nargo.toml rename to test_programs/execution_success/3_add/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/3_add/Prover.toml b/test_programs/execution_success/3_add/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/3_add/Prover.toml rename to test_programs/execution_success/3_add/Prover.toml diff --git a/tooling/nargo_cli/tests/execution_success/3_add/src/main.nr b/test_programs/execution_success/3_add/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/execution_success/3_add/src/main.nr rename to test_programs/execution_success/3_add/src/main.nr diff --git a/tooling/nargo_cli/tests/execution_success/4_sub/Nargo.toml b/test_programs/execution_success/4_sub/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/4_sub/Nargo.toml rename to test_programs/execution_success/4_sub/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/4_sub/Prover.toml b/test_programs/execution_success/4_sub/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/4_sub/Prover.toml rename to test_programs/execution_success/4_sub/Prover.toml diff --git a/tooling/nargo_cli/tests/execution_success/4_sub/src/main.nr b/test_programs/execution_success/4_sub/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/execution_success/4_sub/src/main.nr rename to test_programs/execution_success/4_sub/src/main.nr diff --git a/tooling/nargo_cli/tests/execution_success/5_over/Nargo.toml b/test_programs/execution_success/5_over/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/5_over/Nargo.toml rename to test_programs/execution_success/5_over/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/5_over/Prover.toml b/test_programs/execution_success/5_over/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/5_over/Prover.toml rename to test_programs/execution_success/5_over/Prover.toml diff --git a/tooling/nargo_cli/tests/execution_success/5_over/src/main.nr b/test_programs/execution_success/5_over/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/execution_success/5_over/src/main.nr rename to test_programs/execution_success/5_over/src/main.nr diff --git a/tooling/nargo_cli/tests/execution_success/6/Nargo.toml b/test_programs/execution_success/6/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/6/Nargo.toml rename to test_programs/execution_success/6/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/6/Prover.toml b/test_programs/execution_success/6/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/6/Prover.toml rename to test_programs/execution_success/6/Prover.toml diff --git a/tooling/nargo_cli/tests/execution_success/6/src/main.nr b/test_programs/execution_success/6/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/execution_success/6/src/main.nr rename to test_programs/execution_success/6/src/main.nr diff --git a/tooling/nargo_cli/tests/execution_success/6_array/Nargo.toml b/test_programs/execution_success/6_array/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/6_array/Nargo.toml rename to test_programs/execution_success/6_array/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/6_array/Prover.toml b/test_programs/execution_success/6_array/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/6_array/Prover.toml rename to test_programs/execution_success/6_array/Prover.toml diff --git a/tooling/nargo_cli/tests/execution_success/6_array/src/main.nr b/test_programs/execution_success/6_array/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/execution_success/6_array/src/main.nr rename to test_programs/execution_success/6_array/src/main.nr diff --git a/tooling/nargo_cli/tests/execution_success/7/Nargo.toml b/test_programs/execution_success/7/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/7/Nargo.toml rename to test_programs/execution_success/7/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/7/Prover.toml b/test_programs/execution_success/7/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/7/Prover.toml rename to test_programs/execution_success/7/Prover.toml diff --git a/tooling/nargo_cli/tests/execution_success/7/src/main.nr b/test_programs/execution_success/7/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/execution_success/7/src/main.nr rename to test_programs/execution_success/7/src/main.nr diff --git a/tooling/nargo_cli/tests/execution_success/7_function/Nargo.toml b/test_programs/execution_success/7_function/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/7_function/Nargo.toml rename to test_programs/execution_success/7_function/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/7_function/Prover.toml b/test_programs/execution_success/7_function/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/7_function/Prover.toml rename to test_programs/execution_success/7_function/Prover.toml diff --git a/tooling/nargo_cli/tests/execution_success/7_function/src/main.nr b/test_programs/execution_success/7_function/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/execution_success/7_function/src/main.nr rename to test_programs/execution_success/7_function/src/main.nr diff --git a/tooling/nargo_cli/tests/execution_success/arithmetic_binary_operations/Nargo.toml b/test_programs/execution_success/arithmetic_binary_operations/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/arithmetic_binary_operations/Nargo.toml rename to test_programs/execution_success/arithmetic_binary_operations/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/arithmetic_binary_operations/Prover.toml b/test_programs/execution_success/arithmetic_binary_operations/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/arithmetic_binary_operations/Prover.toml rename to test_programs/execution_success/arithmetic_binary_operations/Prover.toml diff --git a/tooling/nargo_cli/tests/execution_success/arithmetic_binary_operations/src/main.nr b/test_programs/execution_success/arithmetic_binary_operations/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/execution_success/arithmetic_binary_operations/src/main.nr rename to test_programs/execution_success/arithmetic_binary_operations/src/main.nr diff --git a/tooling/nargo_cli/tests/execution_success/array_dynamic/Nargo.toml b/test_programs/execution_success/array_dynamic/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/array_dynamic/Nargo.toml rename to test_programs/execution_success/array_dynamic/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/array_dynamic/Prover.toml b/test_programs/execution_success/array_dynamic/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/array_dynamic/Prover.toml rename to test_programs/execution_success/array_dynamic/Prover.toml diff --git a/test_programs/execution_success/array_dynamic/src/main.nr b/test_programs/execution_success/array_dynamic/src/main.nr new file mode 100644 index 00000000000..dde7bacc455 --- /dev/null +++ b/test_programs/execution_success/array_dynamic/src/main.nr @@ -0,0 +1,38 @@ +fn main( + x: [u32; 5], + mut z: u32, + t: u32, + index: [Field;5], + index2: [Field;5], + offset: Field, + sublen: Field +) { + let idx = (z - 5 * t - 5) as Field; + //dynamic array test + dyn_array(x, idx, idx - 3); + //regression for issue 1283 + let mut s = 0; + let x3 = [246, 159, 32, 176, 8]; + for i in 0..5 { + s += x3[index[i]]; + } + assert(s != 0); + + if 3 < (sublen as u32) { + assert(index[offset + 3] == index2[3]); + } +} + +fn dyn_array(mut x: [u32; 5], y: Field, z: Field) { + assert(x[y] == 111); + assert(x[z] == 101); + x[z] = 0; + assert(x[y] == 111); + assert(x[1] == 0); + if y as u32 < 10 { + x[y] = x[y] - 2; + } else { + x[y] = 0; + } + assert(x[4] == 109); +} diff --git a/tooling/nargo_cli/tests/execution_success/array_eq/Nargo.toml b/test_programs/execution_success/array_eq/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/array_eq/Nargo.toml rename to test_programs/execution_success/array_eq/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/array_eq/Prover.toml b/test_programs/execution_success/array_eq/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/array_eq/Prover.toml rename to test_programs/execution_success/array_eq/Prover.toml diff --git a/tooling/nargo_cli/tests/execution_success/array_eq/src/main.nr b/test_programs/execution_success/array_eq/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/execution_success/array_eq/src/main.nr rename to test_programs/execution_success/array_eq/src/main.nr diff --git a/tooling/nargo_cli/tests/execution_success/array_len/Nargo.toml b/test_programs/execution_success/array_len/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/array_len/Nargo.toml rename to test_programs/execution_success/array_len/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/array_len/Prover.toml b/test_programs/execution_success/array_len/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/array_len/Prover.toml rename to test_programs/execution_success/array_len/Prover.toml diff --git a/tooling/nargo_cli/tests/execution_success/array_len/src/main.nr b/test_programs/execution_success/array_len/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/execution_success/array_len/src/main.nr rename to test_programs/execution_success/array_len/src/main.nr diff --git a/tooling/nargo_cli/tests/execution_success/array_neq/Nargo.toml b/test_programs/execution_success/array_neq/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/array_neq/Nargo.toml rename to test_programs/execution_success/array_neq/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/array_neq/Prover.toml b/test_programs/execution_success/array_neq/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/array_neq/Prover.toml rename to test_programs/execution_success/array_neq/Prover.toml diff --git a/tooling/nargo_cli/tests/execution_success/array_neq/src/main.nr b/test_programs/execution_success/array_neq/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/execution_success/array_neq/src/main.nr rename to test_programs/execution_success/array_neq/src/main.nr diff --git a/tooling/nargo_cli/tests/execution_success/array_sort/Nargo.toml b/test_programs/execution_success/array_sort/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/array_sort/Nargo.toml rename to test_programs/execution_success/array_sort/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/array_sort/Prover.toml b/test_programs/execution_success/array_sort/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/array_sort/Prover.toml rename to test_programs/execution_success/array_sort/Prover.toml diff --git a/tooling/nargo_cli/tests/execution_success/array_sort/src/main.nr b/test_programs/execution_success/array_sort/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/execution_success/array_sort/src/main.nr rename to test_programs/execution_success/array_sort/src/main.nr diff --git a/tooling/nargo_cli/tests/execution_success/assert/Nargo.toml b/test_programs/execution_success/assert/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/assert/Nargo.toml rename to test_programs/execution_success/assert/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/assert/Prover.toml b/test_programs/execution_success/assert/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/assert/Prover.toml rename to test_programs/execution_success/assert/Prover.toml diff --git a/tooling/nargo_cli/tests/execution_success/assert/src/main.nr b/test_programs/execution_success/assert/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/execution_success/assert/src/main.nr rename to test_programs/execution_success/assert/src/main.nr diff --git a/tooling/nargo_cli/tests/execution_success/assert_statement/Nargo.toml b/test_programs/execution_success/assert_statement/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/assert_statement/Nargo.toml rename to test_programs/execution_success/assert_statement/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/assert_statement/Prover.toml b/test_programs/execution_success/assert_statement/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/assert_statement/Prover.toml rename to test_programs/execution_success/assert_statement/Prover.toml diff --git a/tooling/nargo_cli/tests/execution_success/assert_statement/src/main.nr b/test_programs/execution_success/assert_statement/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/execution_success/assert_statement/src/main.nr rename to test_programs/execution_success/assert_statement/src/main.nr diff --git a/tooling/nargo_cli/tests/execution_success/assign_ex/Nargo.toml b/test_programs/execution_success/assign_ex/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/assign_ex/Nargo.toml rename to test_programs/execution_success/assign_ex/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/assign_ex/Prover.toml b/test_programs/execution_success/assign_ex/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/assign_ex/Prover.toml rename to test_programs/execution_success/assign_ex/Prover.toml diff --git a/tooling/nargo_cli/tests/execution_success/assign_ex/src/main.nr b/test_programs/execution_success/assign_ex/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/execution_success/assign_ex/src/main.nr rename to test_programs/execution_success/assign_ex/src/main.nr diff --git a/tooling/nargo_cli/tests/execution_success/bit_and/Nargo.toml b/test_programs/execution_success/bit_and/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/bit_and/Nargo.toml rename to test_programs/execution_success/bit_and/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/bit_and/Prover.toml b/test_programs/execution_success/bit_and/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/bit_and/Prover.toml rename to test_programs/execution_success/bit_and/Prover.toml diff --git a/tooling/nargo_cli/tests/execution_success/bit_and/src/main.nr b/test_programs/execution_success/bit_and/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/execution_success/bit_and/src/main.nr rename to test_programs/execution_success/bit_and/src/main.nr diff --git a/tooling/nargo_cli/tests/execution_success/bit_shifts_comptime/Nargo.toml b/test_programs/execution_success/bit_shifts_comptime/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/bit_shifts_comptime/Nargo.toml rename to test_programs/execution_success/bit_shifts_comptime/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/bit_shifts_comptime/Prover.toml b/test_programs/execution_success/bit_shifts_comptime/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/bit_shifts_comptime/Prover.toml rename to test_programs/execution_success/bit_shifts_comptime/Prover.toml diff --git a/test_programs/execution_success/bit_shifts_comptime/src/main.nr b/test_programs/execution_success/bit_shifts_comptime/src/main.nr new file mode 100644 index 00000000000..9bb1028173d --- /dev/null +++ b/test_programs/execution_success/bit_shifts_comptime/src/main.nr @@ -0,0 +1,26 @@ +fn main(x: u64) { + let two: u64 = 2; + let three: u64 = 3; + // shifts on constant values + assert(two << 2 == 8); + assert((two << 3) / 8 == two); + assert((three >> 1) == 1); + // shifts on runtime values + assert(x << 1 == 128); + assert(x >> 2 == 16); + + regression_2250(); + + //regression for 3481 + assert(x << 63 == 0); + + assert_eq((1 as u56) << (32 as u56), 0x0100000000); +} + +fn regression_2250() { + let a: u1 = 1 >> 1; + assert(a == 0); + + let b: u32 = 1 >> 32; + assert(b == 0); +} diff --git a/tooling/nargo_cli/tests/execution_success/bit_shifts_runtime/Nargo.toml b/test_programs/execution_success/bit_shifts_runtime/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/bit_shifts_runtime/Nargo.toml rename to test_programs/execution_success/bit_shifts_runtime/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/bit_shifts_runtime/Prover.toml b/test_programs/execution_success/bit_shifts_runtime/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/bit_shifts_runtime/Prover.toml rename to test_programs/execution_success/bit_shifts_runtime/Prover.toml diff --git a/test_programs/execution_success/bit_shifts_runtime/src/main.nr b/test_programs/execution_success/bit_shifts_runtime/src/main.nr new file mode 100644 index 00000000000..33d68765598 --- /dev/null +++ b/test_programs/execution_success/bit_shifts_runtime/src/main.nr @@ -0,0 +1,19 @@ +fn main(x: u64, y: u64) { + // runtime shifts on compile-time known values + assert(64 << y == 128); + assert(64 >> y == 32); + // runtime shifts on runtime values + assert(x << y == 128); + assert(x >> y == 32); + + // Bit-shift with signed integers + let mut a :i8 = y as i8; + let mut b: i8 = x as i8; + assert(b << 1 == -128); + assert(b >> 2 == 16); + assert(b >> a == 32); + a = -a; + assert(a << 7 == -128); + assert(a << -a == -2); + +} diff --git a/test_programs/execution_success/blake3/Nargo.toml b/test_programs/execution_success/blake3/Nargo.toml new file mode 100644 index 00000000000..29f6ad5f11c --- /dev/null +++ b/test_programs/execution_success/blake3/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "blake3" +type = "bin" +authors = [""] +compiler_version = ">=0.22.0" + +[dependencies] \ No newline at end of file diff --git a/test_programs/execution_success/blake3/Prover.toml b/test_programs/execution_success/blake3/Prover.toml new file mode 100644 index 00000000000..c807701479b --- /dev/null +++ b/test_programs/execution_success/blake3/Prover.toml @@ -0,0 +1,37 @@ +# hello as bytes +# https://connor4312.github.io/blake3/index.html +x = [104, 101, 108, 108, 111] +result = [ + 0xea, + 0x8f, + 0x16, + 0x3d, + 0xb3, + 0x86, + 0x82, + 0x92, + 0x5e, + 0x44, + 0x91, + 0xc5, + 0xe5, + 0x8d, + 0x4b, + 0xb3, + 0x50, + 0x6e, + 0xf8, + 0xc1, + 0x4e, + 0xb7, + 0x8a, + 0x86, + 0xe9, + 0x08, + 0xc5, + 0x62, + 0x4a, + 0x67, + 0x20, + 0x0f, +] diff --git a/test_programs/execution_success/blake3/src/main.nr b/test_programs/execution_success/blake3/src/main.nr new file mode 100644 index 00000000000..3bfea6c5f95 --- /dev/null +++ b/test_programs/execution_success/blake3/src/main.nr @@ -0,0 +1,6 @@ +use dep::std; + +fn main(x: [u8; 5], result: [u8; 32]) { + let digest = std::hash::blake3(x); + assert(digest == result); +} diff --git a/tooling/nargo_cli/tests/execution_success/bool_not/Nargo.toml b/test_programs/execution_success/bool_not/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/bool_not/Nargo.toml rename to test_programs/execution_success/bool_not/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/bool_not/Prover.toml b/test_programs/execution_success/bool_not/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/bool_not/Prover.toml rename to test_programs/execution_success/bool_not/Prover.toml diff --git a/tooling/nargo_cli/tests/execution_success/bool_not/src/main.nr b/test_programs/execution_success/bool_not/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/execution_success/bool_not/src/main.nr rename to test_programs/execution_success/bool_not/src/main.nr diff --git a/tooling/nargo_cli/tests/execution_success/bool_or/Nargo.toml b/test_programs/execution_success/bool_or/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/bool_or/Nargo.toml rename to test_programs/execution_success/bool_or/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/bool_or/Prover.toml b/test_programs/execution_success/bool_or/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/bool_or/Prover.toml rename to test_programs/execution_success/bool_or/Prover.toml diff --git a/tooling/nargo_cli/tests/execution_success/bool_or/src/main.nr b/test_programs/execution_success/bool_or/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/execution_success/bool_or/src/main.nr rename to test_programs/execution_success/bool_or/src/main.nr diff --git a/tooling/nargo_cli/tests/execution_success/brillig_acir_as_brillig/Nargo.toml b/test_programs/execution_success/brillig_acir_as_brillig/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/brillig_acir_as_brillig/Nargo.toml rename to test_programs/execution_success/brillig_acir_as_brillig/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/brillig_acir_as_brillig/Prover.toml b/test_programs/execution_success/brillig_acir_as_brillig/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/brillig_acir_as_brillig/Prover.toml rename to test_programs/execution_success/brillig_acir_as_brillig/Prover.toml diff --git a/tooling/nargo_cli/tests/execution_success/brillig_acir_as_brillig/src/main.nr b/test_programs/execution_success/brillig_acir_as_brillig/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/execution_success/brillig_acir_as_brillig/src/main.nr rename to test_programs/execution_success/brillig_acir_as_brillig/src/main.nr diff --git a/test_programs/execution_success/brillig_array_eq/Nargo.toml b/test_programs/execution_success/brillig_array_eq/Nargo.toml new file mode 100644 index 00000000000..62ce392f96b --- /dev/null +++ b/test_programs/execution_success/brillig_array_eq/Nargo.toml @@ -0,0 +1,6 @@ +[package] +name = "brillig_array_eq" +type = "bin" +authors = [""] + +[dependencies] diff --git a/test_programs/execution_success/brillig_array_eq/Prover.toml b/test_programs/execution_success/brillig_array_eq/Prover.toml new file mode 100644 index 00000000000..ecfed7de213 --- /dev/null +++ b/test_programs/execution_success/brillig_array_eq/Prover.toml @@ -0,0 +1,2 @@ +a = [77,75,108,209,54,16,50,202,155,210,174,185,217,0,170,77,69,217,234,216,10,201,66,51,116,196,81,167,37,77,7,102] +b = [77,75,108,209,54,16,50,202,155,210,174,185,217,0,170,77,69,217,234,216,10,201,66,51,116,196,81,167,37,77,7,102] diff --git a/test_programs/execution_success/brillig_array_eq/src/main.nr b/test_programs/execution_success/brillig_array_eq/src/main.nr new file mode 100644 index 00000000000..90f631dbed8 --- /dev/null +++ b/test_programs/execution_success/brillig_array_eq/src/main.nr @@ -0,0 +1,4 @@ +// Simple example of checking where two arrays are equal +unconstrained fn main(a: [Field; 32], b: [Field; 32]) { + assert(a == b); +} diff --git a/tooling/nargo_cli/tests/execution_success/brillig_arrays/Nargo.toml b/test_programs/execution_success/brillig_arrays/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/brillig_arrays/Nargo.toml rename to test_programs/execution_success/brillig_arrays/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/brillig_arrays/Prover.toml b/test_programs/execution_success/brillig_arrays/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/brillig_arrays/Prover.toml rename to test_programs/execution_success/brillig_arrays/Prover.toml diff --git a/tooling/nargo_cli/tests/execution_success/brillig_arrays/src/main.nr b/test_programs/execution_success/brillig_arrays/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/execution_success/brillig_arrays/src/main.nr rename to test_programs/execution_success/brillig_arrays/src/main.nr diff --git a/tooling/nargo_cli/tests/execution_success/brillig_assert/Nargo.toml b/test_programs/execution_success/brillig_assert/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/brillig_assert/Nargo.toml rename to test_programs/execution_success/brillig_assert/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/brillig_assert/Prover.toml b/test_programs/execution_success/brillig_assert/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/brillig_assert/Prover.toml rename to test_programs/execution_success/brillig_assert/Prover.toml diff --git a/tooling/nargo_cli/tests/execution_success/brillig_assert/src/main.nr b/test_programs/execution_success/brillig_assert/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/execution_success/brillig_assert/src/main.nr rename to test_programs/execution_success/brillig_assert/src/main.nr diff --git a/tooling/nargo_cli/tests/execution_success/brillig_blake2s/Nargo.toml b/test_programs/execution_success/brillig_blake2s/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/brillig_blake2s/Nargo.toml rename to test_programs/execution_success/brillig_blake2s/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/brillig_blake2s/Prover.toml b/test_programs/execution_success/brillig_blake2s/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/brillig_blake2s/Prover.toml rename to test_programs/execution_success/brillig_blake2s/Prover.toml diff --git a/tooling/nargo_cli/tests/execution_success/brillig_blake2s/src/main.nr b/test_programs/execution_success/brillig_blake2s/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/execution_success/brillig_blake2s/src/main.nr rename to test_programs/execution_success/brillig_blake2s/src/main.nr diff --git a/tooling/nargo_cli/tests/execution_success/brillig_calls/Nargo.toml b/test_programs/execution_success/brillig_calls/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/brillig_calls/Nargo.toml rename to test_programs/execution_success/brillig_calls/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/brillig_calls/Prover.toml b/test_programs/execution_success/brillig_calls/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/brillig_calls/Prover.toml rename to test_programs/execution_success/brillig_calls/Prover.toml diff --git a/tooling/nargo_cli/tests/execution_success/brillig_calls/src/main.nr b/test_programs/execution_success/brillig_calls/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/execution_success/brillig_calls/src/main.nr rename to test_programs/execution_success/brillig_calls/src/main.nr diff --git a/tooling/nargo_cli/tests/execution_success/brillig_calls_array/Nargo.toml b/test_programs/execution_success/brillig_calls_array/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/brillig_calls_array/Nargo.toml rename to test_programs/execution_success/brillig_calls_array/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/brillig_calls_array/Prover.toml b/test_programs/execution_success/brillig_calls_array/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/brillig_calls_array/Prover.toml rename to test_programs/execution_success/brillig_calls_array/Prover.toml diff --git a/tooling/nargo_cli/tests/execution_success/brillig_calls_array/src/main.nr b/test_programs/execution_success/brillig_calls_array/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/execution_success/brillig_calls_array/src/main.nr rename to test_programs/execution_success/brillig_calls_array/src/main.nr diff --git a/tooling/nargo_cli/tests/execution_success/brillig_calls_conditionals/Nargo.toml b/test_programs/execution_success/brillig_calls_conditionals/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/brillig_calls_conditionals/Nargo.toml rename to test_programs/execution_success/brillig_calls_conditionals/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/brillig_calls_conditionals/Prover.toml b/test_programs/execution_success/brillig_calls_conditionals/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/brillig_calls_conditionals/Prover.toml rename to test_programs/execution_success/brillig_calls_conditionals/Prover.toml diff --git a/tooling/nargo_cli/tests/execution_success/brillig_calls_conditionals/src/main.nr b/test_programs/execution_success/brillig_calls_conditionals/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/execution_success/brillig_calls_conditionals/src/main.nr rename to test_programs/execution_success/brillig_calls_conditionals/src/main.nr diff --git a/tooling/nargo_cli/tests/execution_success/brillig_conditional/Nargo.toml b/test_programs/execution_success/brillig_conditional/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/brillig_conditional/Nargo.toml rename to test_programs/execution_success/brillig_conditional/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/brillig_conditional/Prover.toml b/test_programs/execution_success/brillig_conditional/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/brillig_conditional/Prover.toml rename to test_programs/execution_success/brillig_conditional/Prover.toml diff --git a/tooling/nargo_cli/tests/execution_success/brillig_conditional/src/main.nr b/test_programs/execution_success/brillig_conditional/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/execution_success/brillig_conditional/src/main.nr rename to test_programs/execution_success/brillig_conditional/src/main.nr diff --git a/test_programs/execution_success/brillig_cow/Nargo.toml b/test_programs/execution_success/brillig_cow/Nargo.toml new file mode 100644 index 00000000000..d191eb53ddf --- /dev/null +++ b/test_programs/execution_success/brillig_cow/Nargo.toml @@ -0,0 +1,6 @@ +[package] +name = "brillig_cow" +type = "bin" +authors = [""] + +[dependencies] diff --git a/test_programs/execution_success/brillig_cow/Prover.toml b/test_programs/execution_success/brillig_cow/Prover.toml new file mode 100644 index 00000000000..6533d218b15 --- /dev/null +++ b/test_programs/execution_success/brillig_cow/Prover.toml @@ -0,0 +1,7 @@ +original = [0, 1, 2, 3, 4] +index = 2 + +[expected_result] +original = [0, 1, 2, 3, 4] +modified_once = [0, 1, 27, 3, 4] +modified_twice = [0, 1, 27, 27, 4] diff --git a/test_programs/execution_success/brillig_cow/src/main.nr b/test_programs/execution_success/brillig_cow/src/main.nr new file mode 100644 index 00000000000..7d847e085fe --- /dev/null +++ b/test_programs/execution_success/brillig_cow/src/main.nr @@ -0,0 +1,54 @@ +// Tests the copy on write optimization for arrays. We look for cases where we are modifying an array in place when we shouldn't. + +global ARRAY_SIZE = 5; + +struct ExecutionResult { + original: [Field; ARRAY_SIZE], + modified_once: [Field; ARRAY_SIZE], + modified_twice: [Field; ARRAY_SIZE], +} + +impl ExecutionResult { + fn is_equal(self, other: ExecutionResult) -> bool { + (self.original == other.original) & + (self.modified_once == other.modified_once) & + (self.modified_twice == other.modified_twice) + } +} + +fn modify_in_inlined_constrained(original: [Field; ARRAY_SIZE], index: u64) -> ExecutionResult { + let mut modified = original; + + modified[index] = 27; + + let modified_once = modified; + + modified[index+1] = 27; + + ExecutionResult { + original, + modified_once, + modified_twice: modified + } +} + +unconstrained fn modify_in_unconstrained(original: [Field; ARRAY_SIZE], index: u64) -> ExecutionResult { + let mut modified = original; + + modified[index] = 27; + + let modified_once = modified; + + modified[index+1] = 27; + + ExecutionResult { + original, + modified_once, + modified_twice: modified + } +} + +unconstrained fn main(original: [Field; ARRAY_SIZE], index: u64, expected_result: ExecutionResult) { + assert(expected_result.is_equal(modify_in_unconstrained(original, index))); + assert(expected_result.is_equal(modify_in_inlined_constrained(original, index))); +} diff --git a/test_programs/execution_success/brillig_ecdsa_secp256k1/Nargo.toml b/test_programs/execution_success/brillig_ecdsa_secp256k1/Nargo.toml new file mode 100644 index 00000000000..495a49f2247 --- /dev/null +++ b/test_programs/execution_success/brillig_ecdsa_secp256k1/Nargo.toml @@ -0,0 +1,6 @@ +[package] +name = "brillig_ecdsa_secp256k1" +type = "bin" +authors = [""] + +[dependencies] diff --git a/tooling/nargo_cli/tests/execution_success/brillig_ecdsa/Prover.toml b/test_programs/execution_success/brillig_ecdsa_secp256k1/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/brillig_ecdsa/Prover.toml rename to test_programs/execution_success/brillig_ecdsa_secp256k1/Prover.toml diff --git a/test_programs/execution_success/brillig_ecdsa_secp256k1/src/main.nr b/test_programs/execution_success/brillig_ecdsa_secp256k1/src/main.nr new file mode 100644 index 00000000000..5d84d885567 --- /dev/null +++ b/test_programs/execution_success/brillig_ecdsa_secp256k1/src/main.nr @@ -0,0 +1,16 @@ +use dep::std; +// Tests a very simple program. +// +// The features being tested is ecdsa in brillig +fn main(hashed_message: [u8; 32], pub_key_x: [u8; 32], pub_key_y: [u8; 32], signature: [u8; 64]) { + assert(ecdsa(hashed_message, pub_key_x, pub_key_y, signature)); +} + +unconstrained fn ecdsa( + hashed_message: [u8; 32], + pub_key_x: [u8; 32], + pub_key_y: [u8; 32], + signature: [u8; 64] +) -> bool { + std::ecdsa_secp256k1::verify_signature(pub_key_x, pub_key_y, signature, hashed_message) +} diff --git a/test_programs/execution_success/brillig_ecdsa_secp256r1/Nargo.toml b/test_programs/execution_success/brillig_ecdsa_secp256r1/Nargo.toml new file mode 100644 index 00000000000..0a71e782104 --- /dev/null +++ b/test_programs/execution_success/brillig_ecdsa_secp256r1/Nargo.toml @@ -0,0 +1,6 @@ +[package] +name = "brillig_ecdsa_secp256r1" +type = "bin" +authors = [""] + +[dependencies] diff --git a/tooling/nargo_cli/tests/execution_success/ecdsa_secp256r1/Prover.toml b/test_programs/execution_success/brillig_ecdsa_secp256r1/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/ecdsa_secp256r1/Prover.toml rename to test_programs/execution_success/brillig_ecdsa_secp256r1/Prover.toml diff --git a/test_programs/execution_success/brillig_ecdsa_secp256r1/src/main.nr b/test_programs/execution_success/brillig_ecdsa_secp256r1/src/main.nr new file mode 100644 index 00000000000..9da07f531aa --- /dev/null +++ b/test_programs/execution_success/brillig_ecdsa_secp256r1/src/main.nr @@ -0,0 +1,16 @@ +use dep::std; +// Tests a very simple program. +// +// The features being tested is ecdsa in brillig +fn main(hashed_message: [u8; 32], pub_key_x: [u8; 32], pub_key_y: [u8; 32], signature: [u8; 64]) { + assert(ecdsa(hashed_message, pub_key_x, pub_key_y, signature)); +} + +unconstrained fn ecdsa( + hashed_message: [u8; 32], + pub_key_x: [u8; 32], + pub_key_y: [u8; 32], + signature: [u8; 64] +) -> bool { + std::ecdsa_secp256r1::verify_signature(pub_key_x, pub_key_y, signature, hashed_message) +} diff --git a/tooling/nargo_cli/tests/execution_success/brillig_fns_as_values/Nargo.toml b/test_programs/execution_success/brillig_fns_as_values/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/brillig_fns_as_values/Nargo.toml rename to test_programs/execution_success/brillig_fns_as_values/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/brillig_fns_as_values/Prover.toml b/test_programs/execution_success/brillig_fns_as_values/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/brillig_fns_as_values/Prover.toml rename to test_programs/execution_success/brillig_fns_as_values/Prover.toml diff --git a/tooling/nargo_cli/tests/execution_success/brillig_fns_as_values/src/main.nr b/test_programs/execution_success/brillig_fns_as_values/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/execution_success/brillig_fns_as_values/src/main.nr rename to test_programs/execution_success/brillig_fns_as_values/src/main.nr diff --git a/tooling/nargo_cli/tests/execution_success/brillig_hash_to_field/Nargo.toml b/test_programs/execution_success/brillig_hash_to_field/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/brillig_hash_to_field/Nargo.toml rename to test_programs/execution_success/brillig_hash_to_field/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/brillig_hash_to_field/Prover.toml b/test_programs/execution_success/brillig_hash_to_field/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/brillig_hash_to_field/Prover.toml rename to test_programs/execution_success/brillig_hash_to_field/Prover.toml diff --git a/tooling/nargo_cli/tests/execution_success/brillig_hash_to_field/src/main.nr b/test_programs/execution_success/brillig_hash_to_field/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/execution_success/brillig_hash_to_field/src/main.nr rename to test_programs/execution_success/brillig_hash_to_field/src/main.nr diff --git a/tooling/nargo_cli/tests/execution_success/brillig_identity_function/Nargo.toml b/test_programs/execution_success/brillig_identity_function/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/brillig_identity_function/Nargo.toml rename to test_programs/execution_success/brillig_identity_function/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/brillig_identity_function/Prover.toml b/test_programs/execution_success/brillig_identity_function/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/brillig_identity_function/Prover.toml rename to test_programs/execution_success/brillig_identity_function/Prover.toml diff --git a/tooling/nargo_cli/tests/execution_success/brillig_identity_function/src/main.nr b/test_programs/execution_success/brillig_identity_function/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/execution_success/brillig_identity_function/src/main.nr rename to test_programs/execution_success/brillig_identity_function/src/main.nr diff --git a/tooling/nargo_cli/tests/execution_success/brillig_keccak/Nargo.toml b/test_programs/execution_success/brillig_keccak/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/brillig_keccak/Nargo.toml rename to test_programs/execution_success/brillig_keccak/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/brillig_keccak/Prover.toml b/test_programs/execution_success/brillig_keccak/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/brillig_keccak/Prover.toml rename to test_programs/execution_success/brillig_keccak/Prover.toml diff --git a/tooling/nargo_cli/tests/execution_success/brillig_keccak/src/main.nr b/test_programs/execution_success/brillig_keccak/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/execution_success/brillig_keccak/src/main.nr rename to test_programs/execution_success/brillig_keccak/src/main.nr diff --git a/tooling/nargo_cli/tests/execution_success/brillig_loop/Nargo.toml b/test_programs/execution_success/brillig_loop/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/brillig_loop/Nargo.toml rename to test_programs/execution_success/brillig_loop/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/brillig_loop/Prover.toml b/test_programs/execution_success/brillig_loop/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/brillig_loop/Prover.toml rename to test_programs/execution_success/brillig_loop/Prover.toml diff --git a/tooling/nargo_cli/tests/execution_success/brillig_loop/src/main.nr b/test_programs/execution_success/brillig_loop/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/execution_success/brillig_loop/src/main.nr rename to test_programs/execution_success/brillig_loop/src/main.nr diff --git a/tooling/nargo_cli/tests/execution_success/brillig_nested_arrays/Nargo.toml b/test_programs/execution_success/brillig_nested_arrays/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/brillig_nested_arrays/Nargo.toml rename to test_programs/execution_success/brillig_nested_arrays/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/brillig_nested_arrays/Prover.toml b/test_programs/execution_success/brillig_nested_arrays/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/brillig_nested_arrays/Prover.toml rename to test_programs/execution_success/brillig_nested_arrays/Prover.toml diff --git a/tooling/nargo_cli/tests/execution_success/brillig_nested_arrays/src/main.nr b/test_programs/execution_success/brillig_nested_arrays/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/execution_success/brillig_nested_arrays/src/main.nr rename to test_programs/execution_success/brillig_nested_arrays/src/main.nr diff --git a/tooling/nargo_cli/tests/execution_success/brillig_nested_slices/Nargo.toml b/test_programs/execution_success/brillig_nested_slices/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/brillig_nested_slices/Nargo.toml rename to test_programs/execution_success/brillig_nested_slices/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/brillig_nested_slices/Prover.toml b/test_programs/execution_success/brillig_nested_slices/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/brillig_nested_slices/Prover.toml rename to test_programs/execution_success/brillig_nested_slices/Prover.toml diff --git a/tooling/nargo_cli/tests/execution_success/brillig_nested_slices/src/main.nr b/test_programs/execution_success/brillig_nested_slices/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/execution_success/brillig_nested_slices/src/main.nr rename to test_programs/execution_success/brillig_nested_slices/src/main.nr diff --git a/tooling/nargo_cli/tests/execution_success/brillig_not/Nargo.toml b/test_programs/execution_success/brillig_not/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/brillig_not/Nargo.toml rename to test_programs/execution_success/brillig_not/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/brillig_not/Prover.toml b/test_programs/execution_success/brillig_not/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/brillig_not/Prover.toml rename to test_programs/execution_success/brillig_not/Prover.toml diff --git a/tooling/nargo_cli/tests/execution_success/brillig_not/src/main.nr b/test_programs/execution_success/brillig_not/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/execution_success/brillig_not/src/main.nr rename to test_programs/execution_success/brillig_not/src/main.nr diff --git a/tooling/nargo_cli/tests/execution_success/brillig_oracle/Nargo.toml b/test_programs/execution_success/brillig_oracle/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/brillig_oracle/Nargo.toml rename to test_programs/execution_success/brillig_oracle/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/brillig_oracle/Prover.toml b/test_programs/execution_success/brillig_oracle/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/brillig_oracle/Prover.toml rename to test_programs/execution_success/brillig_oracle/Prover.toml diff --git a/test_programs/execution_success/brillig_oracle/src/main.nr b/test_programs/execution_success/brillig_oracle/src/main.nr new file mode 100644 index 00000000000..490b7b605e3 --- /dev/null +++ b/test_programs/execution_success/brillig_oracle/src/main.nr @@ -0,0 +1,42 @@ +use dep::std::slice; +use dep::std::test::OracleMock; + +// Tests oracle usage in brillig/unconstrained functions +fn main(x: Field) { + let size = 20; + // TODO: Add a method along the lines of `(0..size).to_array()`. + let mut mock_oracle_response = [0; 20]; + // TODO: Add an `array.reverse()` method. + let mut reversed_mock_oracle_response = [0; 20]; + for i in 0..size { + mock_oracle_response[i] = i; + reversed_mock_oracle_response[19 - i] = i; + } + + // TODO: this method of returning a slice feels hacky. + let _ = OracleMock::mock("get_number_sequence").with_params(size).returns((20, mock_oracle_response)); + let _ = OracleMock::mock("get_reverse_number_sequence").with_params(size).returns((20, reversed_mock_oracle_response)); + + get_number_sequence_wrapper(size); +} + +// Define oracle functions which we have mocked above +#[oracle(get_number_sequence)] +unconstrained fn get_number_sequence(_size: Field) -> [Field] {} + +#[oracle(get_reverse_number_sequence)] +unconstrained fn get_reverse_number_sequence(_size: Field) -> [Field] {} + +unconstrained fn get_number_sequence_wrapper(size: Field) { + let slice = get_number_sequence(size); + for i in 0..20 as u32 { + assert(slice[i] == i as Field); + } + + let reversed_slice = get_reverse_number_sequence(size); + // Regression test that we have not overwritten memory + for i in 0..20 as u32 { + assert(slice[i] == reversed_slice[19 - i]); + } +} + diff --git a/tooling/nargo_cli/tests/execution_success/brillig_pedersen/Nargo.toml b/test_programs/execution_success/brillig_pedersen/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/brillig_pedersen/Nargo.toml rename to test_programs/execution_success/brillig_pedersen/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/brillig_pedersen/Prover.toml b/test_programs/execution_success/brillig_pedersen/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/brillig_pedersen/Prover.toml rename to test_programs/execution_success/brillig_pedersen/Prover.toml diff --git a/tooling/nargo_cli/tests/execution_success/brillig_pedersen/src/main.nr b/test_programs/execution_success/brillig_pedersen/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/execution_success/brillig_pedersen/src/main.nr rename to test_programs/execution_success/brillig_pedersen/src/main.nr diff --git a/tooling/nargo_cli/tests/execution_success/brillig_recursion/Nargo.toml b/test_programs/execution_success/brillig_recursion/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/brillig_recursion/Nargo.toml rename to test_programs/execution_success/brillig_recursion/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/brillig_recursion/Prover.toml b/test_programs/execution_success/brillig_recursion/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/brillig_recursion/Prover.toml rename to test_programs/execution_success/brillig_recursion/Prover.toml diff --git a/tooling/nargo_cli/tests/execution_success/brillig_recursion/src/main.nr b/test_programs/execution_success/brillig_recursion/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/execution_success/brillig_recursion/src/main.nr rename to test_programs/execution_success/brillig_recursion/src/main.nr diff --git a/tooling/nargo_cli/tests/execution_success/brillig_references/Nargo.toml b/test_programs/execution_success/brillig_references/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/brillig_references/Nargo.toml rename to test_programs/execution_success/brillig_references/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/brillig_references/Prover.toml b/test_programs/execution_success/brillig_references/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/brillig_references/Prover.toml rename to test_programs/execution_success/brillig_references/Prover.toml diff --git a/tooling/nargo_cli/tests/execution_success/brillig_references/src/main.nr b/test_programs/execution_success/brillig_references/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/execution_success/brillig_references/src/main.nr rename to test_programs/execution_success/brillig_references/src/main.nr diff --git a/tooling/nargo_cli/tests/execution_success/brillig_scalar_mul/Nargo.toml b/test_programs/execution_success/brillig_scalar_mul/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/brillig_scalar_mul/Nargo.toml rename to test_programs/execution_success/brillig_scalar_mul/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/brillig_scalar_mul/Prover.toml b/test_programs/execution_success/brillig_scalar_mul/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/brillig_scalar_mul/Prover.toml rename to test_programs/execution_success/brillig_scalar_mul/Prover.toml diff --git a/test_programs/execution_success/brillig_scalar_mul/src/main.nr b/test_programs/execution_success/brillig_scalar_mul/src/main.nr new file mode 100644 index 00000000000..ab2f79eb815 --- /dev/null +++ b/test_programs/execution_success/brillig_scalar_mul/src/main.nr @@ -0,0 +1,23 @@ +use dep::std; + +unconstrained fn main( + a: Field, + a_pub_x: pub Field, + a_pub_y: pub Field, + b: Field, + b_pub_x: pub Field, + b_pub_y: pub Field +) { + let mut priv_key = a; + let mut pub_x: Field = a_pub_x; + let mut pub_y: Field = a_pub_y; + if a != 1 { + // Change `a` in Prover.toml to test input `b` + priv_key = b; + pub_x = b_pub_x; + pub_y = b_pub_y; + } + let res = std::scalar_mul::fixed_base_embedded_curve(priv_key, 0); + assert(res[0] == pub_x); + assert(res[1] == pub_y); +} diff --git a/tooling/nargo_cli/tests/execution_success/brillig_schnorr/Nargo.toml b/test_programs/execution_success/brillig_schnorr/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/brillig_schnorr/Nargo.toml rename to test_programs/execution_success/brillig_schnorr/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/brillig_schnorr/Prover.toml b/test_programs/execution_success/brillig_schnorr/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/brillig_schnorr/Prover.toml rename to test_programs/execution_success/brillig_schnorr/Prover.toml diff --git a/test_programs/execution_success/brillig_schnorr/src/main.nr b/test_programs/execution_success/brillig_schnorr/src/main.nr new file mode 100644 index 00000000000..4cc79ae7e07 --- /dev/null +++ b/test_programs/execution_success/brillig_schnorr/src/main.nr @@ -0,0 +1,25 @@ +use dep::std; +// Note: If main has any unsized types, then the verifier will never be able +// to figure out the circuit instance +unconstrained fn main( + message: [u8; 10], + message_field: Field, + pub_key_x: Field, + pub_key_y: Field, + signature: [u8; 64] +) { + // Regression for issue #2421 + // We want to make sure that we can accurately verify a signature whose message is a slice vs. an array + let message_field_bytes = message_field.to_be_bytes(10); + for i in 0..10 { + assert(message[i] == message_field_bytes[i]); + } + // Is there ever a situation where someone would want + // to ensure that a signature was invalid? + // Check that passing a slice as the message is valid + let valid_signature = std::schnorr::verify_signature(pub_key_x, pub_key_y, signature, message_field_bytes); + assert(valid_signature); + // Check that passing an array as the message is valid + let valid_signature = std::schnorr::verify_signature(pub_key_x, pub_key_y, signature, message); + assert(valid_signature); +} diff --git a/test_programs/execution_success/brillig_set_slice_of_slice/Nargo.toml b/test_programs/execution_success/brillig_set_slice_of_slice/Nargo.toml new file mode 100644 index 00000000000..071254266f4 --- /dev/null +++ b/test_programs/execution_success/brillig_set_slice_of_slice/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "brillig_set_slice_of_slice" +type = "bin" +authors = [""] +compiler_version = ">=0.19.4" + +[dependencies] \ No newline at end of file diff --git a/test_programs/execution_success/brillig_set_slice_of_slice/src/main.nr b/test_programs/execution_success/brillig_set_slice_of_slice/src/main.nr new file mode 100644 index 00000000000..c0e9c7d172f --- /dev/null +++ b/test_programs/execution_success/brillig_set_slice_of_slice/src/main.nr @@ -0,0 +1,51 @@ +struct Property +{ + key : [u8], + value : [u8], +} + +struct JSON +{ + doc : [Property] +} + +unconstrained fn slice_eq(self: [u8], other: [u8]) -> bool { + let mut equal = true; + for i in 0..self.len() { + if self[i] != other[i] { + equal = false; + } + } + equal +} + +// This test acts a regression for issue #3476 +unconstrained fn main() { + let mut json = JSON { doc: [] }; + let mut prop = Property { key: [], value:[] }; + + let other_prop = Property { key: [0, 1, 2], value:[10] }; + json.doc = json.doc.push_back(other_prop); + + for i in 0..3 { + prop.key = prop.key.push_back(i as u8); + } + prop.value = prop.value.push_back(5); + + // add property to json or replace existing + let len : Field = json.doc.len(); + let mut found = false; + for i in 0..len + { + if (!found) + { + if (slice_eq(prop.key, json.doc[i].key)) + { + json.doc[i].value = prop.value; + found = true; + } + } + } + assert(found == true); + assert(json.doc[0].value[0] == 5); +} \ No newline at end of file diff --git a/tooling/nargo_cli/tests/execution_success/brillig_sha256/Nargo.toml b/test_programs/execution_success/brillig_sha256/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/brillig_sha256/Nargo.toml rename to test_programs/execution_success/brillig_sha256/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/brillig_sha256/Prover.toml b/test_programs/execution_success/brillig_sha256/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/brillig_sha256/Prover.toml rename to test_programs/execution_success/brillig_sha256/Prover.toml diff --git a/tooling/nargo_cli/tests/execution_success/brillig_sha256/src/main.nr b/test_programs/execution_success/brillig_sha256/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/execution_success/brillig_sha256/src/main.nr rename to test_programs/execution_success/brillig_sha256/src/main.nr diff --git a/tooling/nargo_cli/tests/execution_success/brillig_slices/Nargo.toml b/test_programs/execution_success/brillig_slices/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/brillig_slices/Nargo.toml rename to test_programs/execution_success/brillig_slices/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/brillig_slices/Prover.toml b/test_programs/execution_success/brillig_slices/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/brillig_slices/Prover.toml rename to test_programs/execution_success/brillig_slices/Prover.toml diff --git a/tooling/nargo_cli/tests/execution_success/brillig_slices/src/main.nr b/test_programs/execution_success/brillig_slices/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/execution_success/brillig_slices/src/main.nr rename to test_programs/execution_success/brillig_slices/src/main.nr diff --git a/tooling/nargo_cli/tests/execution_success/brillig_to_be_bytes/Nargo.toml b/test_programs/execution_success/brillig_to_be_bytes/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/brillig_to_be_bytes/Nargo.toml rename to test_programs/execution_success/brillig_to_be_bytes/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/brillig_to_be_bytes/Prover.toml b/test_programs/execution_success/brillig_to_be_bytes/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/brillig_to_be_bytes/Prover.toml rename to test_programs/execution_success/brillig_to_be_bytes/Prover.toml diff --git a/tooling/nargo_cli/tests/execution_success/brillig_to_be_bytes/src/main.nr b/test_programs/execution_success/brillig_to_be_bytes/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/execution_success/brillig_to_be_bytes/src/main.nr rename to test_programs/execution_success/brillig_to_be_bytes/src/main.nr diff --git a/tooling/nargo_cli/tests/compile_success_empty/brillig_to_bits/Nargo.toml b/test_programs/execution_success/brillig_to_bits/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_success_empty/brillig_to_bits/Nargo.toml rename to test_programs/execution_success/brillig_to_bits/Nargo.toml diff --git a/tooling/nargo_cli/tests/compile_success_empty/brillig_to_bits/src/main.nr b/test_programs/execution_success/brillig_to_bits/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/compile_success_empty/brillig_to_bits/src/main.nr rename to test_programs/execution_success/brillig_to_bits/src/main.nr diff --git a/tooling/nargo_cli/tests/execution_success/brillig_to_bytes_integration/Nargo.toml b/test_programs/execution_success/brillig_to_bytes_integration/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/brillig_to_bytes_integration/Nargo.toml rename to test_programs/execution_success/brillig_to_bytes_integration/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/brillig_to_bytes_integration/Prover.toml b/test_programs/execution_success/brillig_to_bytes_integration/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/brillig_to_bytes_integration/Prover.toml rename to test_programs/execution_success/brillig_to_bytes_integration/Prover.toml diff --git a/tooling/nargo_cli/tests/execution_success/brillig_to_bytes_integration/src/main.nr b/test_programs/execution_success/brillig_to_bytes_integration/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/execution_success/brillig_to_bytes_integration/src/main.nr rename to test_programs/execution_success/brillig_to_bytes_integration/src/main.nr diff --git a/tooling/nargo_cli/tests/execution_success/brillig_to_le_bytes/Nargo.toml b/test_programs/execution_success/brillig_to_le_bytes/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/brillig_to_le_bytes/Nargo.toml rename to test_programs/execution_success/brillig_to_le_bytes/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/brillig_to_le_bytes/Prover.toml b/test_programs/execution_success/brillig_to_le_bytes/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/brillig_to_le_bytes/Prover.toml rename to test_programs/execution_success/brillig_to_le_bytes/Prover.toml diff --git a/tooling/nargo_cli/tests/execution_success/brillig_to_le_bytes/src/main.nr b/test_programs/execution_success/brillig_to_le_bytes/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/execution_success/brillig_to_le_bytes/src/main.nr rename to test_programs/execution_success/brillig_to_le_bytes/src/main.nr diff --git a/tooling/nargo_cli/tests/execution_success/brillig_top_level/Nargo.toml b/test_programs/execution_success/brillig_top_level/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/brillig_top_level/Nargo.toml rename to test_programs/execution_success/brillig_top_level/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/brillig_top_level/Prover.toml b/test_programs/execution_success/brillig_top_level/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/brillig_top_level/Prover.toml rename to test_programs/execution_success/brillig_top_level/Prover.toml diff --git a/tooling/nargo_cli/tests/execution_success/brillig_top_level/src/main.nr b/test_programs/execution_success/brillig_top_level/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/execution_success/brillig_top_level/src/main.nr rename to test_programs/execution_success/brillig_top_level/src/main.nr diff --git a/tooling/nargo_cli/tests/execution_success/brillig_unitialised_arrays/Nargo.toml b/test_programs/execution_success/brillig_unitialised_arrays/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/brillig_unitialised_arrays/Nargo.toml rename to test_programs/execution_success/brillig_unitialised_arrays/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/brillig_unitialised_arrays/Prover.toml b/test_programs/execution_success/brillig_unitialised_arrays/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/brillig_unitialised_arrays/Prover.toml rename to test_programs/execution_success/brillig_unitialised_arrays/Prover.toml diff --git a/tooling/nargo_cli/tests/execution_success/brillig_unitialised_arrays/src/main.nr b/test_programs/execution_success/brillig_unitialised_arrays/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/execution_success/brillig_unitialised_arrays/src/main.nr rename to test_programs/execution_success/brillig_unitialised_arrays/src/main.nr diff --git a/tooling/nargo_cli/tests/execution_success/cast_bool/Nargo.toml b/test_programs/execution_success/cast_bool/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/cast_bool/Nargo.toml rename to test_programs/execution_success/cast_bool/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/cast_bool/Prover.toml b/test_programs/execution_success/cast_bool/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/cast_bool/Prover.toml rename to test_programs/execution_success/cast_bool/Prover.toml diff --git a/tooling/nargo_cli/tests/execution_success/cast_bool/src/main.nr b/test_programs/execution_success/cast_bool/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/execution_success/cast_bool/src/main.nr rename to test_programs/execution_success/cast_bool/src/main.nr diff --git a/tooling/nargo_cli/tests/execution_success/closures_mut_ref/Nargo.toml b/test_programs/execution_success/closures_mut_ref/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/closures_mut_ref/Nargo.toml rename to test_programs/execution_success/closures_mut_ref/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/closures_mut_ref/Prover.toml b/test_programs/execution_success/closures_mut_ref/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/closures_mut_ref/Prover.toml rename to test_programs/execution_success/closures_mut_ref/Prover.toml diff --git a/tooling/nargo_cli/tests/execution_success/closures_mut_ref/src/main.nr b/test_programs/execution_success/closures_mut_ref/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/execution_success/closures_mut_ref/src/main.nr rename to test_programs/execution_success/closures_mut_ref/src/main.nr diff --git a/tooling/nargo_cli/tests/execution_success/conditional_1/Nargo.toml b/test_programs/execution_success/conditional_1/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/conditional_1/Nargo.toml rename to test_programs/execution_success/conditional_1/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/conditional_1/Prover.toml b/test_programs/execution_success/conditional_1/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/conditional_1/Prover.toml rename to test_programs/execution_success/conditional_1/Prover.toml diff --git a/tooling/nargo_cli/tests/execution_success/conditional_1/src/main.nr b/test_programs/execution_success/conditional_1/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/execution_success/conditional_1/src/main.nr rename to test_programs/execution_success/conditional_1/src/main.nr diff --git a/tooling/nargo_cli/tests/execution_success/conditional_2/Nargo.toml b/test_programs/execution_success/conditional_2/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/conditional_2/Nargo.toml rename to test_programs/execution_success/conditional_2/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/conditional_2/Prover.toml b/test_programs/execution_success/conditional_2/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/conditional_2/Prover.toml rename to test_programs/execution_success/conditional_2/Prover.toml diff --git a/tooling/nargo_cli/tests/execution_success/conditional_2/src/main.nr b/test_programs/execution_success/conditional_2/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/execution_success/conditional_2/src/main.nr rename to test_programs/execution_success/conditional_2/src/main.nr diff --git a/tooling/nargo_cli/tests/execution_success/conditional_regression_421/Nargo.toml b/test_programs/execution_success/conditional_regression_421/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/conditional_regression_421/Nargo.toml rename to test_programs/execution_success/conditional_regression_421/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/conditional_regression_421/Prover.toml b/test_programs/execution_success/conditional_regression_421/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/conditional_regression_421/Prover.toml rename to test_programs/execution_success/conditional_regression_421/Prover.toml diff --git a/tooling/nargo_cli/tests/execution_success/conditional_regression_421/src/main.nr b/test_programs/execution_success/conditional_regression_421/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/execution_success/conditional_regression_421/src/main.nr rename to test_programs/execution_success/conditional_regression_421/src/main.nr diff --git a/tooling/nargo_cli/tests/execution_success/conditional_regression_661/Nargo.toml b/test_programs/execution_success/conditional_regression_661/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/conditional_regression_661/Nargo.toml rename to test_programs/execution_success/conditional_regression_661/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/conditional_regression_661/Prover.toml b/test_programs/execution_success/conditional_regression_661/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/conditional_regression_661/Prover.toml rename to test_programs/execution_success/conditional_regression_661/Prover.toml diff --git a/tooling/nargo_cli/tests/execution_success/conditional_regression_661/src/main.nr b/test_programs/execution_success/conditional_regression_661/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/execution_success/conditional_regression_661/src/main.nr rename to test_programs/execution_success/conditional_regression_661/src/main.nr diff --git a/tooling/nargo_cli/tests/execution_success/conditional_regression_short_circuit/Nargo.toml b/test_programs/execution_success/conditional_regression_short_circuit/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/conditional_regression_short_circuit/Nargo.toml rename to test_programs/execution_success/conditional_regression_short_circuit/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/conditional_regression_short_circuit/Prover.toml b/test_programs/execution_success/conditional_regression_short_circuit/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/conditional_regression_short_circuit/Prover.toml rename to test_programs/execution_success/conditional_regression_short_circuit/Prover.toml diff --git a/tooling/nargo_cli/tests/execution_success/conditional_regression_short_circuit/src/main.nr b/test_programs/execution_success/conditional_regression_short_circuit/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/execution_success/conditional_regression_short_circuit/src/main.nr rename to test_programs/execution_success/conditional_regression_short_circuit/src/main.nr diff --git a/test_programs/execution_success/conditional_regression_underflow/Nargo.toml b/test_programs/execution_success/conditional_regression_underflow/Nargo.toml new file mode 100644 index 00000000000..75c4fb43b2f --- /dev/null +++ b/test_programs/execution_success/conditional_regression_underflow/Nargo.toml @@ -0,0 +1,6 @@ +[package] +name = "conditional_regression_underflow" +type = "bin" +authors = [""] + +[dependencies] diff --git a/tooling/nargo_cli/tests/execution_success/conditional_regression_underflow/Prover.toml b/test_programs/execution_success/conditional_regression_underflow/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/conditional_regression_underflow/Prover.toml rename to test_programs/execution_success/conditional_regression_underflow/Prover.toml diff --git a/test_programs/execution_success/conditional_regression_underflow/src/main.nr b/test_programs/execution_success/conditional_regression_underflow/src/main.nr new file mode 100644 index 00000000000..a101af32505 --- /dev/null +++ b/test_programs/execution_success/conditional_regression_underflow/src/main.nr @@ -0,0 +1,15 @@ +// Regression test for https://github.com/noir-lang/noir/issues/3493 +fn main(x: u4) { + if x == 10 { + x + 15; + } + if x == 9 { + x << 3; + } + if x == 8 { + x * 3; + } + if x == 7 { + x - 8; + } +} diff --git a/tooling/nargo_cli/tests/execution_success/custom_entry/Nargo.toml b/test_programs/execution_success/custom_entry/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/custom_entry/Nargo.toml rename to test_programs/execution_success/custom_entry/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/custom_entry/Prover.toml b/test_programs/execution_success/custom_entry/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/custom_entry/Prover.toml rename to test_programs/execution_success/custom_entry/Prover.toml diff --git a/tooling/nargo_cli/tests/execution_success/custom_entry/src/foobarbaz.nr b/test_programs/execution_success/custom_entry/src/foobarbaz.nr similarity index 100% rename from tooling/nargo_cli/tests/execution_success/custom_entry/src/foobarbaz.nr rename to test_programs/execution_success/custom_entry/src/foobarbaz.nr diff --git a/test_programs/execution_success/databus/Nargo.toml b/test_programs/execution_success/databus/Nargo.toml new file mode 100644 index 00000000000..72360f7aefe --- /dev/null +++ b/test_programs/execution_success/databus/Nargo.toml @@ -0,0 +1,6 @@ +[package] +name = "databus" +type = "bin" +authors = [""] + +[dependencies] diff --git a/test_programs/execution_success/databus/Prover.toml b/test_programs/execution_success/databus/Prover.toml new file mode 100644 index 00000000000..2d034508dd8 --- /dev/null +++ b/test_programs/execution_success/databus/Prover.toml @@ -0,0 +1,3 @@ +x = "3" +y = "4" +z = [1,2,3,4] diff --git a/test_programs/execution_success/databus/src/main.nr b/test_programs/execution_success/databus/src/main.nr new file mode 100644 index 00000000000..631331ef2d7 --- /dev/null +++ b/test_programs/execution_success/databus/src/main.nr @@ -0,0 +1,10 @@ +// Test unsafe integer multiplication with overflow: 12^8 = 429 981 696 +// The circuit should handle properly the growth of the bit size +use dep::std; + +fn main(mut x: u32, y: call_data u32, z: call_data [u32;4] ) -> return_data u32 { + +let a = z[x]; + a+y + +} diff --git a/tooling/nargo_cli/tests/execution_success/debug_logs/Nargo.toml b/test_programs/execution_success/debug_logs/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/debug_logs/Nargo.toml rename to test_programs/execution_success/debug_logs/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/debug_logs/Prover.toml b/test_programs/execution_success/debug_logs/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/debug_logs/Prover.toml rename to test_programs/execution_success/debug_logs/Prover.toml diff --git a/tooling/nargo_cli/tests/execution_success/debug_logs/src/main.nr b/test_programs/execution_success/debug_logs/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/execution_success/debug_logs/src/main.nr rename to test_programs/execution_success/debug_logs/src/main.nr diff --git a/tooling/nargo_cli/tests/execution_success/diamond_deps_0/Nargo.toml b/test_programs/execution_success/diamond_deps_0/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/diamond_deps_0/Nargo.toml rename to test_programs/execution_success/diamond_deps_0/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/diamond_deps_0/Prover.toml b/test_programs/execution_success/diamond_deps_0/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/diamond_deps_0/Prover.toml rename to test_programs/execution_success/diamond_deps_0/Prover.toml diff --git a/tooling/nargo_cli/tests/execution_success/diamond_deps_0/src/main.nr b/test_programs/execution_success/diamond_deps_0/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/execution_success/diamond_deps_0/src/main.nr rename to test_programs/execution_success/diamond_deps_0/src/main.nr diff --git a/tooling/nargo_cli/tests/execution_success/distinct_keyword/Nargo.toml b/test_programs/execution_success/distinct_keyword/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/distinct_keyword/Nargo.toml rename to test_programs/execution_success/distinct_keyword/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/distinct_keyword/Prover.toml b/test_programs/execution_success/distinct_keyword/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/distinct_keyword/Prover.toml rename to test_programs/execution_success/distinct_keyword/Prover.toml diff --git a/tooling/nargo_cli/tests/execution_success/distinct_keyword/src/main.nr b/test_programs/execution_success/distinct_keyword/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/execution_success/distinct_keyword/src/main.nr rename to test_programs/execution_success/distinct_keyword/src/main.nr diff --git a/tooling/nargo_cli/tests/execution_success/double_verify_proof/Nargo.toml b/test_programs/execution_success/double_verify_proof/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/double_verify_proof/Nargo.toml rename to test_programs/execution_success/double_verify_proof/Nargo.toml diff --git a/test_programs/execution_success/double_verify_proof/Prover.toml b/test_programs/execution_success/double_verify_proof/Prover.toml new file mode 100644 index 00000000000..dff48212e50 --- /dev/null +++ b/test_programs/execution_success/double_verify_proof/Prover.toml @@ -0,0 +1,5 @@ +key_hash = "0x096129b1c6e108252fc5c829c4cc9b7e8f0d1fd9f29c2532b563d6396645e08f" +proof = ["0x000000000000000000000000000000d62b795bec274279129a71195796825fcc","0x00000000000000000000000000000000000793ab763140f20a68a6bd2721fd74","0x00000000000000000000000000000053141d06d3307b36153f321511199e579c","0x00000000000000000000000000000000000a4b55d6c21f98a9c434911dcb5c67","0x0000000000000000000000000000005f9d324c0abd22cec92d99dbec438e9491","0x0000000000000000000000000000000000240dfafe1b53dc27147cbab14ea893","0x000000000000000000000000000000044a61d3aac32c6931247cf334a19d9611","0x000000000000000000000000000000000003f0f8cf4207bfa85c23ec9f8d0c88","0x00000000000000000000000000000002168a470e39ba2ac266f6b474de12045f","0x000000000000000000000000000000000025791e7d3feab542345c00ec5a30df","0x000000000000000000000000000000dcafd76d4c3640969c80e017b951ef6397","0x00000000000000000000000000000000001d27f75a1256771e88e0c86fc42dbc","0x0000000000000000000000000000007347ae7d2d9d7fc2b8f0baa014ee1fed9f","0x000000000000000000000000000000000018bd927f42bf7caf9555f56f09000d","0x000000000000000000000000000000041f765f83cbe5904c8f453f70a4531d10","0x00000000000000000000000000000000001858aabeeb5331a221419f4fed1c19","0x000000000000000000000000000000d254a54caaedf8287b9af951b2f2611121","0x000000000000000000000000000000000005ab493623c9563cf2e55ba5f18200","0x00000000000000000000000000000014f24cddc1a02440dc63637df8032c8074","0x000000000000000000000000000000000011950c16cef98471b1d78b935195a4","0x000000000000000000000000000000b0340b459e6bd5cc8f031c8654a502897f","0x00000000000000000000000000000000000e1cf3968dac4545a76a2ae58e512c","0x0000000000000000000000000000002adf7218aa06ca0d2c2e600dcc39193a2d","0x00000000000000000000000000000000001302e7e4b0f14749bd885ca25588b6","0x00000000000000000000000000000092009ce4056e79ab815d8cdfd4491138ae","0x000000000000000000000000000000000018af11e853c6cf2f0f6274b0da8133","0x000000000000000000000000000000dd3dc6f49232141718527b3a0e4b26e21d","0x00000000000000000000000000000000001a877853348a8b695c4f9a9aa4ce68","0x000000000000000000000000000000aecfc56ba07155450b368140d6324023b5","0x000000000000000000000000000000000029c11052798c57ece614617d33fcc2","0x000000000000000000000000000000eb106ffc816d16fb84e84b0b61157b2603","0x000000000000000000000000000000000026c3cac16206899a21cb5126841446","0x000000000000000000000000000000a782ed54805fe845068b362b58e2fa34ec","0x00000000000000000000000000000000000cf046a1bfcc666b7f28b572676073","0x000000000000000000000000000000b931c8dda60bb4aca4cc817f5540f1209f","0x000000000000000000000000000000000024ad50c3936fafc3d190e6a4874223","0x000000000000000000000000000000cce90cfbaf5671c8c8652db28a3a9566f7","0x000000000000000000000000000000000003574db9d0f84380c9635660f86354","0x0000000000000000000000000000003eb3e1dc31846a90f721e7a08c6d6dc4f7","0x000000000000000000000000000000000028999a700cd1abae1a288eebb9a91c","0x000000000000000000000000000000c1be4d385b11387e14eb9817050d772f78","0x000000000000000000000000000000000003c56b5bad8b4484c66ac921f1f102","0x000000000000000000000000000000ace245cabf0f00dc7fd253dd8af0377a14","0x0000000000000000000000000000000000107f1731fcf34b364c813599fa1df7","0x035b937d404932b542b706eb810ef4a7dca4566d4dde1ad6a8717f46167ead7e","0x17608cef3dc7960f41cb1295706df663727d45ee598a61e05e989d111449fb65","0x054712a950ad67da3aa860e49e6891f99b586b7f37caff94eb013fdb374b61ee","0x04b755083086c769b7f593e0e48d68dc54be808203351380ca5566a48149d8bb","0x17d7670b0915235f626fdc1d7e1134d2be906ef138d7843384b3ebc23b1d630f","0x064cf544ab5f4e3dab47960502cccc83321fb275068dfbdd3a2fcbc6dddcaa65","0x083338262712e2b66769ea40d9f412b18caa1bc81a51ff5a50b6c41f8c4b3d23","0x0cdd38958cab97defde00f4a5961b6fd676e29d9f2c352f6bb2c68b91f83f8af","0x02c8bdd005c2f43a0a8cbb2744916ce5c322dfa5b23367a829c12699f4036d32","0x25bac73c7e7b659fbea3135b7a0decf9db8dc3045bd2837dae337c64cc722546","0x19eb361aa419d37bce3d2e8b2b7692a02a9559e83d7f3d8fe9169970fbbc2cba","0x2494bd5106d00e05c7ea60e632e9fe03773b7f2c5b662aa37ec512a01f4a0775","0x18c52c2f2c6e7be1d7847c15e452a3a9c64316103d12e4b5b9a82fac4e940ee9","0x0e0342810456ef78f498c1bfa085a5f3cbc06db1f32fabd0ea9ad27dccac1680","0x024c13d6ef56af33ed7164ea8e47ddecc8a487b000d8b1b45edcd3895a503ba2","0x26e0d127f626bd39b55bc5d0c131dbf03fe006dc5c3edc57dda1e629799a4317","0x1b1140061bc52b15c4f5e100729a81968ee79dc03deb966a18850335a8e44a8b","0x1bb76f945199e71d531a89288912087a02dd0e83020e65d671485bf2e5e86e1a","0x29269900859c6d86e404185b415bf3b279cd100f38cfdb0077e8d6a299c4fd35","0x22b5e94bae2f6f0cdb424a3b12c4bf82cec3fb228e012c1974ed457827bbe012","0x18d3543a93249778e7a57936170dae85ffc47c2567f2d0076a32c0bb86fcf10a","0x03721dc2670206cde42a175fd56bcce32cf6cb8801450a8e8e4b3d4e07785973","0x2806db136dd214d3ac1478460855cae6a4324ab45cab35320d104fee26c260e8","0x1c3749f1937082afbbae9375b9be708cf339e1983e57ef4447f36cfa560c685c","0x1067b8cfb90ef08bcb48aea56b2716334241787c2004a95682d68a0685566fd0","0x0f41aee4416398f1d48ffc302403273cddef34a41f98507c53682041d82e51ff","0x10d854c9f0bfbdff7ca91a68f4978e9a79e7b14243d92f465f17bdf88d9f64f8","0x00000000000000000000000000000000018938b11099e0cdc05ddab84a153a97","0x0000000000000000000000000000000001d7dda1471f0dc3b3a3d3438c197982","0x00000000000000000000000000000000022682917da43ab9a6e9cbcece1db86d","0x2453913e6b0f36eab883ac4b0e0604d56aaeb9c55e641135173e63c342f1a660","0x05216c1b58dc43a49d01aaba3113b0e86be450fc17d28016e648e7162a1b67fb","0x152b34845a0222a2b41354c0d395a250d8363dc18748647d85acd89d6934ec56","0x1dfc6e971ce82b7dcda1f7f282713c6e22a8c79258a61209bda69719806da544","0x2968dd8b3af8e3953f1fbbd72f4c49b8270597bb27d4037adc157ac6083bee60","0x1b9425b88a4c7d39b3d75afe66917a9aa1d2055724392bc01fb918d84ff1410e","0x04ab571f236d8e750904dc307dd274003d9130f1a7110e4c1521cfb408877c73","0x2ad84f26fdc5831545272d02b806bb0e6dae44e71f73552c4eb9ff06030748c7","0x020e632b99d325db774b8630fb50b9a4e74d35b7f27d9fc02c65087ee747e42c","0x09a8c5a3171268cb61c02515c01c109889200ed13f415ae54df2078bbb887f92","0x1143281a9451abbb4c34c3fa84e7678c2af2e7ea8c05160a6f7f06988fc91af8","0x000000000000000000000000000000cbda736ca5cf6bc75413c2cc9e28ab0a68","0x00000000000000000000000000000000001ee78c9cc56aa5991062ae2e338587","0x000000000000000000000000000000bc9bfcdebb486f4cb314e681d2cc5f8df6","0x00000000000000000000000000000000000ad538431d04771bca7f633cb659ff","0x000000000000000000000000000000d45b317afcefa466a59bba9e171f1af70c","0x0000000000000000000000000000000000133c50180ea17932e4881124e7a7c6","0x000000000000000000000000000000fc9ed37f543775849f3e84eaa06f77f992","0x00000000000000000000000000000000001372873c9c051d1baff99248b8f70e"] +public_inputs = ["0x0000000000000000000000000000000000000000000000000000000000000003"] +verification_key = ["0x2b337de1c8c14f22ec9b9e2f96afef3652627366f8170a0a948dad4ac1bd5e80","0x0000000000000000000000000000000000000000000000000000000000000008","0x0000000000000000000000000000000000000000000000000000000000000005","0x0000000000000000000000000000000000000000000000000000000000000008","0x0000000000000000000000000000000000000000000000000000000000000001","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x00000000000000000000000000000092139c61bae1a44f0fc7689507414be688","0x00000000000000000000000000000000000160ce4e279582f91bde4f03f5e9a2","0x0000000000000000000000000000005dc2d37f658c3b2d60f24740eb13b65d79","0x000000000000000000000000000000000007e3e8a5d98a1177ec85bf88f163a5","0x000000000000000000000000000000dc3035fbd7ff16412a8fd7da587a935298","0x000000000000000000000000000000000023d08e2817ac16990004ed11d8fc66","0x000000000000000000000000000000356a5ad59c646c746a8d09f5d154e47c4f","0x00000000000000000000000000000000000708529196af3c8e16ffa580c26182","0x0000000000000000000000000000002ddfe70eb7a1280596e8e4a804f118a6dd","0x000000000000000000000000000000000013757e15a0905f298303784a161b21","0x000000000000000000000000000000a23a729df796935c7824e3a26be794829b","0x000000000000000000000000000000000005775b6c146c4a59856e869fe5a70e","0x000000000000000000000000000000eef0c9e088fd2d45aa40311082d1f2809b","0x00000000000000000000000000000000001d539ccbfc556d0ad59307a218de65","0x000000000000000000000000000000a2c848beceb6ab7806fd3b88037b8410fc","0x0000000000000000000000000000000000177004deeb1f9d401fd7b1af1a5ac8","0x0000000000000000000000000000002508eb63672a733f20de1a97644be4f540","0x00000000000000000000000000000000000d82d51f2f75d806285fd248c819b8","0x000000000000000000000000000000d002f9100cbba8a29f13b11513c53c59d0","0x000000000000000000000000000000000006cd3b0e3460533b9e5ea2cdc0fcbb","0x000000000000000000000000000000f45ea38a93b2f810c5633ddb54927c1c96","0x000000000000000000000000000000000021791de65f9a28ec7024b1a87ab4f3","0x000000000000000000000000000000926511a0439502c86885a8c6f0327aa7ad","0x000000000000000000000000000000000029fa14a969c5d81ed3abbbfb11220a","0x000000000000000000000000000000b84c3258e8206f560e5b5b18cbeafef87e","0x00000000000000000000000000000000002a910445cd8fc895e5d235cd8ea185","0x000000000000000000000000000000887e67f15e84bcb8507a5064a363f6043b","0x000000000000000000000000000000000014dc6643d801c3ef27c2066b6e2bb4","0x000000000000000000000000000000e38e900b42c314ba803088e8fbf125203f","0x000000000000000000000000000000000020690fd4869db418306046b38161dc","0x0000000000000000000000000000001e2fa856bf7951b8292b1e88185993629c","0x0000000000000000000000000000000000048a85e0bbac7c60ad3d78f601f63c","0x0000000000000000000000000000006f457719495073d3666d77a625aeab0c51","0x00000000000000000000000000000000002623ad892dc62b1fa7d0a650f0d470","0x000000000000000000000000000000dbfcc8a467e021c03b13f74a9f79c3a10c","0x0000000000000000000000000000000000295f6f10976c37bd9c6f96bb7187d5","0x000000000000000000000000000000c13ef9a937cc12420fb38d9ab8e848e85e","0x000000000000000000000000000000000003560a3b334e887532f605c9cb7628","0x0000000000000000000000000000009bcebf08a4599cdda0fb96312d4dc0c7a9","0x000000000000000000000000000000000015adc8bb1e01c835f48959d1237bd6","0x00000000000000000000000000000047762ab839e4ff63c77605a9f383da37c2","0x000000000000000000000000000000000016a8c3c53d89660cf271522cd301fb","0x000000000000000000000000000000f0c8539a0b5f94420a513f9c305b932bfe","0x00000000000000000000000000000000002957ba01d9de5638f808f88a692533","0x000000000000000000000000000000ab17c6189d67d3bf5dd2f3885de0151b6f","0x0000000000000000000000000000000000060d8aa43fdc434d1942263f364d95","0x0000000000000000000000000000005d292333b3adb497f00b4bc32d45229060","0x00000000000000000000000000000000001a1018a66221883639f2898a66f345","0x00000000000000000000000000000006555a806b1993291deba0dc44e2abf431","0x00000000000000000000000000000000000cacff7099a9d5e35a21f4a00b2dc3","0x000000000000000000000000000000f50c11ba95d349c36d143eefd12e494950","0x00000000000000000000000000000000001022e8c5f02d639bc9dd8bc4407f99","0x000000000000000000000000000000c76828795098eda73d50b4b585c60afc60","0x00000000000000000000000000000000002bf09c0ec7011e93888962f2406630","0x00000000000000000000000000000049e5c83a8978d832fb8e144548e3ca1adb","0x00000000000000000000000000000000000e0ec242c2e160a984f61ca5adf5f5","0x0000000000000000000000000000009c5d6e08a6605ab4513748ac0fa017dd1c","0x00000000000000000000000000000000001f54baa07558e5fb055bd9ba49c067","0x0000000000000000000000000000001e1ee7ee29bbb5e4b080c6091c1433ce62","0x000000000000000000000000000000000024aec62a9d9763499267dc98c33428","0x0000000000000000000000000000001a96755946ff16f0d6632365f0eb0ab4d4","0x000000000000000000000000000000000028cf3e22bcd53782ebc3e0490e27e5","0x00000000000000000000000000000043148d7d8c9ba43f2133fab4201435a364","0x0000000000000000000000000000000000234ce541f1f5117dd404cfaf01a229","0x000000000000000000000000000000a7fb95ffb461d9514a1070e2d2403982ef","0x00000000000000000000000000000000003016955028b6390f446c3fd0c5b424","0x00000000000000000000000000000008863c3b7cd7cddc20ba79ce915051c56e","0x000000000000000000000000000000000013ef666111b0be56a235983d397d2a","0x000000000000000000000000000000e3993f465fc9f56e93ac769e597b752c1c","0x0000000000000000000000000000000000217f7c4235161e9a3c16c45b6ca499","0x0000000000000000000000000000008ffa4cd96bc67b0b7df5678271e1114075","0x0000000000000000000000000000000000256467bfcb63d9fdcb5dde397757ad","0x00000000000000000000000000000054e5eb270bb64bde6e6ececadfd8c3236c","0x00000000000000000000000000000000000e52d1bd75812c33c6f3d79ee4b94c","0x000000000000000000000000000000484a2c641dce55bc2dd64ef0cd790a7fea","0x00000000000000000000000000000000000ff417d256be43e73c8b1aa85bdda3","0x0000000000000000000000000000000b72e7b7713ab5da44e0f864182e748a23","0x00000000000000000000000000000000001a221055f1625ad833a44705f5f74e","0x00000000000000000000000000000067a99a34e9b81a17ad001db02e29bcb82a","0x000000000000000000000000000000000018a6c02e398389827568fa960e86e2","0x000000000000000000000000000000bb29f26f9890d6cc6401f4921d5884edca","0x00000000000000000000000000000000000868357b28039385c5a5058b6d358e","0x00000000000000000000000000000036fb6e229dde8edf7ec858b12d7e8be485","0x00000000000000000000000000000000001060afe929554ca473103f5e68193c","0x00000000000000000000000000000015226e07e207744c0857074dcab883af4a","0x00000000000000000000000000000000000b1c02619282755533457230b19b4a","0x0000000000000000000000000000001f2a0277e4807e6e1cbabca21dde5eb5e1","0x00000000000000000000000000000000000d928deafed363659688ed4ccdef52","0x000000000000000000000000000000363f0c994e91cecad25835338edee2294f","0x00000000000000000000000000000000002eea648c8732596b1314fe2a4d2f05","0x000000000000000000000000000000b2671d2ae51d31c1210433c3972bb64578","0x00000000000000000000000000000000000ab49886c2b94bd0bd3f6ed1dbbe2c"] +proof_b = ["0x000000000000000000000000000000f05c69448ca29bdf52076f9b073bb30fed","0x000000000000000000000000000000000028c86bb3e27b4aaaaef126f7df5349","0x00000000000000000000000000000026ae031fc93594375dfc7f3bbe027f97d5","0x000000000000000000000000000000000000dd12c7290fe7f775796a233b8590","0x000000000000000000000000000000c1ee6631704de424d010c5c4ac8293ac49","0x00000000000000000000000000000000002f41818c9aa83f5c8d9bdd128015b9","0x000000000000000000000000000000b50a5801482f7e3a5de8ab3cce0f10b0d3","0x000000000000000000000000000000000022a0bc69c293dbf293b25bc9eef7f8","0x0000000000000000000000000000003b02abf1967ef394154dc15d763135e903","0x00000000000000000000000000000000000d8a2ee46acc6d1ed8d517b56d47c8","0x00000000000000000000000000000039bf0d1b3d8cf9de898f101c626e978d78","0x0000000000000000000000000000000000008faa7df2451a24d291a9b584f1a5","0x000000000000000000000000000000c1dae329ed7adf63a2d89a5f16fb98b6d8","0x00000000000000000000000000000000001ff0bc16fc0bd4aa2d6255690453c2","0x000000000000000000000000000000d12d7589f853a9b472613efa56689beaf1","0x00000000000000000000000000000000002d6fbc798f4403751df6aeee8bedd3","0x0000000000000000000000000000007c1fa069cb17194fecf88db9dd54a4ee36","0x0000000000000000000000000000000000268e026f9814822a42b2d59eec5d24","0x000000000000000000000000000000c3fb56beab774218cd63498fc050a5fd9b","0x00000000000000000000000000000000000071c014d7b5063f005a0bc2ee1af4","0x000000000000000000000000000000ae12b25371c6af42bbe0a85cddd2eaebc7","0x000000000000000000000000000000000026d270e1ffc9c7c344c694dfadda83","0x00000000000000000000000000000080280858c6be461716921caa3c26f3f6f3","0x000000000000000000000000000000000001dcdd3f39e27d0ce6aa5d14dff4c1","0x000000000000000000000000000000080e1d2c913c834ebcf7e0600c076c08fd","0x00000000000000000000000000000000002df3d142217694e65fb7c355d62764","0x000000000000000000000000000000e5e336f3f59d77e500f49771bfbeb12e83","0x000000000000000000000000000000000028fffe08bdc4c0690643d2e1a1275f","0x000000000000000000000000000000db5618b32afc13e18f21b39f3fbede9d11","0x00000000000000000000000000000000001d244818370d43fb7e8bc67e03787b","0x0000000000000000000000000000006bcc1fd3f9f78449ad1df1bc11bc379edd","0x000000000000000000000000000000000009ac9cbb285edbf5b3a973f3f5f1cb","0x000000000000000000000000000000fd885905b6c0fc95bb4dd0b11f6797d4b3","0x000000000000000000000000000000000021f07995cdd835145e19c38127c562","0x000000000000000000000000000000bbbf2b975c2c97ae4b45c4a52059e53ee3","0x000000000000000000000000000000000024158163788841cf4590bbc1e89a90","0x0000000000000000000000000000009aca93d2b1386ea412d4b36ea5bb9894a8","0x00000000000000000000000000000000002532d1d210e8ed4c2f5c00cbaaa475","0x000000000000000000000000000000634a88caa1d77cb6b5fe77cac31458fc31","0x00000000000000000000000000000000000bdf18bae92fce7cfddab5520cac6e","0x000000000000000000000000000000622e9626255170ccec77602c755aa193e1","0x000000000000000000000000000000000001d4edba370e04436a988bad05dada","0x000000000000000000000000000000b52934323a0aec8f803cdaafee2ab7bfb2","0x0000000000000000000000000000000000155312af5e0e25ca9fd61aef9e58ed","0x06270b517855f6f6a608e432883d1d1030a12a1e33022dc142b7728691421da2","0x2af7c794d7b720b25eb1df0afd8c8e3c15b6e518194c3caea7966a5f8210ff04","0x073fe573aeb27d81a5713be93e1365390dcbc3c8e7439ff1d36a84cc014f5642","0x11351b961147431e54535248b58b35cf5cddb9b13827899167617d7a96794d64","0x297c9421c9c3db286770787c35b86bc41583386491b4ae55e5fa81aefa21efc4","0x0f4eeca3ff4a3495f859898937688652d33f9b4dd3e003e12adf15278e0997c3","0x133e3d8b82721d40d919f2326810ba6f07eff3f7d20d86b2bde692a811522019","0x2c502f53c9698b73bb8c8f9b9cf2d705d16a64a7040348b4b39c637a2064316c","0x0cbc1971e1c566cde9d9125c91cdc88e817db182692f836c1a5170a6246eaf73","0x12c47793e7db706c637cd4b4d96d227f569850176b852b1fe8ad522ddb38ef0e","0x0cd7b300e9309a135285be1aeb02b152f97931a7357ab6d609a2cb1970aab877","0x2a7789dfe286c9d0a7592f1c9316e730cb14c9d843aefc4764d76e7f8571c96a","0x248ac54ce3dbf37796621882a4ac76046df5ab680da487fd85cce76b1ae392d3","0x149d1d07cebe320f77b03533e34912545cedeae62bd9778d37724728762b5710","0x00fe29daebdaed61309790e70e2dcefa3f3af4c6c965ce424b8dbcf09b8e4b49","0x2b75b3bace61b731d7f0c003a144b62b0a4fbe9f0d14ca89b0652b70210014b3","0x2588ef27cfb6e0d8c6f9a969b2da44fead30a02ed70a563fd15aa45bb671de1c","0x2b74d7674b55642697b4a1e226eddb0e4918b2d57aa5b99093dc46cadcdea000","0x244c626845d3a5040f08f01e9611f968ad675ca857789149b13a0cfa83a2e064","0x2cb8d02f90cae33fd7bcfb80af4aff067c4f5fc4b3f9228d5b8f768bc8f6c971","0x1372f3d1f04e0c39a50e823d5da03d70bebe19a1b8e28f8c2ff601cc0bfc0095","0x19af6601d2613426a50b7c35d60562a5f2f2634e6af56dac13459632e15570ee","0x13c2a16ed3b65dcd9414659be79af17995d344de34eaf962343b0f1e76c73a57","0x0dd5dcdbd50b8774831d4f01f930804d38b4266dfee085185530880a0c3903c0","0x07e91848d660b11b722638680ac60f20db9507fdc8d610ce762600f5a1aacd29","0x1f9c2a94d10c0a7fb60292cfc46fd3d2501181bea0ffe1f5f2501d474be3a785","0x14edb9c5bd389eae08a5ea2a7a1662894e1e878c142084d966a625bef68cf7c3","0x00000000000000000000000000000000cecd01810814d175f0a533f0067618c4","0x00000000000000000000000000000000f82935013ce5c82720c63e533af41db8","0x000000000000000000000000000000012185688171b6bed850e748b66f7222ac","0x2dd7f5ff2150155c2ac86ebe28d9ecbca2eea812b0021ab2bceae111cfea8325","0x04ea6c2daf2b9e827d2213c3d03953410dcf1ed67ba34a3c00e772be92606a8b","0x163f2bd18dcde52f99b9867c944780fd718d1612927053b139b280fc55013d1b","0x05e388fd160ccac30a8f7b18a4bd042f705e92b5937e8c0e9478e2ff623907c6","0x00ba3f6f527d6ed3ff17a63b1d5be3c42bdfae88fdf63311fc7b871157939309","0x16187d9daa8c2e5a1a9ab15be7ca6a8feebfb31bea76f9a3ca69381881c70561","0x0f64522e4904edb7377b14a7b9dad848829167324ef5c016346b3ad8251191ee","0x273bbe6000a4001dce369e5a36cc0b0ca3fd351665b688238aa8c556a6ca6b8e","0x022d2232efb2faa8307846c9a4c697aabad1b7f1336b35ad72fa8922975b49d9","0x0d82d478bff3955c4b0a34ef94427ca5f9da23147ad953c89f2e428277ec2825","0x18d886be90343010659c231583be61a138e28e37c24771e3cb61fbe2587d0671","0x000000000000000000000000000000196ba6a58dbeb7c34cb1d6287e23d434de","0x00000000000000000000000000000000001df8ae8a1589590f8863c1fefd8dfd","0x000000000000000000000000000000f30e11b2c5fbefa166cbb9f58c5f8e1a4c","0x000000000000000000000000000000000026420ade7666bc0ab1cf1fd9d0c534","0x0000000000000000000000000000000feb5b7d8260d25a1ee1ce76ff461673fc","0x00000000000000000000000000000000002bd2ac6223a80671b777bf5dca70a4","0x000000000000000000000000000000690f757006d2fa1ddb0114c9f268783537","0x000000000000000000000000000000000023ad36feadd91e50118f32e97a0204"] \ No newline at end of file diff --git a/test_programs/execution_success/double_verify_proof/src/main.nr b/test_programs/execution_success/double_verify_proof/src/main.nr new file mode 100644 index 00000000000..ce087dc4e61 --- /dev/null +++ b/test_programs/execution_success/double_verify_proof/src/main.nr @@ -0,0 +1,28 @@ +use dep::std; + +fn main( + verification_key: [Field; 114], + // This is the proof without public inputs attached. + // + // This means: the size of this does not change with the number of public inputs. + proof: [Field; 93], + public_inputs: [Field; 1], + // This is currently not public. It is fine given that the vk is a part of the circuit definition. + // I believe we want to eventually make it public too though. + key_hash: Field, + proof_b: [Field; 93] +) { + std::verify_proof( + verification_key.as_slice(), + proof.as_slice(), + public_inputs.as_slice(), + key_hash + ); + + std::verify_proof( + verification_key.as_slice(), + proof_b.as_slice(), + public_inputs.as_slice(), + key_hash + ); +} diff --git a/tooling/nargo_cli/tests/execution_success/ecdsa_secp256k1/Nargo.toml b/test_programs/execution_success/ecdsa_secp256k1/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/ecdsa_secp256k1/Nargo.toml rename to test_programs/execution_success/ecdsa_secp256k1/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/ecdsa_secp256k1/Prover.toml b/test_programs/execution_success/ecdsa_secp256k1/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/ecdsa_secp256k1/Prover.toml rename to test_programs/execution_success/ecdsa_secp256k1/Prover.toml diff --git a/test_programs/execution_success/ecdsa_secp256k1/src/main.nr b/test_programs/execution_success/ecdsa_secp256k1/src/main.nr new file mode 100644 index 00000000000..2f410755f74 --- /dev/null +++ b/test_programs/execution_success/ecdsa_secp256k1/src/main.nr @@ -0,0 +1,16 @@ +use dep::std; + +fn main( + message: [u8;38], + hashed_message: [u8;32], + pub_key_x: [u8;32], + pub_key_y: [u8;32], + signature: [u8;64] +) { + // Hash the message, since secp256k1 expects a hashed_message + let expected = std::hash::sha256(message); + assert(hashed_message == expected); + + let valid_signature = std::ecdsa_secp256k1::verify_signature(pub_key_x, pub_key_y, signature, hashed_message); + assert(valid_signature); +} diff --git a/tooling/nargo_cli/tests/execution_success/ecdsa_secp256r1/Nargo.toml b/test_programs/execution_success/ecdsa_secp256r1/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/ecdsa_secp256r1/Nargo.toml rename to test_programs/execution_success/ecdsa_secp256r1/Nargo.toml diff --git a/test_programs/execution_success/ecdsa_secp256r1/Prover.toml b/test_programs/execution_success/ecdsa_secp256r1/Prover.toml new file mode 100644 index 00000000000..a45f799877b --- /dev/null +++ b/test_programs/execution_success/ecdsa_secp256r1/Prover.toml @@ -0,0 +1,20 @@ +hashed_message = [ + 84, 112, 91, 163, 186, 175, 219, 223, 186, 140, 95, 154, 112, 247, 168, 155, 238, 152, + 217, 6, 181, 62, 49, 7, 77, 167, 186, 236, 220, 13, 169, 173, +] +pub_key_x = [ + 85, 15, 71, 16, 3, 243, 223, 151, 195, 223, 80, 106, 199, 151, 246, 114, 31, 177, 161, + 251, 123, 143, 111, 131, 210, 36, 73, 138, 101, 200, 142, 36, +] +pub_key_y = [ + 19, 96, 147, 215, 1, 46, 80, 154, 115, 113, 92, 189, 11, 0, 163, 204, 15, 244, 181, + 192, 27, 63, 250, 25, 106, 177, 251, 50, 112, 54, 184, 230, +] +signature = [ + 44, 112, 168, 208, 132, 182, 43, 252, 92, 224, 54, 65, 202, 249, 247, 42, + 212, 218, 140, 129, 191, 230, 236, 148, 135, 187, 94, 27, 239, 98, 161, 50, + 24, 173, 158, 226, 158, 175, 53, 31, 220, 80, 241, 82, 12, 66, 94, 155, + 144, 138, 7, 39, 139, 67, 176, 236, 123, 135, 39, 120, 193, 78, 7, 132 +] + + diff --git a/tooling/nargo_cli/tests/execution_success/ecdsa_secp256r1/src/main.nr b/test_programs/execution_success/ecdsa_secp256r1/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/execution_success/ecdsa_secp256r1/src/main.nr rename to test_programs/execution_success/ecdsa_secp256r1/src/main.nr diff --git a/tooling/nargo_cli/tests/execution_success/eddsa/Nargo.toml b/test_programs/execution_success/eddsa/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/eddsa/Nargo.toml rename to test_programs/execution_success/eddsa/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/eddsa/Prover.toml b/test_programs/execution_success/eddsa/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/eddsa/Prover.toml rename to test_programs/execution_success/eddsa/Prover.toml diff --git a/tooling/nargo_cli/tests/execution_success/eddsa/src/main.nr b/test_programs/execution_success/eddsa/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/execution_success/eddsa/src/main.nr rename to test_programs/execution_success/eddsa/src/main.nr diff --git a/tooling/nargo_cli/tests/execution_success/field_attribute/Nargo.toml b/test_programs/execution_success/field_attribute/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/field_attribute/Nargo.toml rename to test_programs/execution_success/field_attribute/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/field_attribute/Prover.toml b/test_programs/execution_success/field_attribute/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/field_attribute/Prover.toml rename to test_programs/execution_success/field_attribute/Prover.toml diff --git a/tooling/nargo_cli/tests/execution_success/field_attribute/src/main.nr b/test_programs/execution_success/field_attribute/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/execution_success/field_attribute/src/main.nr rename to test_programs/execution_success/field_attribute/src/main.nr diff --git a/tooling/nargo_cli/tests/execution_success/generics/Nargo.toml b/test_programs/execution_success/generics/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/generics/Nargo.toml rename to test_programs/execution_success/generics/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/generics/Prover.toml b/test_programs/execution_success/generics/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/generics/Prover.toml rename to test_programs/execution_success/generics/Prover.toml diff --git a/tooling/nargo_cli/tests/execution_success/generics/src/main.nr b/test_programs/execution_success/generics/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/execution_success/generics/src/main.nr rename to test_programs/execution_success/generics/src/main.nr diff --git a/tooling/nargo_cli/tests/execution_success/global_consts/Nargo.toml b/test_programs/execution_success/global_consts/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/global_consts/Nargo.toml rename to test_programs/execution_success/global_consts/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/global_consts/Prover.toml b/test_programs/execution_success/global_consts/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/global_consts/Prover.toml rename to test_programs/execution_success/global_consts/Prover.toml diff --git a/tooling/nargo_cli/tests/execution_success/global_consts/src/baz.nr b/test_programs/execution_success/global_consts/src/baz.nr similarity index 100% rename from tooling/nargo_cli/tests/execution_success/global_consts/src/baz.nr rename to test_programs/execution_success/global_consts/src/baz.nr diff --git a/tooling/nargo_cli/tests/execution_success/global_consts/src/foo.nr b/test_programs/execution_success/global_consts/src/foo.nr similarity index 100% rename from tooling/nargo_cli/tests/execution_success/global_consts/src/foo.nr rename to test_programs/execution_success/global_consts/src/foo.nr diff --git a/tooling/nargo_cli/tests/execution_success/global_consts/src/foo/bar.nr b/test_programs/execution_success/global_consts/src/foo/bar.nr similarity index 100% rename from tooling/nargo_cli/tests/execution_success/global_consts/src/foo/bar.nr rename to test_programs/execution_success/global_consts/src/foo/bar.nr diff --git a/test_programs/execution_success/global_consts/src/main.nr b/test_programs/execution_success/global_consts/src/main.nr new file mode 100644 index 00000000000..70c7a745a22 --- /dev/null +++ b/test_programs/execution_success/global_consts/src/main.nr @@ -0,0 +1,96 @@ +mod foo; +mod baz; + +global M: Field = 32; +global L: Field = 10; // Unused globals currently allowed +global N: Field = 5; +global T_LEN = 2; // Type inference is allowed on globals +//global N: Field = 5; // Uncomment to see duplicate globals error +struct Dummy { + x: [Field; N], + y: [Field; foo::MAGIC_NUMBER] +} + +struct Test { + v: Field, +} +global VALS: [Test; 1] = [Test { v: 100 }]; +global NESTED = [VALS, VALS]; + +fn main( + a: [Field; M + N - N], + b: [Field; 30 + N / 2], + c: pub [Field; foo::MAGIC_NUMBER], + d: [Field; foo::bar::N] +) { + let test_struct = Dummy { x: d, y: c }; + + for i in 0..foo::MAGIC_NUMBER { + assert(c[i] == foo::MAGIC_NUMBER); + assert(test_struct.y[i] == foo::MAGIC_NUMBER); + assert(test_struct.y[i] != NESTED[1][0].v); + } + + assert(N != M); + + let expected: u32 = 42; + assert(foo::TYPE_INFERRED == expected); + + let mut y = 5; + let mut x = M; + for i in 0..N * N { + let M: Field = 10; + x = M; + + y = i; + } + assert(y == 24); + assert(x == 10); + + let q = multiplyByM(3); + assert(q == 96); + + arrays_neq(a, b); + + let t: [Field; T_LEN] = [N, M]; + assert(t[1] == 32); + + assert(15 == my_submodule::my_helper()); + + let add_submodules_N = my_submodule::N + foo::bar::N; + assert(15 == add_submodules_N); + let add_from_bar_N = my_submodule::N + foo::bar::from_bar(1); + assert(15 == add_from_bar_N); + // Example showing an array filled with (my_submodule::N + 2) 0's + let sugared = [0; my_submodule::N + 2]; + assert(sugared[my_submodule::N + 1] == 0); + + let arr: [Field; my_submodule::N] = [N; 10]; + assert((arr[0] == 5) & (arr[9] == 5)); + + foo::from_foo(d); + baz::from_baz(c); +} + +fn multiplyByM(x: Field) -> Field { + x * M +} + +fn arrays_neq(a: [Field; M], b: [Field; M]) { + assert(a != b); +} + +mod my_submodule { + global N: Field = 10; + global L: Field = 50; + + fn my_bool_or(x: u1, y: u1) { + assert(x | y == 1); + } + + pub fn my_helper() -> Field { + let N: Field = 15; // Like in Rust, local variables override globals + let x = N; + x + } +} diff --git a/tooling/nargo_cli/tests/execution_success/hash_to_field/Nargo.toml b/test_programs/execution_success/hash_to_field/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/hash_to_field/Nargo.toml rename to test_programs/execution_success/hash_to_field/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/hash_to_field/Prover.toml b/test_programs/execution_success/hash_to_field/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/hash_to_field/Prover.toml rename to test_programs/execution_success/hash_to_field/Prover.toml diff --git a/tooling/nargo_cli/tests/execution_success/hash_to_field/src/main.nr b/test_programs/execution_success/hash_to_field/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/execution_success/hash_to_field/src/main.nr rename to test_programs/execution_success/hash_to_field/src/main.nr diff --git a/tooling/nargo_cli/tests/execution_success/higher_order_functions/Nargo.toml b/test_programs/execution_success/higher_order_functions/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/higher_order_functions/Nargo.toml rename to test_programs/execution_success/higher_order_functions/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/higher_order_functions/Prover.toml b/test_programs/execution_success/higher_order_functions/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/higher_order_functions/Prover.toml rename to test_programs/execution_success/higher_order_functions/Prover.toml diff --git a/test_programs/execution_success/higher_order_functions/src/main.nr b/test_programs/execution_success/higher_order_functions/src/main.nr new file mode 100644 index 00000000000..6583f961d58 --- /dev/null +++ b/test_programs/execution_success/higher_order_functions/src/main.nr @@ -0,0 +1,99 @@ +fn main(w: Field) -> pub Field { + let f = if 3 * 7 > 200 as u32 { foo } else { bar }; + assert(f()[1] == 2); + // Lambdas: + assert(twice(|x| x * 2, 5) == 20); + assert((|x, y| x + y + 1)(2, 3) == 6); + // nested lambdas + assert( + (|a, b| { + a + (|c| c + 2)(b) + })(0, 1) + == 3 + ); + // Closures: + let a = 42; + let g = || a; + assert(g() == 42); + // When you copy mutable variables, + // the capture of the copies shouldn't change: + let mut x = 2; + x = x + 1; + let z = x; + // Add extra mutations to ensure we can mutate x without the + // captured z changing. + x = x + 1; + assert((|y| y + z)(1) == 4); + // When you capture mutable variables, + // again, the captured variable doesn't change: + let closure_capturing_mutable = (|y| y + x); + assert(closure_capturing_mutable(1) == 5); + x += 1; + assert(closure_capturing_mutable(1) == 5); + + regression_2154(); + + let ret = twice(add1, 3); + + test_array_functions(); + w + ret +} +/// Test the array functions in std::array +fn test_array_functions() { + let two = 2; // giving this a name, to ensure that the Option functions work with closures + let myarray: [i32; 3] = [1, 2, 3]; + assert(myarray.any(|n| n > 2)); + assert(myarray.any(|n| n > two)); + + let evens: [i32; 3] = myarray.map(|n| n * two); // [2, 4, 6] + assert(evens.all(|n| n > 1)); + assert(evens.all(|n| n >= two)); + + assert(evens.fold(0, |a, b| a + b) == 12); + assert(evens.fold(0, |a, b| a + b + two) == 18); + assert(evens.reduce(|a, b| a + b) == 12); + assert(evens.reduce(|a, b| a + b + two) == 16); + // TODO: is this a sort_via issue with the new backend, + // or something more general? + // + // currently it fails only with `--experimental-ssa` with + // "not yet implemented: Cast into signed" + // but it worked with the original ssa backend + // (before dropping it) + // + // opened #2121 for it + // https://github.com/noir-lang/noir/issues/2121 + // let descending = myarray.sort_via(|a, b| a > b); + // assert(descending == [3, 2, 1]); + assert(evens.map(|n| n / 2) == myarray); + assert(evens.map(|n| n / two) == myarray); +} + +fn foo() -> [u32; 2] { + [1, 3] +} + +fn bar() -> [u32; 2] { + [3, 2] +} + +fn add1(x: Field) -> Field { + x + 1 +} + +fn twice(f: fn(Field) -> Field, x: Field) -> Field { + f(f(x)) +} +// Fixing an ICE, where rewriting the closures +// during monomorphization didn't correspond +// to an internal `if` type +// found by @jfecher: +// https://github.com/noir-lang/noir/pull/1959#issuecomment-1658992989 +// issue https://github.com/noir-lang/noir/issues/2154 +fn regression_2154() { + let x: u32 = 32; + + let closure_if_else = if x > 2 { || x } else { || x + 2342 }; + + assert(closure_if_else() == 32); +} diff --git a/tooling/nargo_cli/tests/execution_success/if_else_chain/Nargo.toml b/test_programs/execution_success/if_else_chain/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/if_else_chain/Nargo.toml rename to test_programs/execution_success/if_else_chain/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/if_else_chain/Prover.toml b/test_programs/execution_success/if_else_chain/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/if_else_chain/Prover.toml rename to test_programs/execution_success/if_else_chain/Prover.toml diff --git a/tooling/nargo_cli/tests/execution_success/if_else_chain/src/main.nr b/test_programs/execution_success/if_else_chain/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/execution_success/if_else_chain/src/main.nr rename to test_programs/execution_success/if_else_chain/src/main.nr diff --git a/tooling/nargo_cli/tests/execution_success/import/Nargo.toml b/test_programs/execution_success/import/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/import/Nargo.toml rename to test_programs/execution_success/import/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/import/Prover.toml b/test_programs/execution_success/import/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/import/Prover.toml rename to test_programs/execution_success/import/Prover.toml diff --git a/tooling/nargo_cli/tests/execution_success/import/src/import.nr b/test_programs/execution_success/import/src/import.nr similarity index 100% rename from tooling/nargo_cli/tests/execution_success/import/src/import.nr rename to test_programs/execution_success/import/src/import.nr diff --git a/tooling/nargo_cli/tests/execution_success/import/src/main.nr b/test_programs/execution_success/import/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/execution_success/import/src/main.nr rename to test_programs/execution_success/import/src/main.nr diff --git a/tooling/nargo_cli/tests/execution_success/integer_array_indexing/Nargo.toml b/test_programs/execution_success/integer_array_indexing/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/integer_array_indexing/Nargo.toml rename to test_programs/execution_success/integer_array_indexing/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/integer_array_indexing/Prover.toml b/test_programs/execution_success/integer_array_indexing/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/integer_array_indexing/Prover.toml rename to test_programs/execution_success/integer_array_indexing/Prover.toml diff --git a/tooling/nargo_cli/tests/execution_success/integer_array_indexing/src/main.nr b/test_programs/execution_success/integer_array_indexing/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/execution_success/integer_array_indexing/src/main.nr rename to test_programs/execution_success/integer_array_indexing/src/main.nr diff --git a/tooling/nargo_cli/tests/execution_success/keccak256/Nargo.toml b/test_programs/execution_success/keccak256/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/keccak256/Nargo.toml rename to test_programs/execution_success/keccak256/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/keccak256/Prover.toml b/test_programs/execution_success/keccak256/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/keccak256/Prover.toml rename to test_programs/execution_success/keccak256/Prover.toml diff --git a/tooling/nargo_cli/tests/execution_success/keccak256/src/main.nr b/test_programs/execution_success/keccak256/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/execution_success/keccak256/src/main.nr rename to test_programs/execution_success/keccak256/src/main.nr diff --git a/tooling/nargo_cli/tests/execution_success/main_bool_arg/Nargo.toml b/test_programs/execution_success/main_bool_arg/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/main_bool_arg/Nargo.toml rename to test_programs/execution_success/main_bool_arg/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/main_bool_arg/Prover.toml b/test_programs/execution_success/main_bool_arg/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/main_bool_arg/Prover.toml rename to test_programs/execution_success/main_bool_arg/Prover.toml diff --git a/tooling/nargo_cli/tests/execution_success/main_bool_arg/src/main.nr b/test_programs/execution_success/main_bool_arg/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/execution_success/main_bool_arg/src/main.nr rename to test_programs/execution_success/main_bool_arg/src/main.nr diff --git a/tooling/nargo_cli/tests/execution_success/merkle_insert/Nargo.toml b/test_programs/execution_success/merkle_insert/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/merkle_insert/Nargo.toml rename to test_programs/execution_success/merkle_insert/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/merkle_insert/Prover.toml b/test_programs/execution_success/merkle_insert/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/merkle_insert/Prover.toml rename to test_programs/execution_success/merkle_insert/Prover.toml diff --git a/tooling/nargo_cli/tests/execution_success/merkle_insert/src/main.nr b/test_programs/execution_success/merkle_insert/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/execution_success/merkle_insert/src/main.nr rename to test_programs/execution_success/merkle_insert/src/main.nr diff --git a/tooling/nargo_cli/tests/execution_success/mock_oracle/Nargo.toml b/test_programs/execution_success/mock_oracle/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/mock_oracle/Nargo.toml rename to test_programs/execution_success/mock_oracle/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/mock_oracle/Prover.toml b/test_programs/execution_success/mock_oracle/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/mock_oracle/Prover.toml rename to test_programs/execution_success/mock_oracle/Prover.toml diff --git a/tooling/nargo_cli/tests/execution_success/mock_oracle/src/main.nr b/test_programs/execution_success/mock_oracle/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/execution_success/mock_oracle/src/main.nr rename to test_programs/execution_success/mock_oracle/src/main.nr diff --git a/tooling/nargo_cli/tests/execution_success/modules/Nargo.toml b/test_programs/execution_success/modules/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/modules/Nargo.toml rename to test_programs/execution_success/modules/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/modules/Prover.toml b/test_programs/execution_success/modules/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/modules/Prover.toml rename to test_programs/execution_success/modules/Prover.toml diff --git a/tooling/nargo_cli/tests/execution_success/modules/src/foo.nr b/test_programs/execution_success/modules/src/foo.nr similarity index 100% rename from tooling/nargo_cli/tests/execution_success/modules/src/foo.nr rename to test_programs/execution_success/modules/src/foo.nr diff --git a/tooling/nargo_cli/tests/execution_success/modules/src/main.nr b/test_programs/execution_success/modules/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/execution_success/modules/src/main.nr rename to test_programs/execution_success/modules/src/main.nr diff --git a/tooling/nargo_cli/tests/execution_success/modules_more/Nargo.toml b/test_programs/execution_success/modules_more/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/modules_more/Nargo.toml rename to test_programs/execution_success/modules_more/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/modules_more/Prover.toml b/test_programs/execution_success/modules_more/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/modules_more/Prover.toml rename to test_programs/execution_success/modules_more/Prover.toml diff --git a/tooling/nargo_cli/tests/execution_success/modules_more/src/foo.nr b/test_programs/execution_success/modules_more/src/foo.nr similarity index 100% rename from tooling/nargo_cli/tests/execution_success/modules_more/src/foo.nr rename to test_programs/execution_success/modules_more/src/foo.nr diff --git a/tooling/nargo_cli/tests/execution_success/modules_more/src/foo/bar.nr b/test_programs/execution_success/modules_more/src/foo/bar.nr similarity index 100% rename from tooling/nargo_cli/tests/execution_success/modules_more/src/foo/bar.nr rename to test_programs/execution_success/modules_more/src/foo/bar.nr diff --git a/tooling/nargo_cli/tests/execution_success/modules_more/src/main.nr b/test_programs/execution_success/modules_more/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/execution_success/modules_more/src/main.nr rename to test_programs/execution_success/modules_more/src/main.nr diff --git a/tooling/nargo_cli/tests/execution_success/modulus/Nargo.toml b/test_programs/execution_success/modulus/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/modulus/Nargo.toml rename to test_programs/execution_success/modulus/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/modulus/Prover.toml b/test_programs/execution_success/modulus/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/modulus/Prover.toml rename to test_programs/execution_success/modulus/Prover.toml diff --git a/tooling/nargo_cli/tests/execution_success/modulus/src/main.nr b/test_programs/execution_success/modulus/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/execution_success/modulus/src/main.nr rename to test_programs/execution_success/modulus/src/main.nr diff --git a/tooling/nargo_cli/tests/execution_success/nested_array_dynamic/Nargo.toml b/test_programs/execution_success/nested_array_dynamic/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/nested_array_dynamic/Nargo.toml rename to test_programs/execution_success/nested_array_dynamic/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/nested_array_dynamic/Prover.toml b/test_programs/execution_success/nested_array_dynamic/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/nested_array_dynamic/Prover.toml rename to test_programs/execution_success/nested_array_dynamic/Prover.toml diff --git a/tooling/nargo_cli/tests/execution_success/nested_array_dynamic/src/main.nr b/test_programs/execution_success/nested_array_dynamic/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/execution_success/nested_array_dynamic/src/main.nr rename to test_programs/execution_success/nested_array_dynamic/src/main.nr diff --git a/tooling/nargo_cli/tests/execution_success/nested_arrays_from_brillig/Nargo.toml b/test_programs/execution_success/nested_arrays_from_brillig/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/nested_arrays_from_brillig/Nargo.toml rename to test_programs/execution_success/nested_arrays_from_brillig/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/nested_arrays_from_brillig/Prover.toml b/test_programs/execution_success/nested_arrays_from_brillig/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/nested_arrays_from_brillig/Prover.toml rename to test_programs/execution_success/nested_arrays_from_brillig/Prover.toml diff --git a/tooling/nargo_cli/tests/execution_success/nested_arrays_from_brillig/src/main.nr b/test_programs/execution_success/nested_arrays_from_brillig/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/execution_success/nested_arrays_from_brillig/src/main.nr rename to test_programs/execution_success/nested_arrays_from_brillig/src/main.nr diff --git a/tooling/nargo_cli/tests/execution_success/nested_slice_dynamic/Nargo.toml b/test_programs/execution_success/nested_slice_dynamic/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/nested_slice_dynamic/Nargo.toml rename to test_programs/execution_success/nested_slice_dynamic/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/nested_slice_dynamic/Prover.toml b/test_programs/execution_success/nested_slice_dynamic/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/nested_slice_dynamic/Prover.toml rename to test_programs/execution_success/nested_slice_dynamic/Prover.toml diff --git a/tooling/nargo_cli/tests/execution_success/nested_slice_dynamic/src/main.nr b/test_programs/execution_success/nested_slice_dynamic/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/execution_success/nested_slice_dynamic/src/main.nr rename to test_programs/execution_success/nested_slice_dynamic/src/main.nr diff --git a/test_programs/execution_success/operator_overloading/Nargo.toml b/test_programs/execution_success/operator_overloading/Nargo.toml new file mode 100644 index 00000000000..7f9f18ff567 --- /dev/null +++ b/test_programs/execution_success/operator_overloading/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "operator_overloading" +type = "bin" +authors = [""] +compiler_version = ">=0.20.0" + +[dependencies] diff --git a/test_programs/execution_success/operator_overloading/Prover.toml b/test_programs/execution_success/operator_overloading/Prover.toml new file mode 100644 index 00000000000..516b7b4074c --- /dev/null +++ b/test_programs/execution_success/operator_overloading/Prover.toml @@ -0,0 +1,2 @@ +x = 3 +y = 9 diff --git a/test_programs/execution_success/operator_overloading/src/main.nr b/test_programs/execution_success/operator_overloading/src/main.nr new file mode 100644 index 00000000000..3867531abca --- /dev/null +++ b/test_programs/execution_success/operator_overloading/src/main.nr @@ -0,0 +1,154 @@ +use dep::std::ops::{ Add, Sub, Mul, Div, Rem, BitAnd, BitOr, BitXor, Shl, Shr }; +use dep::std::cmp::Ordering; + +// x = 3, y = 9 +fn main(x: u32, y: u32) { + let wx = Wrapper::new(x); + let wy = Wrapper::new(y); + + // expected x and expected y values + let ex: u32 = 3; + let ey: u32 = 9; + + assert((wx + wy).inner == ex + ey); + assert((wy - wx).inner == ey - ex); + assert((wx * wy).inner == ex * ey); + assert((wx / wy).inner == ex / ey); + assert((wx % wy).inner == ex % ey); + + assert((wx & wy).inner == (ex & ey)); + assert((wx | wy).inner == (ex | ey)); + assert((wx ^ wy).inner == (ex ^ ey)); + + assert((wy << wx).inner == (ey << ex)); + assert((wy >> wx).inner == (ey >> ex)); + + assert((wx == wy) == (ex == ey)); + assert((wx < wy) == (ex < ey)); + assert((wx <= wy) == (ex <= ey)); + assert((wx > wy) == (ex > ey)); + assert((wx >= wy) == (ex >= ey)); + assert(wx.cmp(wy) == ex.cmp(ey)); + + // Ensure operator overloading still works with more complex types + let pair_ascending = Pair { x: wx, y: wy }; + let pair_descending = Pair { x: wy, y: wx }; + + assert(pair_ascending != pair_descending); + + assert(pair_ascending < pair_descending); + assert(pair_ascending <= pair_descending); + assert(pair_descending > pair_ascending); + assert(pair_descending >= pair_ascending); + + assert(pair_ascending.cmp(pair_descending) == Ordering::less()); +} + +struct Wrapper { + inner: u32 +} + +impl Wrapper { + fn new(inner: u32) -> Self { + Wrapper { inner } + } +} + +impl Add for Wrapper { + fn add(self, other: Self) -> Self { + Wrapper::new(self.inner + other.inner) + } +} + +impl Sub for Wrapper { + fn sub(self, other: Self) -> Self { + Wrapper::new(self.inner - other.inner) + } +} + +impl Mul for Wrapper { + fn mul(self, other: Self) -> Self { + Wrapper::new(self.inner * other.inner) + } +} + +impl Div for Wrapper { + fn div(self, other: Self) -> Self { + Wrapper::new(self.inner / other.inner) + } +} + +impl Rem for Wrapper { + fn rem(self, other: Self) -> Self { + Wrapper::new(self.inner % other.inner) + } +} + +impl BitAnd for Wrapper { + fn bitand(self, other: Self) -> Self { + Wrapper::new(self.inner & other.inner) + } +} + +impl BitOr for Wrapper { + fn bitor(self, other: Self) -> Self { + Wrapper::new(self.inner | other.inner) + } +} + +impl BitXor for Wrapper { + fn bitxor(self, other: Self) -> Self { + Wrapper::new(self.inner ^ other.inner) + } +} + +impl Shl for Wrapper { + fn shl(self, other: Self) -> Self { + Wrapper::new(self.inner << other.inner) + } +} + +impl Shr for Wrapper { + fn shr(self, other: Self) -> Self { + Wrapper::new(self.inner >> other.inner) + } +} + +impl Eq for Wrapper { + fn eq(self, other: Self) -> bool { + self.inner == other.inner + } +} + +impl Ord for Wrapper { + fn cmp(self, other: Self) -> Ordering { + self.inner.cmp(other.inner) + } +} + + + + + +struct Pair { + x: Wrapper, + y: Wrapper, +} + +impl Eq for Pair { + fn eq(self, o: Self) -> bool { + (self.x == o.x) & (self.y == o.y) + } +} + +impl Ord for Pair { + fn cmp(self, o: Self) -> Ordering { + let mut result = self.x.cmp(o.x); + + if result == Ordering::equal() { + result = self.y.cmp(o.y); + } + + result + } +} diff --git a/tooling/nargo_cli/tests/execution_success/pedersen_check/Nargo.toml b/test_programs/execution_success/pedersen_check/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/pedersen_check/Nargo.toml rename to test_programs/execution_success/pedersen_check/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/pedersen_check/Prover.toml b/test_programs/execution_success/pedersen_check/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/pedersen_check/Prover.toml rename to test_programs/execution_success/pedersen_check/Prover.toml diff --git a/tooling/nargo_cli/tests/execution_success/pedersen_check/src/main.nr b/test_programs/execution_success/pedersen_check/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/execution_success/pedersen_check/src/main.nr rename to test_programs/execution_success/pedersen_check/src/main.nr diff --git a/tooling/nargo_cli/tests/execution_success/poseidon_bn254_hash/Nargo.toml b/test_programs/execution_success/poseidon_bn254_hash/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/poseidon_bn254_hash/Nargo.toml rename to test_programs/execution_success/poseidon_bn254_hash/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/poseidon_bn254_hash/Prover.toml b/test_programs/execution_success/poseidon_bn254_hash/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/poseidon_bn254_hash/Prover.toml rename to test_programs/execution_success/poseidon_bn254_hash/Prover.toml diff --git a/tooling/nargo_cli/tests/execution_success/poseidon_bn254_hash/src/main.nr b/test_programs/execution_success/poseidon_bn254_hash/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/execution_success/poseidon_bn254_hash/src/main.nr rename to test_programs/execution_success/poseidon_bn254_hash/src/main.nr diff --git a/tooling/nargo_cli/tests/execution_success/poseidonsponge_x5_254/Nargo.toml b/test_programs/execution_success/poseidonsponge_x5_254/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/poseidonsponge_x5_254/Nargo.toml rename to test_programs/execution_success/poseidonsponge_x5_254/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/poseidonsponge_x5_254/Prover.toml b/test_programs/execution_success/poseidonsponge_x5_254/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/poseidonsponge_x5_254/Prover.toml rename to test_programs/execution_success/poseidonsponge_x5_254/Prover.toml diff --git a/tooling/nargo_cli/tests/execution_success/poseidonsponge_x5_254/src/main.nr b/test_programs/execution_success/poseidonsponge_x5_254/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/execution_success/poseidonsponge_x5_254/src/main.nr rename to test_programs/execution_success/poseidonsponge_x5_254/src/main.nr diff --git a/tooling/nargo_cli/tests/execution_success/pred_eq/Nargo.toml b/test_programs/execution_success/pred_eq/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/pred_eq/Nargo.toml rename to test_programs/execution_success/pred_eq/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/pred_eq/Prover.toml b/test_programs/execution_success/pred_eq/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/pred_eq/Prover.toml rename to test_programs/execution_success/pred_eq/Prover.toml diff --git a/tooling/nargo_cli/tests/execution_success/pred_eq/src/main.nr b/test_programs/execution_success/pred_eq/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/execution_success/pred_eq/src/main.nr rename to test_programs/execution_success/pred_eq/src/main.nr diff --git a/test_programs/execution_success/prelude/Nargo.toml b/test_programs/execution_success/prelude/Nargo.toml new file mode 100644 index 00000000000..35f223bce02 --- /dev/null +++ b/test_programs/execution_success/prelude/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "prelude" +type = "bin" +authors = [""] +compiler_version = ">=0.20.0" + +[dependencies] \ No newline at end of file diff --git a/test_programs/execution_success/prelude/src/main.nr b/test_programs/execution_success/prelude/src/main.nr new file mode 100644 index 00000000000..c9ae448c486 --- /dev/null +++ b/test_programs/execution_success/prelude/src/main.nr @@ -0,0 +1,31 @@ +fn main() { + let _xs = Vec::new(); + let _option = Option::none(); + + print("42\n"); + println("42"); +} + +mod a { + // We don't want to give an error due to re-importing elements that are already in the prelude. + use dep::std::collections::vec::Vec; + use dep::std::option::Option; + + fn main() { + let _xs = Vec::new(); + let _option = Option::none(); + + print("42\n"); + println("42"); + } +} + +mod b { + fn main() { + let _xs = Vec::new(); + let _option = Option::none(); + + print("42\n"); + println("42"); + } +} diff --git a/tooling/nargo_cli/tests/execution_success/references/Nargo.toml b/test_programs/execution_success/references/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/references/Nargo.toml rename to test_programs/execution_success/references/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/references/Prover.toml b/test_programs/execution_success/references/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/references/Prover.toml rename to test_programs/execution_success/references/Prover.toml diff --git a/tooling/nargo_cli/tests/execution_success/references/src/main.nr b/test_programs/execution_success/references/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/execution_success/references/src/main.nr rename to test_programs/execution_success/references/src/main.nr diff --git a/tooling/nargo_cli/tests/execution_success/regression/Nargo.toml b/test_programs/execution_success/regression/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/regression/Nargo.toml rename to test_programs/execution_success/regression/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/regression/Prover.toml b/test_programs/execution_success/regression/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/regression/Prover.toml rename to test_programs/execution_success/regression/Prover.toml diff --git a/test_programs/execution_success/regression/src/main.nr b/test_programs/execution_success/regression/src/main.nr new file mode 100644 index 00000000000..08112d4c616 --- /dev/null +++ b/test_programs/execution_success/regression/src/main.nr @@ -0,0 +1,109 @@ +global NIBBLE_LENGTH: Field = 16; + +fn compact_decode(input: [u8; N], length: Field) -> ([u4; NIBBLE_LENGTH], Field) { + assert(2 * input.len() as u64 <= NIBBLE_LENGTH as u64); + assert(length as u64 <= input.len() as u64); + + let mut nibble = [0 as u4; NIBBLE_LENGTH]; + + let first_nibble = (input[0] >> 4) as u4; + let parity = first_nibble as u1; + + if parity == 1 { + nibble[0] = (input[0] & 0x0f) as u4; + for i in 1..input.len() { + if i as u64 < length as u64 { + let x = input[i]; + nibble[2*i - 1] = (x >> 4) as u4; + nibble[2*i] = (x & 0x0f) as u4; + } + } + } else { + for i in 0..2 { + if (i as u64) < length as u64 - 1 { + let x = input[i + 1]; + nibble[2*i] = (x >> 4) as u4; + nibble[2*i + 1] = (x & 0x0f) as u4; + } + } + } + + let out = (nibble, 2 * length + (parity as Field) - 2); + + out +} + +fn enc(value: [u8; N], value_length: Field) -> ([u8; 32], Field) { + assert(value.len() as u8 >= value_length as u8); + let mut out_value = [0; 32]; + if value_length == 0 { + let out = (out_value, value_length); + out + } else if value_length as u8 < 31 { + out_value[0] = 0x80 + value_length as u8; + + for i in 1..value.len() { + out_value[i] = value[i-1]; + } + + let out = (out_value, value_length + 1); + + out + } else { + let out = (out_value, 32); + out + } +} + +fn bitshift_literal_0() -> u64 { + let mut bits: u64 = 0; + bits |= 1 << 0; + + bits +} +fn bitshift_literal_4() -> u64 { + let mut bits: u64 = 0; + bits |= 1 << 4; + + bits +} +fn bitshift_variable(idx: u64) -> u64 { + let mut bits: u64 = 0; + bits |= 1 << idx; + + bits +} + +fn main(x: [u8; 5], z: Field) { + //Issue 1144 + let (nib, len) = compact_decode(x, z); + assert(len == 5); + assert([nib[0], nib[1], nib[2], nib[3], nib[4]] == [15, 1, 12, 11, 8]); + // Issue 1169 + let val1 = [ + 0xb8, 0x8f, 0x61, 0xe6, 0xfb, 0xda, 0x83, 0xfb, 0xff, 0xfa, 0xbe, 0x36, 0x41, 0x12, 0x13, + 0x74, 0x80, 0x39, 0x80, 0x18, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00 + ]; + let val1_length = 20; + + let enc_val1 = enc(val1, val1_length); + + assert( + enc_val1.0 == [ + 0x94, 0xb8, 0x8f, 0x61, 0xe6, 0xfb, 0xda, 0x83, 0xfb, 0xff, 0xfa, 0xbe, 0x36, 0x41, + 0x12, 0x13, 0x74, 0x80, 0x39, 0x80, 0x18, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00 + ] + ); + assert(enc_val1.1 == 21); + // Issue 2399 + let result_0 = bitshift_literal_0(); + assert(result_0 == 1); + let result_4 = bitshift_literal_4(); + assert(result_4 == 16); + let result_0 = bitshift_variable(0); + assert(result_0 == 1); + let result_4 = bitshift_variable(4); + assert(result_4 == 16); +} diff --git a/test_programs/execution_success/regression_2660/Nargo.toml b/test_programs/execution_success/regression_2660/Nargo.toml new file mode 100644 index 00000000000..1ea0058684b --- /dev/null +++ b/test_programs/execution_success/regression_2660/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "regression_2660" +version = "0.1.0" +type = "bin" +authors = [""] + +[dependencies] \ No newline at end of file diff --git a/test_programs/execution_success/regression_2660/Prover.toml b/test_programs/execution_success/regression_2660/Prover.toml new file mode 100644 index 00000000000..4dd6b405159 --- /dev/null +++ b/test_programs/execution_success/regression_2660/Prover.toml @@ -0,0 +1 @@ +x = "1" diff --git a/test_programs/execution_success/regression_2660/src/main.nr b/test_programs/execution_success/regression_2660/src/main.nr new file mode 100644 index 00000000000..f32bc924e3a --- /dev/null +++ b/test_programs/execution_success/regression_2660/src/main.nr @@ -0,0 +1,6 @@ +global foo = -1; + +fn main(x: i32) { + let y = x + foo; + assert(y == 0); +} diff --git a/tooling/nargo_cli/tests/execution_success/regression_2854/Nargo.toml b/test_programs/execution_success/regression_2854/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/regression_2854/Nargo.toml rename to test_programs/execution_success/regression_2854/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/regression_2854/Prover.toml b/test_programs/execution_success/regression_2854/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/regression_2854/Prover.toml rename to test_programs/execution_success/regression_2854/Prover.toml diff --git a/tooling/nargo_cli/tests/execution_success/regression_2854/src/main.nr b/test_programs/execution_success/regression_2854/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/execution_success/regression_2854/src/main.nr rename to test_programs/execution_success/regression_2854/src/main.nr diff --git a/test_programs/execution_success/regression_3394/Nargo.toml b/test_programs/execution_success/regression_3394/Nargo.toml new file mode 100644 index 00000000000..4949962b16a --- /dev/null +++ b/test_programs/execution_success/regression_3394/Nargo.toml @@ -0,0 +1,6 @@ +[package] +name = "regression_3394" +type = "bin" +authors = [""] + +[dependencies] diff --git a/tooling/nargo_cli/tests/execution_success/regression_method_cannot_be_found/Prover.toml b/test_programs/execution_success/regression_3394/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/regression_method_cannot_be_found/Prover.toml rename to test_programs/execution_success/regression_3394/Prover.toml diff --git a/test_programs/execution_success/regression_3394/src/main.nr b/test_programs/execution_success/regression_3394/src/main.nr new file mode 100644 index 00000000000..cc45487b98b --- /dev/null +++ b/test_programs/execution_success/regression_3394/src/main.nr @@ -0,0 +1,6 @@ +use dep::std; + +fn main() { + let x : i8 = -128; + std::println(x); +} \ No newline at end of file diff --git a/test_programs/execution_success/regression_3607/Nargo.toml b/test_programs/execution_success/regression_3607/Nargo.toml new file mode 100644 index 00000000000..8757f88b90e --- /dev/null +++ b/test_programs/execution_success/regression_3607/Nargo.toml @@ -0,0 +1,6 @@ +[package] +name = "regression_3607" +type = "bin" +authors = [""] + +[dependencies] diff --git a/test_programs/execution_success/regression_3607/Prover.toml b/test_programs/execution_success/regression_3607/Prover.toml new file mode 100644 index 00000000000..4dd6b405159 --- /dev/null +++ b/test_programs/execution_success/regression_3607/Prover.toml @@ -0,0 +1 @@ +x = "1" diff --git a/test_programs/execution_success/regression_3607/src/main.nr b/test_programs/execution_success/regression_3607/src/main.nr new file mode 100644 index 00000000000..c09211c2810 --- /dev/null +++ b/test_programs/execution_success/regression_3607/src/main.nr @@ -0,0 +1,8 @@ +fn main(mut x: u32) { + if x == 0 { + x = (x+1) / x; + } else { + x = (x+1) / x; + } + assert(x != 0); +} \ No newline at end of file diff --git a/test_programs/execution_success/regression_3889/Nargo.toml b/test_programs/execution_success/regression_3889/Nargo.toml new file mode 100644 index 00000000000..d212d24473f --- /dev/null +++ b/test_programs/execution_success/regression_3889/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "regression_3889" +version = "0.1.0" +type = "bin" +authors = [""] + +[dependencies] diff --git a/test_programs/execution_success/regression_3889/Prover.toml b/test_programs/execution_success/regression_3889/Prover.toml new file mode 100644 index 00000000000..a81ab67fe3e --- /dev/null +++ b/test_programs/execution_success/regression_3889/Prover.toml @@ -0,0 +1,10 @@ +[works] +a = "5" + +[fails] +a = "6" + + +[also_fails] +a = "7" + diff --git a/test_programs/execution_success/regression_3889/src/main.nr b/test_programs/execution_success/regression_3889/src/main.nr new file mode 100644 index 00000000000..10b8ecabee3 --- /dev/null +++ b/test_programs/execution_success/regression_3889/src/main.nr @@ -0,0 +1,23 @@ +mod Foo { + struct NewType{ + a: Field, + } +} + +mod Bar { + use crate::Foo::NewType as BarStruct; + use crate::Foo::NewType; +} + +mod Baz { + struct Works { + a: Field, + } + use crate::Bar::BarStruct; + use crate::Bar::NewType; +} + + +fn main(works: Baz::Works, fails: Baz::BarStruct, also_fails: Bar::NewType) -> pub Field { + works.a + fails.a + also_fails.a +} diff --git a/tooling/nargo_cli/tests/execution_success/regression_mem_op_predicate/Nargo.toml b/test_programs/execution_success/regression_mem_op_predicate/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/regression_mem_op_predicate/Nargo.toml rename to test_programs/execution_success/regression_mem_op_predicate/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/regression_mem_op_predicate/Prover.toml b/test_programs/execution_success/regression_mem_op_predicate/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/regression_mem_op_predicate/Prover.toml rename to test_programs/execution_success/regression_mem_op_predicate/Prover.toml diff --git a/tooling/nargo_cli/tests/execution_success/regression_mem_op_predicate/src/main.nr b/test_programs/execution_success/regression_mem_op_predicate/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/execution_success/regression_mem_op_predicate/src/main.nr rename to test_programs/execution_success/regression_mem_op_predicate/src/main.nr diff --git a/tooling/nargo_cli/tests/execution_success/regression_method_cannot_be_found/Nargo.toml b/test_programs/execution_success/regression_method_cannot_be_found/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/regression_method_cannot_be_found/Nargo.toml rename to test_programs/execution_success/regression_method_cannot_be_found/Nargo.toml diff --git a/test_programs/execution_success/regression_method_cannot_be_found/Prover.toml b/test_programs/execution_success/regression_method_cannot_be_found/Prover.toml new file mode 100644 index 00000000000..8b137891791 --- /dev/null +++ b/test_programs/execution_success/regression_method_cannot_be_found/Prover.toml @@ -0,0 +1 @@ + diff --git a/tooling/nargo_cli/tests/execution_success/regression_method_cannot_be_found/src/main.nr b/test_programs/execution_success/regression_method_cannot_be_found/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/execution_success/regression_method_cannot_be_found/src/main.nr rename to test_programs/execution_success/regression_method_cannot_be_found/src/main.nr diff --git a/tooling/nargo_cli/tests/execution_success/scalar_mul/Nargo.toml b/test_programs/execution_success/scalar_mul/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/scalar_mul/Nargo.toml rename to test_programs/execution_success/scalar_mul/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/scalar_mul/Prover.toml b/test_programs/execution_success/scalar_mul/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/scalar_mul/Prover.toml rename to test_programs/execution_success/scalar_mul/Prover.toml diff --git a/test_programs/execution_success/scalar_mul/src/main.nr b/test_programs/execution_success/scalar_mul/src/main.nr new file mode 100644 index 00000000000..2ddf22cf71e --- /dev/null +++ b/test_programs/execution_success/scalar_mul/src/main.nr @@ -0,0 +1,23 @@ +use dep::std; + +fn main( + a: Field, + a_pub_x: pub Field, + a_pub_y: pub Field, + b: Field, + b_pub_x: pub Field, + b_pub_y: pub Field +) { + let mut priv_key = a; + let mut pub_x: Field = a_pub_x; + let mut pub_y: Field = a_pub_y; + if a != 1 { + // Change `a` in Prover.toml to test input `b` + priv_key = b; + pub_x = b_pub_x; + pub_y = b_pub_y; + } + let res = std::scalar_mul::fixed_base_embedded_curve(priv_key, 0); + assert(res[0] == pub_x); + assert(res[1] == pub_y); +} diff --git a/tooling/nargo_cli/tests/execution_success/schnorr/Nargo.toml b/test_programs/execution_success/schnorr/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/schnorr/Nargo.toml rename to test_programs/execution_success/schnorr/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/schnorr/Prover.toml b/test_programs/execution_success/schnorr/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/schnorr/Prover.toml rename to test_programs/execution_success/schnorr/Prover.toml diff --git a/test_programs/execution_success/schnorr/src/main.nr b/test_programs/execution_success/schnorr/src/main.nr new file mode 100644 index 00000000000..107af152625 --- /dev/null +++ b/test_programs/execution_success/schnorr/src/main.nr @@ -0,0 +1,25 @@ +use dep::std; +// Note: If main has any unsized types, then the verifier will never be able +// to figure out the circuit instance +fn main( + message: [u8; 10], + message_field: Field, + pub_key_x: Field, + pub_key_y: Field, + signature: [u8; 64] +) { + // Regression for issue #2421 + // We want to make sure that we can accurately verify a signature whose message is a slice vs. an array + let message_field_bytes = message_field.to_be_bytes(10); + for i in 0..10 { + assert(message[i] == message_field_bytes[i]); + } + // Is there ever a situation where someone would want + // to ensure that a signature was invalid? + // Check that passing a slice as the message is valid + let valid_signature = std::schnorr::verify_signature(pub_key_x, pub_key_y, signature, message_field_bytes); + assert(valid_signature); + // Check that passing an array as the message is valid + let valid_signature = std::schnorr::verify_signature(pub_key_x, pub_key_y, signature, message); + assert(valid_signature); +} diff --git a/tooling/nargo_cli/tests/execution_success/sha256/Nargo.toml b/test_programs/execution_success/sha256/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/sha256/Nargo.toml rename to test_programs/execution_success/sha256/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/sha256/Prover.toml b/test_programs/execution_success/sha256/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/sha256/Prover.toml rename to test_programs/execution_success/sha256/Prover.toml diff --git a/tooling/nargo_cli/tests/execution_success/sha256/src/main.nr b/test_programs/execution_success/sha256/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/execution_success/sha256/src/main.nr rename to test_programs/execution_success/sha256/src/main.nr diff --git a/tooling/nargo_cli/tests/execution_success/sha2_byte/Nargo.toml b/test_programs/execution_success/sha2_byte/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/sha2_byte/Nargo.toml rename to test_programs/execution_success/sha2_byte/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/sha2_byte/Prover.toml b/test_programs/execution_success/sha2_byte/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/sha2_byte/Prover.toml rename to test_programs/execution_success/sha2_byte/Prover.toml diff --git a/tooling/nargo_cli/tests/execution_success/sha2_byte/src/main.nr b/test_programs/execution_success/sha2_byte/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/execution_success/sha2_byte/src/main.nr rename to test_programs/execution_success/sha2_byte/src/main.nr diff --git a/test_programs/execution_success/side_effects_constrain_array/Nargo.toml b/test_programs/execution_success/side_effects_constrain_array/Nargo.toml new file mode 100644 index 00000000000..fc817f0921f --- /dev/null +++ b/test_programs/execution_success/side_effects_constrain_array/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "side_effects_constrain_array" +type = "bin" +authors = [""] +compiler_version = ">=0.20.0" + +[dependencies] \ No newline at end of file diff --git a/tooling/nargo_cli/tests/execution_success/slice_struct_field/Prover.toml b/test_programs/execution_success/side_effects_constrain_array/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/slice_struct_field/Prover.toml rename to test_programs/execution_success/side_effects_constrain_array/Prover.toml diff --git a/test_programs/execution_success/side_effects_constrain_array/src/main.nr b/test_programs/execution_success/side_effects_constrain_array/src/main.nr new file mode 100644 index 00000000000..fb3c346a460 --- /dev/null +++ b/test_programs/execution_success/side_effects_constrain_array/src/main.nr @@ -0,0 +1,17 @@ +struct Bar { + inner: [Field; 3], +} + +fn main(y: pub u32) { + let bar = Bar { inner: [100, 101, 102] }; + + // The assert inside the if should be hit + if y < 10 { + assert(bar.inner == [100, 101, 102]); + } + + // The assert inside the if should not be hit + if y > 10 { + assert(bar.inner == [0, 1, 2]); + } +} \ No newline at end of file diff --git a/tooling/nargo_cli/tests/execution_success/signed_arithmetic/Nargo.toml b/test_programs/execution_success/signed_arithmetic/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/signed_arithmetic/Nargo.toml rename to test_programs/execution_success/signed_arithmetic/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/signed_arithmetic/Prover.toml b/test_programs/execution_success/signed_arithmetic/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/signed_arithmetic/Prover.toml rename to test_programs/execution_success/signed_arithmetic/Prover.toml diff --git a/tooling/nargo_cli/tests/execution_success/signed_arithmetic/src/main.nr b/test_programs/execution_success/signed_arithmetic/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/execution_success/signed_arithmetic/src/main.nr rename to test_programs/execution_success/signed_arithmetic/src/main.nr diff --git a/test_programs/execution_success/signed_comparison/Nargo.toml b/test_programs/execution_success/signed_comparison/Nargo.toml new file mode 100644 index 00000000000..c8de162877b --- /dev/null +++ b/test_programs/execution_success/signed_comparison/Nargo.toml @@ -0,0 +1,6 @@ +[package] +name = "signed_comparison" +type = "bin" +authors = [""] + +[dependencies] diff --git a/test_programs/execution_success/signed_comparison/Prover.toml b/test_programs/execution_success/signed_comparison/Prover.toml new file mode 100644 index 00000000000..e0e584b7380 --- /dev/null +++ b/test_programs/execution_success/signed_comparison/Prover.toml @@ -0,0 +1,3 @@ +x = "5" +y = "8" +z = "-15" diff --git a/test_programs/execution_success/signed_comparison/src/main.nr b/test_programs/execution_success/signed_comparison/src/main.nr new file mode 100644 index 00000000000..d020be380fb --- /dev/null +++ b/test_programs/execution_success/signed_comparison/src/main.nr @@ -0,0 +1,13 @@ +use dep::std; + +fn main(mut x: i8, mut y: i8, z: i8) { + let mut s1: i8 = 5; + let mut s2: i8 = 8; + assert(-1 as i8 < 0); + assert(x < y); + assert(-x < y); + assert(-y < -x); + assert((z > x) == false); + assert(x <= s1); + assert(z < x - y - s2); +} diff --git a/tooling/nargo_cli/tests/execution_success/signed_division/Nargo.toml b/test_programs/execution_success/signed_division/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/signed_division/Nargo.toml rename to test_programs/execution_success/signed_division/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/signed_division/Prover.toml b/test_programs/execution_success/signed_division/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/signed_division/Prover.toml rename to test_programs/execution_success/signed_division/Prover.toml diff --git a/tooling/nargo_cli/tests/execution_success/signed_division/src/main.nr b/test_programs/execution_success/signed_division/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/execution_success/signed_division/src/main.nr rename to test_programs/execution_success/signed_division/src/main.nr diff --git a/tooling/nargo_cli/tests/execution_success/simple_2d_array/Nargo.toml b/test_programs/execution_success/simple_2d_array/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/simple_2d_array/Nargo.toml rename to test_programs/execution_success/simple_2d_array/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/simple_2d_array/Prover.toml b/test_programs/execution_success/simple_2d_array/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/simple_2d_array/Prover.toml rename to test_programs/execution_success/simple_2d_array/Prover.toml diff --git a/tooling/nargo_cli/tests/execution_success/simple_2d_array/src/main.nr b/test_programs/execution_success/simple_2d_array/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/execution_success/simple_2d_array/src/main.nr rename to test_programs/execution_success/simple_2d_array/src/main.nr diff --git a/tooling/nargo_cli/tests/execution_success/simple_add_and_ret_arr/Nargo.toml b/test_programs/execution_success/simple_add_and_ret_arr/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/simple_add_and_ret_arr/Nargo.toml rename to test_programs/execution_success/simple_add_and_ret_arr/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/simple_add_and_ret_arr/Prover.toml b/test_programs/execution_success/simple_add_and_ret_arr/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/simple_add_and_ret_arr/Prover.toml rename to test_programs/execution_success/simple_add_and_ret_arr/Prover.toml diff --git a/tooling/nargo_cli/tests/execution_success/simple_add_and_ret_arr/src/main.nr b/test_programs/execution_success/simple_add_and_ret_arr/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/execution_success/simple_add_and_ret_arr/src/main.nr rename to test_programs/execution_success/simple_add_and_ret_arr/src/main.nr diff --git a/tooling/nargo_cli/tests/execution_success/simple_bitwise/Nargo.toml b/test_programs/execution_success/simple_bitwise/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/simple_bitwise/Nargo.toml rename to test_programs/execution_success/simple_bitwise/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/simple_bitwise/Prover.toml b/test_programs/execution_success/simple_bitwise/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/simple_bitwise/Prover.toml rename to test_programs/execution_success/simple_bitwise/Prover.toml diff --git a/tooling/nargo_cli/tests/execution_success/simple_bitwise/src/main.nr b/test_programs/execution_success/simple_bitwise/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/execution_success/simple_bitwise/src/main.nr rename to test_programs/execution_success/simple_bitwise/src/main.nr diff --git a/tooling/nargo_cli/tests/execution_success/simple_comparison/Nargo.toml b/test_programs/execution_success/simple_comparison/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/simple_comparison/Nargo.toml rename to test_programs/execution_success/simple_comparison/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/simple_comparison/Prover.toml b/test_programs/execution_success/simple_comparison/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/simple_comparison/Prover.toml rename to test_programs/execution_success/simple_comparison/Prover.toml diff --git a/tooling/nargo_cli/tests/execution_success/simple_comparison/src/main.nr b/test_programs/execution_success/simple_comparison/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/execution_success/simple_comparison/src/main.nr rename to test_programs/execution_success/simple_comparison/src/main.nr diff --git a/tooling/nargo_cli/tests/execution_success/simple_mut/Nargo.toml b/test_programs/execution_success/simple_mut/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/simple_mut/Nargo.toml rename to test_programs/execution_success/simple_mut/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/simple_mut/Prover.toml b/test_programs/execution_success/simple_mut/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/simple_mut/Prover.toml rename to test_programs/execution_success/simple_mut/Prover.toml diff --git a/tooling/nargo_cli/tests/execution_success/simple_mut/src/main.nr b/test_programs/execution_success/simple_mut/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/execution_success/simple_mut/src/main.nr rename to test_programs/execution_success/simple_mut/src/main.nr diff --git a/tooling/nargo_cli/tests/execution_success/simple_not/Nargo.toml b/test_programs/execution_success/simple_not/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/simple_not/Nargo.toml rename to test_programs/execution_success/simple_not/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/simple_not/Prover.toml b/test_programs/execution_success/simple_not/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/simple_not/Prover.toml rename to test_programs/execution_success/simple_not/Prover.toml diff --git a/tooling/nargo_cli/tests/execution_success/simple_not/src/main.nr b/test_programs/execution_success/simple_not/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/execution_success/simple_not/src/main.nr rename to test_programs/execution_success/simple_not/src/main.nr diff --git a/tooling/nargo_cli/tests/execution_success/simple_print/Nargo.toml b/test_programs/execution_success/simple_print/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/simple_print/Nargo.toml rename to test_programs/execution_success/simple_print/Nargo.toml diff --git a/tooling/nargo_cli/tests/compile_failure/orphaned_trait_impl/Prover.toml b/test_programs/execution_success/simple_print/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_failure/orphaned_trait_impl/Prover.toml rename to test_programs/execution_success/simple_print/Prover.toml diff --git a/tooling/nargo_cli/tests/execution_success/simple_print/src/main.nr b/test_programs/execution_success/simple_print/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/execution_success/simple_print/src/main.nr rename to test_programs/execution_success/simple_print/src/main.nr diff --git a/tooling/nargo_cli/tests/execution_success/simple_program_addition/Nargo.toml b/test_programs/execution_success/simple_program_addition/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/simple_program_addition/Nargo.toml rename to test_programs/execution_success/simple_program_addition/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/simple_program_addition/Prover.toml b/test_programs/execution_success/simple_program_addition/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/simple_program_addition/Prover.toml rename to test_programs/execution_success/simple_program_addition/Prover.toml diff --git a/tooling/nargo_cli/tests/execution_success/simple_program_addition/src/main.nr b/test_programs/execution_success/simple_program_addition/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/execution_success/simple_program_addition/src/main.nr rename to test_programs/execution_success/simple_program_addition/src/main.nr diff --git a/tooling/nargo_cli/tests/execution_success/simple_radix/Nargo.toml b/test_programs/execution_success/simple_radix/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/simple_radix/Nargo.toml rename to test_programs/execution_success/simple_radix/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/simple_radix/Prover.toml b/test_programs/execution_success/simple_radix/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/simple_radix/Prover.toml rename to test_programs/execution_success/simple_radix/Prover.toml diff --git a/tooling/nargo_cli/tests/execution_success/simple_radix/src/main.nr b/test_programs/execution_success/simple_radix/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/execution_success/simple_radix/src/main.nr rename to test_programs/execution_success/simple_radix/src/main.nr diff --git a/tooling/nargo_cli/tests/execution_success/simple_shield/Nargo.toml b/test_programs/execution_success/simple_shield/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/simple_shield/Nargo.toml rename to test_programs/execution_success/simple_shield/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/simple_shield/Prover.toml b/test_programs/execution_success/simple_shield/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/simple_shield/Prover.toml rename to test_programs/execution_success/simple_shield/Prover.toml diff --git a/tooling/nargo_cli/tests/execution_success/simple_shield/src/main.nr b/test_programs/execution_success/simple_shield/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/execution_success/simple_shield/src/main.nr rename to test_programs/execution_success/simple_shield/src/main.nr diff --git a/tooling/nargo_cli/tests/execution_success/simple_shift_left_right/Nargo.toml b/test_programs/execution_success/simple_shift_left_right/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/simple_shift_left_right/Nargo.toml rename to test_programs/execution_success/simple_shift_left_right/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/simple_shift_left_right/Prover.toml b/test_programs/execution_success/simple_shift_left_right/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/simple_shift_left_right/Prover.toml rename to test_programs/execution_success/simple_shift_left_right/Prover.toml diff --git a/tooling/nargo_cli/tests/execution_success/simple_shift_left_right/src/main.nr b/test_programs/execution_success/simple_shift_left_right/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/execution_success/simple_shift_left_right/src/main.nr rename to test_programs/execution_success/simple_shift_left_right/src/main.nr diff --git a/tooling/nargo_cli/tests/execution_success/slice_dynamic_index/Nargo.toml b/test_programs/execution_success/slice_dynamic_index/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/slice_dynamic_index/Nargo.toml rename to test_programs/execution_success/slice_dynamic_index/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/slice_dynamic_index/Prover.toml b/test_programs/execution_success/slice_dynamic_index/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/slice_dynamic_index/Prover.toml rename to test_programs/execution_success/slice_dynamic_index/Prover.toml diff --git a/test_programs/execution_success/slice_dynamic_index/src/main.nr b/test_programs/execution_success/slice_dynamic_index/src/main.nr new file mode 100644 index 00000000000..374d2ba4c26 --- /dev/null +++ b/test_programs/execution_success/slice_dynamic_index/src/main.nr @@ -0,0 +1,308 @@ +fn main(x: Field) { + // The parameters to this function must come directly from witness values (inputs to main). + regression_dynamic_slice_index(x - 1, x - 4); +} + +fn regression_dynamic_slice_index(x: Field, y: Field) { + let mut slice = []; + for i in 0..5 { + slice = slice.push_back(i); + } + assert(slice.len() == 5); + + dynamic_slice_index_set_if(slice, x, y); + dynamic_slice_index_set_else(slice, x, y); + dynamic_slice_index_set_nested_if_else_else(slice, x, y); + dynamic_slice_index_set_nested_if_else_if(slice, x, y + 1); + dynamic_slice_index_if(slice, x); + dynamic_array_index_if([0, 1, 2, 3, 4], x); + dynamic_slice_index_else(slice, x); + + dynamic_slice_merge_if(slice, x); + dynamic_slice_merge_else(slice, x); + dynamic_slice_merge_two_ifs(slice, x); + dynamic_slice_merge_mutate_between_ifs(slice, x, y); + dynamic_slice_merge_push_then_pop(slice, x, y); +} + +fn dynamic_slice_index_set_if(mut slice: [Field], x: Field, y: Field) { + assert(slice[x] == 4); + assert(slice[y] == 1); + slice[y] = 0; + assert(slice[x] == 4); + assert(slice[1] == 0); + if x as u32 < 10 { + assert(slice[x] == 4); + slice[x] = slice[x] - 2; + slice[x - 1] = slice[x]; + } else { + slice[x] = 0; + } + assert(slice[3] == 2); + assert(slice[4] == 2); +} + +fn dynamic_slice_index_set_else(mut slice: [Field], x: Field, y: Field) { + assert(slice[x] == 4); + assert(slice[y] == 1); + slice[y] = 0; + assert(slice[x] == 4); + assert(slice[1] == 0); + if x as u32 > 10 { + assert(slice[x] == 4); + slice[x] = slice[x] - 2; + slice[x - 1] = slice[x]; + } else { + slice[x] = 0; + } + assert(slice[4] == 0); +} +// This tests the case of missing a store instruction in the else branch +// of merging slices +fn dynamic_slice_index_if(mut slice: [Field], x: Field) { + if x as u32 < 10 { + assert(slice[x] == 4); + slice[x] = slice[x] - 2; + } else { + assert(slice[x] == 0); + } + assert(slice[4] == 2); +} + +fn dynamic_array_index_if(mut array: [Field; 5], x: Field) { + if x as u32 < 10 { + assert(array[x] == 4); + array[x] = array[x] - 2; + } else { + assert(array[x] == 0); + } + assert(array[4] == 2); +} +// This tests the case of missing a store instruction in the then branch +// of merging slices +fn dynamic_slice_index_else(mut slice: [Field], x: Field) { + if x as u32 > 10 { + assert(slice[x] == 0); + } else { + assert(slice[x] == 4); + slice[x] = slice[x] - 2; + } + assert(slice[4] == 2); +} + +fn dynamic_slice_merge_if(mut slice: [Field], x: Field) { + if x as u32 < 10 { + assert(slice[x] == 4); + slice[x] = slice[x] - 2; + + slice = slice.push_back(10); + // Having an array set here checks whether we appropriately + // handle a slice length that is not yet resolving to a constant + // during flattening + slice[x] = 10; + assert(slice[slice.len() - 1] == 10); + assert(slice.len() == 6); + + slice[x] = 20; + slice[x] = slice[x] + 10; + + slice = slice.push_front(11); + assert(slice[0] == 11); + assert(slice.len() == 7); + assert(slice[5] == 30); + + slice = slice.push_front(12); + assert(slice[0] == 12); + assert(slice.len() == 8); + assert(slice[6] == 30); + + let (popped_slice, last_elem) = slice.pop_back(); + assert(last_elem == 10); + assert(popped_slice.len() == 7); + + let (first_elem, rest_of_slice) = popped_slice.pop_front(); + assert(first_elem == 12); + assert(rest_of_slice.len() == 6); + + slice = rest_of_slice.insert(x - 2, 20); + assert(slice[2] == 20); + assert(slice[6] == 30); + assert(slice.len() == 7); + + let (removed_slice, removed_elem) = slice.remove(x - 1); + // The deconstructed tuple assigns to the slice but is not seen outside of the if statement + // without a direct assignment + slice = removed_slice; + + assert(removed_elem == 1); + assert(slice.len() == 6); + } else { + assert(slice[x] == 0); + slice = slice.push_back(20); + } + + assert(slice.len() == 6); + assert(slice[slice.len() - 1] == 30); +} + +fn dynamic_slice_merge_else(mut slice: [Field], x: Field) { + if x as u32 > 10 { + assert(slice[x] == 0); + slice[x] = 2; + } else { + assert(slice[x] == 4); + slice[x] = slice[x] - 2; + slice = slice.push_back(10); + } + assert(slice.len() == 6); + assert(slice[slice.len() - 1] == 10); + + slice = slice.push_back(20); + assert(slice.len() == 7); + assert(slice[slice.len() - 1] == 20); +} + +fn dynamic_slice_index_set_nested_if_else_else(mut slice: [Field], x: Field, y: Field) { + assert(slice[x] == 4); + assert(slice[y] == 1); + slice[y] = 0; + assert(slice[x] == 4); + assert(slice[1] == 0); + if x as u32 < 10 { + slice[x] = slice[x] - 2; + if y != 1 { + slice[x] = slice[x] + 20; + } else { + if x == 5 { + // We should not hit this case + assert(slice[x] == 22); + } else { + slice[x] = 10; + slice = slice.push_back(15); + assert(slice.len() == 6); + } + assert(slice[4] == 10); + } + } else { + slice[x] = 0; + } + assert(slice[4] == 10); + assert(slice.len() == 6); + assert(slice[slice.len() - 1] == 15); + + slice = slice.push_back(20); + assert(slice.len() == 7); + assert(slice[slice.len() - 1] == 20); +} + +fn dynamic_slice_index_set_nested_if_else_if(mut slice: [Field], x: Field, y: Field) { + assert(slice[x] == 4); + assert(slice[y] == 2); + slice[y] = 0; + assert(slice[x] == 4); + assert(slice[2] == 0); + if x as u32 < 10 { + slice[x] = slice[x] - 2; + // TODO: this panics as we have a load for the slice in flattening + if y == 1 { + slice[x] = slice[x] + 20; + } else { + if x == 4 { + slice[x] = 5; + } + assert(slice[4] == 5); + } + } else { + slice[x] = 0; + } + assert(slice[4] == 5); +} + +fn dynamic_slice_merge_two_ifs(mut slice: [Field], x: Field) { + if x as u32 > 10 { + assert(slice[x] == 0); + slice[x] = 2; + } else { + assert(slice[x] == 4); + slice[x] = slice[x] - 2; + slice = slice.push_back(10); + } + + assert(slice.len() == 6); + assert(slice[slice.len() - 1] == 10); + + if x == 20 { + slice = slice.push_back(20); + } + + slice = slice.push_back(15); + + assert(slice.len() == 7); + assert(slice[slice.len() - 1] == 15); + + slice = slice.push_back(20); + assert(slice.len() == 8); + assert(slice[slice.len() - 1] == 20); +} + +fn dynamic_slice_merge_mutate_between_ifs(mut slice: [Field], x: Field, y: Field) { + if x != y { + slice[x] = 50; + slice = slice.push_back(y); + slice = slice.push_back(x); + } else { + slice[x] = slice[x] - 2; + slice = slice.push_back(x); + } + + slice = slice.push_back(30); + assert(slice.len() == 8); + + if x == 20 { + slice = slice.push_back(20); + } + + slice = slice.push_back(15); + + if x != 20 { + slice = slice.push_back(50); + } + + slice = slice.push_back(60); + assert(slice.len() == 11); + assert(slice[x] == 50); + assert(slice[slice.len() - 4] == 30); + assert(slice[slice.len() - 3] == 15); + assert(slice[slice.len() - 2] == 50); + assert(slice[slice.len() - 1] == 60); +} + +fn dynamic_slice_merge_push_then_pop(mut slice: [Field], x: Field, y: Field) { + if x != y { + slice[x] = 5; + slice = slice.push_back(y); + slice = slice.push_back(x); + assert(slice.len() == 7); + + let (popped_slice, elem) = slice.pop_back(); + assert(slice.len() == 7); + assert(elem == x); + slice = popped_slice; + } else { + slice = slice.push_back(x); + } + + slice = slice.push_back(30); + assert(slice.len() == 7); + + if x == 20 { + slice = slice.push_back(20); + } + + let (slice, elem) = slice.pop_back(); + assert(elem == 30); + + let (_, elem) = slice.pop_back(); + assert(elem == y); +} + diff --git a/tooling/nargo_cli/tests/execution_success/slice_struct_field/Nargo.toml b/test_programs/execution_success/slice_struct_field/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/slice_struct_field/Nargo.toml rename to test_programs/execution_success/slice_struct_field/Nargo.toml diff --git a/test_programs/execution_success/slice_struct_field/Prover.toml b/test_programs/execution_success/slice_struct_field/Prover.toml new file mode 100644 index 00000000000..7127baac5bf --- /dev/null +++ b/test_programs/execution_success/slice_struct_field/Prover.toml @@ -0,0 +1 @@ +y = "3" diff --git a/test_programs/execution_success/slice_struct_field/src/main.nr b/test_programs/execution_success/slice_struct_field/src/main.nr new file mode 100644 index 00000000000..a5b971ada4b --- /dev/null +++ b/test_programs/execution_success/slice_struct_field/src/main.nr @@ -0,0 +1,472 @@ +struct FooParent { + parent_arr: [Field; 3], + foos: [Foo], +} + +struct Bar { + inner: [Field; 3], +} + +struct Foo { + a: Field, + b: [Field], + bar: Bar, +} + +fn main(y: pub Field) { + let mut b_one = [2, 3, 20]; + b_one = b_one.push_back(20); + let foo_one = Foo { a: 1, b: b_one, bar: Bar { inner: [100, 101, 102] } }; + + let mut b_two = [5, 6, 21]; + b_two = b_two.push_back(21); + let foo_two = Foo { a: 4, b: b_two, bar: Bar { inner: [103, 104, 105] } }; + + let foo_three = Foo { a: 7, b: [8, 9, 22], bar: Bar { inner: [106, 107, 108] } }; + let mut foo_four = Foo { a: 10, b: [11, 12, 23], bar: Bar { inner: [109, 110, 111] } }; + + let mut x = [foo_one, foo_two]; + x = x.push_back(foo_three); + x = x.push_back(foo_four); + + assert(x[y - 3].a == 1); + let struct_slice = x[y - 3].b; + for i in 0..4 { + assert(struct_slice[i] == b_one[i]); + } + + assert(x[y - 2].a == 4); + let struct_slice = x[y - 2].b; + for i in 0..4 { + assert(struct_slice[i] == b_two[i]); + } + + assert(x[y - 1].a == 7); + let struct_slice = x[y - 1].b; + assert(struct_slice[0] == 8); + assert(struct_slice[1] == 9); + assert(struct_slice[2] == 22); + + assert(x[y].a == 10); + let struct_slice = x[y].b; + assert(struct_slice[0] == 11); + assert(struct_slice[1] == 12); + assert(struct_slice[2] == 23); + assert(x[y].bar.inner == [109, 110, 111]); + + assert(x[y - 3].bar.inner == [100, 101, 102]); + assert(x[y - 2].bar.inner == [103, 104, 105]); + assert(x[y - 1].bar.inner == [106, 107, 108]); + assert(x[y].bar.inner == [109, 110, 111]); + // Check that switching the lhs and rhs is still valid + assert([109, 110, 111] == x[y].bar.inner); + + assert(x[y - 3].bar.inner == [100, 101, 102]); + assert(x[y - 2].bar.inner == [103, 104, 105]); + assert(x[y - 1].bar.inner == [106, 107, 108]); + assert(x[y].bar.inner == [109, 110, 111]); + // Check that switching the lhs and rhs is still valid + assert([109, 110, 111] == x[y].bar.inner); + + // TODO: Enable merging nested slices + // if y != 2 { + // x[y].a = 50; + // } else { + // x[y].a = 100; + // } + // assert(x[3].a == 50); + // if y == 2 { + // x[y - 1].b = [50, 51, 52]; + // } else { + // x[y - 1].b = [100, 101, 102]; + // } + // assert(x[2].b[0] == 100); + // assert(x[2].b[1] == 101); + // assert(x[2].b[2] == 102); + + let q = x.push_back(foo_four); + let foo_parent_one = FooParent { parent_arr: [0, 1, 2], foos: x }; + let foo_parent_two = FooParent { parent_arr: [3, 4, 5], foos: q }; + let mut foo_parents = [foo_parent_one]; + foo_parents = foo_parents.push_back(foo_parent_two); + // TODO: make a separate test for entirely compile time + // foo_parents[1].foos.push_back(foo_four); + // TODO: Merging nested slices is broken + // if y == 3 { + // foo_parents[y - 2].foos[y - 1].b[y - 1] = 5000; + // } else { + // foo_parents[y - 2].foos[y - 1].b[y - 1] = 1000; + // } + + assert(foo_parents[y - 2].foos[y - 2].b[y - 1] == 21); + foo_parents[y - 2].foos[y - 2].b[y - 1] = 5000; + assert(foo_parents[y - 2].foos[y - 2].b[y - 1] == 5000); + + let b_array = foo_parents[y - 2].foos[y - 3].b; + assert(foo_parents[y - 2].foos[y - 3].a == 1); + assert(b_array[0] == 2); + assert(b_array[1] == 3); + assert(b_array[2] == 20); + assert(b_array[3] == 20); + + let b_array = foo_parents[y - 2].foos[y - 2].b; + assert(foo_parents[y - 2].foos[y - 2].a == 4); + assert(b_array[0] == 5); + assert(b_array[1] == 6); + assert(b_array[2] == 5000); + assert(b_array[3] == 21); + + assert(foo_parents[y - 2].foos[y - 1].a == 7); + foo_parents[y - 2].foos[y - 1].a = 50; + assert(foo_parents[y - 2].foos[y - 1].a == 50); + + let b_array = foo_parents[y - 2].foos[y - 1].b; + assert(b_array[0] == 8); + assert(b_array[1] == 9); + assert(b_array[2] == 22); + assert(b_array.len() == 3); + + // // Test setting a nested array with non-dynamic + let x = [5, 6, 5000, 21, 100, 101].as_slice(); + foo_parents[y - 2].foos[y - 1].b = x; + + assert(foo_parents[y - 2].foos[y - 1].b.len() == 6); + assert(foo_parents[y - 2].foos[y - 1].b[4] == 100); + assert(foo_parents[y - 2].foos[y - 1].b[5] == 101); + + // Need to account for that foo_parents is not modified outside of this function + test_basic_intrinsics_nested_slices(foo_parents, y); + test_complex_intrinsic_nested_slices(foo_parents, y); + + foo_parents[y - 2].foos[y - 1].b = foo_parents[y - 2].foos[y - 1].b.push_back(500); + assert(foo_parents[y - 2].foos[y - 1].b.len() == 7); + assert(foo_parents[y - 2].foos[y - 1].b[6] == 500); + + let (popped_slice, last_elem) = foo_parents[y - 2].foos[y - 1].b.pop_back(); + foo_parents[y - 2].foos[y - 1].b = popped_slice; + assert(foo_parents[y - 2].foos[y - 1].b.len() == 6); + assert(last_elem == 500); + + foo_parents[y - 2].foos[y - 1].b = foo_parents[y - 2].foos[y - 1].b.push_front(11); + assert(foo_parents[y - 2].foos[y - 1].b.len() == 7); + assert(foo_parents[y - 2].foos[y - 1].b[0] == 11); + + assert(foo_parents[y - 2].foos.len() == 5); + foo_four.a = 40; + foo_parents[y - 2].foos = foo_parents[y - 2].foos.push_back(foo_four); + assert(foo_parents[y - 2].foos.len() == 6); + assert(foo_parents[y - 2].foos[y + 2].bar.inner == [109, 110, 111]); + + foo_parents[y - 2].foos = foo_parents[y - 2].foos.push_back(foo_four); + assert(foo_parents[y - 2].foos.len() == 7); + assert(foo_parents[y - 2].foos[6].a == 40); + assert(foo_parents[y - 2].foos[5].bar.inner == [109, 110, 111]); + assert(foo_parents[y - 2].foos[6].bar.inner == [109, 110, 111]); + + foo_parents[y - 2].foos = foo_parents[y - 2].foos.push_back(foo_four); + assert(foo_parents[y - 2].foos.len() == 8); + assert(foo_parents[y - 2].foos[6].a == 40); + assert(foo_parents[y - 2].foos[5].bar.inner == [109, 110, 111]); + assert(foo_parents[y - 2].foos[6].bar.inner == [109, 110, 111]); + + foo_parents[y - 2].foos = foo_parents[y - 2].foos.push_back(foo_four); + assert(foo_parents[y - 2].foos.len() == 9); + + foo_parents[y - 2].foos = foo_parents[y - 2].foos.push_back(foo_four); + assert(foo_parents[y - 2].foos.len() == 10); + + let b_array = foo_parents[y - 2].foos[y - 1].b; + assert(b_array[0] == 11); + assert(b_array[1] == 5); + assert(b_array[2] == 6); + assert(b_array[3] == 5000); + + let b_array = foo_parents[y - 2].foos[y].b; + assert(foo_parents[y - 2].foos[y].a == 10); + assert(b_array[0] == 11); + assert(b_array[1] == 12); + assert(b_array[2] == 23); + + assert(foo_parents[y - 2].foos[y - 3].bar.inner == [100, 101, 102]); + assert(foo_parents[y - 2].foos[y - 2].bar.inner == [103, 104, 105]); + assert(foo_parents[y - 2].foos[y - 1].bar.inner == [106, 107, 108]); + assert(foo_parents[y - 2].foos[y].bar.inner == [109, 110, 111]); +} + +fn test_basic_intrinsics_nested_slices(mut foo_parents: [FooParent], y: Field) { + foo_parents[y - 2].foos[y - 1].b = foo_parents[y - 2].foos[y - 1].b.push_back(500); + assert(foo_parents[y - 2].foos[y - 1].b.len() == 7); + assert(foo_parents[y - 2].foos[y - 1].b[6] == 500); + + let (popped_slice, last_elem) = foo_parents[y - 2].foos[y - 1].b.pop_back(); + foo_parents[y - 2].foos[y - 1].b = popped_slice; + assert(foo_parents[y - 2].foos[y - 1].b.len() == 6); + assert(last_elem == 500); + + foo_parents[y - 2].foos[y - 1].b = foo_parents[y - 2].foos[y - 1].b.push_front(11); + assert(foo_parents[y - 2].foos[y - 1].b.len() == 7); + assert(foo_parents[y - 2].foos[y - 1].b[0] == 11); + + let (first_elem, rest_of_slice) = foo_parents[y - 2].foos[y - 1].b.pop_front(); + foo_parents[y - 2].foos[y - 1].b = rest_of_slice; + assert(foo_parents[y - 2].foos[y - 1].b.len() == 6); + assert(first_elem == 11); + + foo_parents[y - 2].foos[y - 1].b = foo_parents[y - 2].foos[y - 1].b.insert(2, 20); + assert(foo_parents[y - 2].foos[y - 1].b.len() == 7); + assert(foo_parents[y - 2].foos[y - 1].b[y - 1] == 20); + assert(foo_parents[y - 2].foos[y - 1].b[y] == 5000); + assert(foo_parents[y - 2].foos[y - 1].b[6] == 101); + + let (rest_of_slice, removed_elem) = foo_parents[y - 2].foos[y - 1].b.remove(3); + foo_parents[y - 2].foos[y - 1].b = rest_of_slice; + assert(removed_elem == 5000); + assert(foo_parents[y - 2].foos[y - 1].b.len() == 6); + assert(foo_parents[y - 2].foos[y - 1].b[2] == 20); + assert(foo_parents[y - 2].foos[y - 1].b[3] == 21); +} + +// This method test intrinsics on nested slices with complex inputs such as +// pushing a `Foo` struct onto a slice in `FooParents`. +fn test_complex_intrinsic_nested_slices(mut foo_parents: [FooParent], y: Field) { + let mut foo = Foo { a: 13, b: [14, 15, 16], bar: Bar { inner: [109, 110, 111] } }; + assert(foo_parents[y - 2].foos.len() == 5); + foo.a = 40; + foo_parents[y - 2].foos = foo_parents[y - 2].foos.push_back(foo); + assert(foo_parents[1].foos.len() == 6); + assert(foo_parents[1].foos[5].a == 40); + assert(foo_parents[1].foos[5].b[0] == 14); + assert(foo_parents[1].foos[5].b[2] == 16); + assert(foo_parents[1].foos[5].b.len() == 3); + assert(foo_parents[1].foos[5].bar.inner == [109, 110, 111]); + + foo_parents[y - 2].foos[y - 1].b = foo_parents[y - 2].foos[y - 1].b.push_back(500); + assert(foo_parents[1].foos[2].b.len() == 7); + assert(foo_parents[1].foos[2].b[6] == 500); + assert(foo_parents[1].foos[2].bar.inner == [106, 107, 108]); + assert(foo_parents[1].foos[5].a == 40); + assert(foo_parents[1].foos[5].b[0] == 14); + assert(foo_parents[1].foos[5].b[2] == 16); + assert(foo_parents[1].foos[5].b.len() == 3); + assert(foo_parents[1].foos[5].bar.inner == [109, 110, 111]); + + let (popped_slice, last_foo) = foo_parents[y - 2].foos.pop_back(); + foo_parents[y - 2].foos = popped_slice; + assert(foo_parents[y - 2].foos.len() == 5); + assert(last_foo.a == 40); + assert(last_foo.b[0] == 14); + assert(last_foo.b[1] == 15); + assert(last_foo.b[2] == 16); + assert(last_foo.bar.inner == [109, 110, 111]); + + foo_parents[y - 2].foos = foo_parents[y - 2].foos.push_front(foo); + assert(foo_parents[1].foos.len() == 6); + assert(foo_parents[1].foos[0].a == 40); + assert(foo_parents[1].foos[0].b[0] == 14); + assert(foo_parents[1].foos[0].b[1] == 15); + assert(foo_parents[1].foos[0].b[2] == 16); + assert(foo_parents[1].foos[5].a == 10); + assert(foo_parents[1].foos[5].b.len() == 3); + assert(foo_parents[1].foos[5].b[0] == 11); + assert(foo_parents[1].foos[5].b[2] == 23); + assert(foo_parents[1].foos[5].bar.inner == [109, 110, 111]); + + assert(foo_parents[1].foos[1].a == 1); + assert(foo_parents[1].foos[1].bar.inner == [100, 101, 102]); + + let (first_foo, rest_of_slice) = foo_parents[y - 2].foos.pop_front(); + + foo_parents[y - 2].foos = rest_of_slice; + assert(first_foo.a == 40); + assert(first_foo.b[0] == 14); + assert(first_foo.b[1] == 15); + assert(first_foo.b[2] == 16); + assert(first_foo.bar.inner == [109, 110, 111]); + + assert(foo_parents[1].foos[0].a == 1); + assert(foo_parents[1].foos[0].b[0] == 2); + assert(foo_parents[1].foos[0].b[1] == 3); + assert(foo_parents[1].foos[0].b[2] == 20); + assert(foo_parents[1].foos[0].b[3] == 20); + assert(foo_parents[1].foos[0].bar.inner == [100, 101, 102]); + + test_insert_remove_const_index(foo_parents, y, foo); + + // Check values before insertion + assert(foo_parents[1].foos[1].a == 4); + assert(foo_parents[1].foos[1].b[0] == 5); + assert(foo_parents[1].foos[1].b[1] == 6); + assert(foo_parents[1].foos[1].b[2] == 5000); + assert(foo_parents[1].foos[1].b[3] == 21); + assert(foo_parents[1].foos[1].bar.inner == [103, 104, 105]); + + assert(foo_parents[1].foos.len() == 5); + assert(foo_parents[1].foos[2].a == 50); + assert(foo_parents[1].foos[2].b[0] == 5); + assert(foo_parents[1].foos[2].b[2] == 5000); + assert(foo_parents[1].foos[2].bar.inner == [106, 107, 108]); + + assert(foo_parents[1].foos[3].a == 10); + assert(foo_parents[1].foos[3].b[0] == 11); + assert(foo_parents[1].foos[3].b[2] == 23); + assert(foo_parents[1].foos[3].bar.inner == [109, 110, 111]); + + foo_parents[y - 2].foos = foo_parents[y - 2].foos.insert(y - 1, foo); + assert(foo_parents[1].foos.len() == 6); + + // Check values correctly moved after insertion + assert(foo_parents[1].foos[0].a == 1); + assert(foo_parents[1].foos[0].b[0] == 2); + assert(foo_parents[1].foos[0].b[1] == 3); + assert(foo_parents[1].foos[0].b[2] == 20); + assert(foo_parents[1].foos[0].b[3] == 20); + assert(foo_parents[1].foos[0].bar.inner == [100, 101, 102]); + + assert(foo_parents[1].foos[1].a == 4); + assert(foo_parents[1].foos[1].b[0] == 5); + assert(foo_parents[1].foos[1].b[1] == 6); + assert(foo_parents[1].foos[1].b[2] == 5000); + assert(foo_parents[1].foos[1].b[3] == 21); + assert(foo_parents[1].foos[1].bar.inner == [103, 104, 105]); + + assert(foo_parents[1].foos[2].a == 40); + assert(foo_parents[1].foos[2].b[0] == 14); + assert(foo_parents[1].foos[2].b[2] == 16); + assert(foo_parents[1].foos[2].bar.inner == [109, 110, 111]); + + assert(foo_parents[1].foos[3].a == 50); + assert(foo_parents[1].foos[3].b[0] == 5); + assert(foo_parents[1].foos[3].b[2] == 5000); + assert(foo_parents[1].foos[3].bar.inner == [106, 107, 108]); + + assert(foo_parents[1].foos[4].a == 10); + assert(foo_parents[1].foos[4].b[0] == 11); + assert(foo_parents[1].foos[4].b[2] == 23); + assert(foo_parents[1].foos[4].bar.inner == [109, 110, 111]); + + assert(foo_parents[1].foos[5].a == 10); + assert(foo_parents[1].foos[5].b[0] == 11); + assert(foo_parents[1].foos[5].b[2] == 23); + assert(foo_parents[1].foos[5].bar.inner == [109, 110, 111]); + + let (rest_of_slice, removed_elem) = foo_parents[y - 2].foos.remove(y - 1); + foo_parents[1].foos = rest_of_slice; + + // Check that the accurate element was removed + assert(removed_elem.a == 40); + assert(removed_elem.b[0] == 14); + assert(removed_elem.b[2] == 16); + assert(removed_elem.bar.inner == [109, 110, 111]); + + // Check that we have altered our slice accurately following a removal + assert(foo_parents[1].foos[1].a == 4); + assert(foo_parents[1].foos[1].b[0] == 5); + assert(foo_parents[1].foos[1].b[1] == 6); + assert(foo_parents[1].foos[1].b[2] == 5000); + assert(foo_parents[1].foos[1].b[3] == 21); + assert(foo_parents[1].foos[1].bar.inner == [103, 104, 105]); + + assert(foo_parents[1].foos[2].a == 50); + assert(foo_parents[1].foos[2].b[0] == 5); + assert(foo_parents[1].foos[2].b[2] == 5000); + assert(foo_parents[1].foos[2].bar.inner == [106, 107, 108]); + + assert(foo_parents[1].foos[3].a == 10); + assert(foo_parents[1].foos[3].b[0] == 11); + assert(foo_parents[1].foos[3].b[2] == 23); + assert(foo_parents[1].foos[3].bar.inner == [109, 110, 111]); + + assert(foo_parents[1].foos[4].b[0] == 11); + assert(foo_parents[1].foos[4].b[2] == 23); + assert(foo_parents[1].foos[4].bar.inner == [109, 110, 111]); +} + +fn test_insert_remove_const_index(mut foo_parents: [FooParent], y: Field, foo: Foo) { + // Check values before insertion + assert(foo_parents[1].foos[1].a == 4); + assert(foo_parents[1].foos[1].b[0] == 5); + assert(foo_parents[1].foos[1].b[1] == 6); + assert(foo_parents[1].foos[1].b[2] == 5000); + assert(foo_parents[1].foos[1].b[3] == 21); + assert(foo_parents[1].foos[1].bar.inner == [103, 104, 105]); + + assert(foo_parents[1].foos.len() == 5); + assert(foo_parents[1].foos[2].a == 50); + assert(foo_parents[1].foos[2].b[0] == 5); + assert(foo_parents[1].foos[2].b[2] == 5000); + assert(foo_parents[1].foos[2].bar.inner == [106, 107, 108]); + + assert(foo_parents[1].foos[3].a == 10); + assert(foo_parents[1].foos[3].b[0] == 11); + assert(foo_parents[1].foos[3].b[2] == 23); + assert(foo_parents[1].foos[3].bar.inner == [109, 110, 111]); + + foo_parents[y - 2].foos = foo_parents[y - 2].foos.insert(2, foo); + assert(foo_parents[1].foos.len() == 6); + + // Check values correctly moved after insertion + assert(foo_parents[1].foos[0].a == 1); + assert(foo_parents[1].foos[0].b[0] == 2); + assert(foo_parents[1].foos[0].b[1] == 3); + assert(foo_parents[1].foos[0].b[2] == 20); + assert(foo_parents[1].foos[0].b[3] == 20); + assert(foo_parents[1].foos[0].bar.inner == [100, 101, 102]); + + assert(foo_parents[1].foos[1].a == 4); + assert(foo_parents[1].foos[1].b[0] == 5); + assert(foo_parents[1].foos[1].b[1] == 6); + assert(foo_parents[1].foos[1].b[2] == 5000); + assert(foo_parents[1].foos[1].b[3] == 21); + assert(foo_parents[1].foos[1].bar.inner == [103, 104, 105]); + + assert(foo_parents[1].foos[2].a == 40); + assert(foo_parents[1].foos[2].b[0] == 14); + assert(foo_parents[1].foos[2].b[2] == 16); + assert(foo_parents[1].foos[2].bar.inner == [109, 110, 111]); + + assert(foo_parents[1].foos[3].a == 50); + assert(foo_parents[1].foos[3].b[0] == 5); + assert(foo_parents[1].foos[3].b[2] == 5000); + assert(foo_parents[1].foos[3].bar.inner == [106, 107, 108]); + + assert(foo_parents[1].foos[4].a == 10); + assert(foo_parents[1].foos[4].b[0] == 11); + assert(foo_parents[1].foos[4].b[2] == 23); + assert(foo_parents[1].foos[4].bar.inner == [109, 110, 111]); + + assert(foo_parents[1].foos[5].a == 10); + assert(foo_parents[1].foos[5].b[0] == 11); + assert(foo_parents[1].foos[5].b[2] == 23); + assert(foo_parents[1].foos[5].bar.inner == [109, 110, 111]); + + let (rest_of_slice, removed_elem) = foo_parents[y - 2].foos.remove(2); + foo_parents[1].foos = rest_of_slice; + + // Check that the accurate element was removed + assert(removed_elem.a == 40); + assert(removed_elem.b[0] == 14); + assert(removed_elem.b[2] == 16); + assert(removed_elem.bar.inner == [109, 110, 111]); + + // Check that we have altered our slice accurately following a removal + assert(foo_parents[1].foos[1].a == 4); + assert(foo_parents[1].foos[1].b[0] == 5); + assert(foo_parents[1].foos[1].b[1] == 6); + assert(foo_parents[1].foos[1].b[2] == 5000); + assert(foo_parents[1].foos[1].b[3] == 21); + assert(foo_parents[1].foos[1].bar.inner == [103, 104, 105]); + + assert(foo_parents[1].foos[2].a == 50); + assert(foo_parents[1].foos[2].b[0] == 5); + assert(foo_parents[1].foos[2].b[2] == 5000); + assert(foo_parents[1].foos[2].bar.inner == [106, 107, 108]); + + assert(foo_parents[1].foos[3].a == 10); + assert(foo_parents[1].foos[3].b[0] == 11); + assert(foo_parents[1].foos[3].b[2] == 23); + assert(foo_parents[1].foos[3].bar.inner == [109, 110, 111]); + + assert(foo_parents[1].foos[4].b[0] == 11); + assert(foo_parents[1].foos[4].b[2] == 23); + assert(foo_parents[1].foos[4].bar.inner == [109, 110, 111]); +} diff --git a/tooling/nargo_cli/tests/execution_success/slices/Nargo.toml b/test_programs/execution_success/slices/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/slices/Nargo.toml rename to test_programs/execution_success/slices/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/slices/Prover.toml b/test_programs/execution_success/slices/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/slices/Prover.toml rename to test_programs/execution_success/slices/Prover.toml diff --git a/tooling/nargo_cli/tests/execution_success/slices/src/main.nr b/test_programs/execution_success/slices/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/execution_success/slices/src/main.nr rename to test_programs/execution_success/slices/src/main.nr diff --git a/tooling/nargo_cli/tests/execution_success/strings/Nargo.toml b/test_programs/execution_success/strings/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/strings/Nargo.toml rename to test_programs/execution_success/strings/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/strings/Prover.toml b/test_programs/execution_success/strings/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/strings/Prover.toml rename to test_programs/execution_success/strings/Prover.toml diff --git a/test_programs/execution_success/strings/src/main.nr b/test_programs/execution_success/strings/src/main.nr new file mode 100644 index 00000000000..cff229d368a --- /dev/null +++ b/test_programs/execution_success/strings/src/main.nr @@ -0,0 +1,85 @@ +use dep::std; +// Test global string literals +global HELLO_WORLD = "hello world"; + +fn main(message: pub str<11>, y: Field, hex_as_string: str<4>, hex_as_field: Field) { + let mut bad_message = "hello world"; + + assert(message == "hello world"); + assert(message == HELLO_WORLD); + let x = 10; + let z = x * 5; + std::println(10); + std::print(10); + + std::println(z); // x * 5 in println not yet supported + std::print(z); + std::println(x); + std::print(x); + + let array = [1, 2, 3, 5, 8]; + assert(y == 5); // Change to y != 5 to see how the later print statements are not called + std::println(array); + std::print(array); + + bad_message = "hell\0\"world"; + std::println(bad_message); + std::print(bad_message); + assert(message != bad_message); + + let hash = std::hash::pedersen_commitment([x]); + std::println(hash); + std::print(hash); + + assert(hex_as_string == "0x41"); + // assert(hex_as_string != 0x41); This will fail with a type mismatch between str[4] and Field + assert(hex_as_field == 0x41); + + // Single digit & odd length hex literals are valid + assert(hex_as_field == 0x041); + assert(hex_as_field != 0x1); +} + +#[test] +fn test_prints_strings() { + let message = "hello world!"; + + std::println(message); + std::println("goodbye world"); + + std::print(message); + std::print("\n"); + std::print("goodbye world\n"); +} + +#[test] +fn test_prints_array() { + let array = [1, 2, 3, 5, 8]; + + let s = Test { a: 1, b: 2, c: [3, 4] }; + std::println(s); + + std::println(array); + + let hash = std::hash::pedersen_commitment(array); + std::println(hash); +} + +fn failed_constraint(hex_as_field: Field) { + // When this method is called from a test method or with constant values + // a `Failed constraint` compile error will be caught before this `println` + // is executed as the input will be a constant. + std::println(hex_as_field); + assert(hex_as_field != 0x41); +} + +#[test] +fn test_failed_constraint() { + failed_constraint(0x41); +} + +struct Test { + a: Field, + b: Field, + c: [Field; 2], +} diff --git a/tooling/nargo_cli/tests/execution_success/struct/Nargo.toml b/test_programs/execution_success/struct/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/struct/Nargo.toml rename to test_programs/execution_success/struct/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/struct/Prover.toml b/test_programs/execution_success/struct/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/struct/Prover.toml rename to test_programs/execution_success/struct/Prover.toml diff --git a/tooling/nargo_cli/tests/execution_success/struct/src/main.nr b/test_programs/execution_success/struct/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/execution_success/struct/src/main.nr rename to test_programs/execution_success/struct/src/main.nr diff --git a/tooling/nargo_cli/tests/execution_success/struct_array_inputs/Nargo.toml b/test_programs/execution_success/struct_array_inputs/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/struct_array_inputs/Nargo.toml rename to test_programs/execution_success/struct_array_inputs/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/struct_array_inputs/Prover.toml b/test_programs/execution_success/struct_array_inputs/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/struct_array_inputs/Prover.toml rename to test_programs/execution_success/struct_array_inputs/Prover.toml diff --git a/tooling/nargo_cli/tests/execution_success/struct_array_inputs/src/main.nr b/test_programs/execution_success/struct_array_inputs/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/execution_success/struct_array_inputs/src/main.nr rename to test_programs/execution_success/struct_array_inputs/src/main.nr diff --git a/tooling/nargo_cli/tests/execution_success/struct_fields_ordering/Nargo.toml b/test_programs/execution_success/struct_fields_ordering/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/struct_fields_ordering/Nargo.toml rename to test_programs/execution_success/struct_fields_ordering/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/struct_fields_ordering/Prover.toml b/test_programs/execution_success/struct_fields_ordering/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/struct_fields_ordering/Prover.toml rename to test_programs/execution_success/struct_fields_ordering/Prover.toml diff --git a/tooling/nargo_cli/tests/execution_success/struct_fields_ordering/src/main.nr b/test_programs/execution_success/struct_fields_ordering/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/execution_success/struct_fields_ordering/src/main.nr rename to test_programs/execution_success/struct_fields_ordering/src/main.nr diff --git a/tooling/nargo_cli/tests/execution_success/struct_inputs/Nargo.toml b/test_programs/execution_success/struct_inputs/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/struct_inputs/Nargo.toml rename to test_programs/execution_success/struct_inputs/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/struct_inputs/Prover.toml b/test_programs/execution_success/struct_inputs/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/struct_inputs/Prover.toml rename to test_programs/execution_success/struct_inputs/Prover.toml diff --git a/tooling/nargo_cli/tests/execution_success/struct_inputs/src/foo.nr b/test_programs/execution_success/struct_inputs/src/foo.nr similarity index 100% rename from tooling/nargo_cli/tests/execution_success/struct_inputs/src/foo.nr rename to test_programs/execution_success/struct_inputs/src/foo.nr diff --git a/tooling/nargo_cli/tests/execution_success/struct_inputs/src/foo/bar.nr b/test_programs/execution_success/struct_inputs/src/foo/bar.nr similarity index 100% rename from tooling/nargo_cli/tests/execution_success/struct_inputs/src/foo/bar.nr rename to test_programs/execution_success/struct_inputs/src/foo/bar.nr diff --git a/tooling/nargo_cli/tests/execution_success/struct_inputs/src/main.nr b/test_programs/execution_success/struct_inputs/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/execution_success/struct_inputs/src/main.nr rename to test_programs/execution_success/struct_inputs/src/main.nr diff --git a/tooling/nargo_cli/tests/execution_success/submodules/Nargo.toml b/test_programs/execution_success/submodules/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/submodules/Nargo.toml rename to test_programs/execution_success/submodules/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/submodules/Prover.toml b/test_programs/execution_success/submodules/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/submodules/Prover.toml rename to test_programs/execution_success/submodules/Prover.toml diff --git a/test_programs/execution_success/submodules/src/main.nr b/test_programs/execution_success/submodules/src/main.nr new file mode 100644 index 00000000000..f937af74627 --- /dev/null +++ b/test_programs/execution_success/submodules/src/main.nr @@ -0,0 +1,15 @@ +use my_submodule::my_helper; + +fn main(x: u1, y: u1) { + my_helper(); + my_submodule::my_bool_or(x, y); +} + +mod my_submodule { + pub fn my_bool_or(x: u1, y: u1) { + assert(x | y == 1); + } + + pub fn my_helper() {} +} + diff --git a/tooling/nargo_cli/tests/execution_success/to_be_bytes/Nargo.toml b/test_programs/execution_success/to_be_bytes/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/to_be_bytes/Nargo.toml rename to test_programs/execution_success/to_be_bytes/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/to_be_bytes/Prover.toml b/test_programs/execution_success/to_be_bytes/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/to_be_bytes/Prover.toml rename to test_programs/execution_success/to_be_bytes/Prover.toml diff --git a/tooling/nargo_cli/tests/execution_success/to_be_bytes/src/main.nr b/test_programs/execution_success/to_be_bytes/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/execution_success/to_be_bytes/src/main.nr rename to test_programs/execution_success/to_be_bytes/src/main.nr diff --git a/tooling/nargo_cli/tests/execution_success/to_bytes_consistent/Nargo.toml b/test_programs/execution_success/to_bytes_consistent/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/to_bytes_consistent/Nargo.toml rename to test_programs/execution_success/to_bytes_consistent/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/to_bytes_consistent/Prover.toml b/test_programs/execution_success/to_bytes_consistent/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/to_bytes_consistent/Prover.toml rename to test_programs/execution_success/to_bytes_consistent/Prover.toml diff --git a/tooling/nargo_cli/tests/execution_success/to_bytes_consistent/src/main.nr b/test_programs/execution_success/to_bytes_consistent/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/execution_success/to_bytes_consistent/src/main.nr rename to test_programs/execution_success/to_bytes_consistent/src/main.nr diff --git a/tooling/nargo_cli/tests/execution_success/to_bytes_integration/Nargo.toml b/test_programs/execution_success/to_bytes_integration/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/to_bytes_integration/Nargo.toml rename to test_programs/execution_success/to_bytes_integration/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/to_bytes_integration/Prover.toml b/test_programs/execution_success/to_bytes_integration/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/to_bytes_integration/Prover.toml rename to test_programs/execution_success/to_bytes_integration/Prover.toml diff --git a/tooling/nargo_cli/tests/execution_success/to_bytes_integration/src/main.nr b/test_programs/execution_success/to_bytes_integration/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/execution_success/to_bytes_integration/src/main.nr rename to test_programs/execution_success/to_bytes_integration/src/main.nr diff --git a/tooling/nargo_cli/tests/execution_success/to_le_bytes/Nargo.toml b/test_programs/execution_success/to_le_bytes/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/to_le_bytes/Nargo.toml rename to test_programs/execution_success/to_le_bytes/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/to_le_bytes/Prover.toml b/test_programs/execution_success/to_le_bytes/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/to_le_bytes/Prover.toml rename to test_programs/execution_success/to_le_bytes/Prover.toml diff --git a/tooling/nargo_cli/tests/execution_success/to_le_bytes/src/main.nr b/test_programs/execution_success/to_le_bytes/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/execution_success/to_le_bytes/src/main.nr rename to test_programs/execution_success/to_le_bytes/src/main.nr diff --git a/tooling/nargo_cli/tests/execution_success/trait_as_return_type/Nargo.toml b/test_programs/execution_success/trait_as_return_type/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/trait_as_return_type/Nargo.toml rename to test_programs/execution_success/trait_as_return_type/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/trait_as_return_type/Prover.toml b/test_programs/execution_success/trait_as_return_type/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/trait_as_return_type/Prover.toml rename to test_programs/execution_success/trait_as_return_type/Prover.toml diff --git a/tooling/nargo_cli/tests/execution_success/trait_as_return_type/src/main.nr b/test_programs/execution_success/trait_as_return_type/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/execution_success/trait_as_return_type/src/main.nr rename to test_programs/execution_success/trait_as_return_type/src/main.nr diff --git a/tooling/nargo_cli/tests/execution_success/trait_impl_base_type/Nargo.toml b/test_programs/execution_success/trait_impl_base_type/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/trait_impl_base_type/Nargo.toml rename to test_programs/execution_success/trait_impl_base_type/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/trait_impl_base_type/Prover.toml b/test_programs/execution_success/trait_impl_base_type/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/trait_impl_base_type/Prover.toml rename to test_programs/execution_success/trait_impl_base_type/Prover.toml diff --git a/tooling/nargo_cli/tests/execution_success/trait_impl_base_type/src/main.nr b/test_programs/execution_success/trait_impl_base_type/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/execution_success/trait_impl_base_type/src/main.nr rename to test_programs/execution_success/trait_impl_base_type/src/main.nr diff --git a/tooling/nargo_cli/tests/execution_success/traits_in_crates_1/Nargo.toml b/test_programs/execution_success/traits_in_crates_1/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/traits_in_crates_1/Nargo.toml rename to test_programs/execution_success/traits_in_crates_1/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/traits_in_crates_1/Prover.toml b/test_programs/execution_success/traits_in_crates_1/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/traits_in_crates_1/Prover.toml rename to test_programs/execution_success/traits_in_crates_1/Prover.toml diff --git a/tooling/nargo_cli/tests/execution_success/traits_in_crates_1/crate1/Nargo.toml b/test_programs/execution_success/traits_in_crates_1/crate1/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/traits_in_crates_1/crate1/Nargo.toml rename to test_programs/execution_success/traits_in_crates_1/crate1/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/traits_in_crates_1/crate1/src/lib.nr b/test_programs/execution_success/traits_in_crates_1/crate1/src/lib.nr similarity index 100% rename from tooling/nargo_cli/tests/execution_success/traits_in_crates_1/crate1/src/lib.nr rename to test_programs/execution_success/traits_in_crates_1/crate1/src/lib.nr diff --git a/tooling/nargo_cli/tests/execution_success/traits_in_crates_1/crate2/Nargo.toml b/test_programs/execution_success/traits_in_crates_1/crate2/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/traits_in_crates_1/crate2/Nargo.toml rename to test_programs/execution_success/traits_in_crates_1/crate2/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/traits_in_crates_1/crate2/src/lib.nr b/test_programs/execution_success/traits_in_crates_1/crate2/src/lib.nr similarity index 100% rename from tooling/nargo_cli/tests/execution_success/traits_in_crates_1/crate2/src/lib.nr rename to test_programs/execution_success/traits_in_crates_1/crate2/src/lib.nr diff --git a/tooling/nargo_cli/tests/execution_success/traits_in_crates_1/src/main.nr b/test_programs/execution_success/traits_in_crates_1/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/execution_success/traits_in_crates_1/src/main.nr rename to test_programs/execution_success/traits_in_crates_1/src/main.nr diff --git a/tooling/nargo_cli/tests/execution_success/traits_in_crates_2/Nargo.toml b/test_programs/execution_success/traits_in_crates_2/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/traits_in_crates_2/Nargo.toml rename to test_programs/execution_success/traits_in_crates_2/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/traits_in_crates_2/Prover.toml b/test_programs/execution_success/traits_in_crates_2/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/traits_in_crates_2/Prover.toml rename to test_programs/execution_success/traits_in_crates_2/Prover.toml diff --git a/tooling/nargo_cli/tests/execution_success/traits_in_crates_2/crate1/Nargo.toml b/test_programs/execution_success/traits_in_crates_2/crate1/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/traits_in_crates_2/crate1/Nargo.toml rename to test_programs/execution_success/traits_in_crates_2/crate1/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/traits_in_crates_2/crate1/src/lib.nr b/test_programs/execution_success/traits_in_crates_2/crate1/src/lib.nr similarity index 100% rename from tooling/nargo_cli/tests/execution_success/traits_in_crates_2/crate1/src/lib.nr rename to test_programs/execution_success/traits_in_crates_2/crate1/src/lib.nr diff --git a/tooling/nargo_cli/tests/execution_success/traits_in_crates_2/crate2/Nargo.toml b/test_programs/execution_success/traits_in_crates_2/crate2/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/traits_in_crates_2/crate2/Nargo.toml rename to test_programs/execution_success/traits_in_crates_2/crate2/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/traits_in_crates_2/crate2/src/lib.nr b/test_programs/execution_success/traits_in_crates_2/crate2/src/lib.nr similarity index 100% rename from tooling/nargo_cli/tests/execution_success/traits_in_crates_2/crate2/src/lib.nr rename to test_programs/execution_success/traits_in_crates_2/crate2/src/lib.nr diff --git a/tooling/nargo_cli/tests/execution_success/traits_in_crates_2/src/main.nr b/test_programs/execution_success/traits_in_crates_2/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/execution_success/traits_in_crates_2/src/main.nr rename to test_programs/execution_success/traits_in_crates_2/src/main.nr diff --git a/tooling/nargo_cli/tests/execution_success/tuple_inputs/Nargo.toml b/test_programs/execution_success/tuple_inputs/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/tuple_inputs/Nargo.toml rename to test_programs/execution_success/tuple_inputs/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/tuple_inputs/Prover.toml b/test_programs/execution_success/tuple_inputs/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/tuple_inputs/Prover.toml rename to test_programs/execution_success/tuple_inputs/Prover.toml diff --git a/tooling/nargo_cli/tests/execution_success/tuple_inputs/src/main.nr b/test_programs/execution_success/tuple_inputs/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/execution_success/tuple_inputs/src/main.nr rename to test_programs/execution_success/tuple_inputs/src/main.nr diff --git a/tooling/nargo_cli/tests/execution_success/tuples/Nargo.toml b/test_programs/execution_success/tuples/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/tuples/Nargo.toml rename to test_programs/execution_success/tuples/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/tuples/Prover.toml b/test_programs/execution_success/tuples/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/tuples/Prover.toml rename to test_programs/execution_success/tuples/Prover.toml diff --git a/tooling/nargo_cli/tests/execution_success/tuples/src/main.nr b/test_programs/execution_success/tuples/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/execution_success/tuples/src/main.nr rename to test_programs/execution_success/tuples/src/main.nr diff --git a/tooling/nargo_cli/tests/execution_success/type_aliases/Nargo.toml b/test_programs/execution_success/type_aliases/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/type_aliases/Nargo.toml rename to test_programs/execution_success/type_aliases/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/type_aliases/Prover.toml b/test_programs/execution_success/type_aliases/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/type_aliases/Prover.toml rename to test_programs/execution_success/type_aliases/Prover.toml diff --git a/tooling/nargo_cli/tests/execution_success/type_aliases/src/main.nr b/test_programs/execution_success/type_aliases/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/execution_success/type_aliases/src/main.nr rename to test_programs/execution_success/type_aliases/src/main.nr diff --git a/test_programs/execution_success/u128/Nargo.toml b/test_programs/execution_success/u128/Nargo.toml new file mode 100644 index 00000000000..c1dcd84db04 --- /dev/null +++ b/test_programs/execution_success/u128/Nargo.toml @@ -0,0 +1,6 @@ +[package] +name = "u128" +type = "bin" +authors = [""] + +[dependencies] diff --git a/test_programs/execution_success/u128/Prover.toml b/test_programs/execution_success/u128/Prover.toml new file mode 100644 index 00000000000..961db9825a7 --- /dev/null +++ b/test_programs/execution_success/u128/Prover.toml @@ -0,0 +1,7 @@ +x = "3" +y = "4" +z = "7" +hexa ="0x1f03a" +[big_int] +lo = 1 +hi = 2 \ No newline at end of file diff --git a/test_programs/execution_success/u128/src/main.nr b/test_programs/execution_success/u128/src/main.nr new file mode 100644 index 00000000000..4c734f3a8f9 --- /dev/null +++ b/test_programs/execution_success/u128/src/main.nr @@ -0,0 +1,44 @@ +use dep::std; + +fn main(mut x: u32, y: u32, z: u32, big_int: U128, hexa: str<7>) { + let a = U128::from_u64s_le(x as u64, x as u64); + let b = U128::from_u64s_le(y as u64, x as u64); + let c = a + b; + assert(c.lo == z as Field); + assert(c.hi == 2 * x as Field); + assert(U128::from_hex(hexa).lo == 0x1f03a); + let t1 = U128::from_hex("0x9d9c7a87771f03a23783f9d9c7a8777"); + let t2 = U128::from_hex("0x45a26c708BFCF39041"); + let t = t1 + t2; + assert(t.lo == 0xc5e4b029996e17b8); + assert(t.hi == 0x09d9c7a87771f07f); + let t3 = U128::from_le_bytes(t.to_le_bytes()); + assert(t == t3); + + let t4 = t - t2; + assert(t4 == t1); + + let t5 = U128::from_u64s_le(0, 1); + let t6 = U128::from_u64s_le(1, 0); + assert((t5 - t6).hi == 0); + + assert( + (U128::from_hex("0x71f03a23783f9d9c7a8777") * U128::from_hex("0x8BFCF39041")).hi + == U128::from_hex("0x3e4e0471b873470e247c824e61445537").hi + ); + let q = U128::from_hex("0x3e4e0471b873470e247c824e61445537") / U128::from_hex("0x8BFCF39041"); + assert(q == U128::from_hex("0x71f03a23783f9d9c7a8777")); + + assert(big_int.hi == 2); + + let mut small_int = U128::from_integer(x); + assert(small_int.lo == x as Field); + assert(x == small_int.to_integer()); + let shift = small_int << small_int; + assert(shift == U128::from_integer(x << x)); + assert(shift >> small_int == small_int); + assert(shift >> U128::from_integer(127) == U128::from_integer(0)); + assert(shift << U128::from_integer(127) == U128::from_integer(0)); + +} + diff --git a/tooling/nargo_cli/tests/compile_success_empty/unconstrained_empty/Nargo.toml b/test_programs/execution_success/unconstrained_empty/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/compile_success_empty/unconstrained_empty/Nargo.toml rename to test_programs/execution_success/unconstrained_empty/Nargo.toml diff --git a/tooling/nargo_cli/tests/compile_success_empty/unconstrained_empty/src/main.nr b/test_programs/execution_success/unconstrained_empty/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/compile_success_empty/unconstrained_empty/src/main.nr rename to test_programs/execution_success/unconstrained_empty/src/main.nr diff --git a/test_programs/execution_success/unsafe_range_constraint/Nargo.toml b/test_programs/execution_success/unsafe_range_constraint/Nargo.toml new file mode 100644 index 00000000000..8714d95ed54 --- /dev/null +++ b/test_programs/execution_success/unsafe_range_constraint/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "unsafe_range_constraint" +version = "0.1.0" +type = "bin" +authors = [""] + +[dependencies] diff --git a/test_programs/execution_success/unsafe_range_constraint/Prover.toml b/test_programs/execution_success/unsafe_range_constraint/Prover.toml new file mode 100644 index 00000000000..07890234a19 --- /dev/null +++ b/test_programs/execution_success/unsafe_range_constraint/Prover.toml @@ -0,0 +1 @@ +x = "3" diff --git a/test_programs/execution_success/unsafe_range_constraint/src/main.nr b/test_programs/execution_success/unsafe_range_constraint/src/main.nr new file mode 100644 index 00000000000..ead5613bcce --- /dev/null +++ b/test_programs/execution_success/unsafe_range_constraint/src/main.nr @@ -0,0 +1,5 @@ +// Test that we can apply a range constraint to a field using +// a builtin. +fn main(x: Field) { + x.assert_max_bit_size(48); +} diff --git a/tooling/nargo_cli/tests/execution_success/workspace/Nargo.toml b/test_programs/execution_success/workspace/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/workspace/Nargo.toml rename to test_programs/execution_success/workspace/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/workspace/Prover.toml b/test_programs/execution_success/workspace/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/workspace/Prover.toml rename to test_programs/execution_success/workspace/Prover.toml diff --git a/tooling/nargo_cli/tests/execution_success/workspace/crates/a/Nargo.toml b/test_programs/execution_success/workspace/crates/a/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/workspace/crates/a/Nargo.toml rename to test_programs/execution_success/workspace/crates/a/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/workspace/crates/a/Prover.toml b/test_programs/execution_success/workspace/crates/a/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/workspace/crates/a/Prover.toml rename to test_programs/execution_success/workspace/crates/a/Prover.toml diff --git a/tooling/nargo_cli/tests/execution_success/workspace_default_member/a/src/main.nr b/test_programs/execution_success/workspace/crates/a/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/execution_success/workspace_default_member/a/src/main.nr rename to test_programs/execution_success/workspace/crates/a/src/main.nr diff --git a/tooling/nargo_cli/tests/execution_success/workspace/crates/b/Nargo.toml b/test_programs/execution_success/workspace/crates/b/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/workspace/crates/b/Nargo.toml rename to test_programs/execution_success/workspace/crates/b/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/workspace/crates/b/Prover.toml b/test_programs/execution_success/workspace/crates/b/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/workspace/crates/b/Prover.toml rename to test_programs/execution_success/workspace/crates/b/Prover.toml diff --git a/tooling/nargo_cli/tests/execution_success/workspace/crates/b/src/main.nr b/test_programs/execution_success/workspace/crates/b/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/execution_success/workspace/crates/b/src/main.nr rename to test_programs/execution_success/workspace/crates/b/src/main.nr diff --git a/tooling/nargo_cli/tests/execution_success/workspace_default_member/Nargo.toml b/test_programs/execution_success/workspace_default_member/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/workspace_default_member/Nargo.toml rename to test_programs/execution_success/workspace_default_member/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/workspace_default_member/Prover.toml b/test_programs/execution_success/workspace_default_member/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/workspace_default_member/Prover.toml rename to test_programs/execution_success/workspace_default_member/Prover.toml diff --git a/tooling/nargo_cli/tests/execution_success/workspace_default_member/a/Nargo.toml b/test_programs/execution_success/workspace_default_member/a/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/workspace_default_member/a/Nargo.toml rename to test_programs/execution_success/workspace_default_member/a/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/workspace_default_member/a/Prover.toml b/test_programs/execution_success/workspace_default_member/a/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/workspace_default_member/a/Prover.toml rename to test_programs/execution_success/workspace_default_member/a/Prover.toml diff --git a/test_programs/execution_success/workspace_default_member/a/src/main.nr b/test_programs/execution_success/workspace_default_member/a/src/main.nr new file mode 100644 index 00000000000..cf72627da2e --- /dev/null +++ b/test_programs/execution_success/workspace_default_member/a/src/main.nr @@ -0,0 +1,3 @@ +fn main(x: Field, y: pub Field) { + assert(x == y); +} diff --git a/tooling/nargo_cli/tests/execution_success/workspace_default_member/b/Nargo.toml b/test_programs/execution_success/workspace_default_member/b/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/workspace_default_member/b/Nargo.toml rename to test_programs/execution_success/workspace_default_member/b/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/workspace_default_member/b/Prover.toml b/test_programs/execution_success/workspace_default_member/b/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/workspace_default_member/b/Prover.toml rename to test_programs/execution_success/workspace_default_member/b/Prover.toml diff --git a/tooling/nargo_cli/tests/compile_failure/workspace_missing_toml/crates/b/src/main.nr b/test_programs/execution_success/workspace_default_member/b/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/compile_failure/workspace_missing_toml/crates/b/src/main.nr rename to test_programs/execution_success/workspace_default_member/b/src/main.nr diff --git a/tooling/nargo_cli/tests/execution_success/xor/Nargo.toml b/test_programs/execution_success/xor/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/xor/Nargo.toml rename to test_programs/execution_success/xor/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/xor/Prover.toml b/test_programs/execution_success/xor/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/xor/Prover.toml rename to test_programs/execution_success/xor/Prover.toml diff --git a/tooling/nargo_cli/tests/execution_success/xor/src/main.nr b/test_programs/execution_success/xor/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/execution_success/xor/src/main.nr rename to test_programs/execution_success/xor/src/main.nr diff --git a/tooling/nargo_cli/tests/gates_report.sh b/test_programs/gates_report.sh similarity index 97% rename from tooling/nargo_cli/tests/gates_report.sh rename to test_programs/gates_report.sh index e06e6812e9d..4192c581376 100755 --- a/tooling/nargo_cli/tests/gates_report.sh +++ b/test_programs/gates_report.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash set -e # These tests are incompatible with gas reporting diff --git a/tooling/nargo_cli/tests/noir_test_failure/should_fail_mismatch/Nargo.toml b/test_programs/noir_test_failure/should_fail_mismatch/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/noir_test_failure/should_fail_mismatch/Nargo.toml rename to test_programs/noir_test_failure/should_fail_mismatch/Nargo.toml diff --git a/tooling/nargo_cli/tests/test_libraries/bad_name/src/lib.nr b/test_programs/noir_test_failure/should_fail_mismatch/Prover.toml similarity index 100% rename from tooling/nargo_cli/tests/test_libraries/bad_name/src/lib.nr rename to test_programs/noir_test_failure/should_fail_mismatch/Prover.toml diff --git a/test_programs/noir_test_failure/should_fail_mismatch/src/main.nr b/test_programs/noir_test_failure/should_fail_mismatch/src/main.nr new file mode 100644 index 00000000000..253e999ce07 --- /dev/null +++ b/test_programs/noir_test_failure/should_fail_mismatch/src/main.nr @@ -0,0 +1,15 @@ +#[test(should_fail_with = "Not equal")] +fn test_different_string() { + assert_eq(0, 1, "Different string"); +} +// The assert message has a space +#[test(should_fail_with = "Not equal")] +fn test_with_extra_space() { + assert_eq(0, 1, "Not equal "); +} +// The assert message has a space +#[test(should_fail_with = "Not equal")] +fn test_runtime_mismatch() { + // We use a pedersen commitment here so that the assertion failure is only known at runtime. + assert_eq(dep::std::hash::pedersen_commitment([27]).x, 0, "Not equal "); +} diff --git a/tooling/nargo_cli/tests/noir_test_success/should_fail_with_matches/Nargo.toml b/test_programs/noir_test_success/should_fail_with_matches/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/noir_test_success/should_fail_with_matches/Nargo.toml rename to test_programs/noir_test_success/should_fail_with_matches/Nargo.toml diff --git a/test_programs/noir_test_success/should_fail_with_matches/Prover.toml b/test_programs/noir_test_success/should_fail_with_matches/Prover.toml new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tooling/nargo_cli/tests/noir_test_success/should_fail_with_matches/src/main.nr b/test_programs/noir_test_success/should_fail_with_matches/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/noir_test_success/should_fail_with_matches/src/main.nr rename to test_programs/noir_test_success/should_fail_with_matches/src/main.nr diff --git a/test_programs/rebuild.sh b/test_programs/rebuild.sh new file mode 100755 index 00000000000..a3137920fd5 --- /dev/null +++ b/test_programs/rebuild.sh @@ -0,0 +1,62 @@ +#!/usr/bin/env bash +set -e + +process_dir() { + local dir=$1 + local current_dir=$2 + local dir_name=$(basename "$dir") + + if [[ ! -d "$current_dir/acir_artifacts/$dir_name" ]]; then + mkdir -p $current_dir/acir_artifacts/$dir_name + fi + + cd $dir + if [ -d ./target/ ]; then + rm -r ./target/ + fi + nargo compile --only-acir && nargo execute witness + + if [ -d "$current_dir/acir_artifacts/$dir_name/target" ]; then + rm -r "$current_dir/acir_artifacts/$dir_name/target" + fi + mkdir $current_dir/acir_artifacts/$dir_name/target + + mv ./target/*.gz $current_dir/acir_artifacts/$dir_name/target/ + + cd $current_dir +} + +export -f process_dir + +excluded_dirs=("workspace" "workspace_default_member") +current_dir=$(pwd) +base_path="$current_dir/execution_success" + +rm -rf $current_dir/acir_artifacts +mkdir -p $current_dir/acir_artifacts + +# Gather directories to process. +dirs_to_process=() +for dir in $base_path/*; do + if [[ ! -d $dir ]] || [[ " ${excluded_dirs[@]} " =~ " $(basename "$dir") " ]]; then + continue + fi + dirs_to_process+=("$dir") +done + +# Process each directory in parallel +pids=() +for dir in "${dirs_to_process[@]}"; do + process_dir "$dir" "$current_dir" & + pids+=($!) +done + +# Check the exit status of each background job. +for pid in "${pids[@]}"; do + wait $pid || exit_status=$? +done + +# Exit with a failure status if any job failed. +if [ ! -z "$exit_status" ]; then + exit $exit_status +fi \ No newline at end of file diff --git a/tooling/nargo_cli/tests/test_libraries/bad_impl/Nargo.toml b/test_programs/test_libraries/bad_impl/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/test_libraries/bad_impl/Nargo.toml rename to test_programs/test_libraries/bad_impl/Nargo.toml diff --git a/tooling/nargo_cli/tests/test_libraries/bad_impl/src/lib.nr b/test_programs/test_libraries/bad_impl/src/lib.nr similarity index 100% rename from tooling/nargo_cli/tests/test_libraries/bad_impl/src/lib.nr rename to test_programs/test_libraries/bad_impl/src/lib.nr diff --git a/tooling/nargo_cli/tests/test_libraries/bad_name/Nargo.toml b/test_programs/test_libraries/bad_name/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/test_libraries/bad_name/Nargo.toml rename to test_programs/test_libraries/bad_name/Nargo.toml diff --git a/test_programs/test_libraries/bad_name/src/lib.nr b/test_programs/test_libraries/bad_name/src/lib.nr new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tooling/nargo_cli/tests/test_libraries/bin_dep/Nargo.toml b/test_programs/test_libraries/bin_dep/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/test_libraries/bin_dep/Nargo.toml rename to test_programs/test_libraries/bin_dep/Nargo.toml diff --git a/tooling/nargo_cli/tests/test_libraries/bin_dep/src/main.nr b/test_programs/test_libraries/bin_dep/src/main.nr similarity index 100% rename from tooling/nargo_cli/tests/test_libraries/bin_dep/src/main.nr rename to test_programs/test_libraries/bin_dep/src/main.nr diff --git a/tooling/nargo_cli/tests/test_libraries/diamond_deps_1/Nargo.toml b/test_programs/test_libraries/diamond_deps_1/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/test_libraries/diamond_deps_1/Nargo.toml rename to test_programs/test_libraries/diamond_deps_1/Nargo.toml diff --git a/tooling/nargo_cli/tests/test_libraries/diamond_deps_1/src/lib.nr b/test_programs/test_libraries/diamond_deps_1/src/lib.nr similarity index 100% rename from tooling/nargo_cli/tests/test_libraries/diamond_deps_1/src/lib.nr rename to test_programs/test_libraries/diamond_deps_1/src/lib.nr diff --git a/tooling/nargo_cli/tests/test_libraries/diamond_deps_2/Nargo.toml b/test_programs/test_libraries/diamond_deps_2/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/test_libraries/diamond_deps_2/Nargo.toml rename to test_programs/test_libraries/diamond_deps_2/Nargo.toml diff --git a/tooling/nargo_cli/tests/test_libraries/diamond_deps_2/src/lib.nr b/test_programs/test_libraries/diamond_deps_2/src/lib.nr similarity index 100% rename from tooling/nargo_cli/tests/test_libraries/diamond_deps_2/src/lib.nr rename to test_programs/test_libraries/diamond_deps_2/src/lib.nr diff --git a/test_programs/test_libraries/exporting_lib/Nargo.toml b/test_programs/test_libraries/exporting_lib/Nargo.toml new file mode 100644 index 00000000000..628418c0608 --- /dev/null +++ b/test_programs/test_libraries/exporting_lib/Nargo.toml @@ -0,0 +1,6 @@ +[package] +name = "exporting_lib" +type = "lib" +authors = [""] + +[dependencies] diff --git a/test_programs/test_libraries/exporting_lib/src/lib.nr b/test_programs/test_libraries/exporting_lib/src/lib.nr new file mode 100644 index 00000000000..af1fd7a32de --- /dev/null +++ b/test_programs/test_libraries/exporting_lib/src/lib.nr @@ -0,0 +1,10 @@ + +struct MyStruct { + inner: Field +} + +type FooStruct = MyStruct; + +fn is_struct_zero(val: MyStruct) -> bool { + val.inner == 0 +} diff --git a/test_programs/test_libraries/reexporting_lib/Nargo.toml b/test_programs/test_libraries/reexporting_lib/Nargo.toml new file mode 100644 index 00000000000..c26ce501e56 --- /dev/null +++ b/test_programs/test_libraries/reexporting_lib/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "reexporting_lib" +type = "lib" +authors = [""] + +[dependencies] +exporting_lib = { path = "../exporting_lib" } diff --git a/test_programs/test_libraries/reexporting_lib/src/lib.nr b/test_programs/test_libraries/reexporting_lib/src/lib.nr new file mode 100644 index 00000000000..f12dfe01ecd --- /dev/null +++ b/test_programs/test_libraries/reexporting_lib/src/lib.nr @@ -0,0 +1,3 @@ +use dep::exporting_lib::{MyStruct, FooStruct}; + +use dep::exporting_lib as lib; diff --git a/tooling/backend_interface/CHANGELOG.md b/tooling/backend_interface/CHANGELOG.md index 4387d8ccb5f..9ebde989add 100644 --- a/tooling/backend_interface/CHANGELOG.md +++ b/tooling/backend_interface/CHANGELOG.md @@ -214,7 +214,7 @@ * Avoid exposing internals of Assignments type ([614c81b](https://github.com/noir-lang/acvm-backend-barretenberg/commit/614c81b0ea5e110bbf5a61a526bb0173f4fe377a)) * avoid exposing internals of Assignments type ([#119](https://github.com/noir-lang/acvm-backend-barretenberg/issues/119)) ([614c81b](https://github.com/noir-lang/acvm-backend-barretenberg/commit/614c81b0ea5e110bbf5a61a526bb0173f4fe377a)) -* fix serialisation of arithmetic expressions ([#145](https://github.com/noir-lang/acvm-backend-barretenberg/issues/145)) ([7f42535](https://github.com/noir-lang/acvm-backend-barretenberg/commit/7f4253570257d9dedcfa8c8fb96b9d097ef06419)) +* fix serialization of arithmetic expressions ([#145](https://github.com/noir-lang/acvm-backend-barretenberg/issues/145)) ([7f42535](https://github.com/noir-lang/acvm-backend-barretenberg/commit/7f4253570257d9dedcfa8c8fb96b9d097ef06419)) * Implement random_get for wasm backend ([#102](https://github.com/noir-lang/acvm-backend-barretenberg/issues/102)) ([9c0f06e](https://github.com/noir-lang/acvm-backend-barretenberg/commit/9c0f06ef56f23e2b5794e810f433e36ff2c5d6b5)) * rename gates to opcodes ([#59](https://github.com/noir-lang/acvm-backend-barretenberg/issues/59)) ([6e05307](https://github.com/noir-lang/acvm-backend-barretenberg/commit/6e053072d8b9c5d93c296f10782251ccb597f902)) * reorganize and ensure contracts can be compiled in Remix ([#112](https://github.com/noir-lang/acvm-backend-barretenberg/issues/112)) ([7ec5693](https://github.com/noir-lang/acvm-backend-barretenberg/commit/7ec5693f194a79c379ae2952bc17a31ee63a42b9)) diff --git a/tooling/backend_interface/Cargo.toml b/tooling/backend_interface/Cargo.toml index 14b1609dd4a..2d991f9ae6c 100644 --- a/tooling/backend_interface/Cargo.toml +++ b/tooling/backend_interface/Cargo.toml @@ -16,8 +16,9 @@ thiserror.workspace = true serde.workspace = true serde_json.workspace = true bb_abstraction_leaks.workspace = true +tracing.workspace = true -tempfile = "3.6.0" +tempfile.workspace = true ## bb binary downloading tar = "~0.4.15" diff --git a/tooling/backend_interface/src/cli/info.rs b/tooling/backend_interface/src/cli/info.rs index d3fd89bd2bc..81b811f0e32 100644 --- a/tooling/backend_interface/src/cli/info.rs +++ b/tooling/backend_interface/src/cli/info.rs @@ -1,9 +1,8 @@ -use acvm::Language; +use acvm::ExpressionWidth; use serde::Deserialize; -use std::collections::HashSet; use std::path::{Path, PathBuf}; -use crate::{BackendError, BackendOpcodeSupport}; +use crate::BackendError; use super::string_from_stderr; @@ -14,7 +13,11 @@ pub(crate) struct InfoCommand { #[derive(Deserialize)] struct InfoResponse { language: LanguageResponse, + #[allow(dead_code)] + #[deprecated(note = "This field is deprecated and will be removed in the future")] opcodes_supported: Vec, + #[allow(dead_code)] + #[deprecated(note = "This field is deprecated and will be removed in the future")] black_box_functions_supported: Vec, } @@ -24,20 +27,8 @@ struct LanguageResponse { width: Option, } -impl BackendOpcodeSupport { - fn new(info: InfoResponse) -> Self { - let opcodes: HashSet = info.opcodes_supported.into_iter().collect(); - let black_box_functions: HashSet = - info.black_box_functions_supported.into_iter().collect(); - Self { opcodes, black_box_functions } - } -} - impl InfoCommand { - pub(crate) fn run( - self, - binary_path: &Path, - ) -> Result<(Language, BackendOpcodeSupport), BackendError> { + pub(crate) fn run(self, binary_path: &Path) -> Result { let mut command = std::process::Command::new(binary_path); command.arg("info").arg("-c").arg(self.crs_path).arg("-o").arg("-"); @@ -50,32 +41,27 @@ impl InfoCommand { let backend_info: InfoResponse = serde_json::from_slice(&output.stdout).expect("Backend should return valid json"); - let language: Language = match backend_info.language.name.as_str() { + let expression_width: ExpressionWidth = match backend_info.language.name.as_str() { "PLONK-CSAT" => { let width = backend_info.language.width.unwrap(); - Language::PLONKCSat { width } + ExpressionWidth::Bounded { width } } - "R1CS" => Language::R1CS, - _ => panic!("Unknown langauge"), + "R1CS" => ExpressionWidth::Unbounded, + _ => panic!("Unknown Expression width configuration"), }; - Ok((language, BackendOpcodeSupport::new(backend_info))) + Ok(expression_width) } } #[test] fn info_command() -> Result<(), BackendError> { - use acvm::acir::circuit::opcodes::Opcode; - - use acvm::acir::native_types::Expression; - let backend = crate::get_mock_backend()?; let crs_path = backend.backend_directory(); - let (language, opcode_support) = InfoCommand { crs_path }.run(backend.binary_path())?; + let expression_width = InfoCommand { crs_path }.run(backend.binary_path())?; - assert!(matches!(language, Language::PLONKCSat { width: 3 })); - assert!(opcode_support.is_opcode_supported(&Opcode::Arithmetic(Expression::default()))); + assert!(matches!(expression_width, ExpressionWidth::Bounded { width: 3 })); Ok(()) } diff --git a/tooling/backend_interface/src/cli/write_vk.rs b/tooling/backend_interface/src/cli/write_vk.rs index 8d4aa9cc7e3..da9fc04cbef 100644 --- a/tooling/backend_interface/src/cli/write_vk.rs +++ b/tooling/backend_interface/src/cli/write_vk.rs @@ -12,6 +12,7 @@ pub(crate) struct WriteVkCommand { } impl WriteVkCommand { + #[tracing::instrument(level = "trace", name = "vk_generation", skip_all)] pub(crate) fn run(self, binary_path: &Path) -> Result<(), BackendError> { let mut command = std::process::Command::new(binary_path); diff --git a/tooling/backend_interface/src/lib.rs b/tooling/backend_interface/src/lib.rs index 6c91c181a92..eab98852555 100644 --- a/tooling/backend_interface/src/lib.rs +++ b/tooling/backend_interface/src/lib.rs @@ -1,18 +1,18 @@ #![warn(unused_crate_dependencies, unused_extern_crates)] #![warn(unreachable_pub)] -use std::{collections::HashSet, path::PathBuf}; +use std::path::PathBuf; mod cli; mod download; mod proof_system; mod smart_contract; -use acvm::acir::circuit::Opcode; -use bb_abstraction_leaks::ACVM_BACKEND_BARRETENBERG; +pub use bb_abstraction_leaks::ACVM_BACKEND_BARRETENBERG; use bb_abstraction_leaks::BB_VERSION; use cli::VersionCommand; pub use download::download_backend; +use tracing::warn; const BACKENDS_DIR: &str = ".nargo/backends"; @@ -116,7 +116,7 @@ impl Backend { // If version doesn't match then download the correct version. Ok(version_string) => { - println!("`{ACVM_BACKEND_BARRETENBERG}` version `{version_string}` is different from expected `{BB_VERSION}`. Downloading expected version..."); + warn!("`{ACVM_BACKEND_BARRETENBERG}` version `{version_string}` is different from expected `{BB_VERSION}`. Downloading expected version..."); let bb_url = std::env::var("BB_BINARY_URL") .unwrap_or_else(|_| bb_abstraction_leaks::BB_DOWNLOAD_URL.to_owned()); download_backend(&bb_url, binary_path)?; @@ -124,7 +124,7 @@ impl Backend { // If `bb` fails to report its version, then attempt to fix it by re-downloading the binary. Err(_) => { - println!("Could not determine version of `{ACVM_BACKEND_BARRETENBERG}`. Downloading expected version..."); + warn!("Could not determine version of `{ACVM_BACKEND_BARRETENBERG}`. Downloading expected version..."); let bb_url = std::env::var("BB_BINARY_URL") .unwrap_or_else(|_| bb_abstraction_leaks::BB_DOWNLOAD_URL.to_owned()); download_backend(&bb_url, binary_path)?; @@ -135,33 +135,13 @@ impl Backend { } } -pub struct BackendOpcodeSupport { - opcodes: HashSet, - black_box_functions: HashSet, -} - -impl BackendOpcodeSupport { - pub fn is_opcode_supported(&self, opcode: &Opcode) -> bool { - match opcode { - Opcode::Arithmetic(_) => self.opcodes.contains("arithmetic"), - Opcode::Directive(_) => self.opcodes.contains("directive"), - Opcode::Brillig(_) => self.opcodes.contains("brillig"), - Opcode::MemoryInit { .. } => self.opcodes.contains("memory_init"), - Opcode::MemoryOp { .. } => self.opcodes.contains("memory_op"), - Opcode::BlackBoxFuncCall(func) => { - self.black_box_functions.contains(func.get_black_box_func().name()) - } - } - } -} - #[cfg(test)] mod backend { use crate::{Backend, BackendError}; #[test] fn raises_error_on_missing_binary() { - let bad_backend = Backend::new("i_dont_exist".to_string()); + let bad_backend = Backend::new("i_don't_exist".to_string()); let binary_path = bad_backend.assert_binary_exists(); diff --git a/tooling/backend_interface/src/proof_system.rs b/tooling/backend_interface/src/proof_system.rs index 95da1462d4e..595cd7e2020 100644 --- a/tooling/backend_interface/src/proof_system.rs +++ b/tooling/backend_interface/src/proof_system.rs @@ -3,15 +3,16 @@ use std::io::Write; use std::path::Path; use acvm::acir::{circuit::Circuit, native_types::WitnessMap}; +use acvm::ExpressionWidth; use acvm::FieldElement; -use acvm::Language; use tempfile::tempdir; +use tracing::warn; use crate::cli::{ GatesCommand, InfoCommand, ProofAsFieldsCommand, ProveCommand, VerifyCommand, VkAsFieldsCommand, WriteVkCommand, }; -use crate::{Backend, BackendError, BackendOpcodeSupport}; +use crate::{Backend, BackendError}; impl Backend { pub fn get_exact_circuit_size(&self, circuit: &Circuit) -> Result { @@ -30,12 +31,26 @@ impl Backend { .run(binary_path) } - pub fn get_backend_info(&self) -> Result<(Language, BackendOpcodeSupport), BackendError> { + pub fn get_backend_info(&self) -> Result { let binary_path = self.assert_binary_exists()?; self.assert_correct_version()?; InfoCommand { crs_path: self.crs_directory() }.run(binary_path) } + /// If we cannot get a valid backend, returns `ExpressionWidth::Bound { width: 3 }`` + /// The function also prints a message saying we could not find a backend + pub fn get_backend_info_or_default(&self) -> ExpressionWidth { + if let Ok(expression_width) = self.get_backend_info() { + expression_width + } else { + warn!( + "No valid backend found, ExpressionWidth defaulting to Bounded with a width of 3" + ); + ExpressionWidth::Bounded { width: 3 } + } + } + + #[tracing::instrument(level = "trace", skip_all)] pub fn prove( &self, circuit: &Circuit, @@ -76,6 +91,7 @@ impl Backend { Ok(proof) } + #[tracing::instrument(level = "trace", skip_all)] pub fn verify( &self, proof: &[u8], diff --git a/tooling/backend_interface/src/smart_contract.rs b/tooling/backend_interface/src/smart_contract.rs index 5dac57c4072..2548079f8e3 100644 --- a/tooling/backend_interface/src/smart_contract.rs +++ b/tooling/backend_interface/src/smart_contract.rs @@ -47,7 +47,7 @@ mod tests { #[test] fn test_smart_contract() -> Result<(), BackendError> { let expression = &(Witness(1) + Witness(2)) - &Expression::from(Witness(3)); - let constraint = Opcode::Arithmetic(expression); + let constraint = Opcode::AssertZero(expression); let circuit = Circuit { current_witness_index: 4, diff --git a/tooling/backend_interface/test-binaries/mock_backend/src/info_cmd.rs b/tooling/backend_interface/test-binaries/mock_backend/src/info_cmd.rs index e9a7842ba24..fd8cf602125 100644 --- a/tooling/backend_interface/test-binaries/mock_backend/src/info_cmd.rs +++ b/tooling/backend_interface/test-binaries/mock_backend/src/info_cmd.rs @@ -14,11 +14,11 @@ const INFO_RESPONSE: &str = r#"{ "range", "sha256", "blake2s", + "blake3", "keccak256", "schnorr_verify", "pedersen", "pedersen_hash", - "hash_to_field_128_security", "ecdsa_secp256k1", "ecdsa_secp256r1", "fixed_base_scalar_mul", diff --git a/tooling/bb_abstraction_leaks/build.rs b/tooling/bb_abstraction_leaks/build.rs index fe6861a80b8..965a57747f9 100644 --- a/tooling/bb_abstraction_leaks/build.rs +++ b/tooling/bb_abstraction_leaks/build.rs @@ -10,7 +10,7 @@ use const_format::formatcp; const USERNAME: &str = "AztecProtocol"; const REPO: &str = "aztec-packages"; -const VERSION: &str = "0.12.0"; +const VERSION: &str = "0.17.0"; const TAG: &str = formatcp!("aztec-packages-v{}", VERSION); const API_URL: &str = @@ -30,8 +30,11 @@ fn main() -> Result<(), String> { }; // Arm builds of linux are not supported + // We do not panic because we allow users to run nargo without a backend. if let (Os::Linux, Arch::AARCH64) = (&os, &arch) { - panic!("ARM64 builds of linux are not supported") + println!( + "cargo:warning=ARM64 builds of linux are not supported for the barretenberg binary" + ); }; println!("cargo:rustc-env=BB_BINARY_URL={}", get_bb_download_url(arch, os)); diff --git a/tooling/bb_abstraction_leaks/src/contract.sol b/tooling/bb_abstraction_leaks/src/contract.sol deleted file mode 100644 index 814c81d235e..00000000000 --- a/tooling/bb_abstraction_leaks/src/contract.sol +++ /dev/null @@ -1,2575 +0,0 @@ -/** - * @title Ultra Plonk proof verification contract - * @dev Top level Plonk proof verification contract, which allows Plonk proof to be verified - */ -abstract contract BaseUltraVerifier { - // VERIFICATION KEY MEMORY LOCATIONS - uint256 internal constant N_LOC = 0x380; - uint256 internal constant NUM_INPUTS_LOC = 0x3a0; - uint256 internal constant OMEGA_LOC = 0x3c0; - uint256 internal constant DOMAIN_INVERSE_LOC = 0x3e0; - uint256 internal constant Q1_X_LOC = 0x400; - uint256 internal constant Q1_Y_LOC = 0x420; - uint256 internal constant Q2_X_LOC = 0x440; - uint256 internal constant Q2_Y_LOC = 0x460; - uint256 internal constant Q3_X_LOC = 0x480; - uint256 internal constant Q3_Y_LOC = 0x4a0; - uint256 internal constant Q4_X_LOC = 0x4c0; - uint256 internal constant Q4_Y_LOC = 0x4e0; - uint256 internal constant QM_X_LOC = 0x500; - uint256 internal constant QM_Y_LOC = 0x520; - uint256 internal constant QC_X_LOC = 0x540; - uint256 internal constant QC_Y_LOC = 0x560; - uint256 internal constant QARITH_X_LOC = 0x580; - uint256 internal constant QARITH_Y_LOC = 0x5a0; - uint256 internal constant QSORT_X_LOC = 0x5c0; - uint256 internal constant QSORT_Y_LOC = 0x5e0; - uint256 internal constant QELLIPTIC_X_LOC = 0x600; - uint256 internal constant QELLIPTIC_Y_LOC = 0x620; - uint256 internal constant QAUX_X_LOC = 0x640; - uint256 internal constant QAUX_Y_LOC = 0x660; - uint256 internal constant SIGMA1_X_LOC = 0x680; - uint256 internal constant SIGMA1_Y_LOC = 0x6a0; - uint256 internal constant SIGMA2_X_LOC = 0x6c0; - uint256 internal constant SIGMA2_Y_LOC = 0x6e0; - uint256 internal constant SIGMA3_X_LOC = 0x700; - uint256 internal constant SIGMA3_Y_LOC = 0x720; - uint256 internal constant SIGMA4_X_LOC = 0x740; - uint256 internal constant SIGMA4_Y_LOC = 0x760; - uint256 internal constant TABLE1_X_LOC = 0x780; - uint256 internal constant TABLE1_Y_LOC = 0x7a0; - uint256 internal constant TABLE2_X_LOC = 0x7c0; - uint256 internal constant TABLE2_Y_LOC = 0x7e0; - uint256 internal constant TABLE3_X_LOC = 0x800; - uint256 internal constant TABLE3_Y_LOC = 0x820; - uint256 internal constant TABLE4_X_LOC = 0x840; - uint256 internal constant TABLE4_Y_LOC = 0x860; - uint256 internal constant TABLE_TYPE_X_LOC = 0x880; - uint256 internal constant TABLE_TYPE_Y_LOC = 0x8a0; - uint256 internal constant ID1_X_LOC = 0x8c0; - uint256 internal constant ID1_Y_LOC = 0x8e0; - uint256 internal constant ID2_X_LOC = 0x900; - uint256 internal constant ID2_Y_LOC = 0x920; - uint256 internal constant ID3_X_LOC = 0x940; - uint256 internal constant ID3_Y_LOC = 0x960; - uint256 internal constant ID4_X_LOC = 0x980; - uint256 internal constant ID4_Y_LOC = 0x9a0; - uint256 internal constant CONTAINS_RECURSIVE_PROOF_LOC = 0x9c0; - uint256 internal constant RECURSIVE_PROOF_PUBLIC_INPUT_INDICES_LOC = 0x9e0; - uint256 internal constant G2X_X0_LOC = 0xa00; - uint256 internal constant G2X_X1_LOC = 0xa20; - uint256 internal constant G2X_Y0_LOC = 0xa40; - uint256 internal constant G2X_Y1_LOC = 0xa60; - - // ### PROOF DATA MEMORY LOCATIONS - uint256 internal constant W1_X_LOC = 0x1200; - uint256 internal constant W1_Y_LOC = 0x1220; - uint256 internal constant W2_X_LOC = 0x1240; - uint256 internal constant W2_Y_LOC = 0x1260; - uint256 internal constant W3_X_LOC = 0x1280; - uint256 internal constant W3_Y_LOC = 0x12a0; - uint256 internal constant W4_X_LOC = 0x12c0; - uint256 internal constant W4_Y_LOC = 0x12e0; - uint256 internal constant S_X_LOC = 0x1300; - uint256 internal constant S_Y_LOC = 0x1320; - uint256 internal constant Z_X_LOC = 0x1340; - uint256 internal constant Z_Y_LOC = 0x1360; - uint256 internal constant Z_LOOKUP_X_LOC = 0x1380; - uint256 internal constant Z_LOOKUP_Y_LOC = 0x13a0; - uint256 internal constant T1_X_LOC = 0x13c0; - uint256 internal constant T1_Y_LOC = 0x13e0; - uint256 internal constant T2_X_LOC = 0x1400; - uint256 internal constant T2_Y_LOC = 0x1420; - uint256 internal constant T3_X_LOC = 0x1440; - uint256 internal constant T3_Y_LOC = 0x1460; - uint256 internal constant T4_X_LOC = 0x1480; - uint256 internal constant T4_Y_LOC = 0x14a0; - - uint256 internal constant W1_EVAL_LOC = 0x1600; - uint256 internal constant W2_EVAL_LOC = 0x1620; - uint256 internal constant W3_EVAL_LOC = 0x1640; - uint256 internal constant W4_EVAL_LOC = 0x1660; - uint256 internal constant S_EVAL_LOC = 0x1680; - uint256 internal constant Z_EVAL_LOC = 0x16a0; - uint256 internal constant Z_LOOKUP_EVAL_LOC = 0x16c0; - uint256 internal constant Q1_EVAL_LOC = 0x16e0; - uint256 internal constant Q2_EVAL_LOC = 0x1700; - uint256 internal constant Q3_EVAL_LOC = 0x1720; - uint256 internal constant Q4_EVAL_LOC = 0x1740; - uint256 internal constant QM_EVAL_LOC = 0x1760; - uint256 internal constant QC_EVAL_LOC = 0x1780; - uint256 internal constant QARITH_EVAL_LOC = 0x17a0; - uint256 internal constant QSORT_EVAL_LOC = 0x17c0; - uint256 internal constant QELLIPTIC_EVAL_LOC = 0x17e0; - uint256 internal constant QAUX_EVAL_LOC = 0x1800; - uint256 internal constant TABLE1_EVAL_LOC = 0x1840; - uint256 internal constant TABLE2_EVAL_LOC = 0x1860; - uint256 internal constant TABLE3_EVAL_LOC = 0x1880; - uint256 internal constant TABLE4_EVAL_LOC = 0x18a0; - uint256 internal constant TABLE_TYPE_EVAL_LOC = 0x18c0; - uint256 internal constant ID1_EVAL_LOC = 0x18e0; - uint256 internal constant ID2_EVAL_LOC = 0x1900; - uint256 internal constant ID3_EVAL_LOC = 0x1920; - uint256 internal constant ID4_EVAL_LOC = 0x1940; - uint256 internal constant SIGMA1_EVAL_LOC = 0x1960; - uint256 internal constant SIGMA2_EVAL_LOC = 0x1980; - uint256 internal constant SIGMA3_EVAL_LOC = 0x19a0; - uint256 internal constant SIGMA4_EVAL_LOC = 0x19c0; - uint256 internal constant W1_OMEGA_EVAL_LOC = 0x19e0; - uint256 internal constant W2_OMEGA_EVAL_LOC = 0x2000; - uint256 internal constant W3_OMEGA_EVAL_LOC = 0x2020; - uint256 internal constant W4_OMEGA_EVAL_LOC = 0x2040; - uint256 internal constant S_OMEGA_EVAL_LOC = 0x2060; - uint256 internal constant Z_OMEGA_EVAL_LOC = 0x2080; - uint256 internal constant Z_LOOKUP_OMEGA_EVAL_LOC = 0x20a0; - uint256 internal constant TABLE1_OMEGA_EVAL_LOC = 0x20c0; - uint256 internal constant TABLE2_OMEGA_EVAL_LOC = 0x20e0; - uint256 internal constant TABLE3_OMEGA_EVAL_LOC = 0x2100; - uint256 internal constant TABLE4_OMEGA_EVAL_LOC = 0x2120; - - uint256 internal constant PI_Z_X_LOC = 0x2300; - uint256 internal constant PI_Z_Y_LOC = 0x2320; - uint256 internal constant PI_Z_OMEGA_X_LOC = 0x2340; - uint256 internal constant PI_Z_OMEGA_Y_LOC = 0x2360; - - // Used for elliptic widget. These are alias names for wire + shifted wire evaluations - uint256 internal constant X1_EVAL_LOC = W2_EVAL_LOC; - uint256 internal constant X2_EVAL_LOC = W1_OMEGA_EVAL_LOC; - uint256 internal constant X3_EVAL_LOC = W2_OMEGA_EVAL_LOC; - uint256 internal constant Y1_EVAL_LOC = W3_EVAL_LOC; - uint256 internal constant Y2_EVAL_LOC = W4_OMEGA_EVAL_LOC; - uint256 internal constant Y3_EVAL_LOC = W3_OMEGA_EVAL_LOC; - uint256 internal constant QBETA_LOC = Q3_EVAL_LOC; - uint256 internal constant QBETA_SQR_LOC = Q4_EVAL_LOC; - uint256 internal constant QSIGN_LOC = Q1_EVAL_LOC; - - // ### CHALLENGES MEMORY OFFSETS - - uint256 internal constant C_BETA_LOC = 0x2600; - uint256 internal constant C_GAMMA_LOC = 0x2620; - uint256 internal constant C_ALPHA_LOC = 0x2640; - uint256 internal constant C_ETA_LOC = 0x2660; - uint256 internal constant C_ETA_SQR_LOC = 0x2680; - uint256 internal constant C_ETA_CUBE_LOC = 0x26a0; - - uint256 internal constant C_ZETA_LOC = 0x26c0; - uint256 internal constant C_CURRENT_LOC = 0x26e0; - uint256 internal constant C_V0_LOC = 0x2700; - uint256 internal constant C_V1_LOC = 0x2720; - uint256 internal constant C_V2_LOC = 0x2740; - uint256 internal constant C_V3_LOC = 0x2760; - uint256 internal constant C_V4_LOC = 0x2780; - uint256 internal constant C_V5_LOC = 0x27a0; - uint256 internal constant C_V6_LOC = 0x27c0; - uint256 internal constant C_V7_LOC = 0x27e0; - uint256 internal constant C_V8_LOC = 0x2800; - uint256 internal constant C_V9_LOC = 0x2820; - uint256 internal constant C_V10_LOC = 0x2840; - uint256 internal constant C_V11_LOC = 0x2860; - uint256 internal constant C_V12_LOC = 0x2880; - uint256 internal constant C_V13_LOC = 0x28a0; - uint256 internal constant C_V14_LOC = 0x28c0; - uint256 internal constant C_V15_LOC = 0x28e0; - uint256 internal constant C_V16_LOC = 0x2900; - uint256 internal constant C_V17_LOC = 0x2920; - uint256 internal constant C_V18_LOC = 0x2940; - uint256 internal constant C_V19_LOC = 0x2960; - uint256 internal constant C_V20_LOC = 0x2980; - uint256 internal constant C_V21_LOC = 0x29a0; - uint256 internal constant C_V22_LOC = 0x29c0; - uint256 internal constant C_V23_LOC = 0x29e0; - uint256 internal constant C_V24_LOC = 0x2a00; - uint256 internal constant C_V25_LOC = 0x2a20; - uint256 internal constant C_V26_LOC = 0x2a40; - uint256 internal constant C_V27_LOC = 0x2a60; - uint256 internal constant C_V28_LOC = 0x2a80; - uint256 internal constant C_V29_LOC = 0x2aa0; - uint256 internal constant C_V30_LOC = 0x2ac0; - - uint256 internal constant C_U_LOC = 0x2b00; - - // ### LOCAL VARIABLES MEMORY OFFSETS - uint256 internal constant DELTA_NUMERATOR_LOC = 0x3000; - uint256 internal constant DELTA_DENOMINATOR_LOC = 0x3020; - uint256 internal constant ZETA_POW_N_LOC = 0x3040; - uint256 internal constant PUBLIC_INPUT_DELTA_LOC = 0x3060; - uint256 internal constant ZERO_POLY_LOC = 0x3080; - uint256 internal constant L_START_LOC = 0x30a0; - uint256 internal constant L_END_LOC = 0x30c0; - uint256 internal constant R_ZERO_EVAL_LOC = 0x30e0; - - uint256 internal constant PLOOKUP_DELTA_NUMERATOR_LOC = 0x3100; - uint256 internal constant PLOOKUP_DELTA_DENOMINATOR_LOC = 0x3120; - uint256 internal constant PLOOKUP_DELTA_LOC = 0x3140; - - uint256 internal constant ACCUMULATOR_X_LOC = 0x3160; - uint256 internal constant ACCUMULATOR_Y_LOC = 0x3180; - uint256 internal constant ACCUMULATOR2_X_LOC = 0x31a0; - uint256 internal constant ACCUMULATOR2_Y_LOC = 0x31c0; - uint256 internal constant PAIRING_LHS_X_LOC = 0x31e0; - uint256 internal constant PAIRING_LHS_Y_LOC = 0x3200; - uint256 internal constant PAIRING_RHS_X_LOC = 0x3220; - uint256 internal constant PAIRING_RHS_Y_LOC = 0x3240; - - // ### SUCCESS FLAG MEMORY LOCATIONS - uint256 internal constant GRAND_PRODUCT_SUCCESS_FLAG = 0x3300; - uint256 internal constant ARITHMETIC_TERM_SUCCESS_FLAG = 0x3020; - uint256 internal constant BATCH_OPENING_SUCCESS_FLAG = 0x3340; - uint256 internal constant OPENING_COMMITMENT_SUCCESS_FLAG = 0x3360; - uint256 internal constant PAIRING_PREAMBLE_SUCCESS_FLAG = 0x3380; - uint256 internal constant PAIRING_SUCCESS_FLAG = 0x33a0; - uint256 internal constant RESULT_FLAG = 0x33c0; - - // misc stuff - uint256 internal constant OMEGA_INVERSE_LOC = 0x3400; - uint256 internal constant C_ALPHA_SQR_LOC = 0x3420; - uint256 internal constant C_ALPHA_CUBE_LOC = 0x3440; - uint256 internal constant C_ALPHA_QUAD_LOC = 0x3460; - uint256 internal constant C_ALPHA_BASE_LOC = 0x3480; - - // ### RECURSION VARIABLE MEMORY LOCATIONS - uint256 internal constant RECURSIVE_P1_X_LOC = 0x3500; - uint256 internal constant RECURSIVE_P1_Y_LOC = 0x3520; - uint256 internal constant RECURSIVE_P2_X_LOC = 0x3540; - uint256 internal constant RECURSIVE_P2_Y_LOC = 0x3560; - - uint256 internal constant PUBLIC_INPUTS_HASH_LOCATION = 0x3580; - - // sub-identity storage - uint256 internal constant PERMUTATION_IDENTITY = 0x3600; - uint256 internal constant PLOOKUP_IDENTITY = 0x3620; - uint256 internal constant ARITHMETIC_IDENTITY = 0x3640; - uint256 internal constant SORT_IDENTITY = 0x3660; - uint256 internal constant ELLIPTIC_IDENTITY = 0x3680; - uint256 internal constant AUX_IDENTITY = 0x36a0; - uint256 internal constant AUX_NON_NATIVE_FIELD_EVALUATION = 0x36c0; - uint256 internal constant AUX_LIMB_ACCUMULATOR_EVALUATION = 0x36e0; - uint256 internal constant AUX_RAM_CONSISTENCY_EVALUATION = 0x3700; - uint256 internal constant AUX_ROM_CONSISTENCY_EVALUATION = 0x3720; - uint256 internal constant AUX_MEMORY_EVALUATION = 0x3740; - - uint256 internal constant QUOTIENT_EVAL_LOC = 0x3760; - uint256 internal constant ZERO_POLY_INVERSE_LOC = 0x3780; - - // when hashing public inputs we use memory at NU_CHALLENGE_INPUT_LOC_A, as the hash input size is unknown at compile time - uint256 internal constant NU_CHALLENGE_INPUT_LOC_A = 0x37a0; - uint256 internal constant NU_CHALLENGE_INPUT_LOC_B = 0x37c0; - uint256 internal constant NU_CHALLENGE_INPUT_LOC_C = 0x37e0; - - bytes4 internal constant PUBLIC_INPUT_INVALID_BN128_G1_POINT_SELECTOR = 0xeba9f4a6; - bytes4 internal constant PUBLIC_INPUT_GE_P_SELECTOR = 0x374a972f; - bytes4 internal constant MOD_EXP_FAILURE_SELECTOR = 0xf894a7bc; - bytes4 internal constant EC_SCALAR_MUL_FAILURE_SELECTOR = 0xf755f369; - bytes4 internal constant PROOF_FAILURE_SELECTOR = 0x0711fcec; - - uint256 internal constant ETA_INPUT_LENGTH = 0xc0; // W1, W2, W3 = 6 * 0x20 bytes - - // We need to hash 41 field elements when generating the NU challenge - // w1, w2, w3, w4, s, z, z_lookup, q1, q2, q3, q4, qm, qc, qarith (14) - // qsort, qelliptic, qaux, sigma1, sigma2, sigma, sigma4, (7) - // table1, table2, table3, table4, tabletype, id1, id2, id3, id4, (9) - // w1_omega, w2_omega, w3_omega, w4_omega, s_omega, z_omega, z_lookup_omega, (7) - // table1_omega, table2_omega, table3_omega, table4_omega (4) - uint256 internal constant NU_INPUT_LENGTH = 0x520; // 0x520 = 41 * 0x20 - - // There are ELEVEN G1 group elements added into the transcript in the `beta` round, that we need to skip over - // W1, W2, W3, W4, S, Z, Z_LOOKUP, T1, T2, T3, T4 - uint256 internal constant NU_CALLDATA_SKIP_LENGTH = 0x2c0; // 11 * 0x40 = 0x2c0 - - uint256 internal constant NEGATIVE_INVERSE_OF_2_MODULO_P = - 0x183227397098d014dc2822db40c0ac2e9419f4243cdcb848a1f0fac9f8000000; - uint256 internal constant LIMB_SIZE = 0x100000000000000000; // 2<<68 - uint256 internal constant SUBLIMB_SHIFT = 0x4000; // 2<<14 - - // y^2 = x^3 + ax + b - // for Grumpkin, a = 0 and b = -17. We use b in a custom gate relation that evaluates elliptic curve arithmetic - uint256 internal constant GRUMPKIN_CURVE_B_PARAMETER_NEGATED = 17; - error PUBLIC_INPUT_COUNT_INVALID(uint256 expected, uint256 actual); - error PUBLIC_INPUT_INVALID_BN128_G1_POINT(); - error PUBLIC_INPUT_GE_P(); - error MOD_EXP_FAILURE(); - error EC_SCALAR_MUL_FAILURE(); - error PROOF_FAILURE(); - - function getVerificationKeyHash() public pure virtual returns (bytes32); - - function loadVerificationKey(uint256 _vk, uint256 _omegaInverseLoc) internal pure virtual; - - /** - * @notice Verify a Ultra Plonk proof - * @param _proof - The serialized proof - * @param _publicInputs - An array of the public inputs - * @return True if proof is valid, reverts otherwise - */ - function verify(bytes calldata _proof, bytes32[] calldata _publicInputs) external view returns (bool) { - loadVerificationKey(N_LOC, OMEGA_INVERSE_LOC); - - uint256 requiredPublicInputCount; - assembly { - requiredPublicInputCount := mload(NUM_INPUTS_LOC) - } - if (requiredPublicInputCount != _publicInputs.length) { - revert PUBLIC_INPUT_COUNT_INVALID(requiredPublicInputCount, _publicInputs.length); - } - - assembly { - let q := 21888242871839275222246405745257275088696311157297823662689037894645226208583 // EC group order - let p := 21888242871839275222246405745257275088548364400416034343698204186575808495617 // Prime field order - - /** - * LOAD PROOF FROM CALLDATA - */ - { - let data_ptr := add(calldataload(0x04), 0x24) - - mstore(W1_Y_LOC, mod(calldataload(data_ptr), q)) - mstore(W1_X_LOC, mod(calldataload(add(data_ptr, 0x20)), q)) - - mstore(W2_Y_LOC, mod(calldataload(add(data_ptr, 0x40)), q)) - mstore(W2_X_LOC, mod(calldataload(add(data_ptr, 0x60)), q)) - - mstore(W3_Y_LOC, mod(calldataload(add(data_ptr, 0x80)), q)) - mstore(W3_X_LOC, mod(calldataload(add(data_ptr, 0xa0)), q)) - - mstore(W4_Y_LOC, mod(calldataload(add(data_ptr, 0xc0)), q)) - mstore(W4_X_LOC, mod(calldataload(add(data_ptr, 0xe0)), q)) - - mstore(S_Y_LOC, mod(calldataload(add(data_ptr, 0x100)), q)) - mstore(S_X_LOC, mod(calldataload(add(data_ptr, 0x120)), q)) - mstore(Z_Y_LOC, mod(calldataload(add(data_ptr, 0x140)), q)) - mstore(Z_X_LOC, mod(calldataload(add(data_ptr, 0x160)), q)) - mstore(Z_LOOKUP_Y_LOC, mod(calldataload(add(data_ptr, 0x180)), q)) - mstore(Z_LOOKUP_X_LOC, mod(calldataload(add(data_ptr, 0x1a0)), q)) - mstore(T1_Y_LOC, mod(calldataload(add(data_ptr, 0x1c0)), q)) - mstore(T1_X_LOC, mod(calldataload(add(data_ptr, 0x1e0)), q)) - - mstore(T2_Y_LOC, mod(calldataload(add(data_ptr, 0x200)), q)) - mstore(T2_X_LOC, mod(calldataload(add(data_ptr, 0x220)), q)) - - mstore(T3_Y_LOC, mod(calldataload(add(data_ptr, 0x240)), q)) - mstore(T3_X_LOC, mod(calldataload(add(data_ptr, 0x260)), q)) - - mstore(T4_Y_LOC, mod(calldataload(add(data_ptr, 0x280)), q)) - mstore(T4_X_LOC, mod(calldataload(add(data_ptr, 0x2a0)), q)) - - mstore(W1_EVAL_LOC, mod(calldataload(add(data_ptr, 0x2c0)), p)) - mstore(W2_EVAL_LOC, mod(calldataload(add(data_ptr, 0x2e0)), p)) - mstore(W3_EVAL_LOC, mod(calldataload(add(data_ptr, 0x300)), p)) - mstore(W4_EVAL_LOC, mod(calldataload(add(data_ptr, 0x320)), p)) - mstore(S_EVAL_LOC, mod(calldataload(add(data_ptr, 0x340)), p)) - mstore(Z_EVAL_LOC, mod(calldataload(add(data_ptr, 0x360)), p)) - mstore(Z_LOOKUP_EVAL_LOC, mod(calldataload(add(data_ptr, 0x380)), p)) - mstore(Q1_EVAL_LOC, mod(calldataload(add(data_ptr, 0x3a0)), p)) - mstore(Q2_EVAL_LOC, mod(calldataload(add(data_ptr, 0x3c0)), p)) - mstore(Q3_EVAL_LOC, mod(calldataload(add(data_ptr, 0x3e0)), p)) - mstore(Q4_EVAL_LOC, mod(calldataload(add(data_ptr, 0x400)), p)) - mstore(QM_EVAL_LOC, mod(calldataload(add(data_ptr, 0x420)), p)) - mstore(QC_EVAL_LOC, mod(calldataload(add(data_ptr, 0x440)), p)) - mstore(QARITH_EVAL_LOC, mod(calldataload(add(data_ptr, 0x460)), p)) - mstore(QSORT_EVAL_LOC, mod(calldataload(add(data_ptr, 0x480)), p)) - mstore(QELLIPTIC_EVAL_LOC, mod(calldataload(add(data_ptr, 0x4a0)), p)) - mstore(QAUX_EVAL_LOC, mod(calldataload(add(data_ptr, 0x4c0)), p)) - - mstore(SIGMA1_EVAL_LOC, mod(calldataload(add(data_ptr, 0x4e0)), p)) - mstore(SIGMA2_EVAL_LOC, mod(calldataload(add(data_ptr, 0x500)), p)) - - mstore(SIGMA3_EVAL_LOC, mod(calldataload(add(data_ptr, 0x520)), p)) - mstore(SIGMA4_EVAL_LOC, mod(calldataload(add(data_ptr, 0x540)), p)) - - mstore(TABLE1_EVAL_LOC, mod(calldataload(add(data_ptr, 0x560)), p)) - mstore(TABLE2_EVAL_LOC, mod(calldataload(add(data_ptr, 0x580)), p)) - mstore(TABLE3_EVAL_LOC, mod(calldataload(add(data_ptr, 0x5a0)), p)) - mstore(TABLE4_EVAL_LOC, mod(calldataload(add(data_ptr, 0x5c0)), p)) - mstore(TABLE_TYPE_EVAL_LOC, mod(calldataload(add(data_ptr, 0x5e0)), p)) - - mstore(ID1_EVAL_LOC, mod(calldataload(add(data_ptr, 0x600)), p)) - mstore(ID2_EVAL_LOC, mod(calldataload(add(data_ptr, 0x620)), p)) - mstore(ID3_EVAL_LOC, mod(calldataload(add(data_ptr, 0x640)), p)) - mstore(ID4_EVAL_LOC, mod(calldataload(add(data_ptr, 0x660)), p)) - - mstore(W1_OMEGA_EVAL_LOC, mod(calldataload(add(data_ptr, 0x680)), p)) - mstore(W2_OMEGA_EVAL_LOC, mod(calldataload(add(data_ptr, 0x6a0)), p)) - mstore(W3_OMEGA_EVAL_LOC, mod(calldataload(add(data_ptr, 0x6c0)), p)) - mstore(W4_OMEGA_EVAL_LOC, mod(calldataload(add(data_ptr, 0x6e0)), p)) - mstore(S_OMEGA_EVAL_LOC, mod(calldataload(add(data_ptr, 0x700)), p)) - - mstore(Z_OMEGA_EVAL_LOC, mod(calldataload(add(data_ptr, 0x720)), p)) - - mstore(Z_LOOKUP_OMEGA_EVAL_LOC, mod(calldataload(add(data_ptr, 0x740)), p)) - mstore(TABLE1_OMEGA_EVAL_LOC, mod(calldataload(add(data_ptr, 0x760)), p)) - mstore(TABLE2_OMEGA_EVAL_LOC, mod(calldataload(add(data_ptr, 0x780)), p)) - mstore(TABLE3_OMEGA_EVAL_LOC, mod(calldataload(add(data_ptr, 0x7a0)), p)) - mstore(TABLE4_OMEGA_EVAL_LOC, mod(calldataload(add(data_ptr, 0x7c0)), p)) - - mstore(PI_Z_Y_LOC, mod(calldataload(add(data_ptr, 0x7e0)), q)) - mstore(PI_Z_X_LOC, mod(calldataload(add(data_ptr, 0x800)), q)) - - mstore(PI_Z_OMEGA_Y_LOC, mod(calldataload(add(data_ptr, 0x820)), q)) - mstore(PI_Z_OMEGA_X_LOC, mod(calldataload(add(data_ptr, 0x840)), q)) - } - - /** - * LOAD RECURSIVE PROOF INTO MEMORY - */ - { - if mload(CONTAINS_RECURSIVE_PROOF_LOC) { - let public_inputs_ptr := add(calldataload(0x24), 0x24) - let index_counter := add(shl(5, mload(RECURSIVE_PROOF_PUBLIC_INPUT_INDICES_LOC)), public_inputs_ptr) - - let x0 := calldataload(index_counter) - x0 := add(x0, shl(68, calldataload(add(index_counter, 0x20)))) - x0 := add(x0, shl(136, calldataload(add(index_counter, 0x40)))) - x0 := add(x0, shl(204, calldataload(add(index_counter, 0x60)))) - let y0 := calldataload(add(index_counter, 0x80)) - y0 := add(y0, shl(68, calldataload(add(index_counter, 0xa0)))) - y0 := add(y0, shl(136, calldataload(add(index_counter, 0xc0)))) - y0 := add(y0, shl(204, calldataload(add(index_counter, 0xe0)))) - let x1 := calldataload(add(index_counter, 0x100)) - x1 := add(x1, shl(68, calldataload(add(index_counter, 0x120)))) - x1 := add(x1, shl(136, calldataload(add(index_counter, 0x140)))) - x1 := add(x1, shl(204, calldataload(add(index_counter, 0x160)))) - let y1 := calldataload(add(index_counter, 0x180)) - y1 := add(y1, shl(68, calldataload(add(index_counter, 0x1a0)))) - y1 := add(y1, shl(136, calldataload(add(index_counter, 0x1c0)))) - y1 := add(y1, shl(204, calldataload(add(index_counter, 0x1e0)))) - mstore(RECURSIVE_P1_X_LOC, x0) - mstore(RECURSIVE_P1_Y_LOC, y0) - mstore(RECURSIVE_P2_X_LOC, x1) - mstore(RECURSIVE_P2_Y_LOC, y1) - - // validate these are valid bn128 G1 points - if iszero(and(and(lt(x0, q), lt(x1, q)), and(lt(y0, q), lt(y1, q)))) { - mstore(0x00, PUBLIC_INPUT_INVALID_BN128_G1_POINT_SELECTOR) - revert(0x00, 0x04) - } - } - } - - { - /** - * Generate initial challenge - */ - mstore(0x00, shl(224, mload(N_LOC))) - mstore(0x04, shl(224, mload(NUM_INPUTS_LOC))) - let challenge := keccak256(0x00, 0x08) - - /** - * Generate eta challenge - */ - mstore(PUBLIC_INPUTS_HASH_LOCATION, challenge) - // The public input location is stored at 0x24, we then add 0x24 to skip selector and the length of public inputs - let public_inputs_start := add(calldataload(0x24), 0x24) - // copy the public inputs over - let public_input_size := mul(mload(NUM_INPUTS_LOC), 0x20) - calldatacopy(add(PUBLIC_INPUTS_HASH_LOCATION, 0x20), public_inputs_start, public_input_size) - - // copy W1, W2, W3 into challenge. Each point is 0x40 bytes, so load 0xc0 = 3 * 0x40 bytes (ETA input length) - let w_start := add(calldataload(0x04), 0x24) - calldatacopy(add(add(PUBLIC_INPUTS_HASH_LOCATION, 0x20), public_input_size), w_start, ETA_INPUT_LENGTH) - - // Challenge is the old challenge + public inputs + W1, W2, W3 (0x20 + public_input_size + 0xc0) - let challenge_bytes_size := add(0x20, add(public_input_size, ETA_INPUT_LENGTH)) - - challenge := keccak256(PUBLIC_INPUTS_HASH_LOCATION, challenge_bytes_size) - { - let eta := mod(challenge, p) - mstore(C_ETA_LOC, eta) - mstore(C_ETA_SQR_LOC, mulmod(eta, eta, p)) - mstore(C_ETA_CUBE_LOC, mulmod(mload(C_ETA_SQR_LOC), eta, p)) - } - - /** - * Generate beta challenge - */ - mstore(0x00, challenge) - mstore(0x20, mload(W4_Y_LOC)) - mstore(0x40, mload(W4_X_LOC)) - mstore(0x60, mload(S_Y_LOC)) - mstore(0x80, mload(S_X_LOC)) - challenge := keccak256(0x00, 0xa0) - mstore(C_BETA_LOC, mod(challenge, p)) - - /** - * Generate gamma challenge - */ - mstore(0x00, challenge) - mstore8(0x20, 0x01) - challenge := keccak256(0x00, 0x21) - mstore(C_GAMMA_LOC, mod(challenge, p)) - - /** - * Generate alpha challenge - */ - mstore(0x00, challenge) - mstore(0x20, mload(Z_Y_LOC)) - mstore(0x40, mload(Z_X_LOC)) - mstore(0x60, mload(Z_LOOKUP_Y_LOC)) - mstore(0x80, mload(Z_LOOKUP_X_LOC)) - challenge := keccak256(0x00, 0xa0) - mstore(C_ALPHA_LOC, mod(challenge, p)) - - /** - * Compute and store some powers of alpha for future computations - */ - let alpha := mload(C_ALPHA_LOC) - mstore(C_ALPHA_SQR_LOC, mulmod(alpha, alpha, p)) - mstore(C_ALPHA_CUBE_LOC, mulmod(mload(C_ALPHA_SQR_LOC), alpha, p)) - mstore(C_ALPHA_QUAD_LOC, mulmod(mload(C_ALPHA_CUBE_LOC), alpha, p)) - mstore(C_ALPHA_BASE_LOC, alpha) - - /** - * Generate zeta challenge - */ - mstore(0x00, challenge) - mstore(0x20, mload(T1_Y_LOC)) - mstore(0x40, mload(T1_X_LOC)) - mstore(0x60, mload(T2_Y_LOC)) - mstore(0x80, mload(T2_X_LOC)) - mstore(0xa0, mload(T3_Y_LOC)) - mstore(0xc0, mload(T3_X_LOC)) - mstore(0xe0, mload(T4_Y_LOC)) - mstore(0x100, mload(T4_X_LOC)) - - challenge := keccak256(0x00, 0x120) - - mstore(C_ZETA_LOC, mod(challenge, p)) - mstore(C_CURRENT_LOC, challenge) - } - - /** - * EVALUATE FIELD OPERATIONS - */ - - /** - * COMPUTE PUBLIC INPUT DELTA - * ΔPI = ∏ᵢ∈ℓ(wᵢ + β σ(i) + γ) / ∏ᵢ∈ℓ(wᵢ + β σ'(i) + γ) - */ - { - let beta := mload(C_BETA_LOC) // β - let gamma := mload(C_GAMMA_LOC) // γ - let work_root := mload(OMEGA_LOC) // ω - let numerator_value := 1 - let denominator_value := 1 - - let p_clone := p // move p to the front of the stack - let valid_inputs := true - - // Load the starting point of the public inputs (jump over the selector and the length of public inputs [0x24]) - let public_inputs_ptr := add(calldataload(0x24), 0x24) - - // endpoint_ptr = public_inputs_ptr + num_inputs * 0x20. // every public input is 0x20 bytes - let endpoint_ptr := add(public_inputs_ptr, mul(mload(NUM_INPUTS_LOC), 0x20)) - - // root_1 = β * 0x05 - let root_1 := mulmod(beta, 0x05, p_clone) // k1.β - // root_2 = β * 0x0c - let root_2 := mulmod(beta, 0x0c, p_clone) - // @note 0x05 + 0x07 == 0x0c == external coset generator - - for {} lt(public_inputs_ptr, endpoint_ptr) { public_inputs_ptr := add(public_inputs_ptr, 0x20) } { - /** - * input = public_input[i] - * valid_inputs &= input < p - * temp = input + gamma - * numerator_value *= (β.σ(i) + wᵢ + γ) // σ(i) = 0x05.ωⁱ - * denominator_value *= (β.σ'(i) + wᵢ + γ) // σ'(i) = 0x0c.ωⁱ - * root_1 *= ω - * root_2 *= ω - */ - - let input := calldataload(public_inputs_ptr) - valid_inputs := and(valid_inputs, lt(input, p_clone)) - let temp := addmod(input, gamma, p_clone) - - numerator_value := mulmod(numerator_value, add(root_1, temp), p_clone) - denominator_value := mulmod(denominator_value, add(root_2, temp), p_clone) - - root_1 := mulmod(root_1, work_root, p_clone) - root_2 := mulmod(root_2, work_root, p_clone) - } - - // Revert if not all public inputs are field elements (i.e. < p) - if iszero(valid_inputs) { - mstore(0x00, PUBLIC_INPUT_GE_P_SELECTOR) - revert(0x00, 0x04) - } - - mstore(DELTA_NUMERATOR_LOC, numerator_value) - mstore(DELTA_DENOMINATOR_LOC, denominator_value) - } - - /** - * Compute Plookup delta factor [γ(1 + β)]^{n-k} - * k = num roots cut out of Z_H = 4 - */ - { - let delta_base := mulmod(mload(C_GAMMA_LOC), addmod(mload(C_BETA_LOC), 1, p), p) - let delta_numerator := delta_base - { - let exponent := mload(N_LOC) - let count := 1 - for {} lt(count, exponent) { count := add(count, count) } { - delta_numerator := mulmod(delta_numerator, delta_numerator, p) - } - } - mstore(PLOOKUP_DELTA_NUMERATOR_LOC, delta_numerator) - - let delta_denominator := mulmod(delta_base, delta_base, p) - delta_denominator := mulmod(delta_denominator, delta_denominator, p) - mstore(PLOOKUP_DELTA_DENOMINATOR_LOC, delta_denominator) - } - /** - * Compute lagrange poly and vanishing poly fractions - */ - { - /** - * vanishing_numerator = zeta - * ZETA_POW_N = zeta^n - * vanishing_numerator -= 1 - * accumulating_root = omega_inverse - * work_root = p - accumulating_root - * domain_inverse = domain_inverse - * vanishing_denominator = zeta + work_root - * work_root *= accumulating_root - * vanishing_denominator *= (zeta + work_root) - * work_root *= accumulating_root - * vanishing_denominator *= (zeta + work_root) - * vanishing_denominator *= (zeta + (zeta + accumulating_root)) - * work_root = omega - * lagrange_numerator = vanishing_numerator * domain_inverse - * l_start_denominator = zeta - 1 - * accumulating_root = work_root^2 - * l_end_denominator = accumulating_root^2 * work_root * zeta - 1 - * Note: l_end_denominator term contains a term \omega^5 to cut out 5 roots of unity from vanishing poly - */ - - let zeta := mload(C_ZETA_LOC) - - // compute zeta^n, where n is a power of 2 - let vanishing_numerator := zeta - { - // pow_small - let exponent := mload(N_LOC) - let count := 1 - for {} lt(count, exponent) { count := add(count, count) } { - vanishing_numerator := mulmod(vanishing_numerator, vanishing_numerator, p) - } - } - mstore(ZETA_POW_N_LOC, vanishing_numerator) - vanishing_numerator := addmod(vanishing_numerator, sub(p, 1), p) - - let accumulating_root := mload(OMEGA_INVERSE_LOC) - let work_root := sub(p, accumulating_root) - let domain_inverse := mload(DOMAIN_INVERSE_LOC) - - let vanishing_denominator := addmod(zeta, work_root, p) - work_root := mulmod(work_root, accumulating_root, p) - vanishing_denominator := mulmod(vanishing_denominator, addmod(zeta, work_root, p), p) - work_root := mulmod(work_root, accumulating_root, p) - vanishing_denominator := mulmod(vanishing_denominator, addmod(zeta, work_root, p), p) - vanishing_denominator := - mulmod(vanishing_denominator, addmod(zeta, mulmod(work_root, accumulating_root, p), p), p) - - work_root := mload(OMEGA_LOC) - - let lagrange_numerator := mulmod(vanishing_numerator, domain_inverse, p) - let l_start_denominator := addmod(zeta, sub(p, 1), p) - - accumulating_root := mulmod(work_root, work_root, p) - - let l_end_denominator := - addmod( - mulmod(mulmod(mulmod(accumulating_root, accumulating_root, p), work_root, p), zeta, p), sub(p, 1), p - ) - - /** - * Compute inversions using Montgomery's batch inversion trick - */ - let accumulator := mload(DELTA_DENOMINATOR_LOC) - let t0 := accumulator - accumulator := mulmod(accumulator, vanishing_denominator, p) - let t1 := accumulator - accumulator := mulmod(accumulator, vanishing_numerator, p) - let t2 := accumulator - accumulator := mulmod(accumulator, l_start_denominator, p) - let t3 := accumulator - accumulator := mulmod(accumulator, mload(PLOOKUP_DELTA_DENOMINATOR_LOC), p) - let t4 := accumulator - { - mstore(0, 0x20) - mstore(0x20, 0x20) - mstore(0x40, 0x20) - mstore(0x60, mulmod(accumulator, l_end_denominator, p)) - mstore(0x80, sub(p, 2)) - mstore(0xa0, p) - if iszero(staticcall(gas(), 0x05, 0x00, 0xc0, 0x00, 0x20)) { - mstore(0x0, MOD_EXP_FAILURE_SELECTOR) - revert(0x00, 0x04) - } - accumulator := mload(0x00) - } - - t4 := mulmod(accumulator, t4, p) - accumulator := mulmod(accumulator, l_end_denominator, p) - - t3 := mulmod(accumulator, t3, p) - accumulator := mulmod(accumulator, mload(PLOOKUP_DELTA_DENOMINATOR_LOC), p) - - t2 := mulmod(accumulator, t2, p) - accumulator := mulmod(accumulator, l_start_denominator, p) - - t1 := mulmod(accumulator, t1, p) - accumulator := mulmod(accumulator, vanishing_numerator, p) - - t0 := mulmod(accumulator, t0, p) - accumulator := mulmod(accumulator, vanishing_denominator, p) - - accumulator := mulmod(mulmod(accumulator, accumulator, p), mload(DELTA_DENOMINATOR_LOC), p) - - mstore(PUBLIC_INPUT_DELTA_LOC, mulmod(mload(DELTA_NUMERATOR_LOC), accumulator, p)) - mstore(ZERO_POLY_LOC, mulmod(vanishing_numerator, t0, p)) - mstore(ZERO_POLY_INVERSE_LOC, mulmod(vanishing_denominator, t1, p)) - mstore(L_START_LOC, mulmod(lagrange_numerator, t2, p)) - mstore(PLOOKUP_DELTA_LOC, mulmod(mload(PLOOKUP_DELTA_NUMERATOR_LOC), t3, p)) - mstore(L_END_LOC, mulmod(lagrange_numerator, t4, p)) - } - - /** - * UltraPlonk Widget Ordering: - * - * 1. Permutation widget - * 2. Plookup widget - * 3. Arithmetic widget - * 4. Fixed base widget (?) - * 5. GenPermSort widget - * 6. Elliptic widget - * 7. Auxiliary widget - */ - - /** - * COMPUTE PERMUTATION WIDGET EVALUATION - */ - { - let alpha := mload(C_ALPHA_LOC) - let beta := mload(C_BETA_LOC) - let gamma := mload(C_GAMMA_LOC) - - /** - * t1 = (W1 + gamma + beta * ID1) * (W2 + gamma + beta * ID2) - * t2 = (W3 + gamma + beta * ID3) * (W4 + gamma + beta * ID4) - * result = alpha_base * z_eval * t1 * t2 - * t1 = (W1 + gamma + beta * sigma_1_eval) * (W2 + gamma + beta * sigma_2_eval) - * t2 = (W2 + gamma + beta * sigma_3_eval) * (W3 + gamma + beta * sigma_4_eval) - * result -= (alpha_base * z_omega_eval * t1 * t2) - */ - let t1 := - mulmod( - add(add(mload(W1_EVAL_LOC), gamma), mulmod(beta, mload(ID1_EVAL_LOC), p)), - add(add(mload(W2_EVAL_LOC), gamma), mulmod(beta, mload(ID2_EVAL_LOC), p)), - p - ) - let t2 := - mulmod( - add(add(mload(W3_EVAL_LOC), gamma), mulmod(beta, mload(ID3_EVAL_LOC), p)), - add(add(mload(W4_EVAL_LOC), gamma), mulmod(beta, mload(ID4_EVAL_LOC), p)), - p - ) - let result := mulmod(mload(C_ALPHA_BASE_LOC), mulmod(mload(Z_EVAL_LOC), mulmod(t1, t2, p), p), p) - t1 := - mulmod( - add(add(mload(W1_EVAL_LOC), gamma), mulmod(beta, mload(SIGMA1_EVAL_LOC), p)), - add(add(mload(W2_EVAL_LOC), gamma), mulmod(beta, mload(SIGMA2_EVAL_LOC), p)), - p - ) - t2 := - mulmod( - add(add(mload(W3_EVAL_LOC), gamma), mulmod(beta, mload(SIGMA3_EVAL_LOC), p)), - add(add(mload(W4_EVAL_LOC), gamma), mulmod(beta, mload(SIGMA4_EVAL_LOC), p)), - p - ) - result := - addmod( - result, - sub(p, mulmod(mload(C_ALPHA_BASE_LOC), mulmod(mload(Z_OMEGA_EVAL_LOC), mulmod(t1, t2, p), p), p)), - p - ) - - /** - * alpha_base *= alpha - * result += alpha_base . (L_{n-k}(ʓ) . (z(ʓ.ω) - ∆_{PI})) - * alpha_base *= alpha - * result += alpha_base . (L_1(ʓ)(Z(ʓ) - 1)) - * alpha_Base *= alpha - */ - mstore(C_ALPHA_BASE_LOC, mulmod(mload(C_ALPHA_BASE_LOC), mload(C_ALPHA_LOC), p)) - result := - addmod( - result, - mulmod( - mload(C_ALPHA_BASE_LOC), - mulmod( - mload(L_END_LOC), - addmod(mload(Z_OMEGA_EVAL_LOC), sub(p, mload(PUBLIC_INPUT_DELTA_LOC)), p), - p - ), - p - ), - p - ) - mstore(C_ALPHA_BASE_LOC, mulmod(mload(C_ALPHA_BASE_LOC), mload(C_ALPHA_LOC), p)) - mstore( - PERMUTATION_IDENTITY, - addmod( - result, - mulmod( - mload(C_ALPHA_BASE_LOC), - mulmod(mload(L_START_LOC), addmod(mload(Z_EVAL_LOC), sub(p, 1), p), p), - p - ), - p - ) - ) - mstore(C_ALPHA_BASE_LOC, mulmod(mload(C_ALPHA_BASE_LOC), mload(C_ALPHA_LOC), p)) - } - - /** - * COMPUTE PLOOKUP WIDGET EVALUATION - */ - { - /** - * Goal: f = (w1(z) + q2.w1(zω)) + η(w2(z) + qm.w2(zω)) + η²(w3(z) + qc.w_3(zω)) + q3(z).η³ - * f = η.q3(z) - * f += (w3(z) + qc.w_3(zω)) - * f *= η - * f += (w2(z) + qm.w2(zω)) - * f *= η - * f += (w1(z) + q2.w1(zω)) - */ - let f := mulmod(mload(C_ETA_LOC), mload(Q3_EVAL_LOC), p) - f := - addmod(f, addmod(mload(W3_EVAL_LOC), mulmod(mload(QC_EVAL_LOC), mload(W3_OMEGA_EVAL_LOC), p), p), p) - f := mulmod(f, mload(C_ETA_LOC), p) - f := - addmod(f, addmod(mload(W2_EVAL_LOC), mulmod(mload(QM_EVAL_LOC), mload(W2_OMEGA_EVAL_LOC), p), p), p) - f := mulmod(f, mload(C_ETA_LOC), p) - f := - addmod(f, addmod(mload(W1_EVAL_LOC), mulmod(mload(Q2_EVAL_LOC), mload(W1_OMEGA_EVAL_LOC), p), p), p) - - // t(z) = table4(z).η³ + table3(z).η² + table2(z).η + table1(z) - let t := - addmod( - addmod( - addmod( - mulmod(mload(TABLE4_EVAL_LOC), mload(C_ETA_CUBE_LOC), p), - mulmod(mload(TABLE3_EVAL_LOC), mload(C_ETA_SQR_LOC), p), - p - ), - mulmod(mload(TABLE2_EVAL_LOC), mload(C_ETA_LOC), p), - p - ), - mload(TABLE1_EVAL_LOC), - p - ) - - // t(zw) = table4(zw).η³ + table3(zw).η² + table2(zw).η + table1(zw) - let t_omega := - addmod( - addmod( - addmod( - mulmod(mload(TABLE4_OMEGA_EVAL_LOC), mload(C_ETA_CUBE_LOC), p), - mulmod(mload(TABLE3_OMEGA_EVAL_LOC), mload(C_ETA_SQR_LOC), p), - p - ), - mulmod(mload(TABLE2_OMEGA_EVAL_LOC), mload(C_ETA_LOC), p), - p - ), - mload(TABLE1_OMEGA_EVAL_LOC), - p - ) - - /** - * Goal: numerator = (TABLE_TYPE_EVAL * f(z) + γ) * (t(z) + βt(zω) + γ(β + 1)) * (β + 1) - * gamma_beta_constant = γ(β + 1) - * numerator = f * TABLE_TYPE_EVAL + gamma - * temp0 = t(z) + t(zω) * β + gamma_beta_constant - * numerator *= temp0 - * numerator *= (β + 1) - * temp0 = alpha * l_1 - * numerator += temp0 - * numerator *= z_lookup(z) - * numerator -= temp0 - */ - let gamma_beta_constant := mulmod(mload(C_GAMMA_LOC), addmod(mload(C_BETA_LOC), 1, p), p) - let numerator := addmod(mulmod(f, mload(TABLE_TYPE_EVAL_LOC), p), mload(C_GAMMA_LOC), p) - let temp0 := addmod(addmod(t, mulmod(t_omega, mload(C_BETA_LOC), p), p), gamma_beta_constant, p) - numerator := mulmod(numerator, temp0, p) - numerator := mulmod(numerator, addmod(mload(C_BETA_LOC), 1, p), p) - temp0 := mulmod(mload(C_ALPHA_LOC), mload(L_START_LOC), p) - numerator := addmod(numerator, temp0, p) - numerator := mulmod(numerator, mload(Z_LOOKUP_EVAL_LOC), p) - numerator := addmod(numerator, sub(p, temp0), p) - - /** - * Goal: denominator = z_lookup(zω)*[s(z) + βs(zω) + γ(1 + β)] - [z_lookup(zω) - [γ(1 + β)]^{n-k}]*α²L_end(z) - * note: delta_factor = [γ(1 + β)]^{n-k} - * denominator = s(z) + βs(zω) + γ(β + 1) - * temp1 = α²L_end(z) - * denominator -= temp1 - * denominator *= z_lookup(zω) - * denominator += temp1 * delta_factor - * PLOOKUP_IDENTITY = (numerator - denominator).alpha_base - * alpha_base *= alpha^3 - */ - let denominator := - addmod( - addmod(mload(S_EVAL_LOC), mulmod(mload(S_OMEGA_EVAL_LOC), mload(C_BETA_LOC), p), p), - gamma_beta_constant, - p - ) - let temp1 := mulmod(mload(C_ALPHA_SQR_LOC), mload(L_END_LOC), p) - denominator := addmod(denominator, sub(p, temp1), p) - denominator := mulmod(denominator, mload(Z_LOOKUP_OMEGA_EVAL_LOC), p) - denominator := addmod(denominator, mulmod(temp1, mload(PLOOKUP_DELTA_LOC), p), p) - - mstore(PLOOKUP_IDENTITY, mulmod(addmod(numerator, sub(p, denominator), p), mload(C_ALPHA_BASE_LOC), p)) - - // update alpha - mstore(C_ALPHA_BASE_LOC, mulmod(mload(C_ALPHA_BASE_LOC), mload(C_ALPHA_CUBE_LOC), p)) - } - - /** - * COMPUTE ARITHMETIC WIDGET EVALUATION - */ - { - /** - * The basic arithmetic gate identity in standard plonk is as follows. - * (w_1 . w_2 . q_m) + (w_1 . q_1) + (w_2 . q_2) + (w_3 . q_3) + (w_4 . q_4) + q_c = 0 - * However, for Ultraplonk, we extend this to support "passing" wires between rows (shown without alpha scaling below): - * q_arith * ( ( (-1/2) * (q_arith - 3) * q_m * w_1 * w_2 + q_1 * w_1 + q_2 * w_2 + q_3 * w_3 + q_4 * w_4 + q_c ) + - * (q_arith - 1)*( α * (q_arith - 2) * (w_1 + w_4 - w_1_omega + q_m) + w_4_omega) ) = 0 - * - * This formula results in several cases depending on q_arith: - * 1. q_arith == 0: Arithmetic gate is completely disabled - * - * 2. q_arith == 1: Everything in the minigate on the right is disabled. The equation is just a standard plonk equation - * with extra wires: q_m * w_1 * w_2 + q_1 * w_1 + q_2 * w_2 + q_3 * w_3 + q_4 * w_4 + q_c = 0 - * - * 3. q_arith == 2: The (w_1 + w_4 - ...) term is disabled. THe equation is: - * (1/2) * q_m * w_1 * w_2 + q_1 * w_1 + q_2 * w_2 + q_3 * w_3 + q_4 * w_4 + q_c + w_4_omega = 0 - * It allows defining w_4 at next index (w_4_omega) in terms of current wire values - * - * 4. q_arith == 3: The product of w_1 and w_2 is disabled, but a mini addition gate is enabled. α allows us to split - * the equation into two: - * - * q_1 * w_1 + q_2 * w_2 + q_3 * w_3 + q_4 * w_4 + q_c + 2 * w_4_omega = 0 - * and - * w_1 + w_4 - w_1_omega + q_m = 0 (we are reusing q_m here) - * - * 5. q_arith > 3: The product of w_1 and w_2 is scaled by (q_arith - 3), while the w_4_omega term is scaled by (q_arith - 1). - * The equation can be split into two: - * - * (q_arith - 3)* q_m * w_1 * w_ 2 + q_1 * w_1 + q_2 * w_2 + q_3 * w_3 + q_4 * w_4 + q_c + (q_arith - 1) * w_4_omega = 0 - * and - * w_1 + w_4 - w_1_omega + q_m = 0 - * - * The problem that q_m is used both in both equations can be dealt with by appropriately changing selector values at - * the next gate. Then we can treat (q_arith - 1) as a simulated q_6 selector and scale q_m to handle (q_arith - 3) at - * product. - */ - - let w1q1 := mulmod(mload(W1_EVAL_LOC), mload(Q1_EVAL_LOC), p) - let w2q2 := mulmod(mload(W2_EVAL_LOC), mload(Q2_EVAL_LOC), p) - let w3q3 := mulmod(mload(W3_EVAL_LOC), mload(Q3_EVAL_LOC), p) - let w4q3 := mulmod(mload(W4_EVAL_LOC), mload(Q4_EVAL_LOC), p) - - // @todo - Add a explicit test that hits QARITH == 3 - // w1w2qm := (w_1 . w_2 . q_m . (QARITH_EVAL_LOC - 3)) / 2 - let w1w2qm := - mulmod( - mulmod( - mulmod(mulmod(mload(W1_EVAL_LOC), mload(W2_EVAL_LOC), p), mload(QM_EVAL_LOC), p), - addmod(mload(QARITH_EVAL_LOC), sub(p, 3), p), - p - ), - NEGATIVE_INVERSE_OF_2_MODULO_P, - p - ) - - // (w_1 . w_2 . q_m . (q_arith - 3)) / -2) + (w_1 . q_1) + (w_2 . q_2) + (w_3 . q_3) + (w_4 . q_4) + q_c - let identity := - addmod( - mload(QC_EVAL_LOC), addmod(w4q3, addmod(w3q3, addmod(w2q2, addmod(w1q1, w1w2qm, p), p), p), p), p - ) - - // if q_arith == 3 we evaluate an additional mini addition gate (on top of the regular one), where: - // w_1 + w_4 - w_1_omega + q_m = 0 - // we use this gate to save an addition gate when adding or subtracting non-native field elements - // α * (q_arith - 2) * (w_1 + w_4 - w_1_omega + q_m) - let extra_small_addition_gate_identity := - mulmod( - mload(C_ALPHA_LOC), - mulmod( - addmod(mload(QARITH_EVAL_LOC), sub(p, 2), p), - addmod( - mload(QM_EVAL_LOC), - addmod( - sub(p, mload(W1_OMEGA_EVAL_LOC)), addmod(mload(W1_EVAL_LOC), mload(W4_EVAL_LOC), p), p - ), - p - ), - p - ), - p - ) - - // if q_arith == 2 OR q_arith == 3 we add the 4th wire of the NEXT gate into the arithmetic identity - // N.B. if q_arith > 2, this wire value will be scaled by (q_arith - 1) relative to the other gate wires! - // alpha_base * q_arith * (identity + (q_arith - 1) * (w_4_omega + extra_small_addition_gate_identity)) - mstore( - ARITHMETIC_IDENTITY, - mulmod( - mload(C_ALPHA_BASE_LOC), - mulmod( - mload(QARITH_EVAL_LOC), - addmod( - identity, - mulmod( - addmod(mload(QARITH_EVAL_LOC), sub(p, 1), p), - addmod(mload(W4_OMEGA_EVAL_LOC), extra_small_addition_gate_identity, p), - p - ), - p - ), - p - ), - p - ) - ) - - // update alpha - mstore(C_ALPHA_BASE_LOC, mulmod(mload(C_ALPHA_BASE_LOC), mload(C_ALPHA_SQR_LOC), p)) - } - - /** - * COMPUTE GENPERMSORT WIDGET EVALUATION - */ - { - /** - * D1 = (w2 - w1) - * D2 = (w3 - w2) - * D3 = (w4 - w3) - * D4 = (w1_omega - w4) - * - * α_a = alpha_base - * α_b = alpha_base * α - * α_c = alpha_base * α^2 - * α_d = alpha_base * α^3 - * - * range_accumulator = ( - * D1(D1 - 1)(D1 - 2)(D1 - 3).α_a + - * D2(D2 - 1)(D2 - 2)(D2 - 3).α_b + - * D3(D3 - 1)(D3 - 2)(D3 - 3).α_c + - * D4(D4 - 1)(D4 - 2)(D4 - 3).α_d + - * ) . q_sort - */ - let minus_two := sub(p, 2) - let minus_three := sub(p, 3) - let d1 := addmod(mload(W2_EVAL_LOC), sub(p, mload(W1_EVAL_LOC)), p) - let d2 := addmod(mload(W3_EVAL_LOC), sub(p, mload(W2_EVAL_LOC)), p) - let d3 := addmod(mload(W4_EVAL_LOC), sub(p, mload(W3_EVAL_LOC)), p) - let d4 := addmod(mload(W1_OMEGA_EVAL_LOC), sub(p, mload(W4_EVAL_LOC)), p) - - let range_accumulator := - mulmod( - mulmod( - mulmod(addmod(mulmod(d1, d1, p), sub(p, d1), p), addmod(d1, minus_two, p), p), - addmod(d1, minus_three, p), - p - ), - mload(C_ALPHA_BASE_LOC), - p - ) - range_accumulator := - addmod( - range_accumulator, - mulmod( - mulmod( - mulmod(addmod(mulmod(d2, d2, p), sub(p, d2), p), addmod(d2, minus_two, p), p), - addmod(d2, minus_three, p), - p - ), - mulmod(mload(C_ALPHA_BASE_LOC), mload(C_ALPHA_LOC), p), - p - ), - p - ) - range_accumulator := - addmod( - range_accumulator, - mulmod( - mulmod( - mulmod(addmod(mulmod(d3, d3, p), sub(p, d3), p), addmod(d3, minus_two, p), p), - addmod(d3, minus_three, p), - p - ), - mulmod(mload(C_ALPHA_BASE_LOC), mload(C_ALPHA_SQR_LOC), p), - p - ), - p - ) - range_accumulator := - addmod( - range_accumulator, - mulmod( - mulmod( - mulmod(addmod(mulmod(d4, d4, p), sub(p, d4), p), addmod(d4, minus_two, p), p), - addmod(d4, minus_three, p), - p - ), - mulmod(mload(C_ALPHA_BASE_LOC), mload(C_ALPHA_CUBE_LOC), p), - p - ), - p - ) - range_accumulator := mulmod(range_accumulator, mload(QSORT_EVAL_LOC), p) - - mstore(SORT_IDENTITY, range_accumulator) - - // update alpha - mstore(C_ALPHA_BASE_LOC, mulmod(mload(C_ALPHA_BASE_LOC), mload(C_ALPHA_QUAD_LOC), p)) - } - - /** - * COMPUTE ELLIPTIC WIDGET EVALUATION - */ - { - /** - * endo_term = (-x_2) * x_1 * (x_3 * 2 + x_1) * q_beta - * endo_sqr_term = x_2^2 - * endo_sqr_term *= (x_3 - x_1) - * endo_sqr_term *= q_beta^2 - * leftovers = x_2^2 - * leftovers *= x_2 - * leftovers += x_1^2 * (x_3 + x_1) @follow-up Invalid comment in BB widget - * leftovers -= (y_2^2 + y_1^2) - * sign_term = y_2 * y_1 - * sign_term += sign_term - * sign_term *= q_sign - */ - // q_elliptic * (x3 + x2 + x1)(x2 - x1)(x2 - x1) - y2^2 - y1^2 + 2(y2y1)*q_sign = 0 - let x_diff := addmod(mload(X2_EVAL_LOC), sub(p, mload(X1_EVAL_LOC)), p) - let y2_sqr := mulmod(mload(Y2_EVAL_LOC), mload(Y2_EVAL_LOC), p) - let y1_sqr := mulmod(mload(Y1_EVAL_LOC), mload(Y1_EVAL_LOC), p) - let y1y2 := mulmod(mulmod(mload(Y1_EVAL_LOC), mload(Y2_EVAL_LOC), p), mload(QSIGN_LOC), p) - - let x_add_identity := - addmod( - mulmod( - addmod(mload(X3_EVAL_LOC), addmod(mload(X2_EVAL_LOC), mload(X1_EVAL_LOC), p), p), - mulmod(x_diff, x_diff, p), - p - ), - addmod( - sub( - p, - addmod(y2_sqr, y1_sqr, p) - ), - addmod(y1y2, y1y2, p), - p - ), - p - ) - x_add_identity := - mulmod( - mulmod( - x_add_identity, - addmod( - 1, - sub(p, mload(QM_EVAL_LOC)), - p - ), - p - ), - mload(C_ALPHA_BASE_LOC), - p - ) - - // q_elliptic * (x3 + x2 + x1)(x2 - x1)(x2 - x1) - y2^2 - y1^2 + 2(y2y1)*q_sign = 0 - let y1_plus_y3 := addmod( - mload(Y1_EVAL_LOC), - mload(Y3_EVAL_LOC), - p - ) - let y_diff := addmod(mulmod(mload(Y2_EVAL_LOC), mload(QSIGN_LOC), p), sub(p, mload(Y1_EVAL_LOC)), p) - let y_add_identity := - addmod( - mulmod(y1_plus_y3, x_diff, p), - mulmod(addmod(mload(X3_EVAL_LOC), sub(p, mload(X1_EVAL_LOC)), p), y_diff, p), - p - ) - y_add_identity := - mulmod( - mulmod(y_add_identity, addmod(1, sub(p, mload(QM_EVAL_LOC)), p), p), - mulmod(mload(C_ALPHA_BASE_LOC), mload(C_ALPHA_LOC), p), - p - ) - - // ELLIPTIC_IDENTITY = (x_identity + y_identity) * Q_ELLIPTIC_EVAL - mstore( - ELLIPTIC_IDENTITY, mulmod(addmod(x_add_identity, y_add_identity, p), mload(QELLIPTIC_EVAL_LOC), p) - ) - } - { - /** - * x_pow_4 = (y_1_sqr - curve_b) * x_1; - * y_1_sqr_mul_4 = y_1_sqr + y_1_sqr; - * y_1_sqr_mul_4 += y_1_sqr_mul_4; - * x_1_pow_4_mul_9 = x_pow_4; - * x_1_pow_4_mul_9 += x_1_pow_4_mul_9; - * x_1_pow_4_mul_9 += x_1_pow_4_mul_9; - * x_1_pow_4_mul_9 += x_1_pow_4_mul_9; - * x_1_pow_4_mul_9 += x_pow_4; - * x_1_sqr_mul_3 = x_1_sqr + x_1_sqr + x_1_sqr; - * x_double_identity = (x_3 + x_1 + x_1) * y_1_sqr_mul_4 - x_1_pow_4_mul_9; - * y_double_identity = x_1_sqr_mul_3 * (x_1 - x_3) - (y_1 + y_1) * (y_1 + y_3); - */ - // (x3 + x1 + x1) (4y1*y1) - 9 * x1 * x1 * x1 * x1 = 0 - let x1_sqr := mulmod(mload(X1_EVAL_LOC), mload(X1_EVAL_LOC), p) - let y1_sqr := mulmod(mload(Y1_EVAL_LOC), mload(Y1_EVAL_LOC), p) - let x_pow_4 := mulmod(addmod(y1_sqr, GRUMPKIN_CURVE_B_PARAMETER_NEGATED, p), mload(X1_EVAL_LOC), p) - let y1_sqr_mul_4 := mulmod(y1_sqr, 4, p) - let x1_pow_4_mul_9 := mulmod(x_pow_4, 9, p) - let x1_sqr_mul_3 := mulmod(x1_sqr, 3, p) - let x_double_identity := - addmod( - mulmod( - addmod(mload(X3_EVAL_LOC), addmod(mload(X1_EVAL_LOC), mload(X1_EVAL_LOC), p), p), - y1_sqr_mul_4, - p - ), - sub(p, x1_pow_4_mul_9), - p - ) - // (y1 + y1) (2y1) - (3 * x1 * x1)(x1 - x3) = 0 - let y_double_identity := - addmod( - mulmod(x1_sqr_mul_3, addmod(mload(X1_EVAL_LOC), sub(p, mload(X3_EVAL_LOC)), p), p), - sub( - p, - mulmod( - addmod(mload(Y1_EVAL_LOC), mload(Y1_EVAL_LOC), p), - addmod(mload(Y1_EVAL_LOC), mload(Y3_EVAL_LOC), p), - p - ) - ), - p - ) - x_double_identity := mulmod(x_double_identity, mload(C_ALPHA_BASE_LOC), p) - y_double_identity := - mulmod(y_double_identity, mulmod(mload(C_ALPHA_BASE_LOC), mload(C_ALPHA_LOC), p), p) - x_double_identity := mulmod(x_double_identity, mload(QM_EVAL_LOC), p) - y_double_identity := mulmod(y_double_identity, mload(QM_EVAL_LOC), p) - // ELLIPTIC_IDENTITY += (x_double_identity + y_double_identity) * Q_DOUBLE_EVAL - mstore( - ELLIPTIC_IDENTITY, - addmod( - mload(ELLIPTIC_IDENTITY), - mulmod(addmod(x_double_identity, y_double_identity, p), mload(QELLIPTIC_EVAL_LOC), p), - p - ) - ) - - // update alpha - mstore(C_ALPHA_BASE_LOC, mulmod(mload(C_ALPHA_BASE_LOC), mload(C_ALPHA_QUAD_LOC), p)) - } - - /** - * COMPUTE AUXILIARY WIDGET EVALUATION - */ - { - { - /** - * Non native field arithmetic gate 2 - * _ _ - * / _ _ _ 14 \ - * q_2 . q_4 | (w_1 . w_2) + (w_1 . w_2) + (w_1 . w_4 + w_2 . w_3 - w_3) . 2 - w_3 - w_4 | - * \_ _/ - * - * limb_subproduct = w_1 . w_2_omega + w_1_omega . w_2 - * non_native_field_gate_2 = w_1 * w_4 + w_4 * w_3 - w_3_omega - * non_native_field_gate_2 = non_native_field_gate_2 * limb_size - * non_native_field_gate_2 -= w_4_omega - * non_native_field_gate_2 += limb_subproduct - * non_native_field_gate_2 *= q_4 - * limb_subproduct *= limb_size - * limb_subproduct += w_1_omega * w_2_omega - * non_native_field_gate_1 = (limb_subproduct + w_3 + w_4) * q_3 - * non_native_field_gate_3 = (limb_subproduct + w_4 - (w_3_omega + w_4_omega)) * q_m - * non_native_field_identity = (non_native_field_gate_1 + non_native_field_gate_2 + non_native_field_gate_3) * q_2 - */ - - let limb_subproduct := - addmod( - mulmod(mload(W1_EVAL_LOC), mload(W2_OMEGA_EVAL_LOC), p), - mulmod(mload(W1_OMEGA_EVAL_LOC), mload(W2_EVAL_LOC), p), - p - ) - - let non_native_field_gate_2 := - addmod( - addmod( - mulmod(mload(W1_EVAL_LOC), mload(W4_EVAL_LOC), p), - mulmod(mload(W2_EVAL_LOC), mload(W3_EVAL_LOC), p), - p - ), - sub(p, mload(W3_OMEGA_EVAL_LOC)), - p - ) - non_native_field_gate_2 := mulmod(non_native_field_gate_2, LIMB_SIZE, p) - non_native_field_gate_2 := addmod(non_native_field_gate_2, sub(p, mload(W4_OMEGA_EVAL_LOC)), p) - non_native_field_gate_2 := addmod(non_native_field_gate_2, limb_subproduct, p) - non_native_field_gate_2 := mulmod(non_native_field_gate_2, mload(Q4_EVAL_LOC), p) - limb_subproduct := mulmod(limb_subproduct, LIMB_SIZE, p) - limb_subproduct := - addmod(limb_subproduct, mulmod(mload(W1_OMEGA_EVAL_LOC), mload(W2_OMEGA_EVAL_LOC), p), p) - let non_native_field_gate_1 := - mulmod( - addmod(limb_subproduct, sub(p, addmod(mload(W3_EVAL_LOC), mload(W4_EVAL_LOC), p)), p), - mload(Q3_EVAL_LOC), - p - ) - let non_native_field_gate_3 := - mulmod( - addmod( - addmod(limb_subproduct, mload(W4_EVAL_LOC), p), - sub(p, addmod(mload(W3_OMEGA_EVAL_LOC), mload(W4_OMEGA_EVAL_LOC), p)), - p - ), - mload(QM_EVAL_LOC), - p - ) - let non_native_field_identity := - mulmod( - addmod(addmod(non_native_field_gate_1, non_native_field_gate_2, p), non_native_field_gate_3, p), - mload(Q2_EVAL_LOC), - p - ) - - mstore(AUX_NON_NATIVE_FIELD_EVALUATION, non_native_field_identity) - } - - { - /** - * limb_accumulator_1 = w_2_omega; - * limb_accumulator_1 *= SUBLIMB_SHIFT; - * limb_accumulator_1 += w_1_omega; - * limb_accumulator_1 *= SUBLIMB_SHIFT; - * limb_accumulator_1 += w_3; - * limb_accumulator_1 *= SUBLIMB_SHIFT; - * limb_accumulator_1 += w_2; - * limb_accumulator_1 *= SUBLIMB_SHIFT; - * limb_accumulator_1 += w_1; - * limb_accumulator_1 -= w_4; - * limb_accumulator_1 *= q_4; - */ - let limb_accumulator_1 := mulmod(mload(W2_OMEGA_EVAL_LOC), SUBLIMB_SHIFT, p) - limb_accumulator_1 := addmod(limb_accumulator_1, mload(W1_OMEGA_EVAL_LOC), p) - limb_accumulator_1 := mulmod(limb_accumulator_1, SUBLIMB_SHIFT, p) - limb_accumulator_1 := addmod(limb_accumulator_1, mload(W3_EVAL_LOC), p) - limb_accumulator_1 := mulmod(limb_accumulator_1, SUBLIMB_SHIFT, p) - limb_accumulator_1 := addmod(limb_accumulator_1, mload(W2_EVAL_LOC), p) - limb_accumulator_1 := mulmod(limb_accumulator_1, SUBLIMB_SHIFT, p) - limb_accumulator_1 := addmod(limb_accumulator_1, mload(W1_EVAL_LOC), p) - limb_accumulator_1 := addmod(limb_accumulator_1, sub(p, mload(W4_EVAL_LOC)), p) - limb_accumulator_1 := mulmod(limb_accumulator_1, mload(Q4_EVAL_LOC), p) - - /** - * limb_accumulator_2 = w_3_omega; - * limb_accumulator_2 *= SUBLIMB_SHIFT; - * limb_accumulator_2 += w_2_omega; - * limb_accumulator_2 *= SUBLIMB_SHIFT; - * limb_accumulator_2 += w_1_omega; - * limb_accumulator_2 *= SUBLIMB_SHIFT; - * limb_accumulator_2 += w_4; - * limb_accumulator_2 *= SUBLIMB_SHIFT; - * limb_accumulator_2 += w_3; - * limb_accumulator_2 -= w_4_omega; - * limb_accumulator_2 *= q_m; - */ - let limb_accumulator_2 := mulmod(mload(W3_OMEGA_EVAL_LOC), SUBLIMB_SHIFT, p) - limb_accumulator_2 := addmod(limb_accumulator_2, mload(W2_OMEGA_EVAL_LOC), p) - limb_accumulator_2 := mulmod(limb_accumulator_2, SUBLIMB_SHIFT, p) - limb_accumulator_2 := addmod(limb_accumulator_2, mload(W1_OMEGA_EVAL_LOC), p) - limb_accumulator_2 := mulmod(limb_accumulator_2, SUBLIMB_SHIFT, p) - limb_accumulator_2 := addmod(limb_accumulator_2, mload(W4_EVAL_LOC), p) - limb_accumulator_2 := mulmod(limb_accumulator_2, SUBLIMB_SHIFT, p) - limb_accumulator_2 := addmod(limb_accumulator_2, mload(W3_EVAL_LOC), p) - limb_accumulator_2 := addmod(limb_accumulator_2, sub(p, mload(W4_OMEGA_EVAL_LOC)), p) - limb_accumulator_2 := mulmod(limb_accumulator_2, mload(QM_EVAL_LOC), p) - - mstore( - AUX_LIMB_ACCUMULATOR_EVALUATION, - mulmod(addmod(limb_accumulator_1, limb_accumulator_2, p), mload(Q3_EVAL_LOC), p) - ) - } - - { - /** - * memory_record_check = w_3; - * memory_record_check *= eta; - * memory_record_check += w_2; - * memory_record_check *= eta; - * memory_record_check += w_1; - * memory_record_check *= eta; - * memory_record_check += q_c; - * - * partial_record_check = memory_record_check; - * - * memory_record_check -= w_4; - */ - - let memory_record_check := mulmod(mload(W3_EVAL_LOC), mload(C_ETA_LOC), p) - memory_record_check := addmod(memory_record_check, mload(W2_EVAL_LOC), p) - memory_record_check := mulmod(memory_record_check, mload(C_ETA_LOC), p) - memory_record_check := addmod(memory_record_check, mload(W1_EVAL_LOC), p) - memory_record_check := mulmod(memory_record_check, mload(C_ETA_LOC), p) - memory_record_check := addmod(memory_record_check, mload(QC_EVAL_LOC), p) - - let partial_record_check := memory_record_check - memory_record_check := addmod(memory_record_check, sub(p, mload(W4_EVAL_LOC)), p) - - mstore(AUX_MEMORY_EVALUATION, memory_record_check) - - // index_delta = w_1_omega - w_1 - let index_delta := addmod(mload(W1_OMEGA_EVAL_LOC), sub(p, mload(W1_EVAL_LOC)), p) - // record_delta = w_4_omega - w_4 - let record_delta := addmod(mload(W4_OMEGA_EVAL_LOC), sub(p, mload(W4_EVAL_LOC)), p) - // index_is_monotonically_increasing = index_delta * (index_delta - 1) - let index_is_monotonically_increasing := mulmod(index_delta, addmod(index_delta, sub(p, 1), p), p) - - // adjacent_values_match_if_adjacent_indices_match = record_delta * (1 - index_delta) - let adjacent_values_match_if_adjacent_indices_match := - mulmod(record_delta, addmod(1, sub(p, index_delta), p), p) - - // AUX_ROM_CONSISTENCY_EVALUATION = ((adjacent_values_match_if_adjacent_indices_match * alpha) + index_is_monotonically_increasing) * alpha + partial_record_check - mstore( - AUX_ROM_CONSISTENCY_EVALUATION, - addmod( - mulmod( - addmod( - mulmod(adjacent_values_match_if_adjacent_indices_match, mload(C_ALPHA_LOC), p), - index_is_monotonically_increasing, - p - ), - mload(C_ALPHA_LOC), - p - ), - memory_record_check, - p - ) - ) - - { - /** - * next_gate_access_type = w_3_omega; - * next_gate_access_type *= eta; - * next_gate_access_type += w_2_omega; - * next_gate_access_type *= eta; - * next_gate_access_type += w_1_omega; - * next_gate_access_type *= eta; - * next_gate_access_type = w_4_omega - next_gate_access_type; - */ - let next_gate_access_type := mulmod(mload(W3_OMEGA_EVAL_LOC), mload(C_ETA_LOC), p) - next_gate_access_type := addmod(next_gate_access_type, mload(W2_OMEGA_EVAL_LOC), p) - next_gate_access_type := mulmod(next_gate_access_type, mload(C_ETA_LOC), p) - next_gate_access_type := addmod(next_gate_access_type, mload(W1_OMEGA_EVAL_LOC), p) - next_gate_access_type := mulmod(next_gate_access_type, mload(C_ETA_LOC), p) - next_gate_access_type := addmod(mload(W4_OMEGA_EVAL_LOC), sub(p, next_gate_access_type), p) - - // value_delta = w_3_omega - w_3 - let value_delta := addmod(mload(W3_OMEGA_EVAL_LOC), sub(p, mload(W3_EVAL_LOC)), p) - // adjacent_values_match_if_adjacent_indices_match_and_next_access_is_a_read_operation = (1 - index_delta) * value_delta * (1 - next_gate_access_type); - - let adjacent_values_match_if_adjacent_indices_match_and_next_access_is_a_read_operation := - mulmod( - addmod(1, sub(p, index_delta), p), - mulmod(value_delta, addmod(1, sub(p, next_gate_access_type), p), p), - p - ) - - // AUX_RAM_CONSISTENCY_EVALUATION - - /** - * access_type = w_4 - partial_record_check - * access_check = access_type^2 - access_type - * next_gate_access_type_is_boolean = next_gate_access_type^2 - next_gate_access_type - * RAM_consistency_check_identity = adjacent_values_match_if_adjacent_indices_match_and_next_access_is_a_read_operation; - * RAM_consistency_check_identity *= alpha; - * RAM_consistency_check_identity += index_is_monotonically_increasing; - * RAM_consistency_check_identity *= alpha; - * RAM_consistency_check_identity += next_gate_access_type_is_boolean; - * RAM_consistency_check_identity *= alpha; - * RAM_consistency_check_identity += access_check; - */ - - let access_type := addmod(mload(W4_EVAL_LOC), sub(p, partial_record_check), p) - let access_check := mulmod(access_type, addmod(access_type, sub(p, 1), p), p) - let next_gate_access_type_is_boolean := - mulmod(next_gate_access_type, addmod(next_gate_access_type, sub(p, 1), p), p) - let RAM_cci := - mulmod( - adjacent_values_match_if_adjacent_indices_match_and_next_access_is_a_read_operation, - mload(C_ALPHA_LOC), - p - ) - RAM_cci := addmod(RAM_cci, index_is_monotonically_increasing, p) - RAM_cci := mulmod(RAM_cci, mload(C_ALPHA_LOC), p) - RAM_cci := addmod(RAM_cci, next_gate_access_type_is_boolean, p) - RAM_cci := mulmod(RAM_cci, mload(C_ALPHA_LOC), p) - RAM_cci := addmod(RAM_cci, access_check, p) - - mstore(AUX_RAM_CONSISTENCY_EVALUATION, RAM_cci) - } - - { - // timestamp_delta = w_2_omega - w_2 - let timestamp_delta := addmod(mload(W2_OMEGA_EVAL_LOC), sub(p, mload(W2_EVAL_LOC)), p) - - // RAM_timestamp_check_identity = (1 - index_delta) * timestamp_delta - w_3 - let RAM_timestamp_check_identity := - addmod( - mulmod(timestamp_delta, addmod(1, sub(p, index_delta), p), p), sub(p, mload(W3_EVAL_LOC)), p - ) - - /** - * memory_identity = ROM_consistency_check_identity * q_2; - * memory_identity += RAM_timestamp_check_identity * q_4; - * memory_identity += memory_record_check * q_m; - * memory_identity *= q_1; - * memory_identity += (RAM_consistency_check_identity * q_arith); - * - * auxiliary_identity = memory_identity + non_native_field_identity + limb_accumulator_identity; - * auxiliary_identity *= q_aux; - * auxiliary_identity *= alpha_base; - */ - let memory_identity := mulmod(mload(AUX_ROM_CONSISTENCY_EVALUATION), mload(Q2_EVAL_LOC), p) - memory_identity := - addmod(memory_identity, mulmod(RAM_timestamp_check_identity, mload(Q4_EVAL_LOC), p), p) - memory_identity := - addmod(memory_identity, mulmod(mload(AUX_MEMORY_EVALUATION), mload(QM_EVAL_LOC), p), p) - memory_identity := mulmod(memory_identity, mload(Q1_EVAL_LOC), p) - memory_identity := - addmod( - memory_identity, mulmod(mload(AUX_RAM_CONSISTENCY_EVALUATION), mload(QARITH_EVAL_LOC), p), p - ) - - let auxiliary_identity := addmod(memory_identity, mload(AUX_NON_NATIVE_FIELD_EVALUATION), p) - auxiliary_identity := addmod(auxiliary_identity, mload(AUX_LIMB_ACCUMULATOR_EVALUATION), p) - auxiliary_identity := mulmod(auxiliary_identity, mload(QAUX_EVAL_LOC), p) - auxiliary_identity := mulmod(auxiliary_identity, mload(C_ALPHA_BASE_LOC), p) - - mstore(AUX_IDENTITY, auxiliary_identity) - - // update alpha - mstore(C_ALPHA_BASE_LOC, mulmod(mload(C_ALPHA_BASE_LOC), mload(C_ALPHA_CUBE_LOC), p)) - } - } - } - - { - /** - * quotient = ARITHMETIC_IDENTITY - * quotient += PERMUTATION_IDENTITY - * quotient += PLOOKUP_IDENTITY - * quotient += SORT_IDENTITY - * quotient += ELLIPTIC_IDENTITY - * quotient += AUX_IDENTITY - * quotient *= ZERO_POLY_INVERSE - */ - mstore( - QUOTIENT_EVAL_LOC, - mulmod( - addmod( - addmod( - addmod( - addmod( - addmod(mload(PERMUTATION_IDENTITY), mload(PLOOKUP_IDENTITY), p), - mload(ARITHMETIC_IDENTITY), - p - ), - mload(SORT_IDENTITY), - p - ), - mload(ELLIPTIC_IDENTITY), - p - ), - mload(AUX_IDENTITY), - p - ), - mload(ZERO_POLY_INVERSE_LOC), - p - ) - ) - } - - /** - * GENERATE NU AND SEPARATOR CHALLENGES - */ - { - let current_challenge := mload(C_CURRENT_LOC) - // get a calldata pointer that points to the start of the data we want to copy - let calldata_ptr := add(calldataload(0x04), 0x24) - - calldata_ptr := add(calldata_ptr, NU_CALLDATA_SKIP_LENGTH) - - mstore(NU_CHALLENGE_INPUT_LOC_A, current_challenge) - mstore(NU_CHALLENGE_INPUT_LOC_B, mload(QUOTIENT_EVAL_LOC)) - calldatacopy(NU_CHALLENGE_INPUT_LOC_C, calldata_ptr, NU_INPUT_LENGTH) - - // hash length = (0x20 + num field elements), we include the previous challenge in the hash - let challenge := keccak256(NU_CHALLENGE_INPUT_LOC_A, add(NU_INPUT_LENGTH, 0x40)) - - mstore(C_V0_LOC, mod(challenge, p)) - // We need THIRTY-ONE independent nu challenges! - mstore(0x00, challenge) - mstore8(0x20, 0x01) - mstore(C_V1_LOC, mod(keccak256(0x00, 0x21), p)) - mstore8(0x20, 0x02) - mstore(C_V2_LOC, mod(keccak256(0x00, 0x21), p)) - mstore8(0x20, 0x03) - mstore(C_V3_LOC, mod(keccak256(0x00, 0x21), p)) - mstore8(0x20, 0x04) - mstore(C_V4_LOC, mod(keccak256(0x00, 0x21), p)) - mstore8(0x20, 0x05) - mstore(C_V5_LOC, mod(keccak256(0x00, 0x21), p)) - mstore8(0x20, 0x06) - mstore(C_V6_LOC, mod(keccak256(0x00, 0x21), p)) - mstore8(0x20, 0x07) - mstore(C_V7_LOC, mod(keccak256(0x00, 0x21), p)) - mstore8(0x20, 0x08) - mstore(C_V8_LOC, mod(keccak256(0x00, 0x21), p)) - mstore8(0x20, 0x09) - mstore(C_V9_LOC, mod(keccak256(0x00, 0x21), p)) - mstore8(0x20, 0x0a) - mstore(C_V10_LOC, mod(keccak256(0x00, 0x21), p)) - mstore8(0x20, 0x0b) - mstore(C_V11_LOC, mod(keccak256(0x00, 0x21), p)) - mstore8(0x20, 0x0c) - mstore(C_V12_LOC, mod(keccak256(0x00, 0x21), p)) - mstore8(0x20, 0x0d) - mstore(C_V13_LOC, mod(keccak256(0x00, 0x21), p)) - mstore8(0x20, 0x0e) - mstore(C_V14_LOC, mod(keccak256(0x00, 0x21), p)) - mstore8(0x20, 0x0f) - mstore(C_V15_LOC, mod(keccak256(0x00, 0x21), p)) - mstore8(0x20, 0x10) - mstore(C_V16_LOC, mod(keccak256(0x00, 0x21), p)) - mstore8(0x20, 0x11) - mstore(C_V17_LOC, mod(keccak256(0x00, 0x21), p)) - mstore8(0x20, 0x12) - mstore(C_V18_LOC, mod(keccak256(0x00, 0x21), p)) - mstore8(0x20, 0x13) - mstore(C_V19_LOC, mod(keccak256(0x00, 0x21), p)) - mstore8(0x20, 0x14) - mstore(C_V20_LOC, mod(keccak256(0x00, 0x21), p)) - mstore8(0x20, 0x15) - mstore(C_V21_LOC, mod(keccak256(0x00, 0x21), p)) - mstore8(0x20, 0x16) - mstore(C_V22_LOC, mod(keccak256(0x00, 0x21), p)) - mstore8(0x20, 0x17) - mstore(C_V23_LOC, mod(keccak256(0x00, 0x21), p)) - mstore8(0x20, 0x18) - mstore(C_V24_LOC, mod(keccak256(0x00, 0x21), p)) - mstore8(0x20, 0x19) - mstore(C_V25_LOC, mod(keccak256(0x00, 0x21), p)) - mstore8(0x20, 0x1a) - mstore(C_V26_LOC, mod(keccak256(0x00, 0x21), p)) - mstore8(0x20, 0x1b) - mstore(C_V27_LOC, mod(keccak256(0x00, 0x21), p)) - mstore8(0x20, 0x1c) - mstore(C_V28_LOC, mod(keccak256(0x00, 0x21), p)) - mstore8(0x20, 0x1d) - mstore(C_V29_LOC, mod(keccak256(0x00, 0x21), p)) - - // @follow-up - Why are both v29 and v30 using appending 0x1d to the prior challenge and hashing, should it not change? - mstore8(0x20, 0x1d) - challenge := keccak256(0x00, 0x21) - mstore(C_V30_LOC, mod(challenge, p)) - - // separator - mstore(0x00, challenge) - mstore(0x20, mload(PI_Z_Y_LOC)) - mstore(0x40, mload(PI_Z_X_LOC)) - mstore(0x60, mload(PI_Z_OMEGA_Y_LOC)) - mstore(0x80, mload(PI_Z_OMEGA_X_LOC)) - - mstore(C_U_LOC, mod(keccak256(0x00, 0xa0), p)) - } - - let success := 0 - // VALIDATE T1 - { - let x := mload(T1_X_LOC) - let y := mload(T1_Y_LOC) - let xx := mulmod(x, x, q) - // validate on curve - success := eq(mulmod(y, y, q), addmod(mulmod(x, xx, q), 3, q)) - mstore(ACCUMULATOR_X_LOC, x) - mstore(add(ACCUMULATOR_X_LOC, 0x20), y) - } - // VALIDATE T2 - { - let x := mload(T2_X_LOC) // 0x1400 - let y := mload(T2_Y_LOC) // 0x1420 - let xx := mulmod(x, x, q) - // validate on curve - success := and(success, eq(mulmod(y, y, q), addmod(mulmod(x, xx, q), 3, q))) - mstore(0x00, x) - mstore(0x20, y) - } - mstore(0x40, mload(ZETA_POW_N_LOC)) - // accumulator_2 = [T2].zeta^n - success := and(success, staticcall(gas(), 7, 0x00, 0x60, ACCUMULATOR2_X_LOC, 0x40)) - // accumulator = [T1] + accumulator_2 - success := and(success, staticcall(gas(), 6, ACCUMULATOR_X_LOC, 0x80, ACCUMULATOR_X_LOC, 0x40)) - - // VALIDATE T3 - { - let x := mload(T3_X_LOC) - let y := mload(T3_Y_LOC) - let xx := mulmod(x, x, q) - // validate on curve - success := and(success, eq(mulmod(y, y, q), addmod(mulmod(x, xx, q), 3, q))) - mstore(0x00, x) - mstore(0x20, y) - } - mstore(0x40, mulmod(mload(ZETA_POW_N_LOC), mload(ZETA_POW_N_LOC), p)) - // accumulator_2 = [T3].zeta^{2n} - success := and(success, staticcall(gas(), 7, 0x00, 0x60, ACCUMULATOR2_X_LOC, 0x40)) - // accumulator = accumulator + accumulator_2 - success := and(success, staticcall(gas(), 6, ACCUMULATOR_X_LOC, 0x80, ACCUMULATOR_X_LOC, 0x40)) - - // VALIDATE T4 - { - let x := mload(T4_X_LOC) - let y := mload(T4_Y_LOC) - let xx := mulmod(x, x, q) - // validate on curve - success := and(success, eq(mulmod(y, y, q), addmod(mulmod(x, xx, q), 3, q))) - mstore(0x00, x) - mstore(0x20, y) - } - mstore(0x40, mulmod(mulmod(mload(ZETA_POW_N_LOC), mload(ZETA_POW_N_LOC), p), mload(ZETA_POW_N_LOC), p)) - // accumulator_2 = [T4].zeta^{3n} - success := and(success, staticcall(gas(), 7, 0x00, 0x60, ACCUMULATOR2_X_LOC, 0x40)) - // accumulator = accumulator + accumulator_2 - success := and(success, staticcall(gas(), 6, ACCUMULATOR_X_LOC, 0x80, ACCUMULATOR_X_LOC, 0x40)) - - // VALIDATE W1 - { - let x := mload(W1_X_LOC) - let y := mload(W1_Y_LOC) - let xx := mulmod(x, x, q) - // validate on curve - success := and(success, eq(mulmod(y, y, q), addmod(mulmod(x, xx, q), 3, q))) - mstore(0x00, x) - mstore(0x20, y) - } - mstore(0x40, mulmod(addmod(mload(C_U_LOC), 0x1, p), mload(C_V0_LOC), p)) - // accumulator_2 = v0.(u + 1).[W1] - success := and(success, staticcall(gas(), 7, 0x00, 0x60, ACCUMULATOR2_X_LOC, 0x40)) - // accumulator = accumulator + accumulator_2 - success := and(success, staticcall(gas(), 6, ACCUMULATOR_X_LOC, 0x80, ACCUMULATOR_X_LOC, 0x40)) - - // VALIDATE W2 - { - let x := mload(W2_X_LOC) - let y := mload(W2_Y_LOC) - let xx := mulmod(x, x, q) - // validate on curve - success := and(success, eq(mulmod(y, y, q), addmod(mulmod(x, xx, q), 3, q))) - mstore(0x00, x) - mstore(0x20, y) - } - mstore(0x40, mulmod(addmod(mload(C_U_LOC), 0x1, p), mload(C_V1_LOC), p)) - // accumulator_2 = v1.(u + 1).[W2] - success := and(success, staticcall(gas(), 7, 0x00, 0x60, ACCUMULATOR2_X_LOC, 0x40)) - // accumulator = accumulator + accumulator_2 - success := and(success, staticcall(gas(), 6, ACCUMULATOR_X_LOC, 0x80, ACCUMULATOR_X_LOC, 0x40)) - - // VALIDATE W3 - { - let x := mload(W3_X_LOC) - let y := mload(W3_Y_LOC) - let xx := mulmod(x, x, q) - // validate on curve - success := and(success, eq(mulmod(y, y, q), addmod(mulmod(x, xx, q), 3, q))) - mstore(0x00, x) - mstore(0x20, y) - } - mstore(0x40, mulmod(addmod(mload(C_U_LOC), 0x1, p), mload(C_V2_LOC), p)) - // accumulator_2 = v2.(u + 1).[W3] - success := and(success, staticcall(gas(), 7, 0x00, 0x60, ACCUMULATOR2_X_LOC, 0x40)) - // accumulator = accumulator + accumulator_2 - success := and(success, staticcall(gas(), 6, ACCUMULATOR_X_LOC, 0x80, ACCUMULATOR_X_LOC, 0x40)) - - // VALIDATE W4 - { - let x := mload(W4_X_LOC) - let y := mload(W4_Y_LOC) - let xx := mulmod(x, x, q) - // validate on curve - success := and(success, eq(mulmod(y, y, q), addmod(mulmod(x, xx, q), 3, q))) - mstore(0x00, x) - mstore(0x20, y) - } - mstore(0x40, mulmod(addmod(mload(C_U_LOC), 0x1, p), mload(C_V3_LOC), p)) - // accumulator_2 = v3.(u + 1).[W4] - success := and(success, staticcall(gas(), 7, 0x00, 0x60, ACCUMULATOR2_X_LOC, 0x40)) - // accumulator = accumulator + accumulator_2 - success := and(success, staticcall(gas(), 6, ACCUMULATOR_X_LOC, 0x80, ACCUMULATOR_X_LOC, 0x40)) - - // VALIDATE S - { - let x := mload(S_X_LOC) - let y := mload(S_Y_LOC) - let xx := mulmod(x, x, q) - // validate on curve - success := and(success, eq(mulmod(y, y, q), addmod(mulmod(x, xx, q), 3, q))) - mstore(0x00, x) - mstore(0x20, y) - } - mstore(0x40, mulmod(addmod(mload(C_U_LOC), 0x1, p), mload(C_V4_LOC), p)) - // accumulator_2 = v4.(u + 1).[S] - success := and(success, staticcall(gas(), 7, 0x00, 0x60, ACCUMULATOR2_X_LOC, 0x40)) - // accumulator = accumulator + accumulator_2 - success := and(success, staticcall(gas(), 6, ACCUMULATOR_X_LOC, 0x80, ACCUMULATOR_X_LOC, 0x40)) - - // VALIDATE Z - { - let x := mload(Z_X_LOC) - let y := mload(Z_Y_LOC) - let xx := mulmod(x, x, q) - // validate on curve - success := and(success, eq(mulmod(y, y, q), addmod(mulmod(x, xx, q), 3, q))) - mstore(0x00, x) - mstore(0x20, y) - } - mstore(0x40, mulmod(addmod(mload(C_U_LOC), 0x1, p), mload(C_V5_LOC), p)) - // accumulator_2 = v5.(u + 1).[Z] - success := and(success, staticcall(gas(), 7, 0x00, 0x60, ACCUMULATOR2_X_LOC, 0x40)) - // accumulator = accumulator + accumulator_2 - success := and(success, staticcall(gas(), 6, ACCUMULATOR_X_LOC, 0x80, ACCUMULATOR_X_LOC, 0x40)) - - // VALIDATE Z_LOOKUP - { - let x := mload(Z_LOOKUP_X_LOC) - let y := mload(Z_LOOKUP_Y_LOC) - let xx := mulmod(x, x, q) - // validate on curve - success := and(success, eq(mulmod(y, y, q), addmod(mulmod(x, xx, q), 3, q))) - mstore(0x00, x) - mstore(0x20, y) - } - mstore(0x40, mulmod(addmod(mload(C_U_LOC), 0x1, p), mload(C_V6_LOC), p)) - // accumulator_2 = v6.(u + 1).[Z_LOOKUP] - success := and(success, staticcall(gas(), 7, 0x00, 0x60, ACCUMULATOR2_X_LOC, 0x40)) - // accumulator = accumulator + accumulator_2 - success := and(success, staticcall(gas(), 6, ACCUMULATOR_X_LOC, 0x80, ACCUMULATOR_X_LOC, 0x40)) - - // VALIDATE Q1 - { - let x := mload(Q1_X_LOC) - let y := mload(Q1_Y_LOC) - let xx := mulmod(x, x, q) - // validate on curve - success := and(success, eq(mulmod(y, y, q), addmod(mulmod(x, xx, q), 3, q))) - mstore(0x00, x) - mstore(0x20, y) - } - mstore(0x40, mload(C_V7_LOC)) - // accumulator_2 = v7.[Q1] - success := and(success, staticcall(gas(), 7, 0x00, 0x60, ACCUMULATOR2_X_LOC, 0x40)) - // accumulator = accumulator + accumulator_2 - success := and(success, staticcall(gas(), 6, ACCUMULATOR_X_LOC, 0x80, ACCUMULATOR_X_LOC, 0x40)) - - // VALIDATE Q2 - { - let x := mload(Q2_X_LOC) - let y := mload(Q2_Y_LOC) - let xx := mulmod(x, x, q) - // validate on curve - success := and(success, eq(mulmod(y, y, q), addmod(mulmod(x, xx, q), 3, q))) - mstore(0x00, x) - mstore(0x20, y) - } - mstore(0x40, mload(C_V8_LOC)) - // accumulator_2 = v8.[Q2] - success := and(success, staticcall(gas(), 7, 0x00, 0x60, ACCUMULATOR2_X_LOC, 0x40)) - // accumulator = accumulator + accumulator_2 - success := and(success, staticcall(gas(), 6, ACCUMULATOR_X_LOC, 0x80, ACCUMULATOR_X_LOC, 0x40)) - - // VALIDATE Q3 - { - let x := mload(Q3_X_LOC) - let y := mload(Q3_Y_LOC) - let xx := mulmod(x, x, q) - // validate on curve - success := and(success, eq(mulmod(y, y, q), addmod(mulmod(x, xx, q), 3, q))) - mstore(0x00, x) - mstore(0x20, y) - } - mstore(0x40, mload(C_V9_LOC)) - // accumulator_2 = v9.[Q3] - success := and(success, staticcall(gas(), 7, 0x00, 0x60, ACCUMULATOR2_X_LOC, 0x40)) - // accumulator = accumulator + accumulator_2 - success := and(success, staticcall(gas(), 6, ACCUMULATOR_X_LOC, 0x80, ACCUMULATOR_X_LOC, 0x40)) - - // VALIDATE Q4 - { - let x := mload(Q4_X_LOC) - let y := mload(Q4_Y_LOC) - let xx := mulmod(x, x, q) - // validate on curve - success := and(success, eq(mulmod(y, y, q), addmod(mulmod(x, xx, q), 3, q))) - mstore(0x00, x) - mstore(0x20, y) - } - mstore(0x40, mload(C_V10_LOC)) - // accumulator_2 = v10.[Q4] - success := and(success, staticcall(gas(), 7, 0x00, 0x60, ACCUMULATOR2_X_LOC, 0x40)) - // accumulator = accumulator + accumulator_2 - success := and(success, staticcall(gas(), 6, ACCUMULATOR_X_LOC, 0x80, ACCUMULATOR_X_LOC, 0x40)) - - // VALIDATE QM - { - let x := mload(QM_X_LOC) - let y := mload(QM_Y_LOC) - let xx := mulmod(x, x, q) - // validate on curve - success := and(success, eq(mulmod(y, y, q), addmod(mulmod(x, xx, q), 3, q))) - mstore(0x00, x) - mstore(0x20, y) - } - mstore(0x40, mload(C_V11_LOC)) - // accumulator_2 = v11.[Q;] - success := and(success, staticcall(gas(), 7, 0x00, 0x60, ACCUMULATOR2_X_LOC, 0x40)) - // accumulator = accumulator + accumulator_2 - success := and(success, staticcall(gas(), 6, ACCUMULATOR_X_LOC, 0x80, ACCUMULATOR_X_LOC, 0x40)) - - // VALIDATE QC - { - let x := mload(QC_X_LOC) - let y := mload(QC_Y_LOC) - let xx := mulmod(x, x, q) - // validate on curve - success := and(success, eq(mulmod(y, y, q), addmod(mulmod(x, xx, q), 3, q))) - mstore(0x00, x) - mstore(0x20, y) - } - mstore(0x40, mload(C_V12_LOC)) - // accumulator_2 = v12.[QC] - success := and(success, staticcall(gas(), 7, 0x00, 0x60, ACCUMULATOR2_X_LOC, 0x40)) - // accumulator = accumulator + accumulator_2 - success := and(success, staticcall(gas(), 6, ACCUMULATOR_X_LOC, 0x80, ACCUMULATOR_X_LOC, 0x40)) - - // VALIDATE QARITH - { - let x := mload(QARITH_X_LOC) - let y := mload(QARITH_Y_LOC) - let xx := mulmod(x, x, q) - // validate on curve - success := and(success, eq(mulmod(y, y, q), addmod(mulmod(x, xx, q), 3, q))) - mstore(0x00, x) - mstore(0x20, y) - } - mstore(0x40, mload(C_V13_LOC)) - // accumulator_2 = v13.[QARITH] - success := and(success, staticcall(gas(), 7, 0x00, 0x60, ACCUMULATOR2_X_LOC, 0x40)) - // accumulator = accumulator + accumulator_2 - success := and(success, staticcall(gas(), 6, ACCUMULATOR_X_LOC, 0x80, ACCUMULATOR_X_LOC, 0x40)) - - // VALIDATE QSORT - { - let x := mload(QSORT_X_LOC) - let y := mload(QSORT_Y_LOC) - let xx := mulmod(x, x, q) - // validate on curve - success := and(success, eq(mulmod(y, y, q), addmod(mulmod(x, xx, q), 3, q))) - mstore(0x00, x) - mstore(0x20, y) - } - mstore(0x40, mload(C_V14_LOC)) - // accumulator_2 = v14.[QSORT] - success := and(success, staticcall(gas(), 7, 0x00, 0x60, ACCUMULATOR2_X_LOC, 0x40)) - // accumulator = accumulator + accumulator_2 - success := and(success, staticcall(gas(), 6, ACCUMULATOR_X_LOC, 0x80, ACCUMULATOR_X_LOC, 0x40)) - - // VALIDATE QELLIPTIC - { - let x := mload(QELLIPTIC_X_LOC) - let y := mload(QELLIPTIC_Y_LOC) - let xx := mulmod(x, x, q) - // validate on curve - success := and(success, eq(mulmod(y, y, q), addmod(mulmod(x, xx, q), 3, q))) - mstore(0x00, x) - mstore(0x20, y) - } - mstore(0x40, mload(C_V15_LOC)) - // accumulator_2 = v15.[QELLIPTIC] - success := and(success, staticcall(gas(), 7, 0x00, 0x60, ACCUMULATOR2_X_LOC, 0x40)) - // accumulator = accumulator + accumulator_2 - success := and(success, staticcall(gas(), 6, ACCUMULATOR_X_LOC, 0x80, ACCUMULATOR_X_LOC, 0x40)) - - // VALIDATE QAUX - { - let x := mload(QAUX_X_LOC) - let y := mload(QAUX_Y_LOC) - let xx := mulmod(x, x, q) - // validate on curve - success := and(success, eq(mulmod(y, y, q), addmod(mulmod(x, xx, q), 3, q))) - mstore(0x00, x) - mstore(0x20, y) - } - mstore(0x40, mload(C_V16_LOC)) - // accumulator_2 = v15.[Q_AUX] - success := and(success, staticcall(gas(), 7, 0x00, 0x60, ACCUMULATOR2_X_LOC, 0x40)) - // accumulator = accumulator + accumulator_2 - success := and(success, staticcall(gas(), 6, ACCUMULATOR_X_LOC, 0x80, ACCUMULATOR_X_LOC, 0x40)) - - // VALIDATE SIGMA1 - { - let x := mload(SIGMA1_X_LOC) - let y := mload(SIGMA1_Y_LOC) - let xx := mulmod(x, x, q) - // validate on curve - success := and(success, eq(mulmod(y, y, q), addmod(mulmod(x, xx, q), 3, q))) - mstore(0x00, x) - mstore(0x20, y) - } - mstore(0x40, mload(C_V17_LOC)) - // accumulator_2 = v17.[sigma1] - success := and(success, staticcall(gas(), 7, 0x00, 0x60, ACCUMULATOR2_X_LOC, 0x40)) - // accumulator = accumulator + accumulator_2 - success := and(success, staticcall(gas(), 6, ACCUMULATOR_X_LOC, 0x80, ACCUMULATOR_X_LOC, 0x40)) - - // VALIDATE SIGMA2 - { - let x := mload(SIGMA2_X_LOC) - let y := mload(SIGMA2_Y_LOC) - let xx := mulmod(x, x, q) - // validate on curve - success := and(success, eq(mulmod(y, y, q), addmod(mulmod(x, xx, q), 3, q))) - mstore(0x00, x) - mstore(0x20, y) - } - mstore(0x40, mload(C_V18_LOC)) - // accumulator_2 = v18.[sigma2] - success := and(success, staticcall(gas(), 7, 0x00, 0x60, ACCUMULATOR2_X_LOC, 0x40)) - // accumulator = accumulator + accumulator_2 - success := and(success, staticcall(gas(), 6, ACCUMULATOR_X_LOC, 0x80, ACCUMULATOR_X_LOC, 0x40)) - - // VALIDATE SIGMA3 - { - let x := mload(SIGMA3_X_LOC) - let y := mload(SIGMA3_Y_LOC) - let xx := mulmod(x, x, q) - // validate on curve - success := and(success, eq(mulmod(y, y, q), addmod(mulmod(x, xx, q), 3, q))) - mstore(0x00, x) - mstore(0x20, y) - } - mstore(0x40, mload(C_V19_LOC)) - // accumulator_2 = v19.[sigma3] - success := and(success, staticcall(gas(), 7, 0x00, 0x60, ACCUMULATOR2_X_LOC, 0x40)) - // accumulator = accumulator + accumulator_2 - success := and(success, staticcall(gas(), 6, ACCUMULATOR_X_LOC, 0x80, ACCUMULATOR_X_LOC, 0x40)) - - // VALIDATE SIGMA4 - { - let x := mload(SIGMA4_X_LOC) - let y := mload(SIGMA4_Y_LOC) - let xx := mulmod(x, x, q) - // validate on curve - success := and(success, eq(mulmod(y, y, q), addmod(mulmod(x, xx, q), 3, q))) - mstore(0x00, x) - mstore(0x20, y) - } - mstore(0x40, mload(C_V20_LOC)) - // accumulator_2 = v20.[sigma4] - success := and(success, staticcall(gas(), 7, 0x00, 0x60, ACCUMULATOR2_X_LOC, 0x40)) - // accumulator = accumulator + accumulator_2 - success := and(success, staticcall(gas(), 6, ACCUMULATOR_X_LOC, 0x80, ACCUMULATOR_X_LOC, 0x40)) - - // VALIDATE TABLE1 - { - let x := mload(TABLE1_X_LOC) - let y := mload(TABLE1_Y_LOC) - let xx := mulmod(x, x, q) - // validate on curve - success := and(success, eq(mulmod(y, y, q), addmod(mulmod(x, xx, q), 3, q))) - mstore(0x00, x) - mstore(0x20, y) - } - mstore(0x40, mulmod(addmod(mload(C_U_LOC), 0x1, p), mload(C_V21_LOC), p)) - // accumulator_2 = u.[table1] - success := and(success, staticcall(gas(), 7, 0x00, 0x60, ACCUMULATOR2_X_LOC, 0x40)) - // accumulator = accumulator + accumulator_2 - success := and(success, staticcall(gas(), 6, ACCUMULATOR_X_LOC, 0x80, ACCUMULATOR_X_LOC, 0x40)) - - // VALIDATE TABLE2 - { - let x := mload(TABLE2_X_LOC) - let y := mload(TABLE2_Y_LOC) - let xx := mulmod(x, x, q) - // validate on curve - success := and(success, eq(mulmod(y, y, q), addmod(mulmod(x, xx, q), 3, q))) - mstore(0x00, x) - mstore(0x20, y) - } - mstore(0x40, mulmod(addmod(mload(C_U_LOC), 0x1, p), mload(C_V22_LOC), p)) - // accumulator_2 = u.[table2] - success := and(success, staticcall(gas(), 7, 0x00, 0x60, ACCUMULATOR2_X_LOC, 0x40)) - // accumulator = accumulator + accumulator_2 - success := and(success, staticcall(gas(), 6, ACCUMULATOR_X_LOC, 0x80, ACCUMULATOR_X_LOC, 0x40)) - - // VALIDATE TABLE3 - { - let x := mload(TABLE3_X_LOC) - let y := mload(TABLE3_Y_LOC) - let xx := mulmod(x, x, q) - // validate on curve - success := and(success, eq(mulmod(y, y, q), addmod(mulmod(x, xx, q), 3, q))) - mstore(0x00, x) - mstore(0x20, y) - } - mstore(0x40, mulmod(addmod(mload(C_U_LOC), 0x1, p), mload(C_V23_LOC), p)) - // accumulator_2 = u.[table3] - success := and(success, staticcall(gas(), 7, 0x00, 0x60, ACCUMULATOR2_X_LOC, 0x40)) - // accumulator = accumulator + accumulator_2 - success := and(success, staticcall(gas(), 6, ACCUMULATOR_X_LOC, 0x80, ACCUMULATOR_X_LOC, 0x40)) - - // VALIDATE TABLE4 - { - let x := mload(TABLE4_X_LOC) - let y := mload(TABLE4_Y_LOC) - let xx := mulmod(x, x, q) - // validate on curve - success := and(success, eq(mulmod(y, y, q), addmod(mulmod(x, xx, q), 3, q))) - mstore(0x00, x) - mstore(0x20, y) - } - mstore(0x40, mulmod(addmod(mload(C_U_LOC), 0x1, p), mload(C_V24_LOC), p)) - // accumulator_2 = u.[table4] - success := and(success, staticcall(gas(), 7, 0x00, 0x60, ACCUMULATOR2_X_LOC, 0x40)) - // accumulator = accumulator + accumulator_2 - success := and(success, staticcall(gas(), 6, ACCUMULATOR_X_LOC, 0x80, ACCUMULATOR_X_LOC, 0x40)) - - // VALIDATE TABLE_TYPE - { - let x := mload(TABLE_TYPE_X_LOC) - let y := mload(TABLE_TYPE_Y_LOC) - let xx := mulmod(x, x, q) - // validate on curve - success := and(success, eq(mulmod(y, y, q), addmod(mulmod(x, xx, q), 3, q))) - mstore(0x00, x) - mstore(0x20, y) - } - mstore(0x40, mload(C_V25_LOC)) - // accumulator_2 = v25.[TableType] - success := and(success, staticcall(gas(), 7, 0x00, 0x60, ACCUMULATOR2_X_LOC, 0x40)) - // accumulator = accumulator + accumulator_2 - success := and(success, staticcall(gas(), 6, ACCUMULATOR_X_LOC, 0x80, ACCUMULATOR_X_LOC, 0x40)) - - // VALIDATE ID1 - { - let x := mload(ID1_X_LOC) - let y := mload(ID1_Y_LOC) - let xx := mulmod(x, x, q) - // validate on curve - success := and(success, eq(mulmod(y, y, q), addmod(mulmod(x, xx, q), 3, q))) - mstore(0x00, x) - mstore(0x20, y) - } - mstore(0x40, mload(C_V26_LOC)) - // accumulator_2 = v26.[ID1] - success := and(success, staticcall(gas(), 7, 0x00, 0x60, ACCUMULATOR2_X_LOC, 0x40)) - // accumulator = accumulator + accumulator_2 - success := and(success, staticcall(gas(), 6, ACCUMULATOR_X_LOC, 0x80, ACCUMULATOR_X_LOC, 0x40)) - - // VALIDATE ID2 - { - let x := mload(ID2_X_LOC) - let y := mload(ID2_Y_LOC) - let xx := mulmod(x, x, q) - // validate on curve - success := and(success, eq(mulmod(y, y, q), addmod(mulmod(x, xx, q), 3, q))) - mstore(0x00, x) - mstore(0x20, y) - } - mstore(0x40, mload(C_V27_LOC)) - // accumulator_2 = v27.[ID2] - success := and(success, staticcall(gas(), 7, 0x00, 0x60, ACCUMULATOR2_X_LOC, 0x40)) - // accumulator = accumulator + accumulator_2 - success := and(success, staticcall(gas(), 6, ACCUMULATOR_X_LOC, 0x80, ACCUMULATOR_X_LOC, 0x40)) - - // VALIDATE ID3 - { - let x := mload(ID3_X_LOC) - let y := mload(ID3_Y_LOC) - let xx := mulmod(x, x, q) - // validate on curve - success := and(success, eq(mulmod(y, y, q), addmod(mulmod(x, xx, q), 3, q))) - mstore(0x00, x) - mstore(0x20, y) - } - mstore(0x40, mload(C_V28_LOC)) - // accumulator_2 = v28.[ID3] - success := and(success, staticcall(gas(), 7, 0x00, 0x60, ACCUMULATOR2_X_LOC, 0x40)) - // accumulator = accumulator + accumulator_2 - success := and(success, staticcall(gas(), 6, ACCUMULATOR_X_LOC, 0x80, ACCUMULATOR_X_LOC, 0x40)) - - // VALIDATE ID4 - { - let x := mload(ID4_X_LOC) - let y := mload(ID4_Y_LOC) - let xx := mulmod(x, x, q) - // validate on curve - success := and(success, eq(mulmod(y, y, q), addmod(mulmod(x, xx, q), 3, q))) - mstore(0x00, x) - mstore(0x20, y) - } - mstore(0x40, mload(C_V29_LOC)) - // accumulator_2 = v29.[ID4] - success := and(success, staticcall(gas(), 7, 0x00, 0x60, ACCUMULATOR2_X_LOC, 0x40)) - // accumulator = accumulator + accumulator_2 - success := and(success, staticcall(gas(), 6, ACCUMULATOR_X_LOC, 0x80, ACCUMULATOR_X_LOC, 0x40)) - - /** - * COMPUTE BATCH EVALUATION SCALAR MULTIPLIER - */ - { - /** - * batch_evaluation = v0 * (w_1_omega * u + w_1_eval) - * batch_evaluation += v1 * (w_2_omega * u + w_2_eval) - * batch_evaluation += v2 * (w_3_omega * u + w_3_eval) - * batch_evaluation += v3 * (w_4_omega * u + w_4_eval) - * batch_evaluation += v4 * (s_omega_eval * u + s_eval) - * batch_evaluation += v5 * (z_omega_eval * u + z_eval) - * batch_evaluation += v6 * (z_lookup_omega_eval * u + z_lookup_eval) - */ - let batch_evaluation := - mulmod( - mload(C_V0_LOC), - addmod(mulmod(mload(W1_OMEGA_EVAL_LOC), mload(C_U_LOC), p), mload(W1_EVAL_LOC), p), - p - ) - batch_evaluation := - addmod( - batch_evaluation, - mulmod( - mload(C_V1_LOC), - addmod(mulmod(mload(W2_OMEGA_EVAL_LOC), mload(C_U_LOC), p), mload(W2_EVAL_LOC), p), - p - ), - p - ) - batch_evaluation := - addmod( - batch_evaluation, - mulmod( - mload(C_V2_LOC), - addmod(mulmod(mload(W3_OMEGA_EVAL_LOC), mload(C_U_LOC), p), mload(W3_EVAL_LOC), p), - p - ), - p - ) - batch_evaluation := - addmod( - batch_evaluation, - mulmod( - mload(C_V3_LOC), - addmod(mulmod(mload(W4_OMEGA_EVAL_LOC), mload(C_U_LOC), p), mload(W4_EVAL_LOC), p), - p - ), - p - ) - batch_evaluation := - addmod( - batch_evaluation, - mulmod( - mload(C_V4_LOC), - addmod(mulmod(mload(S_OMEGA_EVAL_LOC), mload(C_U_LOC), p), mload(S_EVAL_LOC), p), - p - ), - p - ) - batch_evaluation := - addmod( - batch_evaluation, - mulmod( - mload(C_V5_LOC), - addmod(mulmod(mload(Z_OMEGA_EVAL_LOC), mload(C_U_LOC), p), mload(Z_EVAL_LOC), p), - p - ), - p - ) - batch_evaluation := - addmod( - batch_evaluation, - mulmod( - mload(C_V6_LOC), - addmod(mulmod(mload(Z_LOOKUP_OMEGA_EVAL_LOC), mload(C_U_LOC), p), mload(Z_LOOKUP_EVAL_LOC), p), - p - ), - p - ) - - /** - * batch_evaluation += v7 * Q1_EVAL - * batch_evaluation += v8 * Q2_EVAL - * batch_evaluation += v9 * Q3_EVAL - * batch_evaluation += v10 * Q4_EVAL - * batch_evaluation += v11 * QM_EVAL - * batch_evaluation += v12 * QC_EVAL - * batch_evaluation += v13 * QARITH_EVAL - * batch_evaluation += v14 * QSORT_EVAL_LOC - * batch_evaluation += v15 * QELLIPTIC_EVAL_LOC - * batch_evaluation += v16 * QAUX_EVAL_LOC - * batch_evaluation += v17 * SIGMA1_EVAL_LOC - * batch_evaluation += v18 * SIGMA2_EVAL_LOC - * batch_evaluation += v19 * SIGMA3_EVAL_LOC - * batch_evaluation += v20 * SIGMA4_EVAL_LOC - */ - batch_evaluation := addmod(batch_evaluation, mulmod(mload(C_V7_LOC), mload(Q1_EVAL_LOC), p), p) - batch_evaluation := addmod(batch_evaluation, mulmod(mload(C_V8_LOC), mload(Q2_EVAL_LOC), p), p) - batch_evaluation := addmod(batch_evaluation, mulmod(mload(C_V9_LOC), mload(Q3_EVAL_LOC), p), p) - batch_evaluation := addmod(batch_evaluation, mulmod(mload(C_V10_LOC), mload(Q4_EVAL_LOC), p), p) - batch_evaluation := addmod(batch_evaluation, mulmod(mload(C_V11_LOC), mload(QM_EVAL_LOC), p), p) - batch_evaluation := addmod(batch_evaluation, mulmod(mload(C_V12_LOC), mload(QC_EVAL_LOC), p), p) - batch_evaluation := addmod(batch_evaluation, mulmod(mload(C_V13_LOC), mload(QARITH_EVAL_LOC), p), p) - batch_evaluation := addmod(batch_evaluation, mulmod(mload(C_V14_LOC), mload(QSORT_EVAL_LOC), p), p) - batch_evaluation := addmod(batch_evaluation, mulmod(mload(C_V15_LOC), mload(QELLIPTIC_EVAL_LOC), p), p) - batch_evaluation := addmod(batch_evaluation, mulmod(mload(C_V16_LOC), mload(QAUX_EVAL_LOC), p), p) - batch_evaluation := addmod(batch_evaluation, mulmod(mload(C_V17_LOC), mload(SIGMA1_EVAL_LOC), p), p) - batch_evaluation := addmod(batch_evaluation, mulmod(mload(C_V18_LOC), mload(SIGMA2_EVAL_LOC), p), p) - batch_evaluation := addmod(batch_evaluation, mulmod(mload(C_V19_LOC), mload(SIGMA3_EVAL_LOC), p), p) - batch_evaluation := addmod(batch_evaluation, mulmod(mload(C_V20_LOC), mload(SIGMA4_EVAL_LOC), p), p) - - /** - * batch_evaluation += v21 * (table1(zw) * u + table1(z)) - * batch_evaluation += v22 * (table2(zw) * u + table2(z)) - * batch_evaluation += v23 * (table3(zw) * u + table3(z)) - * batch_evaluation += v24 * (table4(zw) * u + table4(z)) - * batch_evaluation += v25 * table_type_eval - * batch_evaluation += v26 * id1_eval - * batch_evaluation += v27 * id2_eval - * batch_evaluation += v28 * id3_eval - * batch_evaluation += v29 * id4_eval - * batch_evaluation += quotient_eval - */ - batch_evaluation := - addmod( - batch_evaluation, - mulmod( - mload(C_V21_LOC), - addmod(mulmod(mload(TABLE1_OMEGA_EVAL_LOC), mload(C_U_LOC), p), mload(TABLE1_EVAL_LOC), p), - p - ), - p - ) - batch_evaluation := - addmod( - batch_evaluation, - mulmod( - mload(C_V22_LOC), - addmod(mulmod(mload(TABLE2_OMEGA_EVAL_LOC), mload(C_U_LOC), p), mload(TABLE2_EVAL_LOC), p), - p - ), - p - ) - batch_evaluation := - addmod( - batch_evaluation, - mulmod( - mload(C_V23_LOC), - addmod(mulmod(mload(TABLE3_OMEGA_EVAL_LOC), mload(C_U_LOC), p), mload(TABLE3_EVAL_LOC), p), - p - ), - p - ) - batch_evaluation := - addmod( - batch_evaluation, - mulmod( - mload(C_V24_LOC), - addmod(mulmod(mload(TABLE4_OMEGA_EVAL_LOC), mload(C_U_LOC), p), mload(TABLE4_EVAL_LOC), p), - p - ), - p - ) - batch_evaluation := addmod(batch_evaluation, mulmod(mload(C_V25_LOC), mload(TABLE_TYPE_EVAL_LOC), p), p) - batch_evaluation := addmod(batch_evaluation, mulmod(mload(C_V26_LOC), mload(ID1_EVAL_LOC), p), p) - batch_evaluation := addmod(batch_evaluation, mulmod(mload(C_V27_LOC), mload(ID2_EVAL_LOC), p), p) - batch_evaluation := addmod(batch_evaluation, mulmod(mload(C_V28_LOC), mload(ID3_EVAL_LOC), p), p) - batch_evaluation := addmod(batch_evaluation, mulmod(mload(C_V29_LOC), mload(ID4_EVAL_LOC), p), p) - batch_evaluation := addmod(batch_evaluation, mload(QUOTIENT_EVAL_LOC), p) - - mstore(0x00, 0x01) // [1].x - mstore(0x20, 0x02) // [1].y - mstore(0x40, sub(p, batch_evaluation)) - // accumulator_2 = -[1].(batch_evaluation) - success := and(success, staticcall(gas(), 7, 0x00, 0x60, ACCUMULATOR2_X_LOC, 0x40)) - // accumulator = accumulator + accumulator_2 - success := and(success, staticcall(gas(), 6, ACCUMULATOR_X_LOC, 0x80, ACCUMULATOR_X_LOC, 0x40)) - - mstore(OPENING_COMMITMENT_SUCCESS_FLAG, success) - } - - /** - * PERFORM PAIRING PREAMBLE - */ - { - let u := mload(C_U_LOC) - let zeta := mload(C_ZETA_LOC) - // VALIDATE PI_Z - { - let x := mload(PI_Z_X_LOC) - let y := mload(PI_Z_Y_LOC) - let xx := mulmod(x, x, q) - // validate on curve - success := eq(mulmod(y, y, q), addmod(mulmod(x, xx, q), 3, q)) - mstore(0x00, x) - mstore(0x20, y) - } - // compute zeta.[PI_Z] and add into accumulator - mstore(0x40, zeta) - success := and(success, staticcall(gas(), 7, 0x00, 0x60, ACCUMULATOR2_X_LOC, 0x40)) - // accumulator = accumulator + accumulator_2 - success := and(success, staticcall(gas(), 6, ACCUMULATOR_X_LOC, 0x80, ACCUMULATOR_X_LOC, 0x40)) - - // VALIDATE PI_Z_OMEGA - { - let x := mload(PI_Z_OMEGA_X_LOC) - let y := mload(PI_Z_OMEGA_Y_LOC) - let xx := mulmod(x, x, q) - // validate on curve - success := and(success, eq(mulmod(y, y, q), addmod(mulmod(x, xx, q), 3, q))) - mstore(0x00, x) - mstore(0x20, y) - } - mstore(0x40, mulmod(mulmod(u, zeta, p), mload(OMEGA_LOC), p)) - // accumulator_2 = u.zeta.omega.[PI_Z_OMEGA] - success := and(success, staticcall(gas(), 7, 0x00, 0x60, ACCUMULATOR2_X_LOC, 0x40)) - // PAIRING_RHS = accumulator + accumulator_2 - success := and(success, staticcall(gas(), 6, ACCUMULATOR_X_LOC, 0x80, PAIRING_RHS_X_LOC, 0x40)) - - mstore(0x00, mload(PI_Z_X_LOC)) - mstore(0x20, mload(PI_Z_Y_LOC)) - mstore(0x40, mload(PI_Z_OMEGA_X_LOC)) - mstore(0x60, mload(PI_Z_OMEGA_Y_LOC)) - mstore(0x80, u) - success := and(success, staticcall(gas(), 7, 0x40, 0x60, 0x40, 0x40)) - // PAIRING_LHS = [PI_Z] + [PI_Z_OMEGA] * u - success := and(success, staticcall(gas(), 6, 0x00, 0x80, PAIRING_LHS_X_LOC, 0x40)) - // negate lhs y-coordinate - mstore(PAIRING_LHS_Y_LOC, sub(q, mload(PAIRING_LHS_Y_LOC))) - - if mload(CONTAINS_RECURSIVE_PROOF_LOC) { - // VALIDATE RECURSIVE P1 - { - let x := mload(RECURSIVE_P1_X_LOC) - let y := mload(RECURSIVE_P1_Y_LOC) - let xx := mulmod(x, x, q) - // validate on curve - success := and(success, eq(mulmod(y, y, q), addmod(mulmod(x, xx, q), 3, q))) - mstore(0x00, x) - mstore(0x20, y) - } - - // compute u.u.[recursive_p1] and write into 0x60 - mstore(0x40, mulmod(u, u, p)) - success := and(success, staticcall(gas(), 7, 0x00, 0x60, 0x60, 0x40)) - // VALIDATE RECURSIVE P2 - { - let x := mload(RECURSIVE_P2_X_LOC) - let y := mload(RECURSIVE_P2_Y_LOC) - let xx := mulmod(x, x, q) - // validate on curve - success := and(success, eq(mulmod(y, y, q), addmod(mulmod(x, xx, q), 3, q))) - mstore(0x00, x) - mstore(0x20, y) - } - // compute u.u.[recursive_p2] and write into 0x00 - // 0x40 still contains u*u - success := and(success, staticcall(gas(), 7, 0x00, 0x60, 0x00, 0x40)) - - // compute u.u.[recursiveP1] + rhs and write into rhs - mstore(0xa0, mload(PAIRING_RHS_X_LOC)) - mstore(0xc0, mload(PAIRING_RHS_Y_LOC)) - success := and(success, staticcall(gas(), 6, 0x60, 0x80, PAIRING_RHS_X_LOC, 0x40)) - - // compute u.u.[recursiveP2] + lhs and write into lhs - mstore(0x40, mload(PAIRING_LHS_X_LOC)) - mstore(0x60, mload(PAIRING_LHS_Y_LOC)) - success := and(success, staticcall(gas(), 6, 0x00, 0x80, PAIRING_LHS_X_LOC, 0x40)) - } - - if iszero(success) { - mstore(0x0, EC_SCALAR_MUL_FAILURE_SELECTOR) - revert(0x00, 0x04) - } - mstore(PAIRING_PREAMBLE_SUCCESS_FLAG, success) - } - - /** - * PERFORM PAIRING - */ - { - // rhs paired with [1]_2 - // lhs paired with [x]_2 - - mstore(0x00, mload(PAIRING_RHS_X_LOC)) - mstore(0x20, mload(PAIRING_RHS_Y_LOC)) - mstore(0x40, 0x198e9393920d483a7260bfb731fb5d25f1aa493335a9e71297e485b7aef312c2) // this is [1]_2 - mstore(0x60, 0x1800deef121f1e76426a00665e5c4479674322d4f75edadd46debd5cd992f6ed) - mstore(0x80, 0x090689d0585ff075ec9e99ad690c3395bc4b313370b38ef355acdadcd122975b) - mstore(0xa0, 0x12c85ea5db8c6deb4aab71808dcb408fe3d1e7690c43d37b4ce6cc0166fa7daa) - - mstore(0xc0, mload(PAIRING_LHS_X_LOC)) - mstore(0xe0, mload(PAIRING_LHS_Y_LOC)) - mstore(0x100, mload(G2X_X0_LOC)) - mstore(0x120, mload(G2X_X1_LOC)) - mstore(0x140, mload(G2X_Y0_LOC)) - mstore(0x160, mload(G2X_Y1_LOC)) - - success := staticcall(gas(), 8, 0x00, 0x180, 0x00, 0x20) - mstore(PAIRING_SUCCESS_FLAG, success) - mstore(RESULT_FLAG, mload(0x00)) - } - if iszero( - and( - and(and(mload(PAIRING_SUCCESS_FLAG), mload(RESULT_FLAG)), mload(PAIRING_PREAMBLE_SUCCESS_FLAG)), - mload(OPENING_COMMITMENT_SUCCESS_FLAG) - ) - ) { - mstore(0x0, PROOF_FAILURE_SELECTOR) - revert(0x00, 0x04) - } - { - mstore(0x00, 0x01) - return(0x00, 0x20) // Proof succeeded! - } - } - } -} - -contract UltraVerifier is BaseUltraVerifier { - function getVerificationKeyHash() public pure override(BaseUltraVerifier) returns (bytes32) { - return UltraVerificationKey.verificationKeyHash(); - } - - function loadVerificationKey(uint256 vk, uint256 _omegaInverseLoc) internal pure virtual override(BaseUltraVerifier) { - UltraVerificationKey.loadVerificationKey(vk, _omegaInverseLoc); - } -} diff --git a/tooling/bb_abstraction_leaks/src/lib.rs b/tooling/bb_abstraction_leaks/src/lib.rs index fec53809ad4..56a4f58cd21 100644 --- a/tooling/bb_abstraction_leaks/src/lib.rs +++ b/tooling/bb_abstraction_leaks/src/lib.rs @@ -7,13 +7,6 @@ pub const ACVM_BACKEND_BARRETENBERG: &str = "acvm-backend-barretenberg"; pub const BB_DOWNLOAD_URL: &str = env!("BB_BINARY_URL"); pub const BB_VERSION: &str = env!("BB_VERSION"); -/// Embed the Solidity verifier file -const ULTRA_VERIFIER_CONTRACT: &str = include_str!("contract.sol"); - -pub fn complete_barretenberg_verifier_contract(contract: String) -> String { - format!("{contract}{ULTRA_VERIFIER_CONTRACT}") -} - /// Removes the public inputs which are prepended to a proof by Barretenberg. pub fn remove_public_inputs(num_pub_inputs: usize, proof: &[u8]) -> Vec { // Barretenberg prepends the public inputs onto the proof so we need to remove diff --git a/tooling/debugger/Cargo.toml b/tooling/debugger/Cargo.toml index 53c71754da4..4d240c61f90 100644 --- a/tooling/debugger/Cargo.toml +++ b/tooling/debugger/Cargo.toml @@ -6,14 +6,25 @@ authors.workspace = true edition.workspace = true license.workspace = true -# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html +[build-dependencies] +build-data.workspace = true [dependencies] acvm.workspace = true nargo.workspace = true noirc_printable_type.workspace = true noirc_errors.workspace = true +noirc_driver.workspace = true +fm.workspace = true thiserror.workspace = true codespan-reporting.workspace = true +dap.workspace = true easy-repl = "0.2.1" owo-colors = "3" +serde_json.workspace = true + +[dev-dependencies] +assert_cmd = "2.0.12" +rexpect = "0.5.0" +test-binary = "3.0.1" +tempfile.workspace = true diff --git a/tooling/debugger/README.md b/tooling/debugger/README.md new file mode 100644 index 00000000000..0ec3b6f0cd4 --- /dev/null +++ b/tooling/debugger/README.md @@ -0,0 +1,428 @@ +# Noir Debugger + +There are currently two ways of debugging Noir programs, both in active development and in experimental phase: + +1. The REPL debugger, which currently ships with Nargo behind a feature flag. +2. The VS Code extension, which hasn't still reached minimum viability, and so must be manually set up. + +This README explains how to use each of them as well as specifying which features are currently mature and which ones are unstable. + +## Supported project types + +At the time of writing, the debugger supports debugging binary projects, but not contracts. At the end of this README, we'll elaborate on what the current state of Noir contract debugging is, and the pre-requisites to fulfil. + + +## REPL debugger + +In order to use the REPL debugger, you will need to install a new enough version of Nargo. At the time of writing, the nightly version is 0.20.0, so we'll base this guide on it. + +Let's debug a simple circuit: + +``` +fn main(x : Field, y : pub Field) { + assert(x != y); +} +``` + +To start the REPL debugger, using a terminal, go to a Noir circuit's home directory. Then: + +`$ nargo debug` + +You should be seeing this in your terminal: + +``` +[main] Starting debugger +At opcode 0: EXPR [ (-1, _1) (1, _2) (-1, _3) 0 ] +At ~/noir-examples/recursion/circuits/main/src/main.nr:2:12 + 1 fn main(x : Field, y : pub Field) { + 2 -> assert(x != y); + 3 } +> +``` + +The debugger displays the current opcode, and the corresponding Noir code location associated to it, and it is now waiting for us to drive it. + +Let's first take a look at the available commands. For that we'll use the `help` command. + +``` +At ~/noir-examples/recursion/circuits/main/src/main.nr:2:12 + 1 fn main(x : Field, y : pub Field) { + 2 -> assert(x != y); + 3 } +> help +Available commands: + + break LOCATION:OpcodeLocation add a breakpoint at an opcode location + memory show Brillig memory (valid when executing a + Brillig block) + into step into to the next opcode + next step until a new source location is reached + delete LOCATION:OpcodeLocation delete breakpoint at an opcode location + step step to the next ACIR opcode + registers show Brillig registers (valid when executing + a Brillig block) + regset index:usize value:String update a Brillig register with the given + value + restart restart the debugging session + witness show witness map + witness index:u32 display a single witness from the witness map + witness index:u32 value:String update a witness with the given value + continue continue execution until the end of the + program + opcodes display ACIR opcodes + memset index:usize value:String update a Brillig memory cell with the given + value + +Other commands: + + help Show this help message + quit Quit repl +``` + +The command menu is pretty self-explanatory. Some commands operate only at Brillig level, such as `memory`, `memset`, `registers`, `regset`. If you try to use them while execution is paused at an ACIR opcode, the debugger will simply inform you that you are not executing Brillig code: + +``` +> registers +Not executing a Brillig block +> +``` + +Before continuing, we can take a look at the initial witness map: + +``` +> witness +_1 = 1 +_2 = 2 +> +``` + +Cool, since `x==1`, `y==2`, and we want to check that `x != y`, our circuit should succeed. At this point we could intervene and use the witness setter command to change one of the witnesses. Let's set `y=3`, then back to 2: + +``` +> witness +_1 = 1 +_2 = 2 +> witness 2 3 +_2 = 3 +> witness +_1 = 1 +_2 = 3 +> witness 2 2 +_2 = 2 +> witness +_1 = 1 +_2 = 2 +> +``` + +Let's take a look at this circuit's ACIR, using the `opcodes` command: + +``` +> opcodes + 0 -> EXPR [ (-1, _1) (1, _2) (-1, _3) 0 ] + 1 BRILLIG inputs=[Single(Expression { mul_terms: [], linear_combinations: [(1, Witness(3))], q_c: 0 })] + | outputs=[Simple(Witness(4))] + 1.0 | JumpIfNot { condition: RegisterIndex(0), location: 3 } + 1.1 | Const { destination: RegisterIndex(1), value: Value { inner: 1 } } + 1.2 | BinaryFieldOp { destination: RegisterIndex(0), op: Div, lhs: RegisterIndex(1), rhs: RegisterIndex(0) } + 1.3 | Stop + 2 EXPR [ (1, _3, _4) (1, _5) -1 ] + 3 EXPR [ (1, _3, _5) 0 ] + 4 EXPR [ (-1, _5) 0 ] +> +``` + +Note: in future versions of the debugger, we could explore prettier or more compact formats to print opcodes. + +So the next opcode will take us to Brillig execution. Let's step into opcode 1 so we can explore Brillig debugger commands. + +``` +> into +At opcode 1: BRILLIG: inputs: [Single(Expression { mul_terms: [], linear_combinations: [(1, Witness(3))], q_c: 0 })] +outputs: [Simple(Witness(4))] +[JumpIfNot { condition: RegisterIndex(0), location: 3 }, Const { destination: RegisterIndex(1), value: Value { inner: 1 } }, BinaryFieldOp { destination: RegisterIndex(0), op: Div, lhs: RegisterIndex(1), rhs: RegisterIndex(0) }, Stop] + +At /~/noir-examples/recursion/circuits/main/src/main.nr:2:12 + 1 fn main(x : Field, y : pub Field) { + 2 -> assert(x != y); + 3 } +``` + +In disassembly view: + +``` +> op + 0 EXPR [ (-1, _1) (1, _2) (-1, _3) 0 ] + 1 -> BRILLIG inputs=[Single(Expression { mul_terms: [], linear_combinations: [(1, Witness(3))], q_c: 0 })] + | outputs=[Simple(Witness(4))] + 1.0 |-> JumpIfNot { condition: RegisterIndex(0), location: 3 } + 1.1 | Const { destination: RegisterIndex(1), value: Value { inner: 1 } } + 1.2 | BinaryFieldOp { destination: RegisterIndex(0), op: Div, lhs: RegisterIndex(1), rhs: RegisterIndex(0) } + 1.3 | Stop + 2 EXPR [ (1, _3, _4) (1, _5) -1 ] + 3 EXPR [ (1, _3, _5) 0 ] + 4 EXPR [ (-1, _5) 0 ] +> witness +_1 = 1 +_2 = 2 +_3 = 1 +> +``` + +We can see two arrow `->` cursors: one indicates where we are from the perspective of ACIR (opcode 1), and the other one shows us where we are in the context of the current Brillig block (opcode 1.0). + +Note: REPL commands are autocompleted when not ambiguous, so `opcodes` can be run just with `op`, `into` with `i`, etc. + +The next opcode to execute is a `JumpIfNot`, which reads from register 0. Let's inspect Brillig register state: + +``` +> op + 0 EXPR [ (-1, _1) (1, _2) (-1, _3) 0 ] + 1 -> BRILLIG inputs=[Single(Expression { mul_terms: [], linear_combinations: [(1, Witness(3))], q_c: 0 })] + | outputs=[Simple(Witness(4))] + 1.0 |-> JumpIfNot { condition: RegisterIndex(0), location: 3 } + 1.1 | Const { destination: RegisterIndex(1), value: Value { inner: 1 } } + 1.2 | BinaryFieldOp { destination: RegisterIndex(0), op: Div, lhs: RegisterIndex(1), rhs: RegisterIndex(0) } + 1.3 | Stop + 2 EXPR [ (1, _3, _4) (1, _5) -1 ] + 3 EXPR [ (1, _3, _5) 0 ] + 4 EXPR [ (-1, _5) 0 ] +> registers +Brillig VM registers not available +``` + +Oops. This is unexpected, even though we were already in a Brillig block, we couldn't access the Brillig registers. This is a known issue: when just entering the Brillig block, the ACVM has not yet initialized the Brillig VM, so we can't introspect it. + +Note: In order to solve this, we would have to change the way the ACVM works, or add special handling for this case (after all, the debugger does know we're at the first opcode of a Brillig block and could keep track of how registers will be initialized). At the time of writing, we haven't yet solved this case. + +For now, let's just step once more: + +``` +> i +At opcode 1.1: Const { destination: RegisterIndex(1), value: Value { inner: 1 } } +> registers +0 = 1 +> +``` + +Now we can see that register 0 was initialized with a value of 1, so the `JumpIfNot` didn't activate. After executing opcode 1, we should see register 1 gets a value of 1: + +``` +> i +At opcode 1.2: BinaryFieldOp { destination: RegisterIndex(0), op: Div, lhs: RegisterIndex(1), rhs: RegisterIndex(0) } +> regist +0 = 1 +1 = 1 +> +``` + +The last operation will compute `Reg0 <- Reg1 / Reg0`: + +``` +> i +At opcode 1.3: Stop +> registers +0 = 1 +1 = 1 +> +``` + +Once we step again, we'll be out of Brillig and back on ACVM territory. With a new witness `_4` corresponding to the result of the Brillig block execution: + +``` +> op + 0 EXPR [ (-1, _1) (1, _2) (-1, _3) 0 ] + 1 BRILLIG inputs=[Single(Expression { mul_terms: [], linear_combinations: [(1, Witness(3))], q_c: 0 })] + | outputs=[Simple(Witness(4))] + 1.0 | JumpIfNot { condition: RegisterIndex(0), location: 3 } + 1.1 | Const { destination: RegisterIndex(1), value: Value { inner: 1 } } + 1.2 | BinaryFieldOp { destination: RegisterIndex(0), op: Div, lhs: RegisterIndex(1), rhs: RegisterIndex(0) } + 1.3 | Stop + 2 -> EXPR [ (1, _3, _4) (1, _5) -1 ] + 3 EXPR [ (1, _3, _5) 0 ] + 4 EXPR [ (-1, _5) 0 ] +> wit +_1 = 1 +_2 = 2 +_3 = 1 +_4 = 1 +> +``` + +At any time, we might also decide to restart from the beginning: + +``` +> restart +Restarted debugging session. +At opcode 0: EXPR [ (-1, _1) (1, _2) (-1, _3) 0 ] +At ~/noir-examples/recursion/circuits/main/src/main.nr:2:12 + 1 fn main(x : Field, y : pub Field) { + 2 -> assert(x != y); + 3 } +> +``` + +Let's set a breakpoint. For that, we can use the opcode id's listed by the `opcodes` command: + +``` +> opcodes + 0 -> EXPR [ (-1, _1) (1, _2) (-1, _3) 0 ] + 1 BRILLIG inputs=[Single(Expression { mul_terms: [], linear_combinations: [(1, Witness(3))], q_c: 0 })] + | outputs=[Simple(Witness(4))] + 1.0 | JumpIfNot { condition: RegisterIndex(0), location: 3 } + 1.1 | Const { destination: RegisterIndex(1), value: Value { inner: 1 } } + 1.2 | BinaryFieldOp { destination: RegisterIndex(0), op: Div, lhs: RegisterIndex(1), rhs: RegisterIndex(0) } + 1.3 | Stop + 2 EXPR [ (1, _3, _4) (1, _5) -1 ] + 3 EXPR [ (1, _3, _5) 0 ] + 4 EXPR [ (-1, _5) 0 ] +> break 1.2 +Added breakpoint at opcode 1.2 +``` + +Now we can have the debugger continue all the way to opcode 1.2: + +``` +> break 1.2 +Added breakpoint at opcode 1.2 +> continue +(Continuing execution...) +Stopped at breakpoint in opcode 1.2 +At opcode 1.2: BinaryFieldOp { destination: RegisterIndex(0), op: Div, lhs: RegisterIndex(1), rhs: RegisterIndex(0) } +> opcodes + 0 EXPR [ (-1, _1) (1, _2) (-1, _3) 0 ] + 1 -> BRILLIG inputs=[Single(Expression { mul_terms: [], linear_combinations: [(1, Witness(3))], q_c: 0 })] + | outputs=[Simple(Witness(4))] + 1.0 | JumpIfNot { condition: RegisterIndex(0), location: 3 } + 1.1 | Const { destination: RegisterIndex(1), value: Value { inner: 1 } } + 1.2 |-> BinaryFieldOp { destination: RegisterIndex(0), op: Div, lhs: RegisterIndex(1), rhs: RegisterIndex(0) } + 1.3 | Stop + 2 EXPR [ (1, _3, _4) (1, _5) -1 ] + 3 EXPR [ (1, _3, _5) 0 ] + 4 EXPR [ (-1, _5) 0 ] +> +``` + +Let's continue to the end: + +``` +> continue +(Continuing execution...) +Finished execution +> q +[main] Circuit witness successfully solved +``` + +Upon quitting the debugger after a solved circuit, the resulting circuit witness gets saved, equivalent to what would happen if we had run the same circuit with `nargo execute`. + + +# Testing experimental features + +There's a number of features that are in active development and that can't yet be merged to the main branch for different reasons. In this section we detail what those features are and how to try them out. + +## Build from experimental branch at fork + +Build Nargo by pulling the source version from https://github.com/manastech/noir/tree/dap-with-vars. + +This will result in a Nargo binary being written to `PROJECT_ROOT/target/debug/nargo`. We will use this path later, so keep it at hand or export it to a an env var. For example: + +`export NARGO_EXP=PROJECT_ROOT/target/debug/nargo` + +## About the experimental features + +There are currently 2 experimental features in the debugger: + +- Variables inspection +- Stacktrace inspection + +NOTE: Supporting variables inspection requires extensive instrumentation of the compiler, handling all cases of variable creation, types, and value assignment. At the time of writing this README, some cases are still not supported. For example, if your program uses slices or references, this compiler version might panic when trying to compile them, or at some point during the debugger step-by-step execution. This is the main reason why this feature has not yet been merged into master. + +## Trying out REPL experimental features + +To try out these features, go through the same steps as described at the REPL Debugger section above, but instead of using `nargo debug` use `$NARGO_EXP debug` (assuming you exported your custom built Nargo binary to NARGO_EXP). + +When entering `help` on this version, you'll find two new commands: + +``` +... +stacktrace display the current stack trace +... +vars show variable values available at this point + in execution +``` + +Running `vars` will print the current variables in scope, and its current values: + +``` +At /mul_1/src/main.nr:6:5 + 1 // Test unsafe integer multiplication with overflow: 12^8 = 429 981 696 + 2 // The circuit should handle properly the growth of the bit size + 3 fn main(mut x: u32, y: u32, z: u32) { + 4 x *= y; + 5 x *= x; //144 + 6 -> x *= x; //20736 + 7 x *= x; //429 981 696 + 8 assert(x == z); + 9 } +> vars +y:UnsignedInteger { width: 32 }=Field(4), z:UnsignedInteger { width: 32 }=Field(2¹⁶×6561), x:UnsignedInteger { width: 32 }=Field(2⁴×9) +> +``` + +Running `stacktrace` will print information about the current frame in the stacktrace: + +``` +> stacktrace +Frame #0, opcode 12: EXPR [ (1, _5, _5) (-1, _6) 0 ] +At /1_mul/src/main.nr:6:5 + 1 // Test unsafe integer multiplication with overflow: 12^8 = 429 981 696 + 2 // The circuit should handle properly the growth of the bit size + 3 fn main(mut x: u32, y: u32, z: u32) { + 4 x *= y; + 5 x *= x; //144 + 6 -> x *= x; //20736 + 7 x *= x; //429 981 696 + 8 assert(x == z); + 9 } +> +``` + +## Testing the VS Code extension (experimental) + +There is a fork of the official Noir Visual Studio extension which enables the debugger in VS Code. This fork is at: https://github.com/manastech/vscode-noir/tree/dap-support. + +In this section, we'll explain how to test the VS Code Noir debugger combining that extension fork with the experimental features branch discussed above. + +1. First, get a copy of the extension source code from https://github.com/manastech/vscode-noir/tree/dap-support. + +2. Package the extension by running `npm run package`. + +3. Open the root folder of the extension on VS Code. + +4. From VS Code, press fn+F5. This will open a new VS Code window with the extension loaded from source. + +5. Go to Code -> Settings -> Extensions -> Noir Language Server. Look for the property `Nargo Path` and enter the path to the experimental build you got as a result of following the steps at [Trying out REPL experimental features](#trying-out-repl-experimental-features). + +6. At the VS Code sidebar, go to the debugger section (see screenshot). Click "Add configuration". Overwrite the `projectFolder` property with the absolute path to the Nargo project you want to debug. + +Screenshot 2023-12-18 at 14 37 38 + +7. Go to a Noir file you want to debug. Navigate again to the debug section of VS Code, and click the "play" icon. + +The debugger should now have started. Current features exposed to the debugger include different kinds of stepping interactions, variable inspection and stacktraces. At the time of writing, Brillig registers and memory are not being exposed, but they will soon be. + +![Screen Recording 2023-12-18 at 14 14 28](https://github.com/manastech/noir/assets/651693/36b4becb-953a-4158-9c5a-7a185673f54f) + +## Towards debugging contracts + +### Contracts Runtime + +The execution of Noir contracts depends on a foreign call execution runtime to resolve all the oracle calls that the contract functions depend on. + +This means for the debugger to be usable with contracts we need to be able to do at least one of the following: + +1. Let users mock out a foreign call executor, and run the debugger with it. +2. Instrument live environments, such as the Sandbox, so that calls and transactions can be driven by the debugger, which ultimately means the debugger would use the same foreign call executor a live Sandbox uses for normal execution of Noir circuits. + +Both of these scenarios imply making the debugger available to language runtimes external to Noir. The Sandbox/PXE runs on JS runtimes, and an hypothetical mockable foreign executor could be in principle written in any language. So it seems the most promising way forward is to make sure that the debugger itself is consumable in JS runtimes. + diff --git a/tooling/debugger/build.rs b/tooling/debugger/build.rs new file mode 100644 index 00000000000..cedeebcae86 --- /dev/null +++ b/tooling/debugger/build.rs @@ -0,0 +1,64 @@ +use std::fs::File; +use std::io::Write; +use std::path::{Path, PathBuf}; +use std::{env, fs}; + +const GIT_COMMIT: &&str = &"GIT_COMMIT"; + +fn main() { + // Rebuild if the tests have changed + println!("cargo:rerun-if-changed=tests"); + + // Only use build_data if the environment variable isn't set + // The environment variable is always set when working via Nix + if std::env::var(GIT_COMMIT).is_err() { + build_data::set_GIT_COMMIT(); + build_data::set_GIT_DIRTY(); + build_data::no_debug_rebuilds(); + } + + let out_dir = env::var("OUT_DIR").unwrap(); + let destination = Path::new(&out_dir).join("debug.rs"); + let mut test_file = File::create(destination).unwrap(); + + // Try to find the directory that Cargo sets when it is running; otherwise fallback to assuming the CWD + // is the root of the repository and append the crate path + let root_dir = match std::env::var("CARGO_MANIFEST_DIR") { + Ok(dir) => PathBuf::from(dir).parent().unwrap().parent().unwrap().to_path_buf(), + Err(_) => std::env::current_dir().unwrap(), + }; + let test_dir = root_dir.join("test_programs"); + + generate_debugger_tests(&mut test_file, &test_dir); +} + +fn generate_debugger_tests(test_file: &mut File, test_data_dir: &Path) { + let test_sub_dir = "execution_success"; + let test_data_dir = test_data_dir.join(test_sub_dir); + + let test_case_dirs = + fs::read_dir(test_data_dir).unwrap().flatten().filter(|c| c.path().is_dir()); + + for test_dir in test_case_dirs { + let test_name = + test_dir.file_name().into_string().expect("Directory can't be converted to string"); + if test_name.contains('-') { + panic!( + "Invalid test directory: {test_name}. Cannot include `-`, please convert to `_`" + ); + }; + let test_dir = &test_dir.path(); + + write!( + test_file, + r#" +#[test] +fn debug_{test_name}() {{ + debugger_execution_success("{test_dir}"); +}} + "#, + test_dir = test_dir.display(), + ) + .expect("Could not write templated test file."); + } +} diff --git a/tooling/debugger/src/context.rs b/tooling/debugger/src/context.rs index a97bc9e8b86..74224ce3795 100644 --- a/tooling/debugger/src/context.rs +++ b/tooling/debugger/src/context.rs @@ -1,9 +1,10 @@ -use acvm::acir::circuit::{Opcode, OpcodeLocation}; +use acvm::acir::circuit::{Circuit, Opcode, OpcodeLocation}; +use acvm::acir::native_types::{Witness, WitnessMap}; +use acvm::brillig_vm::{brillig::Value, Registers}; use acvm::pwg::{ ACVMStatus, BrilligSolver, BrilligSolverStatus, ForeignCallWaitInfo, StepResult, ACVM, }; -use acvm::BlackBoxFunctionSolver; -use acvm::{acir::circuit::Circuit, acir::native_types::WitnessMap}; +use acvm::{BlackBoxFunctionSolver, FieldElement}; use nargo::artifacts::debug::DebugArtifact; use nargo::errors::{ExecutionError, Location}; @@ -23,9 +24,8 @@ pub(super) enum DebugCommandResult { pub(super) struct DebugContext<'a, B: BlackBoxFunctionSolver> { acvm: ACVM<'a, B>, brillig_solver: Option>, - foreign_call_executor: ForeignCallExecutor, + foreign_call_executor: Box, debug_artifact: &'a DebugArtifact, - show_output: bool, breakpoints: HashSet, } @@ -35,13 +35,13 @@ impl<'a, B: BlackBoxFunctionSolver> DebugContext<'a, B> { circuit: &'a Circuit, debug_artifact: &'a DebugArtifact, initial_witness: WitnessMap, + foreign_call_executor: Box, ) -> Self { Self { acvm: ACVM::new(blackbox_solver, &circuit.opcodes, initial_witness), brillig_solver: None, - foreign_call_executor: ForeignCallExecutor::default(), + foreign_call_executor, debug_artifact, - show_output: true, breakpoints: HashSet::new(), } } @@ -50,6 +50,18 @@ impl<'a, B: BlackBoxFunctionSolver> DebugContext<'a, B> { self.acvm.opcodes() } + pub(super) fn get_witness_map(&self) -> &WitnessMap { + self.acvm.witness_map() + } + + pub(super) fn overwrite_witness( + &mut self, + witness: Witness, + value: FieldElement, + ) -> Option { + self.acvm.overwrite_witness(witness, value) + } + pub(super) fn get_current_opcode_location(&self) -> Option { let ip = self.acvm.instruction_pointer(); if ip >= self.get_opcodes().len() { @@ -64,17 +76,120 @@ impl<'a, B: BlackBoxFunctionSolver> DebugContext<'a, B> { } } - // Returns the callstack in source code locations for the currently - // executing opcode. This can be None if the execution finished (and - // get_current_opcode_location() returns None) or if the opcode is not - // mapped to a specific source location in the debug artifact (which can - // happen for certain opcodes inserted synthetically by the compiler) + /// Returns the callstack in source code locations for the currently + /// executing opcode. This can be `None` if the execution finished (and + /// `get_current_opcode_location()` returns `None`) or if the opcode is not + /// mapped to a specific source location in the debug artifact (which can + /// happen for certain opcodes inserted synthetically by the compiler) pub(super) fn get_current_source_location(&self) -> Option> { self.get_current_opcode_location() .as_ref() .and_then(|location| self.debug_artifact.debug_symbols[0].opcode_location(location)) } + fn get_opcodes_sizes(&self) -> Vec { + self.get_opcodes() + .iter() + .map(|opcode| match opcode { + Opcode::Brillig(brillig_block) => brillig_block.bytecode.len(), + _ => 1, + }) + .collect() + } + + /// Offsets the given location by the given number of opcodes (including + /// Brillig opcodes). If the offset would move the location outside of a + /// valid circuit location, returns None and the number of remaining + /// opcodes/instructions left which span outside the valid range in the + /// second element of the returned tuple. + pub(super) fn offset_opcode_location( + &self, + location: &Option, + mut offset: i64, + ) -> (Option, i64) { + if offset == 0 { + return (*location, 0); + } + let Some(location) = location else { + return (None, offset); + }; + + let (mut acir_index, mut brillig_index) = match location { + OpcodeLocation::Acir(acir_index) => (*acir_index, 0), + OpcodeLocation::Brillig { acir_index, brillig_index } => (*acir_index, *brillig_index), + }; + let opcode_sizes = self.get_opcodes_sizes(); + if offset > 0 { + while offset > 0 { + let opcode_size = opcode_sizes[acir_index] as i64 - brillig_index as i64; + if offset >= opcode_size { + acir_index += 1; + offset -= opcode_size; + brillig_index = 0; + } else { + brillig_index += offset as usize; + offset = 0; + } + if acir_index >= opcode_sizes.len() { + return (None, offset); + } + } + } else { + while offset < 0 { + if brillig_index > 0 { + if brillig_index > (-offset) as usize { + brillig_index -= (-offset) as usize; + offset = 0; + } else { + offset += brillig_index as i64; + brillig_index = 0; + } + } else { + if acir_index == 0 { + return (None, offset); + } + acir_index -= 1; + let opcode_size = opcode_sizes[acir_index] as i64; + if opcode_size <= -offset { + offset += opcode_size; + } else { + brillig_index = (opcode_size + offset) as usize; + offset = 0; + } + } + } + } + if brillig_index > 0 { + (Some(OpcodeLocation::Brillig { acir_index, brillig_index }), 0) + } else { + (Some(OpcodeLocation::Acir(acir_index)), 0) + } + } + + pub(super) fn render_opcode_at_location(&self, location: &Option) -> String { + let opcodes = self.get_opcodes(); + match location { + None => String::from("invalid"), + Some(OpcodeLocation::Acir(acir_index)) => { + let opcode = &opcodes[*acir_index]; + if let Opcode::Brillig(ref brillig) = opcode { + let first_opcode = &brillig.bytecode[0]; + format!("BRILLIG {first_opcode:?}") + } else { + format!("{opcode:?}") + } + } + Some(OpcodeLocation::Brillig { acir_index, brillig_index }) => { + if let Opcode::Brillig(ref brillig) = opcodes[*acir_index] { + let opcode = &brillig.bytecode[*brillig_index]; + format!(" | {opcode:?}") + } else { + String::from(" | invalid") + } + } + } + } + fn step_brillig_opcode(&mut self) -> DebugCommandResult { let Some(mut solver) = self.brillig_solver.take() else { unreachable!("Missing Brillig solver"); @@ -106,15 +221,19 @@ impl<'a, B: BlackBoxFunctionSolver> DebugContext<'a, B> { } fn handle_foreign_call(&mut self, foreign_call: ForeignCallWaitInfo) -> DebugCommandResult { - let foreign_call_result = - self.foreign_call_executor.execute(&foreign_call, self.show_output); + let foreign_call_result = self.foreign_call_executor.execute(&foreign_call); match foreign_call_result { Ok(foreign_call_result) => { - self.acvm.resolve_pending_foreign_call(foreign_call_result); + if let Some(mut solver) = self.brillig_solver.take() { + solver.resolve_pending_foreign_call(foreign_call_result); + self.brillig_solver = Some(solver); + } else { + self.acvm.resolve_pending_foreign_call(foreign_call_result); + } // TODO: should we retry executing the opcode somehow in this case? DebugCommandResult::Ok } - Err(error) => DebugCommandResult::Error(error), + Err(error) => DebugCommandResult::Error(error.into()), } } @@ -158,13 +277,50 @@ impl<'a, B: BlackBoxFunctionSolver> DebugContext<'a, B> { } } + fn currently_executing_brillig(&self) -> bool { + if self.brillig_solver.is_some() { + return true; + } + + match self.get_current_opcode_location() { + Some(OpcodeLocation::Brillig { .. }) => true, + Some(OpcodeLocation::Acir(acir_index)) => { + matches!(self.get_opcodes()[acir_index], Opcode::Brillig(_)) + } + _ => false, + } + } + + fn get_current_acir_index(&self) -> Option { + self.get_current_opcode_location().map(|opcode_location| match opcode_location { + OpcodeLocation::Acir(acir_index) => acir_index, + OpcodeLocation::Brillig { acir_index, .. } => acir_index, + }) + } + + fn step_out_of_brillig_opcode(&mut self) -> DebugCommandResult { + let Some(start_acir_index) = self.get_current_acir_index() else { + return DebugCommandResult::Done; + }; + loop { + let result = self.step_into_opcode(); + if !matches!(result, DebugCommandResult::Ok) { + return result; + } + let new_acir_index = self.get_current_acir_index().unwrap(); + if new_acir_index != start_acir_index { + return DebugCommandResult::Ok; + } + } + } + pub(super) fn step_acir_opcode(&mut self) -> DebugCommandResult { - let status = if let Some(solver) = self.brillig_solver.take() { - self.acvm.finish_brillig_with_solver(solver) + if self.currently_executing_brillig() { + self.step_out_of_brillig_opcode() } else { - self.acvm.solve_opcode() - }; - self.handle_acvm_status(status) + let status = self.acvm.solve_opcode(); + self.handle_acvm_status(status) + } } pub(super) fn next(&mut self) -> DebugCommandResult { @@ -190,6 +346,32 @@ impl<'a, B: BlackBoxFunctionSolver> DebugContext<'a, B> { } } + pub(super) fn is_executing_brillig(&self) -> bool { + let opcodes = self.get_opcodes(); + let acir_index = self.acvm.instruction_pointer(); + acir_index < opcodes.len() && matches!(opcodes[acir_index], Opcode::Brillig(..)) + } + + pub(super) fn get_brillig_registers(&self) -> Option<&Registers> { + self.brillig_solver.as_ref().map(|solver| solver.get_registers()) + } + + pub(super) fn set_brillig_register(&mut self, register_index: usize, value: FieldElement) { + if let Some(solver) = self.brillig_solver.as_mut() { + solver.set_register(register_index, value.into()); + } + } + + pub(super) fn get_brillig_memory(&self) -> Option<&[Value]> { + self.brillig_solver.as_ref().map(|solver| solver.get_memory()) + } + + pub(super) fn write_brillig_memory(&mut self, ptr: usize, value: FieldElement) { + if let Some(solver) = self.brillig_solver.as_mut() { + solver.write_memory_at(ptr, value.into()); + } + } + fn breakpoint_reached(&self) -> bool { if let Some(location) = self.get_current_opcode_location() { self.breakpoints.contains(&location) @@ -232,6 +414,10 @@ impl<'a, B: BlackBoxFunctionSolver> DebugContext<'a, B> { self.breakpoints.iter() } + pub(super) fn clear_breakpoints(&mut self) { + self.breakpoints.clear(); + } + pub(super) fn is_solved(&self) -> bool { matches!(self.acvm.get_status(), ACVMStatus::Solved) } @@ -240,3 +426,335 @@ impl<'a, B: BlackBoxFunctionSolver> DebugContext<'a, B> { self.acvm.finalize() } } + +#[cfg(test)] +mod tests { + use super::*; + use crate::context::{DebugCommandResult, DebugContext}; + + use acvm::{ + acir::{ + circuit::{ + brillig::{Brillig, BrilligInputs, BrilligOutputs}, + opcodes::BlockId, + }, + native_types::Expression, + }, + brillig_vm::brillig::{ + BinaryFieldOp, Opcode as BrilligOpcode, RegisterIndex, RegisterOrMemory, + }, + }; + use nargo::{artifacts::debug::DebugArtifact, ops::DefaultForeignCallExecutor}; + use std::collections::BTreeMap; + + struct StubbedSolver; + + impl BlackBoxFunctionSolver for StubbedSolver { + fn schnorr_verify( + &self, + _public_key_x: &FieldElement, + _public_key_y: &FieldElement, + _signature: &[u8], + _message: &[u8], + ) -> Result { + unimplemented!(); + } + + fn pedersen_commitment( + &self, + _inputs: &[FieldElement], + _domain_separator: u32, + ) -> Result<(FieldElement, FieldElement), acvm::BlackBoxResolutionError> { + unimplemented!(); + } + + fn pedersen_hash( + &self, + _inputs: &[FieldElement], + _domain_separator: u32, + ) -> Result { + unimplemented!(); + } + + fn fixed_base_scalar_mul( + &self, + _low: &FieldElement, + _high: &FieldElement, + ) -> Result<(FieldElement, FieldElement), acvm::BlackBoxResolutionError> { + unimplemented!(); + } + } + + #[test] + fn test_resolve_foreign_calls_stepping_into_brillig() { + let fe_0 = FieldElement::zero(); + let fe_1 = FieldElement::one(); + let w_x = Witness(1); + + let blackbox_solver = &StubbedSolver; + + let brillig_opcodes = Brillig { + inputs: vec![BrilligInputs::Single(Expression { + linear_combinations: vec![(fe_1, w_x)], + ..Expression::default() + })], + outputs: vec![], + bytecode: vec![ + BrilligOpcode::Const { + destination: RegisterIndex::from(1), + value: Value::from(fe_0), + }, + BrilligOpcode::ForeignCall { + function: "clear_mock".into(), + destinations: vec![], + inputs: vec![RegisterOrMemory::RegisterIndex(RegisterIndex::from(0))], + }, + BrilligOpcode::Stop, + ], + predicate: None, + }; + let opcodes = vec![Opcode::Brillig(brillig_opcodes)]; + let current_witness_index = 2; + let circuit = &Circuit { current_witness_index, opcodes, ..Circuit::default() }; + + let debug_symbols = vec![]; + let file_map = BTreeMap::new(); + let warnings = vec![]; + let debug_artifact = &DebugArtifact { debug_symbols, file_map, warnings }; + + let initial_witness = BTreeMap::from([(Witness(1), fe_1)]).into(); + + let mut context = DebugContext::new( + blackbox_solver, + circuit, + debug_artifact, + initial_witness, + Box::new(DefaultForeignCallExecutor::new(true, None)), + ); + + assert_eq!(context.get_current_opcode_location(), Some(OpcodeLocation::Acir(0))); + + // execute the first Brillig opcode (const) + let result = context.step_into_opcode(); + assert!(matches!(result, DebugCommandResult::Ok)); + assert_eq!( + context.get_current_opcode_location(), + Some(OpcodeLocation::Brillig { acir_index: 0, brillig_index: 1 }) + ); + + // try to execute the second Brillig opcode (and resolve the foreign call) + let result = context.step_into_opcode(); + assert!(matches!(result, DebugCommandResult::Ok)); + assert_eq!( + context.get_current_opcode_location(), + Some(OpcodeLocation::Brillig { acir_index: 0, brillig_index: 1 }) + ); + + // retry the second Brillig opcode (foreign call should be finished) + let result = context.step_into_opcode(); + assert!(matches!(result, DebugCommandResult::Ok)); + assert_eq!( + context.get_current_opcode_location(), + Some(OpcodeLocation::Brillig { acir_index: 0, brillig_index: 2 }) + ); + + // last Brillig opcode + let result = context.step_into_opcode(); + assert!(matches!(result, DebugCommandResult::Done)); + assert_eq!(context.get_current_opcode_location(), None); + } + + #[test] + fn test_break_brillig_block_while_stepping_acir_opcodes() { + let fe_0 = FieldElement::zero(); + let fe_1 = FieldElement::one(); + let w_x = Witness(1); + let w_y = Witness(2); + let w_z = Witness(3); + + let blackbox_solver = &StubbedSolver; + + // This Brillig block is equivalent to: z = x + y + let brillig_opcodes = Brillig { + inputs: vec![ + BrilligInputs::Single(Expression { + linear_combinations: vec![(fe_1, w_x)], + ..Expression::default() + }), + BrilligInputs::Single(Expression { + linear_combinations: vec![(fe_1, w_y)], + ..Expression::default() + }), + ], + outputs: vec![BrilligOutputs::Simple(w_z)], + bytecode: vec![ + BrilligOpcode::BinaryFieldOp { + destination: RegisterIndex::from(0), + op: BinaryFieldOp::Add, + lhs: RegisterIndex::from(0), + rhs: RegisterIndex::from(1), + }, + BrilligOpcode::Stop, + ], + predicate: None, + }; + let opcodes = vec![ + // z = x + y + Opcode::Brillig(brillig_opcodes), + // x + y - z = 0 + Opcode::AssertZero(Expression { + mul_terms: vec![], + linear_combinations: vec![(fe_1, w_x), (fe_1, w_y), (-fe_1, w_z)], + q_c: fe_0, + }), + ]; + let current_witness_index = 3; + let circuit = &Circuit { current_witness_index, opcodes, ..Circuit::default() }; + + let debug_symbols = vec![]; + let file_map = BTreeMap::new(); + let warnings = vec![]; + let debug_artifact = &DebugArtifact { debug_symbols, file_map, warnings }; + + let initial_witness = BTreeMap::from([(Witness(1), fe_1), (Witness(2), fe_1)]).into(); + + let mut context = DebugContext::new( + blackbox_solver, + circuit, + debug_artifact, + initial_witness, + Box::new(DefaultForeignCallExecutor::new(true, None)), + ); + + // set breakpoint + let breakpoint_location = OpcodeLocation::Brillig { acir_index: 0, brillig_index: 1 }; + assert!(context.add_breakpoint(breakpoint_location)); + + // execute the first ACIR opcode (Brillig block) -> should reach the breakpoint instead + let result = context.step_acir_opcode(); + assert!(matches!(result, DebugCommandResult::BreakpointReached(_))); + assert_eq!(context.get_current_opcode_location(), Some(breakpoint_location)); + + // continue execution to the next ACIR opcode + let result = context.step_acir_opcode(); + assert!(matches!(result, DebugCommandResult::Ok)); + assert_eq!(context.get_current_opcode_location(), Some(OpcodeLocation::Acir(1))); + + // last ACIR opcode + let result = context.step_acir_opcode(); + assert!(matches!(result, DebugCommandResult::Done)); + assert_eq!(context.get_current_opcode_location(), None); + } + + #[test] + fn test_offset_opcode_location() { + let blackbox_solver = &StubbedSolver; + let opcodes = vec![ + Opcode::Brillig(Brillig { + inputs: vec![], + outputs: vec![], + bytecode: vec![BrilligOpcode::Stop, BrilligOpcode::Stop, BrilligOpcode::Stop], + predicate: None, + }), + Opcode::MemoryInit { block_id: BlockId(0), init: vec![] }, + Opcode::Brillig(Brillig { + inputs: vec![], + outputs: vec![], + bytecode: vec![BrilligOpcode::Stop, BrilligOpcode::Stop, BrilligOpcode::Stop], + predicate: None, + }), + Opcode::AssertZero(Expression::default()), + ]; + let circuit = Circuit { opcodes, ..Circuit::default() }; + let debug_artifact = + DebugArtifact { debug_symbols: vec![], file_map: BTreeMap::new(), warnings: vec![] }; + let context = DebugContext::new( + blackbox_solver, + &circuit, + &debug_artifact, + WitnessMap::new(), + Box::new(DefaultForeignCallExecutor::new(true, None)), + ); + + assert_eq!(context.offset_opcode_location(&None, 0), (None, 0)); + assert_eq!(context.offset_opcode_location(&None, 2), (None, 2)); + assert_eq!(context.offset_opcode_location(&None, -2), (None, -2)); + assert_eq!( + context.offset_opcode_location(&Some(OpcodeLocation::Acir(0)), 0), + (Some(OpcodeLocation::Acir(0)), 0) + ); + assert_eq!( + context.offset_opcode_location(&Some(OpcodeLocation::Acir(0)), 1), + (Some(OpcodeLocation::Brillig { acir_index: 0, brillig_index: 1 }), 0) + ); + assert_eq!( + context.offset_opcode_location(&Some(OpcodeLocation::Acir(0)), 2), + (Some(OpcodeLocation::Brillig { acir_index: 0, brillig_index: 2 }), 0) + ); + assert_eq!( + context.offset_opcode_location(&Some(OpcodeLocation::Acir(0)), 3), + (Some(OpcodeLocation::Acir(1)), 0) + ); + assert_eq!( + context.offset_opcode_location(&Some(OpcodeLocation::Acir(0)), 4), + (Some(OpcodeLocation::Acir(2)), 0) + ); + assert_eq!( + context.offset_opcode_location(&Some(OpcodeLocation::Acir(0)), 5), + (Some(OpcodeLocation::Brillig { acir_index: 2, brillig_index: 1 }), 0) + ); + assert_eq!( + context.offset_opcode_location(&Some(OpcodeLocation::Acir(0)), 7), + (Some(OpcodeLocation::Acir(3)), 0) + ); + assert_eq!(context.offset_opcode_location(&Some(OpcodeLocation::Acir(0)), 8), (None, 0)); + assert_eq!(context.offset_opcode_location(&Some(OpcodeLocation::Acir(0)), 20), (None, 12)); + assert_eq!( + context.offset_opcode_location(&Some(OpcodeLocation::Acir(1)), 2), + (Some(OpcodeLocation::Brillig { acir_index: 2, brillig_index: 1 }), 0) + ); + assert_eq!(context.offset_opcode_location(&Some(OpcodeLocation::Acir(0)), -1), (None, -1)); + assert_eq!( + context.offset_opcode_location(&Some(OpcodeLocation::Acir(0)), -10), + (None, -10) + ); + + assert_eq!( + context.offset_opcode_location( + &Some(OpcodeLocation::Brillig { acir_index: 0, brillig_index: 1 }), + -1 + ), + (Some(OpcodeLocation::Acir(0)), 0) + ); + assert_eq!( + context.offset_opcode_location( + &Some(OpcodeLocation::Brillig { acir_index: 0, brillig_index: 2 }), + -2 + ), + (Some(OpcodeLocation::Acir(0)), 0) + ); + assert_eq!( + context.offset_opcode_location(&Some(OpcodeLocation::Acir(1)), -3), + (Some(OpcodeLocation::Acir(0)), 0) + ); + assert_eq!( + context.offset_opcode_location(&Some(OpcodeLocation::Acir(2)), -4), + (Some(OpcodeLocation::Acir(0)), 0) + ); + assert_eq!( + context.offset_opcode_location( + &Some(OpcodeLocation::Brillig { acir_index: 2, brillig_index: 1 }), + -5 + ), + (Some(OpcodeLocation::Acir(0)), 0) + ); + assert_eq!( + context.offset_opcode_location(&Some(OpcodeLocation::Acir(3)), -7), + (Some(OpcodeLocation::Acir(0)), 0) + ); + assert_eq!( + context.offset_opcode_location(&Some(OpcodeLocation::Acir(2)), -2), + (Some(OpcodeLocation::Brillig { acir_index: 0, brillig_index: 2 }), 0) + ); + } +} diff --git a/tooling/debugger/src/dap.rs b/tooling/debugger/src/dap.rs new file mode 100644 index 00000000000..803f9f108db --- /dev/null +++ b/tooling/debugger/src/dap.rs @@ -0,0 +1,568 @@ +use std::collections::BTreeMap; +use std::io::{Read, Write}; +use std::str::FromStr; + +use acvm::acir::circuit::{Circuit, OpcodeLocation}; +use acvm::acir::native_types::WitnessMap; +use acvm::BlackBoxFunctionSolver; +use codespan_reporting::files::{Files, SimpleFile}; + +use crate::context::DebugCommandResult; +use crate::context::DebugContext; + +use dap::errors::ServerError; +use dap::events::StoppedEventBody; +use dap::prelude::Event; +use dap::requests::{Command, Request, SetBreakpointsArguments}; +use dap::responses::{ + ContinueResponse, DisassembleResponse, ResponseBody, ScopesResponse, SetBreakpointsResponse, + SetExceptionBreakpointsResponse, SetInstructionBreakpointsResponse, StackTraceResponse, + ThreadsResponse, +}; +use dap::server::Server; +use dap::types::{ + Breakpoint, DisassembledInstruction, Source, StackFrame, SteppingGranularity, + StoppedEventReason, Thread, +}; +use nargo::artifacts::debug::DebugArtifact; +use nargo::ops::DefaultForeignCallExecutor; + +use fm::FileId; +use noirc_driver::CompiledProgram; + +pub struct DapSession<'a, R: Read, W: Write, B: BlackBoxFunctionSolver> { + server: Server, + context: DebugContext<'a, B>, + debug_artifact: &'a DebugArtifact, + running: bool, + source_to_opcodes: BTreeMap>, + next_breakpoint_id: i64, + instruction_breakpoints: Vec<(OpcodeLocation, i64)>, + source_breakpoints: BTreeMap>, +} + +// BTreeMap + +impl<'a, R: Read, W: Write, B: BlackBoxFunctionSolver> DapSession<'a, R, W, B> { + pub fn new( + server: Server, + solver: &'a B, + circuit: &'a Circuit, + debug_artifact: &'a DebugArtifact, + initial_witness: WitnessMap, + ) -> Self { + let source_to_opcodes = Self::build_source_to_opcode_debug_mappings(debug_artifact); + let context = DebugContext::new( + solver, + circuit, + debug_artifact, + initial_witness, + Box::new(DefaultForeignCallExecutor::new(true, None)), + ); + Self { + server, + context, + debug_artifact, + source_to_opcodes, + running: false, + next_breakpoint_id: 1, + instruction_breakpoints: vec![], + source_breakpoints: BTreeMap::new(), + } + } + + /// Builds a map from FileId to an ordered vector of tuples with line + /// numbers and opcode locations corresponding to those line numbers + fn build_source_to_opcode_debug_mappings( + debug_artifact: &'a DebugArtifact, + ) -> BTreeMap> { + if debug_artifact.debug_symbols.is_empty() { + return BTreeMap::new(); + } + let locations = &debug_artifact.debug_symbols[0].locations; + let simple_files: BTreeMap<_, _> = debug_artifact + .file_map + .iter() + .map(|(file_id, debug_file)| { + ( + file_id, + SimpleFile::new(debug_file.path.to_str().unwrap(), debug_file.source.as_str()), + ) + }) + .collect(); + + let mut result: BTreeMap> = BTreeMap::new(); + locations.iter().for_each(|(opcode_location, source_locations)| { + if source_locations.is_empty() { + return; + } + let source_location = source_locations[0]; + let span = source_location.span; + let file_id = source_location.file; + let Ok(line_index) = &simple_files[&file_id].line_index((), span.start() as usize) else { + return; + }; + let line_number = line_index + 1; + + result.entry(file_id).or_default().push((line_number, *opcode_location)); + }); + result.iter_mut().for_each(|(_, file_locations)| file_locations.sort_by_key(|x| x.0)); + result + } + + fn send_stopped_event(&mut self, reason: StoppedEventReason) -> Result<(), ServerError> { + let description = format!("{:?}", &reason); + self.server.send_event(Event::Stopped(StoppedEventBody { + reason, + description: Some(description), + thread_id: Some(0), + preserve_focus_hint: Some(false), + text: None, + all_threads_stopped: Some(false), + hit_breakpoint_ids: None, + }))?; + Ok(()) + } + + pub fn run_loop(&mut self) -> Result<(), ServerError> { + self.running = true; + + if matches!(self.context.get_current_source_location(), None) { + // TODO: remove this? This is to ensure that the tool has a proper + // source location to show when first starting the debugger, but + // maybe the default behavior should be to start executing until the + // first breakpoint set. + _ = self.context.next(); + } + + self.server.send_event(Event::Initialized)?; + self.send_stopped_event(StoppedEventReason::Entry)?; + + while self.running { + let req = match self.server.poll_request()? { + Some(req) => req, + None => break, + }; + match req.command { + Command::Disconnect(_) => { + eprintln!("INFO: ending debugging session"); + self.server.respond(req.ack()?)?; + break; + } + Command::SetBreakpoints(_) => { + self.handle_set_source_breakpoints(req)?; + } + Command::SetExceptionBreakpoints(_) => { + self.server.respond(req.success(ResponseBody::SetExceptionBreakpoints( + SetExceptionBreakpointsResponse { breakpoints: None }, + )))?; + } + Command::SetInstructionBreakpoints(_) => { + self.handle_set_instruction_breakpoints(req)?; + } + Command::Threads => { + self.server.respond(req.success(ResponseBody::Threads(ThreadsResponse { + threads: vec![Thread { id: 0, name: "main".to_string() }], + })))?; + } + Command::StackTrace(_) => { + self.handle_stack_trace(req)?; + } + Command::Disassemble(_) => { + self.handle_disassemble(req)?; + } + Command::StepIn(ref args) => { + let granularity = + args.granularity.as_ref().unwrap_or(&SteppingGranularity::Statement); + match granularity { + SteppingGranularity::Instruction => self.handle_step(req)?, + _ => self.handle_next(req)?, + } + } + Command::StepOut(ref args) => { + let granularity = + args.granularity.as_ref().unwrap_or(&SteppingGranularity::Statement); + match granularity { + SteppingGranularity::Instruction => self.handle_step(req)?, + _ => self.handle_next(req)?, + } + } + Command::Next(ref args) => { + let granularity = + args.granularity.as_ref().unwrap_or(&SteppingGranularity::Statement); + match granularity { + SteppingGranularity::Instruction => self.handle_step(req)?, + _ => self.handle_next(req)?, + } + } + Command::Continue(_) => { + self.handle_continue(req)?; + } + Command::Scopes(_) => { + // FIXME: this needs a proper implementation when we can + // show the parameters and variables + self.server.respond( + req.success(ResponseBody::Scopes(ScopesResponse { scopes: vec![] })), + )?; + } + _ => { + eprintln!("ERROR: unhandled command: {:?}", req.command); + } + } + } + Ok(()) + } + + fn handle_stack_trace(&mut self, req: Request) -> Result<(), ServerError> { + let opcode_location = self.context.get_current_opcode_location(); + let source_location = self.context.get_current_source_location(); + let frames = match source_location { + None => vec![], + Some(locations) => locations + .iter() + .enumerate() + .map(|(index, location)| { + let line_number = self.debug_artifact.location_line_number(*location).unwrap(); + let column_number = + self.debug_artifact.location_column_number(*location).unwrap(); + let ip_reference = opcode_location.map(|location| location.to_string()); + StackFrame { + id: index as i64, + name: format!("frame #{index}"), + source: Some(Source { + path: self.debug_artifact.file_map[&location.file] + .path + .to_str() + .map(String::from), + ..Source::default() + }), + line: line_number as i64, + column: column_number as i64, + instruction_pointer_reference: ip_reference, + ..StackFrame::default() + } + }) + .collect(), + }; + let total_frames = Some(frames.len() as i64); + self.server.respond(req.success(ResponseBody::StackTrace(StackTraceResponse { + stack_frames: frames, + total_frames, + })))?; + Ok(()) + } + + fn handle_disassemble(&mut self, req: Request) -> Result<(), ServerError> { + let Command::Disassemble(ref args) = req.command else { + unreachable!("handle_disassemble called on a non disassemble request"); + }; + let starting_ip = OpcodeLocation::from_str(args.memory_reference.as_str()).ok(); + let instruction_offset = args.instruction_offset.unwrap_or(0); + let (mut opcode_location, mut invalid_count) = + self.context.offset_opcode_location(&starting_ip, instruction_offset); + let mut count = args.instruction_count; + + let mut instructions: Vec = vec![]; + + // leading invalid locations (when the request goes back + // beyond the start of the program) + if invalid_count < 0 { + while invalid_count < 0 { + instructions.push(DisassembledInstruction { + address: String::from("---"), + instruction: String::from("---"), + ..DisassembledInstruction::default() + }); + invalid_count += 1; + count -= 1; + } + if count > 0 { + opcode_location = Some(OpcodeLocation::Acir(0)); + } + } + // the actual opcodes + while count > 0 && !matches!(opcode_location, None) { + instructions.push(DisassembledInstruction { + address: format!("{}", opcode_location.unwrap()), + instruction: self.context.render_opcode_at_location(&opcode_location), + ..DisassembledInstruction::default() + }); + (opcode_location, _) = self.context.offset_opcode_location(&opcode_location, 1); + count -= 1; + } + // any remaining instruction count is beyond the valid opcode + // vector so return invalid placeholders + while count > 0 { + instructions.push(DisassembledInstruction { + address: String::from("---"), + instruction: String::from("---"), + ..DisassembledInstruction::default() + }); + invalid_count -= 1; + count -= 1; + } + + self.server.respond( + req.success(ResponseBody::Disassemble(DisassembleResponse { instructions })), + )?; + Ok(()) + } + + fn handle_step(&mut self, req: Request) -> Result<(), ServerError> { + let result = self.context.step_into_opcode(); + eprintln!("INFO: stepped by instruction with result {result:?}"); + self.server.respond(req.ack()?)?; + self.handle_execution_result(result) + } + + fn handle_next(&mut self, req: Request) -> Result<(), ServerError> { + let result = self.context.next(); + eprintln!("INFO: stepped by statement with result {result:?}"); + self.server.respond(req.ack()?)?; + self.handle_execution_result(result) + } + + fn handle_continue(&mut self, req: Request) -> Result<(), ServerError> { + let result = self.context.cont(); + eprintln!("INFO: continue with result {result:?}"); + self.server.respond(req.success(ResponseBody::Continue(ContinueResponse { + all_threads_continued: Some(true), + })))?; + self.handle_execution_result(result) + } + + fn find_breakpoints_at_location(&self, opcode_location: &OpcodeLocation) -> Vec { + let mut result = vec![]; + for (location, id) in &self.instruction_breakpoints { + if opcode_location == location { + result.push(*id); + } + } + for breakpoints in self.source_breakpoints.values() { + for (location, id) in breakpoints { + if opcode_location == location { + result.push(*id); + } + } + } + result + } + + fn handle_execution_result(&mut self, result: DebugCommandResult) -> Result<(), ServerError> { + match result { + DebugCommandResult::Done => { + self.running = false; + } + DebugCommandResult::Ok => { + self.server.send_event(Event::Stopped(StoppedEventBody { + reason: StoppedEventReason::Pause, + description: None, + thread_id: Some(0), + preserve_focus_hint: Some(false), + text: None, + all_threads_stopped: Some(false), + hit_breakpoint_ids: None, + }))?; + } + DebugCommandResult::BreakpointReached(location) => { + let breakpoint_ids = self.find_breakpoints_at_location(&location); + self.server.send_event(Event::Stopped(StoppedEventBody { + reason: StoppedEventReason::Breakpoint, + description: Some(String::from("Paused at breakpoint")), + thread_id: Some(0), + preserve_focus_hint: Some(false), + text: None, + all_threads_stopped: Some(false), + hit_breakpoint_ids: Some(breakpoint_ids), + }))?; + } + DebugCommandResult::Error(err) => { + self.server.send_event(Event::Stopped(StoppedEventBody { + reason: StoppedEventReason::Exception, + description: Some(format!("{err:?}")), + thread_id: Some(0), + preserve_focus_hint: Some(false), + text: None, + all_threads_stopped: Some(false), + hit_breakpoint_ids: None, + }))?; + } + } + Ok(()) + } + + fn get_next_breakpoint_id(&mut self) -> i64 { + let id = self.next_breakpoint_id; + self.next_breakpoint_id += 1; + id + } + + fn reinstall_breakpoints(&mut self) { + self.context.clear_breakpoints(); + for (location, _) in &self.instruction_breakpoints { + self.context.add_breakpoint(*location); + } + for breakpoints in self.source_breakpoints.values() { + for (location, _) in breakpoints { + self.context.add_breakpoint(*location); + } + } + } + + fn handle_set_instruction_breakpoints(&mut self, req: Request) -> Result<(), ServerError> { + let Command::SetInstructionBreakpoints(ref args) = req.command else { + unreachable!("handle_set_instruction_breakpoints called on a different request"); + }; + + // compute breakpoints to set and return + let mut breakpoints_to_set: Vec<(OpcodeLocation, i64)> = vec![]; + let breakpoints: Vec = args.breakpoints.iter().map(|breakpoint| { + let Ok(location) = OpcodeLocation::from_str(breakpoint.instruction_reference.as_str()) else { + return Breakpoint { + verified: false, + message: Some(String::from("Missing instruction reference")), + ..Breakpoint::default() + }; + }; + if !self.context.is_valid_opcode_location(&location) { + return Breakpoint { + verified: false, + message: Some(String::from("Invalid opcode location")), + ..Breakpoint::default() + }; + } + let id = self.get_next_breakpoint_id(); + breakpoints_to_set.push((location, id)); + Breakpoint { + id: Some(id), + verified: true, + ..Breakpoint::default() + } + }).collect(); + + // actually set the computed breakpoints + self.instruction_breakpoints = breakpoints_to_set; + self.reinstall_breakpoints(); + + // response to request + self.server.respond(req.success(ResponseBody::SetInstructionBreakpoints( + SetInstructionBreakpointsResponse { breakpoints }, + )))?; + Ok(()) + } + + fn find_file_id(&self, source_path: &str) -> Option { + let file_map = &self.debug_artifact.file_map; + let found = file_map.iter().find(|(_, debug_file)| match debug_file.path.to_str() { + Some(debug_file_path) => debug_file_path == source_path, + None => false, + }); + found.map(|iter| *iter.0) + } + + // TODO: there are four possibilities for the return value of this function: + // 1. the source location is not found -> None + // 2. an exact unique location is found -> Some(opcode_location) + // 3. an exact but not unique location is found (ie. a source location may + // be mapped to multiple opcodes, and those may be disjoint, for example for + // functions called multiple times throughout the program) + // 4. exact location is not found, so an opcode for a nearby source location + // is returned (this again could actually be more than one opcodes) + // Case 3 is not supported yet, and 4 is not correctly handled. + fn find_opcode_for_source_location( + &self, + file_id: &FileId, + line: i64, + ) -> Option { + let line = line as usize; + let Some(line_to_opcodes) = self.source_to_opcodes.get(file_id) else { + return None; + }; + let found_index = match line_to_opcodes.binary_search_by(|x| x.0.cmp(&line)) { + Ok(index) => line_to_opcodes[index].1, + Err(index) => line_to_opcodes[index].1, + }; + Some(found_index) + } + + fn map_source_breakpoints(&mut self, args: &SetBreakpointsArguments) -> Vec { + let Some(ref source) = &args.source.path else { + return vec![]; + }; + let Some(file_id) = self.find_file_id(source) else { + eprintln!("WARN: file ID for source {source} not found"); + return vec![]; + }; + let Some(ref breakpoints) = &args.breakpoints else { + return vec![]; + }; + let mut breakpoints_to_set: Vec<(OpcodeLocation, i64)> = vec![]; + let breakpoints = breakpoints + .iter() + .map(|breakpoint| { + let line = breakpoint.line; + let Some(location) = self.find_opcode_for_source_location(&file_id, line) else { + return Breakpoint { + verified: false, + message: Some(String::from("Source location cannot be matched to opcode location")), + ..Breakpoint::default() + }; + }; + // TODO: line will not necessarily be the one requested; we + // should do the reverse mapping and retrieve the actual source + // code line number + if !self.context.is_valid_opcode_location(&location) { + return Breakpoint { + verified: false, + message: Some(String::from("Invalid opcode location")), + ..Breakpoint::default() + }; + } + let instruction_reference = format!("{}", location); + let breakpoint_id = self.get_next_breakpoint_id(); + breakpoints_to_set.push((location, breakpoint_id)); + Breakpoint { + id: Some(breakpoint_id), + verified: true, + source: Some(args.source.clone()), + instruction_reference: Some(instruction_reference), + line: Some(line), + ..Breakpoint::default() + } + }) + .collect(); + + self.source_breakpoints.insert(file_id, breakpoints_to_set); + + breakpoints + } + + fn handle_set_source_breakpoints(&mut self, req: Request) -> Result<(), ServerError> { + let Command::SetBreakpoints(ref args) = req.command else { + unreachable!("handle_set_source_breakpoints called on a different request"); + }; + let breakpoints = self.map_source_breakpoints(args); + self.reinstall_breakpoints(); + self.server.respond( + req.success(ResponseBody::SetBreakpoints(SetBreakpointsResponse { breakpoints })), + )?; + Ok(()) + } +} + +pub fn run_session( + server: Server, + solver: &B, + program: CompiledProgram, + initial_witness: WitnessMap, +) -> Result<(), ServerError> { + let debug_artifact = DebugArtifact { + debug_symbols: vec![program.debug], + file_map: program.file_map, + warnings: program.warnings, + }; + let mut session = + DapSession::new(server, solver, &program.circuit, &debug_artifact, initial_witness); + + session.run_loop() +} diff --git a/tooling/debugger/src/lib.rs b/tooling/debugger/src/lib.rs index 7c6a9e9f618..21834e44f93 100644 --- a/tooling/debugger/src/lib.rs +++ b/tooling/debugger/src/lib.rs @@ -1,12 +1,19 @@ mod context; +mod dap; mod repl; +mod source_code_printer; +use std::io::{Read, Write}; + +use ::dap::errors::ServerError; +use ::dap::server::Server; use acvm::BlackBoxFunctionSolver; use acvm::{acir::circuit::Circuit, acir::native_types::WitnessMap}; use nargo::artifacts::debug::DebugArtifact; use nargo::NargoError; +use noirc_driver::CompiledProgram; pub fn debug_circuit( blackbox_solver: &B, @@ -16,3 +23,12 @@ pub fn debug_circuit( ) -> Result, NargoError> { repl::run(blackbox_solver, circuit, &debug_artifact, initial_witness) } + +pub fn run_dap_loop( + server: Server, + solver: &B, + program: CompiledProgram, + initial_witness: WitnessMap, +) -> Result<(), ServerError> { + dap::run_session(server, solver, program, initial_witness) +} diff --git a/tooling/debugger/src/repl.rs b/tooling/debugger/src/repl.rs index 320d8edf63a..b1af2bc2686 100644 --- a/tooling/debugger/src/repl.rs +++ b/tooling/debugger/src/repl.rs @@ -1,21 +1,15 @@ use crate::context::{DebugCommandResult, DebugContext}; -use acvm::acir::circuit::{Opcode, OpcodeLocation}; -use acvm::BlackBoxFunctionSolver; -use acvm::{acir::circuit::Circuit, acir::native_types::WitnessMap}; +use acvm::acir::circuit::{Circuit, Opcode, OpcodeLocation}; +use acvm::acir::native_types::{Witness, WitnessMap}; +use acvm::{BlackBoxFunctionSolver, FieldElement}; -use nargo::artifacts::debug::DebugArtifact; -use nargo::NargoError; +use nargo::{artifacts::debug::DebugArtifact, ops::DefaultForeignCallExecutor, NargoError}; use easy_repl::{command, CommandStatus, Repl}; use std::cell::RefCell; -use codespan_reporting::files::Files; -use noirc_errors::Location; - -use owo_colors::OwoColorize; - -use std::ops::Range; +use crate::source_code_printer::print_source_code_location; pub struct ReplDebugger<'a, B: BlackBoxFunctionSolver> { context: DebugContext<'a, B>, @@ -33,8 +27,13 @@ impl<'a, B: BlackBoxFunctionSolver> ReplDebugger<'a, B> { debug_artifact: &'a DebugArtifact, initial_witness: WitnessMap, ) -> Self { - let context = - DebugContext::new(blackbox_solver, circuit, debug_artifact, initial_witness.clone()); + let context = DebugContext::new( + blackbox_solver, + circuit, + debug_artifact, + initial_witness.clone(), + Box::new(DefaultForeignCallExecutor::new(true, None)), + ); Self { context, blackbox_solver, @@ -66,73 +65,8 @@ impl<'a, B: BlackBoxFunctionSolver> ReplDebugger<'a, B> { ); } } - self.show_source_code_location(&location); - } - } - } - - fn print_location_path(&self, loc: Location) { - let line_number = self.debug_artifact.location_line_number(loc).unwrap(); - let column_number = self.debug_artifact.location_column_number(loc).unwrap(); - - println!( - "At {}:{line_number}:{column_number}", - self.debug_artifact.name(loc.file).unwrap() - ); - } - - fn show_source_code_location(&self, location: &OpcodeLocation) { - let locations = self.debug_artifact.debug_symbols[0].opcode_location(location); - let Some(locations) = locations else { return }; - for loc in locations { - self.print_location_path(loc); - let loc_line_index = self.debug_artifact.location_line_index(loc).unwrap(); - - // How many lines before or after the location's line we - // print - let context_lines = 5; - - let first_line_to_print = - if loc_line_index < context_lines { 0 } else { loc_line_index - context_lines }; - - let last_line_index = self.debug_artifact.last_line_index(loc).unwrap(); - let last_line_to_print = std::cmp::min(loc_line_index + context_lines, last_line_index); - - let source = self.debug_artifact.location_source_code(loc).unwrap(); - for (current_line_index, line) in source.lines().enumerate() { - let current_line_number = current_line_index + 1; - - if current_line_index < first_line_to_print { - // Ignore lines before range starts - continue; - } else if current_line_index == first_line_to_print && current_line_index > 0 { - // Denote that there's more lines before but we're not showing them - print_line_of_ellipsis(current_line_index); - } - - if current_line_index > last_line_to_print { - // Denote that there's more lines after but we're not showing them, - // and stop printing - print_line_of_ellipsis(current_line_number); - break; - } - - if current_line_index == loc_line_index { - // Highlight current location - let Range { start: loc_start, end: loc_end } = - self.debug_artifact.location_in_line(loc).unwrap(); - println!( - "{:>3} {:2} {}{}{}", - current_line_number, - "->", - &line[0..loc_start].to_string().dimmed(), - &line[loc_start..loc_end], - &line[loc_end..].to_string().dimmed() - ); - } else { - print_dimmed_line(current_line_number, line); - } + print_source_code_location(self.debug_artifact, &location); } } } @@ -274,6 +208,7 @@ impl<'a, B: BlackBoxFunctionSolver> ReplDebugger<'a, B> { self.circuit, self.debug_artifact, self.initial_witness.clone(), + Box::new(DefaultForeignCallExecutor::new(true, None)), ); for opcode_location in breakpoints { self.context.add_breakpoint(opcode_location); @@ -283,6 +218,93 @@ impl<'a, B: BlackBoxFunctionSolver> ReplDebugger<'a, B> { self.show_current_vm_status(); } + pub fn show_witness_map(&self) { + let witness_map = self.context.get_witness_map(); + // NOTE: we need to clone() here to get the iterator + for (witness, value) in witness_map.clone().into_iter() { + println!("_{} = {value}", witness.witness_index()); + } + } + + pub fn show_witness(&self, index: u32) { + if let Some(value) = self.context.get_witness_map().get_index(index) { + println!("_{} = {value}", index); + } + } + + pub fn update_witness(&mut self, index: u32, value: String) { + let Some(field_value) = FieldElement::try_from_str(&value) else { + println!("Invalid witness value: {value}"); + return; + }; + + let witness = Witness::from(index); + _ = self.context.overwrite_witness(witness, field_value); + println!("_{} = {value}", index); + } + + pub fn show_brillig_registers(&self) { + if !self.context.is_executing_brillig() { + println!("Not executing a Brillig block"); + return; + } + + let Some(registers) = self.context.get_brillig_registers() else { + // this can happen when just entering the Brillig block since ACVM + // would have not initialized the Brillig VM yet; in fact, the + // Brillig code may be skipped altogether + println!("Brillig VM registers not available"); + return; + }; + + for (index, value) in registers.inner.iter().enumerate() { + println!("{index} = {}", value.to_field()); + } + } + + pub fn set_brillig_register(&mut self, index: usize, value: String) { + let Some(field_value) = FieldElement::try_from_str(&value) else { + println!("Invalid value: {value}"); + return; + }; + if !self.context.is_executing_brillig() { + println!("Not executing a Brillig block"); + return; + } + self.context.set_brillig_register(index, field_value); + } + + pub fn show_brillig_memory(&self) { + if !self.context.is_executing_brillig() { + println!("Not executing a Brillig block"); + return; + } + + let Some(memory) = self.context.get_brillig_memory() else { + // this can happen when just entering the Brillig block since ACVM + // would have not initialized the Brillig VM yet; in fact, the + // Brillig code may be skipped altogether + println!("Brillig VM memory not available"); + return; + }; + + for (index, value) in memory.iter().enumerate() { + println!("{index} = {}", value.to_field()); + } + } + + pub fn write_brillig_memory(&mut self, index: usize, value: String) { + let Some(field_value) = FieldElement::try_from_str(&value) else { + println!("Invalid value: {value}"); + return; + }; + if !self.context.is_executing_brillig() { + println!("Not executing a Brillig block"); + return; + } + self.context.write_brillig_memory(index, field_value); + } + fn is_solved(&self) -> bool { self.context.is_solved() } @@ -292,14 +314,6 @@ impl<'a, B: BlackBoxFunctionSolver> ReplDebugger<'a, B> { } } -fn print_line_of_ellipsis(line_number: usize) { - println!("{}", format!("{:>3} {}", line_number, "...").dimmed()); -} - -fn print_dimmed_line(line_number: usize, line: &str) { - println!("{}", format!("{:>3} {:2} {}", line_number, "", line).dimmed()); -} - pub fn run( blackbox_solver: &B, circuit: &Circuit, @@ -393,6 +407,76 @@ pub fn run( } }, ) + .add( + "witness", + command! { + "show witness map", + () => || { + ref_context.borrow().show_witness_map(); + Ok(CommandStatus::Done) + } + }, + ) + .add( + "witness", + command! { + "display a single witness from the witness map", + (index: u32) => |index| { + ref_context.borrow().show_witness(index); + Ok(CommandStatus::Done) + } + }, + ) + .add( + "witness", + command! { + "update a witness with the given value", + (index: u32, value: String) => |index, value| { + ref_context.borrow_mut().update_witness(index, value); + Ok(CommandStatus::Done) + } + }, + ) + .add( + "registers", + command! { + "show Brillig registers (valid when executing a Brillig block)", + () => || { + ref_context.borrow().show_brillig_registers(); + Ok(CommandStatus::Done) + } + }, + ) + .add( + "regset", + command! { + "update a Brillig register with the given value", + (index: usize, value: String) => |index, value| { + ref_context.borrow_mut().set_brillig_register(index, value); + Ok(CommandStatus::Done) + } + }, + ) + .add( + "memory", + command! { + "show Brillig memory (valid when executing a Brillig block)", + () => || { + ref_context.borrow().show_brillig_memory(); + Ok(CommandStatus::Done) + } + }, + ) + .add( + "memset", + command! { + "update a Brillig memory cell with the given value", + (index: usize, value: String) => |index, value| { + ref_context.borrow_mut().write_brillig_memory(index, value); + Ok(CommandStatus::Done) + } + }, + ) .build() .expect("Failed to initialize debugger repl"); diff --git a/tooling/debugger/src/source_code_printer.rs b/tooling/debugger/src/source_code_printer.rs new file mode 100644 index 00000000000..1707f9066b7 --- /dev/null +++ b/tooling/debugger/src/source_code_printer.rs @@ -0,0 +1,317 @@ +use acvm::acir::circuit::OpcodeLocation; +use codespan_reporting::files::Files; +use nargo::artifacts::debug::DebugArtifact; +use noirc_errors::Location; +use owo_colors::OwoColorize; +use std::ops::Range; + +#[derive(Debug, PartialEq)] +enum PrintedLine<'a> { + Skip, + Ellipsis { + line_number: usize, + }, + Content { + line_number: usize, + cursor: &'a str, + content: &'a str, + highlight: Option>, + }, +} + +#[derive(Clone, Debug)] +struct LocationPrintContext { + file_lines: Range, + printed_lines: Range, + location_lines: Range, + location_offset_in_first_line: Range, + location_offset_in_last_line: Range, +} + +// Given a DebugArtifact and an OpcodeLocation, prints all the source code +// locations the OpcodeLocation maps to, with some surrounding context and +// visual aids to highlight the location itself. +pub(crate) fn print_source_code_location( + debug_artifact: &DebugArtifact, + location: &OpcodeLocation, +) { + let locations = debug_artifact.debug_symbols[0].opcode_location(location); + let Some(locations) = locations else { return; }; + + let locations = locations.iter(); + + for loc in locations { + print_location_path(debug_artifact, *loc); + + let lines = render_location(debug_artifact, loc); + + for line in lines { + match line { + PrintedLine::Skip => {} + PrintedLine::Ellipsis { line_number } => print_ellipsis(line_number), + PrintedLine::Content { line_number, cursor, content, highlight } => { + print_content(line_number, cursor, content, highlight) + } + } + } + } +} + +fn print_location_path(debug_artifact: &DebugArtifact, loc: Location) { + let line_number = debug_artifact.location_line_number(loc).unwrap(); + let column_number = debug_artifact.location_column_number(loc).unwrap(); + + println!("At {}:{line_number}:{column_number}", debug_artifact.name(loc.file).unwrap()); +} + +fn print_ellipsis(line_number: usize) { + println!("{:>3} {:2} {}", line_number.dimmed(), "", "...".dimmed()); +} + +fn print_content(line_number: usize, cursor: &str, content: &str, highlight: Option>) { + match highlight { + Some(highlight) => { + println!( + "{:>3} {:2} {}{}{}", + line_number, + cursor, + content[0..highlight.start].to_string().dimmed(), + &content[highlight.start..highlight.end], + content[highlight.end..].to_string().dimmed(), + ); + } + None => { + println!( + "{:>3} {:2} {}", + line_number.dimmed(), + cursor.dimmed(), + content.to_string().dimmed(), + ); + } + } +} + +fn render_line( + current: usize, + content: &str, + loc_context: LocationPrintContext, +) -> PrintedLine<'_> { + let file_lines = loc_context.file_lines; + let printed_lines = loc_context.printed_lines; + let location_lines = loc_context.location_lines; + let line_number = current + 1; + + if current < printed_lines.start { + // Ignore lines before the context window we choose to show + PrintedLine::Skip + } else if 0 < current && current == printed_lines.start && current < location_lines.start { + // Denote that there's more lines before but we're not showing them + PrintedLine::Ellipsis { line_number } + } else if current < location_lines.start { + // Print lines before the location start without highlighting + PrintedLine::Content { line_number, cursor: "", content, highlight: None } + } else if current == location_lines.start { + // Highlight current location from where it starts to the end of the current line + PrintedLine::Content { + line_number, + cursor: "->", + content, + highlight: Some(loc_context.location_offset_in_first_line), + } + } else if current < location_lines.end { + // Highlight current line if it's contained by the current location + PrintedLine::Content { + line_number, + cursor: "", + content, + highlight: Some(Range { start: 0, end: content.len() }), + } + } else if current == location_lines.end { + // Highlight current location from the beginning of the line until the location's own end + PrintedLine::Content { + line_number, + cursor: "", + content, + highlight: Some(loc_context.location_offset_in_last_line), + } + } else if current < printed_lines.end || printed_lines.end == file_lines.end { + // Print lines after the location end without highlighting + PrintedLine::Content { line_number, cursor: "", content, highlight: None } + } else if current == printed_lines.end && printed_lines.end < file_lines.end { + // Denote that there's more lines after but we're not showing them + PrintedLine::Ellipsis { line_number } + } else { + PrintedLine::Skip + } +} + +// Given a Location in a DebugArtifact, returns a line iterator that specifies how to +// print the location's file. +// +// Consider for example the file (line numbers added to facilitate this doc): +// ``` +// 1 use dep::std::hash::poseidon; +// 2 +// 3 fn main(x1: [Field; 2], y1: pub Field, x2: [Field; 4], y2: pub Field) { +// 4 let hash1 = poseidon::bn254::hash_2(x1); +// 5 assert(hash1 == y1); +// 6 +// 7 let hash2 = poseidon::bn254::hash_4(x2); +// 8 assert(hash2 == y2); +// 9 } +// 10 +// ``` +// +// If the location to render is `poseidon::bn254::hash_2(x1)`, we'll render the file as: +// ``` +// 1 use dep::std::hash::poseidon; +// 2 +// 3 fn main(x1: [Field; 2], y1: pub Field, x2: [Field; 4], y2: pub Field) { +// 4 let hash1 = poseidon::bn254::hash_2(x1); +// 5 -> assert(hash1 == y1); +// 6 +// 7 let hash2 = poseidon::bn254::hash_4(x2); +// 8 assert(hash2 == y2); +// 9 } +// 10 ... +// ``` +// +// This is the result of: +// 1. Limiting the amount of printed lines to 5 before and 5 after the location. +// 2. Using ellipsis (...) to denote when some file lines have been left out of the render. +// 3. Using an arrow cursor (->) to denote where the rendered location starts. +// 4. Highlighting the location (here expressed as a block for the sake of the explanation). +// +// Note that locations may span multiple lines, so this function deals with that too. +fn render_location<'a>( + debug_artifact: &'a DebugArtifact, + loc: &'a Location, +) -> impl Iterator> { + let loc = *loc; + + let file_lines = Range { start: 0, end: debug_artifact.last_line_index(loc).unwrap() }; + + // Sub-range of file lines that this location spans + let location_lines = Range { + start: debug_artifact.location_line_index(loc).unwrap(), + end: debug_artifact.location_end_line_index(loc).unwrap(), + }; + + // How many lines before or after the location's lines we print + let context_lines = 5; + + // Sub-range of lines that we'll print, which includes location + context lines + let first_line_to_print = + if location_lines.start < context_lines { 0 } else { location_lines.start - context_lines }; + let last_line_to_print = std::cmp::min(location_lines.end + context_lines, file_lines.end); + let printed_lines = Range { start: first_line_to_print, end: last_line_to_print }; + + // Range of the location relative to its starting and ending lines + let location_offset_in_first_line = debug_artifact.location_in_line(loc).unwrap(); + let location_offset_in_last_line = debug_artifact.location_in_end_line(loc).unwrap(); + + let context = LocationPrintContext { + file_lines, + printed_lines, + location_lines, + location_offset_in_first_line, + location_offset_in_last_line, + }; + + let source = debug_artifact.location_source_code(loc).unwrap(); + source + .lines() + .enumerate() + .map(move |(index, content)| render_line(index, content, context.clone())) +} + +#[cfg(test)] +mod tests { + use crate::source_code_printer::render_location; + use crate::source_code_printer::PrintedLine::Content; + use acvm::acir::circuit::OpcodeLocation; + use fm::FileManager; + use nargo::artifacts::debug::DebugArtifact; + use noirc_errors::{debug_info::DebugInfo, Location, Span}; + use std::collections::BTreeMap; + use std::ops::Range; + use std::path::Path; + use std::path::PathBuf; + use tempfile::{tempdir, TempDir}; + + // Returns the absolute path to the file + fn create_dummy_file(dir: &TempDir, file_name: &Path) -> PathBuf { + let file_path = dir.path().join(file_name); + let _file = std::fs::File::create(&file_path).unwrap(); + file_path + } + + #[test] + fn render_multiple_line_location() { + let source_code = r##"pub fn main(mut state: [Field; 2]) -> [Field; 2] { + state = permute( + consts::x5_2_config(), + state); + + state +}"##; + + let dir = tempdir().unwrap(); + let file_name = Path::new("main.nr"); + create_dummy_file(&dir, file_name); + + let mut fm = FileManager::new(dir.path()); + let file_id = fm.add_file_with_source(file_name, source_code.to_string()).unwrap(); + + // Location of + // ``` + // permute( + // consts::x5_2_config(), + // state) + // ``` + let loc = Location::new(Span::inclusive(63, 116), file_id); + + // We don't care about opcodes in this context, + // we just use a dummy to construct debug_symbols + let mut opcode_locations = BTreeMap::>::new(); + opcode_locations.insert(OpcodeLocation::Acir(42), vec![loc]); + + let debug_symbols = vec![DebugInfo::new(opcode_locations)]; + let debug_artifact = DebugArtifact::new(debug_symbols, &fm); + + let location_rendered: Vec<_> = render_location(&debug_artifact, &loc).collect(); + + assert_eq!( + location_rendered, + vec![ + Content { + line_number: 1, + cursor: "", + content: "pub fn main(mut state: [Field; 2]) -> [Field; 2] {", + highlight: None, + }, + Content { + line_number: 2, + cursor: "->", + content: " state = permute(", + highlight: Some(Range { start: 12, end: 20 }), + }, + Content { + line_number: 3, + cursor: "", + content: " consts::x5_2_config(),", + highlight: Some(Range { start: 0, end: 30 }), + }, + Content { + line_number: 4, + cursor: "", + content: " state);", + highlight: Some(Range { start: 0, end: 14 }), + }, + Content { line_number: 5, cursor: "", content: "", highlight: None }, + Content { line_number: 6, cursor: "", content: " state", highlight: None }, + Content { line_number: 7, cursor: "", content: "}", highlight: None }, + ] + ); + } +} diff --git a/tooling/debugger/tests/debug.rs b/tooling/debugger/tests/debug.rs new file mode 100644 index 00000000000..e8b17b8a7af --- /dev/null +++ b/tooling/debugger/tests/debug.rs @@ -0,0 +1,55 @@ +#[cfg(test)] +mod tests { + // Some of these imports are consumed by the injected tests + use assert_cmd::cargo::cargo_bin; + + use rexpect::spawn_bash; + + test_binary::build_test_binary_once!(mock_backend, "../backend_interface/test-binaries"); + + // include tests generated by `build.rs` + include!(concat!(env!("OUT_DIR"), "/debug.rs")); + + pub fn debugger_execution_success(test_program_dir: &str) { + let nargo_bin = + cargo_bin("nargo").into_os_string().into_string().expect("Cannot parse nargo path"); + + let mock_backend_path = + path_to_mock_backend().into_string().expect("Cannot parse mock_backend path"); + + let mut dbg_session = spawn_bash(Some(10000)).expect("Could not start bash session"); + + dbg_session + .send_line(&format!("export NARGO_BACKEND_PATH={}", mock_backend_path)) + .expect("Could not export NARGO_BACKEND_PATH."); + dbg_session.wait_for_prompt().expect("Could not export NARGO_BACKEND_PATH."); + + // Start debugger and test that it loads for the given program. + dbg_session + .execute( + &format!("{} debug --program-dir {}", nargo_bin, test_program_dir), + ".*\\Starting debugger.*", + ) + .expect("Could not start debugger"); + + // While running the debugger, issue a "continue" cmd, + // which should run to the program to end given + // we haven't set any breakpoints. + // ">" is the debugger's prompt, so finding one + // after running "continue" indicates that the + // debugger has not panicked until the end of the program. + dbg_session + .send_line("c") + .expect("Debugger panicked while attempting to step through program."); + dbg_session + .exp_string(">") + .expect("Failed while waiting for debugger to step through program."); + + // Run the "quit" command, then check that the debugger confirms + // having successfully solved the circuit witness. + dbg_session.send_line("quit").expect("Failed to quit debugger"); + dbg_session + .exp_regex(".*Circuit witness successfully solved.*") + .expect("Expected circuit witness to be successfully solved."); + } +} diff --git a/tooling/lsp/Cargo.toml b/tooling/lsp/Cargo.toml index 69fec4b44d8..168ed38aa15 100644 --- a/tooling/lsp/Cargo.toml +++ b/tooling/lsp/Cargo.toml @@ -11,8 +11,8 @@ license.workspace = true [dependencies] acvm.workspace = true codespan-lsp.workspace = true -codespan-reporting.workspace = true nargo.workspace = true +nargo_fmt.workspace = true nargo_toml.workspace = true noirc_driver.workspace = true noirc_errors.workspace = true @@ -20,10 +20,10 @@ noirc_frontend.workspace = true serde.workspace = true serde_json.workspace = true tower.workspace = true -cfg-if.workspace = true lsp-types.workspace = true async-lsp = { workspace = true, features = ["omni-trait"] } serde_with = "3.2.0" +thiserror.workspace = true fm.workspace = true [target.'cfg(all(target_arch = "wasm32", not(target_os = "wasi")))'.dependencies] diff --git a/tooling/lsp/src/lib.rs b/tooling/lsp/src/lib.rs index 653bcd01a83..93195e203d5 100644 --- a/tooling/lsp/src/lib.rs +++ b/tooling/lsp/src/lib.rs @@ -4,6 +4,7 @@ #![cfg_attr(not(test), warn(unused_crate_dependencies, unused_extern_crates))] use std::{ + collections::HashMap, future::Future, ops::{self, ControlFlow}, path::{Path, PathBuf}, @@ -16,20 +17,27 @@ use async_lsp::{ router::Router, AnyEvent, AnyNotification, AnyRequest, ClientSocket, Error, LspService, ResponseError, }; -use codespan_reporting::files; +use fm::codespan_files as files; +use lsp_types::CodeLens; +use nargo::workspace::Workspace; +use nargo_toml::{find_file_manifest, resolve_workspace_from_toml, PackageSelection}; +use noirc_driver::{file_manager_with_stdlib, prepare_crate, NOIR_ARTIFACT_VERSION_STRING}; use noirc_frontend::{ graph::{CrateId, CrateName}, hir::{Context, FunctionNameMatch}, + node_interner::NodeInterner, }; + use notifications::{ on_did_change_configuration, on_did_change_text_document, on_did_close_text_document, on_did_open_text_document, on_did_save_text_document, on_exit, on_initialized, }; use requests::{ - on_code_lens_request, on_initialize, on_profile_run_request, on_shutdown, on_test_run_request, - on_tests_request, + on_code_lens_request, on_formatting, on_goto_declaration_request, on_goto_definition_request, + on_initialize, on_profile_run_request, on_shutdown, on_test_run_request, on_tests_request, }; use serde_json::Value as JsonValue; +use thiserror::Error; use tower::Service; mod notifications; @@ -40,16 +48,35 @@ mod types; use solver::WrapperSolver; use types::{notification, request, NargoTest, NargoTestId, Position, Range, Url}; +#[derive(Debug, Error)] +pub enum LspError { + /// Error while Resolving Workspace. + #[error("Failed to Resolve Workspace - {0}")] + WorkspaceResolutionError(String), +} + // State for the LSP gets implemented on this struct and is internal to the implementation pub struct LspState { root_path: Option, client: ClientSocket, solver: WrapperSolver, + open_documents_count: usize, + input_files: HashMap, + cached_lenses: HashMap>, + cached_definitions: HashMap, } impl LspState { fn new(client: &ClientSocket, solver: impl BlackBoxFunctionSolver + 'static) -> Self { - Self { client: client.clone(), root_path: None, solver: WrapperSolver(Box::new(solver)) } + Self { + client: client.clone(), + root_path: None, + solver: WrapperSolver(Box::new(solver)), + input_files: HashMap::new(), + cached_lenses: HashMap::new(), + cached_definitions: HashMap::new(), + open_documents_count: 0, + } } } @@ -63,11 +90,14 @@ impl NargoLspService { let mut router = Router::new(state); router .request::(on_initialize) + .request::(on_formatting) .request::(on_shutdown) .request::(on_code_lens_request) .request::(on_tests_request) .request::(on_test_run_request) .request::(on_profile_run_request) + .request::(on_goto_definition_request) + .request::(on_goto_declaration_request) .notification::(on_initialized) .notification::(on_did_change_configuration) .notification::(on_did_open_text_document) @@ -179,28 +209,59 @@ fn byte_span_to_range<'a, F: files::Files<'a> + ?Sized>( } } -cfg_if::cfg_if! { - if #[cfg(all(target_arch = "wasm32", not(target_os = "wasi")))] { - use wasm_bindgen::{prelude::*, JsValue}; +pub(crate) fn resolve_workspace_for_source_path(file_path: &Path) -> Result { + let package_root = find_file_manifest(file_path); - #[wasm_bindgen(module = "@noir-lang/source-resolver")] - extern "C" { + let toml_path = package_root.ok_or_else(|| { + LspError::WorkspaceResolutionError(format!( + "Nargo.toml not found for file: {:?}", + file_path + )) + })?; - #[wasm_bindgen(catch)] - fn read_file(path: &str) -> Result; + let workspace = resolve_workspace_from_toml( + &toml_path, + PackageSelection::All, + Some(NOIR_ARTIFACT_VERSION_STRING.to_string()), + ) + .map_err(|err| LspError::WorkspaceResolutionError(err.to_string()))?; - } + Ok(workspace) +} - fn get_non_stdlib_asset(path_to_file: &Path) -> std::io::Result { - let path_str = path_to_file.to_str().unwrap(); - match read_file(path_str) { - Ok(buffer) => Ok(buffer), - Err(_) => Err(Error::new(ErrorKind::Other, "could not read file using wasm")), - } - } - } else { - fn get_non_stdlib_asset(path_to_file: &Path) -> std::io::Result { - std::fs::read_to_string(path_to_file) - } +/// Prepares a package from a source string +/// This is useful for situations when we don't need dependencies +/// and just need to operate on single file. +/// +/// Use case for this is the LSP server and code lenses +/// which operate on single file and need to understand this file +/// in order to offer code lenses to the user +fn prepare_source(source: String) -> (Context<'static>, CrateId) { + let root = Path::new(""); + let file_name = Path::new("main.nr"); + let mut file_manager = file_manager_with_stdlib(root); + file_manager.add_file_with_source(file_name, source).expect( + "Adding source buffer to file manager should never fail when file manager is empty", + ); + + let mut context = Context::new(file_manager); + let root_crate_id = prepare_crate(&mut context, file_name); + + (context, root_crate_id) +} + +#[test] +fn prepare_package_from_source_string() { + let source = r#" + fn main() { + let x = 1; + let y = 2; + let z = x + y; } + "#; + + let (mut context, crate_id) = crate::prepare_source(source.to_string()); + let _check_result = noirc_driver::check_crate(&mut context, crate_id, false, false); + let main_func_id = context.get_main_function(&crate_id); + assert!(main_func_id.is_some()); } diff --git a/tooling/lsp/src/notifications/mod.rs b/tooling/lsp/src/notifications/mod.rs index a7d8bf3e2ec..f47ceff04d9 100644 --- a/tooling/lsp/src/notifications/mod.rs +++ b/tooling/lsp/src/notifications/mod.rs @@ -1,21 +1,20 @@ use std::ops::ControlFlow; use async_lsp::{ErrorCode, LanguageClient, ResponseError}; -use nargo::prepare_package; -use nargo_toml::{find_package_manifest, resolve_workspace_from_toml, PackageSelection}; -use noirc_driver::{check_crate, NOIR_ARTIFACT_VERSION_STRING}; +use nargo::{insert_all_files_for_workspace_into_file_manager, prepare_package}; +use noirc_driver::{check_crate, file_manager_with_stdlib}; use noirc_errors::{DiagnosticKind, FileDiagnostic}; +use crate::requests::collect_lenses_for_package; use crate::types::{ notification, Diagnostic, DiagnosticSeverity, DidChangeConfigurationParams, DidChangeTextDocumentParams, DidCloseTextDocumentParams, DidOpenTextDocumentParams, - DidSaveTextDocumentParams, InitializedParams, LogMessageParams, MessageType, NargoPackageTests, - PublishDiagnosticsParams, + DidSaveTextDocumentParams, InitializedParams, NargoPackageTests, PublishDiagnosticsParams, }; use crate::{ - byte_span_to_range, get_non_stdlib_asset, get_package_tests_in_crate, uri_to_file_path, - LspState, + byte_span_to_range, get_package_tests_in_crate, prepare_source, + resolve_workspace_for_source_path, LspState, }; pub(super) fn on_initialized( @@ -33,90 +32,117 @@ pub(super) fn on_did_change_configuration( } pub(super) fn on_did_open_text_document( - _state: &mut LspState, - _params: DidOpenTextDocumentParams, + state: &mut LspState, + params: DidOpenTextDocumentParams, ) -> ControlFlow> { - ControlFlow::Continue(()) -} + state.input_files.insert(params.text_document.uri.to_string(), params.text_document.text); -pub(super) fn on_did_change_text_document( - _state: &mut LspState, - _params: DidChangeTextDocumentParams, -) -> ControlFlow> { - ControlFlow::Continue(()) -} + let document_uri = params.text_document.uri; -pub(super) fn on_did_close_text_document( - _state: &mut LspState, - _params: DidCloseTextDocumentParams, -) -> ControlFlow> { - ControlFlow::Continue(()) + match process_noir_document(document_uri, state) { + Ok(_) => { + state.open_documents_count += 1; + ControlFlow::Continue(()) + } + Err(err) => ControlFlow::Break(Err(err)), + } } -pub(super) fn on_did_save_text_document( +pub(super) fn on_did_change_text_document( state: &mut LspState, - params: DidSaveTextDocumentParams, + params: DidChangeTextDocumentParams, ) -> ControlFlow> { - let file_path = match uri_to_file_path(¶ms.text_document.uri) { - Ok(file_path) => file_path, - Err(()) => { + let text = params.content_changes.into_iter().next().unwrap().text; + state.input_files.insert(params.text_document.uri.to_string(), text.clone()); + + let (mut context, crate_id) = prepare_source(text); + let _ = check_crate(&mut context, crate_id, false, false); + + let workspace = match resolve_workspace_for_source_path( + ¶ms.text_document.uri.to_file_path().unwrap(), + ) { + Ok(workspace) => workspace, + Err(lsp_error) => { return ControlFlow::Break(Err(ResponseError::new( ErrorCode::REQUEST_FAILED, - "URI is not a valid file path", + lsp_error, ) .into())) } }; - - let root_path = match &state.root_path { - Some(root) => root, + let package = match workspace.members.first() { + Some(package) => package, None => { return ControlFlow::Break(Err(ResponseError::new( ErrorCode::REQUEST_FAILED, - "Could not find project root", + "Selected workspace has no members", ) - .into())); + .into())) } }; - let toml_path = match find_package_manifest(root_path, &file_path) { - Ok(toml_path) => toml_path, - Err(err) => { - // If we cannot find a manifest, we log a warning but return no diagnostics - // We can reconsider this when we can build a file without the need for a Nargo.toml file to resolve deps - let _ = state.client.log_message(LogMessageParams { - typ: MessageType::WARNING, - message: format!("{err}"), - }); - return ControlFlow::Continue(()); - } - }; - let workspace = match resolve_workspace_from_toml( - &toml_path, - PackageSelection::All, - Some(NOIR_ARTIFACT_VERSION_STRING.to_string()), - ) { - Ok(workspace) => workspace, - Err(err) => { - // If we found a manifest, but the workspace is invalid, we raise an error about it - return ControlFlow::Break(Err(ResponseError::new( - ErrorCode::REQUEST_FAILED, - format!("{err}"), - ) - .into())); - } - }; + let lenses = collect_lenses_for_package(&context, crate_id, &workspace, package, None); + + state.cached_lenses.insert(params.text_document.uri.to_string(), lenses); + + ControlFlow::Continue(()) +} + +pub(super) fn on_did_close_text_document( + state: &mut LspState, + params: DidCloseTextDocumentParams, +) -> ControlFlow> { + state.input_files.remove(¶ms.text_document.uri.to_string()); + state.cached_lenses.remove(¶ms.text_document.uri.to_string()); + + state.open_documents_count -= 1; + + if state.open_documents_count == 0 { + state.cached_definitions.clear(); + } + + ControlFlow::Continue(()) +} + +pub(super) fn on_did_save_text_document( + state: &mut LspState, + params: DidSaveTextDocumentParams, +) -> ControlFlow> { + let document_uri = params.text_document.uri; + + match process_noir_document(document_uri, state) { + Ok(_) => ControlFlow::Continue(()), + Err(err) => ControlFlow::Break(Err(err)), + } +} + +fn process_noir_document( + document_uri: lsp_types::Url, + state: &mut LspState, +) -> Result<(), async_lsp::Error> { + let file_path = document_uri.to_file_path().map_err(|_| { + ResponseError::new(ErrorCode::REQUEST_FAILED, "URI is not a valid file path") + })?; + + let workspace = resolve_workspace_for_source_path(&file_path).map_err(|lsp_error| { + ResponseError::new(ErrorCode::REQUEST_FAILED, lsp_error.to_string()) + })?; + + let mut workspace_file_manager = file_manager_with_stdlib(&workspace.root_dir); + insert_all_files_for_workspace_into_file_manager(&workspace, &mut workspace_file_manager); let diagnostics: Vec<_> = workspace .into_iter() .flat_map(|package| -> Vec { - let (mut context, crate_id) = prepare_package(package, Box::new(get_non_stdlib_asset)); + let (mut context, crate_id) = prepare_package(&workspace_file_manager, package); - let file_diagnostics = match check_crate(&mut context, crate_id, false) { + let file_diagnostics = match check_crate(&mut context, crate_id, false, false) { Ok(((), warnings)) => warnings, Err(errors_and_warnings) => errors_and_warnings, }; + let package_root_dir: String = package.root_dir.as_os_str().to_string_lossy().into(); + // We don't add test headings for a package if it contains no `#[test]` functions if let Some(tests) = get_package_tests_in_crate(&context, &crate_id, &package.name) { let _ = state.client.notify::(NargoPackageTests { @@ -125,6 +151,17 @@ pub(super) fn on_did_save_text_document( }); } + let collected_lenses = crate::requests::collect_lenses_for_package( + &context, + crate_id, + &workspace, + package, + Some(&file_path), + ); + state.cached_lenses.insert(document_uri.to_string(), collected_lenses); + + state.cached_definitions.insert(package_root_dir, context.def_interner); + let fm = &context.file_manager; let files = fm.as_file_map(); @@ -159,17 +196,13 @@ pub(super) fn on_did_save_text_document( .collect() }) .collect(); - - // We need to refresh lenses when we compile since that's the only time they can be accurately reflected - std::mem::drop(state.client.code_lens_refresh(())); - let _ = state.client.publish_diagnostics(PublishDiagnosticsParams { - uri: params.text_document.uri, + uri: document_uri, version: None, diagnostics, }); - ControlFlow::Continue(()) + Ok(()) } pub(super) fn on_exit( diff --git a/tooling/lsp/src/requests/code_lens_request.rs b/tooling/lsp/src/requests/code_lens_request.rs index 349c7382cdc..b360c28a050 100644 --- a/tooling/lsp/src/requests/code_lens_request.rs +++ b/tooling/lsp/src/requests/code_lens_request.rs @@ -1,15 +1,14 @@ use std::future::{self, Future}; -use async_lsp::{ErrorCode, LanguageClient, ResponseError}; +use async_lsp::{ErrorCode, ResponseError}; -use nargo::{package::Package, prepare_package, workspace::Workspace}; -use nargo_toml::{find_package_manifest, resolve_workspace_from_toml, PackageSelection}; -use noirc_driver::{check_crate, NOIR_ARTIFACT_VERSION_STRING}; +use nargo::{package::Package, workspace::Workspace}; +use noirc_driver::check_crate; use noirc_frontend::hir::FunctionNameMatch; use crate::{ - byte_span_to_range, get_non_stdlib_asset, - types::{CodeLens, CodeLensParams, CodeLensResult, Command, LogMessageParams, MessageType}, + byte_span_to_range, prepare_source, resolve_workspace_for_source_path, + types::{CodeLens, CodeLensParams, CodeLensResult, Command}, uri_to_file_path, LspState, }; @@ -54,182 +53,184 @@ fn on_code_lens_request_inner( ResponseError::new(ErrorCode::REQUEST_FAILED, "URI is not a valid file path") })?; - let root_path = state.root_path.as_deref().ok_or_else(|| { - ResponseError::new(ErrorCode::REQUEST_FAILED, "Could not find project root") - })?; + if let Some(collected_lenses) = state.cached_lenses.get(¶ms.text_document.uri.to_string()) { + return Ok(Some(collected_lenses.clone())); + } - let toml_path = match find_package_manifest(root_path, &file_path) { - Ok(toml_path) => toml_path, - Err(err) => { - // If we cannot find a manifest, we log a warning but return no code lenses - // We can reconsider this when we can build a file without the need for a Nargo.toml file to resolve deps - let _ = state.client.log_message(LogMessageParams { - typ: MessageType::WARNING, - message: err.to_string(), - }); - return Ok(None); - } - }; - let workspace = resolve_workspace_from_toml( - &toml_path, - PackageSelection::All, - Some(NOIR_ARTIFACT_VERSION_STRING.to_string()), - ) - .map_err(|err| { - // If we found a manifest, but the workspace is invalid, we raise an error about it - ResponseError::new(ErrorCode::REQUEST_FAILED, err) + let source_string = std::fs::read_to_string(&file_path).map_err(|_| { + ResponseError::new(ErrorCode::REQUEST_FAILED, "Could not read file from disk") })?; - let mut lenses: Vec = vec![]; + let workspace = resolve_workspace_for_source_path(file_path.as_path()).unwrap(); + let package = workspace.members.first().unwrap(); - for package in &workspace { - let (mut context, crate_id) = prepare_package(package, Box::new(get_non_stdlib_asset)); - // We ignore the warnings and errors produced by compilation for producing code lenses - // because we can still get the test functions even if compilation fails - let _ = check_crate(&mut context, crate_id, false); + let (mut context, crate_id) = prepare_source(source_string); + // We ignore the warnings and errors produced by compilation for producing code lenses + // because we can still get the test functions even if compilation fails + let _ = check_crate(&mut context, crate_id, false, false); - let fm = &context.file_manager; - let files = fm.as_file_map(); - let tests = context - .get_all_test_functions_in_crate_matching(&crate_id, FunctionNameMatch::Anything); + let collected_lenses = + collect_lenses_for_package(&context, crate_id, &workspace, package, None); - for (func_name, test_function) in tests { - let location = context.function_meta(&test_function.get_id()).name.location; - let file_id = location.file; + if collected_lenses.is_empty() { + state.cached_lenses.remove(¶ms.text_document.uri.to_string()); + Ok(None) + } else { + state + .cached_lenses + .insert(params.text_document.uri.to_string().clone(), collected_lenses.clone()); + Ok(Some(collected_lenses)) + } +} - // Ignore diagnostics for any file that wasn't the file we saved - // TODO: In the future, we could create "related" diagnostics for these files - if fm.path(file_id) != file_path { +pub(crate) fn collect_lenses_for_package( + context: &noirc_frontend::macros_api::HirContext, + crate_id: noirc_frontend::macros_api::CrateId, + workspace: &Workspace, + package: &Package, + file_path: Option<&std::path::PathBuf>, +) -> Vec { + let mut lenses: Vec = vec![]; + let fm = &context.file_manager; + let files = fm.as_file_map(); + let tests = + context.get_all_test_functions_in_crate_matching(&crate_id, FunctionNameMatch::Anything); + for (func_name, test_function) in tests { + let location = context.function_meta(&test_function.get_id()).name.location; + let file_id = location.file; + + // Ignore diagnostics for any file that wasn't the file we saved + // TODO: In the future, we could create "related" diagnostics for these files + if let Some(file_path) = file_path { + if fm.path(file_id) != *file_path { continue; } + } - let range = - byte_span_to_range(files, file_id, location.span.into()).unwrap_or_default(); - - let test_command = Command { - title: with_arrow(TEST_CODELENS_TITLE), - command: TEST_COMMAND.into(), - arguments: Some( - [ - package_selection_args(&workspace, package), - vec!["--exact".into(), func_name.into()], - ] - .concat(), - ), - }; + let range = byte_span_to_range(files, file_id, location.span.into()).unwrap_or_default(); - let test_lens = CodeLens { range, command: Some(test_command), data: None }; + let test_command = Command { + title: with_arrow(TEST_CODELENS_TITLE), + command: TEST_COMMAND.into(), + arguments: Some( + [ + package_selection_args(workspace, package), + vec!["--exact".into(), func_name.into()], + ] + .concat(), + ), + }; - lenses.push(test_lens); - } + let test_lens = CodeLens { range, command: Some(test_command), data: None }; - if package.is_binary() { - if let Some(main_func_id) = context.get_main_function(&crate_id) { - let location = context.function_meta(&main_func_id).name.location; - let file_id = location.file; + lenses.push(test_lens); + } + if package.is_binary() { + if let Some(main_func_id) = context.get_main_function(&crate_id) { + let location = context.function_meta(&main_func_id).name.location; + let file_id = location.file; - // Ignore diagnostics for any file that wasn't the file we saved - // TODO: In the future, we could create "related" diagnostics for these files - if fm.path(file_id) != file_path { - continue; + // Ignore diagnostics for any file that wasn't the file we saved + // TODO: In the future, we could create "related" diagnostics for these files + if let Some(file_path) = file_path { + if fm.path(file_id) != *file_path { + return lenses; } + } - let range = - byte_span_to_range(files, file_id, location.span.into()).unwrap_or_default(); + let range = + byte_span_to_range(files, file_id, location.span.into()).unwrap_or_default(); - let compile_command = Command { - title: with_arrow(COMPILE_CODELENS_TITLE), - command: COMPILE_COMMAND.into(), - arguments: Some(package_selection_args(&workspace, package)), - }; + let compile_command = Command { + title: with_arrow(COMPILE_CODELENS_TITLE), + command: COMPILE_COMMAND.into(), + arguments: Some(package_selection_args(workspace, package)), + }; - let compile_lens = CodeLens { range, command: Some(compile_command), data: None }; + let compile_lens = CodeLens { range, command: Some(compile_command), data: None }; - lenses.push(compile_lens); + lenses.push(compile_lens); - let info_command = Command { - title: INFO_CODELENS_TITLE.to_string(), - command: INFO_COMMAND.into(), - arguments: Some(package_selection_args(&workspace, package)), - }; + let info_command = Command { + title: INFO_CODELENS_TITLE.to_string(), + command: INFO_COMMAND.into(), + arguments: Some(package_selection_args(workspace, package)), + }; - let info_lens = CodeLens { range, command: Some(info_command), data: None }; + let info_lens = CodeLens { range, command: Some(info_command), data: None }; - lenses.push(info_lens); + lenses.push(info_lens); - let execute_command = Command { - title: EXECUTE_CODELENS_TITLE.to_string(), - command: EXECUTE_COMMAND.into(), - arguments: Some(package_selection_args(&workspace, package)), - }; + let execute_command = Command { + title: EXECUTE_CODELENS_TITLE.to_string(), + command: EXECUTE_COMMAND.into(), + arguments: Some(package_selection_args(workspace, package)), + }; - let execute_lens = CodeLens { range, command: Some(execute_command), data: None }; + let execute_lens = CodeLens { range, command: Some(execute_command), data: None }; - lenses.push(execute_lens); + lenses.push(execute_lens); - let profile_command = Command { - title: PROFILE_CODELENS_TITLE.to_string(), - command: PROFILE_COMMAND.into(), - arguments: Some(package_selection_args(&workspace, package)), - }; + let profile_command = Command { + title: PROFILE_CODELENS_TITLE.to_string(), + command: PROFILE_COMMAND.into(), + arguments: Some(package_selection_args(workspace, package)), + }; - let profile_lens = CodeLens { range, command: Some(profile_command), data: None }; + let profile_lens = CodeLens { range, command: Some(profile_command), data: None }; - lenses.push(profile_lens); - } + lenses.push(profile_lens); } + } - if package.is_contract() { - // Currently not looking to deduplicate this since we don't have a clear decision on if the Contract stuff is staying - for contract in context.get_all_contracts(&crate_id) { - let location = contract.location; - let file_id = location.file; + if package.is_contract() { + // Currently not looking to deduplicate this since we don't have a clear decision on if the Contract stuff is staying + for contract in context.get_all_contracts(&crate_id) { + let location = contract.location; + let file_id = location.file; - // Ignore diagnostics for any file that wasn't the file we saved - // TODO: In the future, we could create "related" diagnostics for these files - if fm.path(file_id) != file_path { + // Ignore diagnostics for any file that wasn't the file we saved + // TODO: In the future, we could create "related" diagnostics for these files + if let Some(file_path) = file_path { + if fm.path(file_id) != *file_path { continue; } + } - let range = - byte_span_to_range(files, file_id, location.span.into()).unwrap_or_default(); + let range = + byte_span_to_range(files, file_id, location.span.into()).unwrap_or_default(); - let compile_command = Command { - title: with_arrow(COMPILE_CODELENS_TITLE), - command: COMPILE_COMMAND.into(), - arguments: Some(package_selection_args(&workspace, package)), - }; + let compile_command = Command { + title: with_arrow(COMPILE_CODELENS_TITLE), + command: COMPILE_COMMAND.into(), + arguments: Some(package_selection_args(workspace, package)), + }; - let compile_lens = CodeLens { range, command: Some(compile_command), data: None }; + let compile_lens = CodeLens { range, command: Some(compile_command), data: None }; - lenses.push(compile_lens); + lenses.push(compile_lens); - let info_command = Command { - title: INFO_CODELENS_TITLE.to_string(), - command: INFO_COMMAND.into(), - arguments: Some(package_selection_args(&workspace, package)), - }; + let info_command = Command { + title: INFO_CODELENS_TITLE.to_string(), + command: INFO_COMMAND.into(), + arguments: Some(package_selection_args(workspace, package)), + }; - let info_lens = CodeLens { range, command: Some(info_command), data: None }; + let info_lens = CodeLens { range, command: Some(info_command), data: None }; - lenses.push(info_lens); + lenses.push(info_lens); - let profile_command = Command { - title: PROFILE_CODELENS_TITLE.to_string(), - command: PROFILE_COMMAND.into(), - arguments: Some(package_selection_args(&workspace, package)), - }; + let profile_command = Command { + title: PROFILE_CODELENS_TITLE.to_string(), + command: PROFILE_COMMAND.into(), + arguments: Some(package_selection_args(workspace, package)), + }; - let profile_lens = CodeLens { range, command: Some(profile_command), data: None }; + let profile_lens = CodeLens { range, command: Some(profile_command), data: None }; - lenses.push(profile_lens); - } + lenses.push(profile_lens); } } - if lenses.is_empty() { - Ok(None) - } else { - Ok(Some(lenses)) - } + lenses } diff --git a/tooling/lsp/src/requests/goto_declaration.rs b/tooling/lsp/src/requests/goto_declaration.rs new file mode 100644 index 00000000000..6e3664804f6 --- /dev/null +++ b/tooling/lsp/src/requests/goto_declaration.rs @@ -0,0 +1,80 @@ +use std::future::{self, Future}; + +use crate::resolve_workspace_for_source_path; +use crate::types::GotoDeclarationResult; +use crate::LspState; +use async_lsp::{ErrorCode, ResponseError}; + +use lsp_types::request::{GotoDeclarationParams, GotoDeclarationResponse}; + +use nargo::insert_all_files_for_workspace_into_file_manager; +use noirc_driver::file_manager_with_stdlib; + +use super::{position_to_byte_index, to_lsp_location}; + +pub(crate) fn on_goto_declaration_request( + state: &mut LspState, + params: GotoDeclarationParams, +) -> impl Future> { + let result = on_goto_definition_inner(state, params); + future::ready(result) +} + +fn on_goto_definition_inner( + _state: &mut LspState, + params: GotoDeclarationParams, +) -> Result { + let file_path = + params.text_document_position_params.text_document.uri.to_file_path().map_err(|_| { + ResponseError::new(ErrorCode::REQUEST_FAILED, "URI is not a valid file path") + })?; + + let workspace = resolve_workspace_for_source_path(file_path.as_path()).unwrap(); + let package = workspace.members.first().unwrap(); + + let package_root_path: String = package.root_dir.as_os_str().to_string_lossy().into(); + + let mut workspace_file_manager = file_manager_with_stdlib(&workspace.root_dir); + insert_all_files_for_workspace_into_file_manager(&workspace, &mut workspace_file_manager); + + let (mut context, crate_id) = nargo::prepare_package(&workspace_file_manager, package); + + let interner; + if let Some(def_interner) = _state.cached_definitions.get(&package_root_path) { + interner = def_interner; + } else { + // We ignore the warnings and errors produced by compilation while resolving the definition + let _ = noirc_driver::check_crate(&mut context, crate_id, false, false); + interner = &context.def_interner; + } + + let files = context.file_manager.as_file_map(); + let file_id = context.file_manager.name_to_id(file_path.clone()).ok_or(ResponseError::new( + ErrorCode::REQUEST_FAILED, + format!("Could not find file in file manager. File path: {:?}", file_path), + ))?; + let byte_index = + position_to_byte_index(files, file_id, ¶ms.text_document_position_params.position) + .map_err(|err| { + ResponseError::new( + ErrorCode::REQUEST_FAILED, + format!("Could not convert position to byte index. Error: {:?}", err), + ) + })?; + + let search_for_location = noirc_errors::Location { + file: file_id, + span: noirc_errors::Span::single_char(byte_index as u32), + }; + + let goto_declaration_response = + interner.get_declaration_location_from(search_for_location).and_then(|found_location| { + let file_id = found_location.file; + let definition_position = to_lsp_location(files, file_id, found_location.span)?; + let response: GotoDeclarationResponse = + GotoDeclarationResponse::from(definition_position).to_owned(); + Some(response) + }); + + Ok(goto_declaration_response) +} diff --git a/tooling/lsp/src/requests/goto_definition.rs b/tooling/lsp/src/requests/goto_definition.rs new file mode 100644 index 00000000000..6d44761de94 --- /dev/null +++ b/tooling/lsp/src/requests/goto_definition.rs @@ -0,0 +1,139 @@ +use std::future::{self, Future}; + +use crate::resolve_workspace_for_source_path; +use crate::{types::GotoDefinitionResult, LspState}; +use async_lsp::{ErrorCode, ResponseError}; + +use lsp_types::{GotoDefinitionParams, GotoDefinitionResponse}; +use nargo::insert_all_files_for_workspace_into_file_manager; +use noirc_driver::file_manager_with_stdlib; + +use super::{position_to_byte_index, to_lsp_location}; + +pub(crate) fn on_goto_definition_request( + state: &mut LspState, + params: GotoDefinitionParams, +) -> impl Future> { + let result = on_goto_definition_inner(state, params); + future::ready(result) +} + +fn on_goto_definition_inner( + _state: &mut LspState, + params: GotoDefinitionParams, +) -> Result { + let file_path = + params.text_document_position_params.text_document.uri.to_file_path().map_err(|_| { + ResponseError::new(ErrorCode::REQUEST_FAILED, "URI is not a valid file path") + })?; + + let workspace = resolve_workspace_for_source_path(file_path.as_path()).unwrap(); + let package = workspace.members.first().unwrap(); + + let package_root_path: String = package.root_dir.as_os_str().to_string_lossy().into(); + + let mut workspace_file_manager = file_manager_with_stdlib(&workspace.root_dir); + insert_all_files_for_workspace_into_file_manager(&workspace, &mut workspace_file_manager); + + let (mut context, crate_id) = nargo::prepare_package(&workspace_file_manager, package); + + let interner; + if let Some(def_interner) = _state.cached_definitions.get(&package_root_path) { + interner = def_interner; + } else { + // We ignore the warnings and errors produced by compilation while resolving the definition + let _ = noirc_driver::check_crate(&mut context, crate_id, false, false); + interner = &context.def_interner; + } + + let files = context.file_manager.as_file_map(); + let file_id = context.file_manager.name_to_id(file_path.clone()).ok_or(ResponseError::new( + ErrorCode::REQUEST_FAILED, + format!("Could not find file in file manager. File path: {:?}", file_path), + ))?; + let byte_index = + position_to_byte_index(files, file_id, ¶ms.text_document_position_params.position) + .map_err(|err| { + ResponseError::new( + ErrorCode::REQUEST_FAILED, + format!("Could not convert position to byte index. Error: {:?}", err), + ) + })?; + + let search_for_location = noirc_errors::Location { + file: file_id, + span: noirc_errors::Span::single_char(byte_index as u32), + }; + + let goto_definition_response = + interner.get_definition_location_from(search_for_location).and_then(|found_location| { + let file_id = found_location.file; + let definition_position = to_lsp_location(files, file_id, found_location.span)?; + let response: GotoDefinitionResponse = + GotoDefinitionResponse::from(definition_position).to_owned(); + Some(response) + }); + + Ok(goto_definition_response) +} + +#[cfg(test)] +mod goto_definition_tests { + + use async_lsp::ClientSocket; + use lsp_types::{Position, Url}; + use tokio::test; + + use crate::solver::MockBackend; + + use super::*; + + #[test] + async fn test_on_goto_definition() { + let client = ClientSocket::new_closed(); + let solver = MockBackend; + let mut state = LspState::new(&client, solver); + + let root_path = std::env::current_dir() + .unwrap() + .join("../../test_programs/execution_success/7_function") + .canonicalize() + .expect("Could not resolve root path"); + let noir_text_document = Url::from_file_path(root_path.join("src/main.nr").as_path()) + .expect("Could not convert text document path to URI"); + let root_uri = Some( + Url::from_file_path(root_path.as_path()).expect("Could not convert root path to URI"), + ); + + #[allow(deprecated)] + let initialize_params = lsp_types::InitializeParams { + process_id: Default::default(), + root_path: None, + root_uri, + initialization_options: None, + capabilities: Default::default(), + trace: Some(lsp_types::TraceValue::Verbose), + workspace_folders: None, + client_info: None, + locale: None, + }; + let _initialize_response = crate::requests::on_initialize(&mut state, initialize_params) + .await + .expect("Could not initialize LSP server"); + + let params = GotoDefinitionParams { + text_document_position_params: lsp_types::TextDocumentPositionParams { + text_document: lsp_types::TextDocumentIdentifier { uri: noir_text_document }, + position: Position { line: 95, character: 5 }, + }, + work_done_progress_params: Default::default(), + partial_result_params: Default::default(), + }; + + let response = on_goto_definition_request(&mut state, params) + .await + .expect("Could execute on_goto_definition_request"); + + assert!(&response.is_some()); + } +} diff --git a/tooling/lsp/src/requests/mod.rs b/tooling/lsp/src/requests/mod.rs index b2be24e1187..ba2ef870d88 100644 --- a/tooling/lsp/src/requests/mod.rs +++ b/tooling/lsp/src/requests/mod.rs @@ -1,7 +1,14 @@ use std::future::Future; -use crate::types::{CodeLensOptions, InitializeParams, TextDocumentSyncOptions}; +use crate::types::{CodeLensOptions, InitializeParams}; use async_lsp::ResponseError; +use fm::codespan_files::Error; +use lsp_types::{ + DeclarationCapability, Location, Position, TextDocumentSyncCapability, TextDocumentSyncKind, + Url, +}; +use nargo_fmt::Config; +use serde::{Deserialize, Serialize}; use crate::{ types::{InitializeResult, NargoCapability, NargoTestsOptions, ServerCapabilities}, @@ -19,26 +26,57 @@ use crate::{ // and params passed in. mod code_lens_request; +mod goto_declaration; +mod goto_definition; mod profile_run; mod test_run; mod tests; pub(crate) use { - code_lens_request::on_code_lens_request, profile_run::on_profile_run_request, - test_run::on_test_run_request, tests::on_tests_request, + code_lens_request::collect_lenses_for_package, code_lens_request::on_code_lens_request, + goto_declaration::on_goto_declaration_request, goto_definition::on_goto_definition_request, + profile_run::on_profile_run_request, test_run::on_test_run_request, tests::on_tests_request, }; +/// LSP client will send initialization request after the server has started. +/// [InitializeParams].`initialization_options` will contain the options sent from the client. +#[derive(Debug, Deserialize, Serialize)] +struct LspInitializationOptions { + /// Controls whether code lens is enabled by the server + /// By default this will be set to true (enabled). + #[serde(rename = "enableCodeLens", default = "default_enable_code_lens")] + enable_code_lens: bool, +} + +fn default_enable_code_lens() -> bool { + true +} + +impl Default for LspInitializationOptions { + fn default() -> Self { + Self { enable_code_lens: default_enable_code_lens() } + } +} + pub(crate) fn on_initialize( state: &mut LspState, params: InitializeParams, ) -> impl Future> { state.root_path = params.root_uri.and_then(|root_uri| root_uri.to_file_path().ok()); - async { - let text_document_sync = - TextDocumentSyncOptions { save: Some(true.into()), ..Default::default() }; + let initialization_options: LspInitializationOptions = params + .initialization_options + .and_then(|value| serde_json::from_value(value).ok()) + .unwrap_or_default(); + + async move { + let text_document_sync = TextDocumentSyncCapability::Kind(TextDocumentSyncKind::FULL); - let code_lens = CodeLensOptions { resolve_provider: Some(false) }; + let code_lens = if initialization_options.enable_code_lens { + Some(CodeLensOptions { resolve_provider: Some(false) }) + } else { + None + }; let nargo = NargoCapability { tests: Some(NargoTestsOptions { @@ -50,15 +88,118 @@ pub(crate) fn on_initialize( Ok(InitializeResult { capabilities: ServerCapabilities { - text_document_sync: Some(text_document_sync.into()), - code_lens_provider: Some(code_lens), + text_document_sync: Some(text_document_sync), + code_lens_provider: code_lens, + document_formatting_provider: true, nargo: Some(nargo), + definition_provider: Some(lsp_types::OneOf::Left(true)), + declaration_provider: Some(DeclarationCapability::Simple(true)), }, server_info: None, }) } } +pub(crate) fn on_formatting( + state: &mut LspState, + params: lsp_types::DocumentFormattingParams, +) -> impl Future>, ResponseError>> { + std::future::ready(on_formatting_inner(state, params)) +} + +fn on_formatting_inner( + state: &LspState, + params: lsp_types::DocumentFormattingParams, +) -> Result>, ResponseError> { + let path = params.text_document.uri.to_string(); + + if let Some(source) = state.input_files.get(&path) { + let (module, errors) = noirc_frontend::parse_program(source); + if !errors.is_empty() { + return Ok(None); + } + + let new_text = nargo_fmt::format(source, module, &Config::default()); + + let start_position = Position { line: 0, character: 0 }; + let end_position = Position { + line: source.lines().count() as u32, + character: source.chars().count() as u32, + }; + + Ok(Some(vec![lsp_types::TextEdit { + range: lsp_types::Range::new(start_position, end_position), + new_text, + }])) + } else { + Ok(None) + } +} + +pub(crate) fn position_to_byte_index<'a, F>( + files: &'a F, + file_id: F::FileId, + position: &Position, +) -> Result +where + F: fm::codespan_files::Files<'a> + ?Sized, +{ + let source = files.source(file_id)?; + let source = source.as_ref(); + + let line_span = files.line_range(file_id, position.line as usize)?; + + let line_str = source.get(line_span.clone()); + + if let Some(line_str) = line_str { + let byte_offset = character_to_line_offset(line_str, position.character)?; + Ok(line_span.start + byte_offset) + } else { + Err(Error::InvalidCharBoundary { given: position.line as usize }) + } +} + +fn character_to_line_offset(line: &str, character: u32) -> Result { + let line_len = line.len(); + let mut character_offset = 0; + + let mut chars = line.chars(); + while let Some(ch) = chars.next() { + if character_offset == character { + let chars_off = chars.as_str().len(); + let ch_off = ch.len_utf8(); + + return Ok(line_len - chars_off - ch_off); + } + + character_offset += ch.len_utf16() as u32; + } + + // Handle positions after the last character on the line + if character_offset == character { + Ok(line_len) + } else { + Err(Error::ColumnTooLarge { given: character_offset as usize, max: line.len() }) + } +} + +fn to_lsp_location<'a, F>( + files: &'a F, + file_id: F::FileId, + definition_span: noirc_errors::Span, +) -> Option +where + F: fm::codespan_files::Files<'a> + ?Sized, +{ + let range = crate::byte_span_to_range(files, file_id, definition_span.into())?; + let file_name = files.name(file_id).ok()?; + + let path = file_name.to_string(); + let uri = Url::from_file_path(path).ok()?; + + Some(Location { uri, range }) +} + pub(crate) fn on_shutdown( _state: &mut LspState, _params: (), @@ -70,7 +211,7 @@ pub(crate) fn on_shutdown( mod initialization { use async_lsp::ClientSocket; use lsp_types::{ - CodeLensOptions, InitializeParams, TextDocumentSyncCapability, TextDocumentSyncOptions, + CodeLensOptions, InitializeParams, TextDocumentSyncCapability, TextDocumentSyncKind, }; use tokio::test; @@ -88,13 +229,36 @@ mod initialization { assert!(matches!( response.capabilities, ServerCapabilities { - text_document_sync: Some(TextDocumentSyncCapability::Options( - TextDocumentSyncOptions { save: Some(_), .. } + text_document_sync: Some(TextDocumentSyncCapability::Kind( + TextDocumentSyncKind::FULL )), code_lens_provider: Some(CodeLensOptions { resolve_provider: Some(false) }), + document_formatting_provider: true, .. } )); assert!(response.server_info.is_none()); } } + +#[cfg(test)] +mod character_to_line_offset_tests { + use super::*; + + #[test] + fn test_character_to_line_offset() { + let line = "Hello, dark!"; + let character = 8; + + let result = character_to_line_offset(line, character).unwrap(); + assert_eq!(result, 8); + + // In the case of a multi-byte character, the offset should be the byte index of the character + // byte offset for 8 character (黑) is expected to be 10 + let line = "Hello, 黑!"; + let character = 8; + + let result = character_to_line_offset(line, character).unwrap(); + assert_eq!(result, 10); + } +} diff --git a/tooling/lsp/src/requests/profile_run.rs b/tooling/lsp/src/requests/profile_run.rs index 84888d30ba5..6664475a68c 100644 --- a/tooling/lsp/src/requests/profile_run.rs +++ b/tooling/lsp/src/requests/profile_run.rs @@ -3,11 +3,13 @@ use std::{ future::{self, Future}, }; -use acvm::{acir::circuit::Opcode, Language}; +use acvm::ExpressionWidth; use async_lsp::{ErrorCode, ResponseError}; -use nargo::artifacts::debug::DebugArtifact; +use nargo::{artifacts::debug::DebugArtifact, insert_all_files_for_workspace_into_file_manager}; use nargo_toml::{find_package_manifest, resolve_workspace_from_toml, PackageSelection}; -use noirc_driver::{CompileOptions, DebugFile, NOIR_ARTIFACT_VERSION_STRING}; +use noirc_driver::{ + file_manager_with_stdlib, CompileOptions, DebugFile, NOIR_ARTIFACT_VERSION_STRING, +}; use noirc_errors::{debug_info::OpCodesCount, Location}; use crate::{ @@ -48,6 +50,9 @@ fn on_profile_run_request_inner( ResponseError::new(ErrorCode::REQUEST_FAILED, err) })?; + let mut workspace_file_manager = file_manager_with_stdlib(&workspace.root_dir); + insert_all_files_for_workspace_into_file_manager(&workspace, &mut workspace_file_manager); + // Since we filtered on crate name, this should be the only item in the iterator match workspace.into_iter().next() { Some(_package) => { @@ -57,16 +62,14 @@ fn on_profile_run_request_inner( .cloned() .partition(|package| package.is_binary()); - // # TODO(#3504): Consider how to incorporate Backend relevant information in wider context. - let is_opcode_supported = |_opcode: &Opcode| true; - let np_language = Language::PLONKCSat { width: 3 }; + let expression_width = ExpressionWidth::Bounded { width: 3 }; let (compiled_programs, compiled_contracts) = nargo::ops::compile_workspace( + &workspace_file_manager, &workspace, &binary_packages, &contract_packages, - np_language, - is_opcode_supported, + expression_width, &CompileOptions::default(), ) .map_err(|err| ResponseError::new(ErrorCode::REQUEST_FAILED, err))?; diff --git a/tooling/lsp/src/requests/test_run.rs b/tooling/lsp/src/requests/test_run.rs index 962fe99a49b..c2181d7839d 100644 --- a/tooling/lsp/src/requests/test_run.rs +++ b/tooling/lsp/src/requests/test_run.rs @@ -2,15 +2,17 @@ use std::future::{self, Future}; use async_lsp::{ErrorCode, ResponseError}; use nargo::{ + insert_all_files_for_workspace_into_file_manager, ops::{run_test, TestStatus}, prepare_package, }; use nargo_toml::{find_package_manifest, resolve_workspace_from_toml, PackageSelection}; -use noirc_driver::{check_crate, CompileOptions, NOIR_ARTIFACT_VERSION_STRING}; +use noirc_driver::{ + check_crate, file_manager_with_stdlib, CompileOptions, NOIR_ARTIFACT_VERSION_STRING, +}; use noirc_frontend::hir::FunctionNameMatch; use crate::{ - get_non_stdlib_asset, types::{NargoTestRunParams, NargoTestRunResult}, LspState, }; @@ -48,11 +50,14 @@ fn on_test_run_request_inner( ResponseError::new(ErrorCode::REQUEST_FAILED, err) })?; + let mut workspace_file_manager = file_manager_with_stdlib(&workspace.root_dir); + insert_all_files_for_workspace_into_file_manager(&workspace, &mut workspace_file_manager); + // Since we filtered on crate name, this should be the only item in the iterator match workspace.into_iter().next() { Some(package) => { - let (mut context, crate_id) = prepare_package(package, Box::new(get_non_stdlib_asset)); - if check_crate(&mut context, crate_id, false).is_err() { + let (mut context, crate_id) = prepare_package(&workspace_file_manager, package); + if check_crate(&mut context, crate_id, false, false).is_err() { let result = NargoTestRunResult { id: params.id.clone(), result: "error".to_string(), @@ -73,8 +78,14 @@ fn on_test_run_request_inner( ) })?; - let test_result = - run_test(&state.solver, &context, test_function, false, &CompileOptions::default()); + let test_result = run_test( + &state.solver, + &context, + test_function, + false, + None, + &CompileOptions::default(), + ); let result = match test_result { TestStatus::Pass => NargoTestRunResult { id: params.id.clone(), diff --git a/tooling/lsp/src/requests/tests.rs b/tooling/lsp/src/requests/tests.rs index 6b94b921a06..0f717b9fb9e 100644 --- a/tooling/lsp/src/requests/tests.rs +++ b/tooling/lsp/src/requests/tests.rs @@ -2,12 +2,12 @@ use std::future::{self, Future}; use async_lsp::{ErrorCode, LanguageClient, ResponseError}; use lsp_types::{LogMessageParams, MessageType}; -use nargo::prepare_package; +use nargo::{insert_all_files_for_workspace_into_file_manager, prepare_package}; use nargo_toml::{find_package_manifest, resolve_workspace_from_toml, PackageSelection}; -use noirc_driver::{check_crate, NOIR_ARTIFACT_VERSION_STRING}; +use noirc_driver::{check_crate, file_manager_with_stdlib, NOIR_ARTIFACT_VERSION_STRING}; use crate::{ - get_non_stdlib_asset, get_package_tests_in_crate, + get_package_tests_in_crate, types::{NargoPackageTests, NargoTestsParams, NargoTestsResult}, LspState, }; @@ -50,13 +50,16 @@ fn on_tests_request_inner( ResponseError::new(ErrorCode::REQUEST_FAILED, err) })?; + let mut workspace_file_manager = file_manager_with_stdlib(&workspace.root_dir); + insert_all_files_for_workspace_into_file_manager(&workspace, &mut workspace_file_manager); + let package_tests: Vec<_> = workspace .into_iter() .filter_map(|package| { - let (mut context, crate_id) = prepare_package(package, Box::new(get_non_stdlib_asset)); + let (mut context, crate_id) = prepare_package(&workspace_file_manager, package); // We ignore the warnings and errors produced by compilation for producing tests // because we can still get the test functions even if compilation fails - let _ = check_crate(&mut context, crate_id, false); + let _ = check_crate(&mut context, crate_id, false, false); // We don't add test headings for a package if it contains no `#[test]` functions get_package_tests_in_crate(&context, &crate_id, &package.name) diff --git a/tooling/lsp/src/solver.rs b/tooling/lsp/src/solver.rs index 4b37c204175..ce9302d280a 100644 --- a/tooling/lsp/src/solver.rs +++ b/tooling/lsp/src/solver.rs @@ -55,7 +55,10 @@ impl BlackBoxFunctionSolver for MockBackend { _signature: &[u8], _message: &[u8], ) -> Result { - Err(acvm::BlackBoxResolutionError::Unsupported(acvm::acir::BlackBoxFunc::SchnorrVerify)) + Err(acvm::BlackBoxResolutionError::Failed( + acvm::acir::BlackBoxFunc::SchnorrVerify, + "Unsupported opcode".to_owned(), + )) } fn pedersen_commitment( @@ -63,8 +66,9 @@ impl BlackBoxFunctionSolver for MockBackend { _inputs: &[acvm::FieldElement], _domain_separator: u32, ) -> Result<(acvm::FieldElement, acvm::FieldElement), acvm::BlackBoxResolutionError> { - Err(acvm::BlackBoxResolutionError::Unsupported( + Err(acvm::BlackBoxResolutionError::Failed( acvm::acir::BlackBoxFunc::PedersenCommitment, + "Unsupported opcode".to_owned(), )) } @@ -73,8 +77,9 @@ impl BlackBoxFunctionSolver for MockBackend { _low: &acvm::FieldElement, _high: &acvm::FieldElement, ) -> Result<(acvm::FieldElement, acvm::FieldElement), acvm::BlackBoxResolutionError> { - Err(acvm::BlackBoxResolutionError::Unsupported( + Err(acvm::BlackBoxResolutionError::Failed( acvm::acir::BlackBoxFunc::FixedBaseScalarMul, + "Unsupported opcode".to_owned(), )) } @@ -83,6 +88,9 @@ impl BlackBoxFunctionSolver for MockBackend { _inputs: &[acvm::FieldElement], _domain_separator: u32, ) -> Result { - Err(acvm::BlackBoxResolutionError::Unsupported(acvm::acir::BlackBoxFunc::PedersenHash)) + Err(acvm::BlackBoxResolutionError::Failed( + acvm::acir::BlackBoxFunc::PedersenHash, + "Unsupported opcode".to_owned(), + )) } } diff --git a/tooling/lsp/src/types.rs b/tooling/lsp/src/types.rs index 7a50c538051..8dbc51ec83c 100644 --- a/tooling/lsp/src/types.rs +++ b/tooling/lsp/src/types.rs @@ -1,4 +1,5 @@ use fm::FileId; +use lsp_types::{DeclarationCapability, DefinitionOptions, OneOf}; use noirc_driver::DebugFile; use noirc_errors::{debug_info::OpCodesCount, Location}; use noirc_frontend::graph::CrateName; @@ -11,8 +12,7 @@ pub(crate) use lsp_types::{ CodeLens, CodeLensOptions, CodeLensParams, Command, Diagnostic, DiagnosticSeverity, DidChangeConfigurationParams, DidChangeTextDocumentParams, DidCloseTextDocumentParams, DidOpenTextDocumentParams, DidSaveTextDocumentParams, InitializeParams, InitializedParams, - LogMessageParams, MessageType, Position, PublishDiagnosticsParams, Range, ServerInfo, - TextDocumentSyncCapability, TextDocumentSyncOptions, Url, + Position, PublishDiagnosticsParams, Range, ServerInfo, TextDocumentSyncCapability, Url, }; pub(crate) mod request { @@ -24,7 +24,9 @@ pub(crate) mod request { }; // Re-providing lsp_types that we don't need to override - pub(crate) use lsp_types::request::{CodeLensRequest as CodeLens, Shutdown}; + pub(crate) use lsp_types::request::{ + CodeLensRequest as CodeLens, Formatting, GotoDeclaration, GotoDefinition, Shutdown, + }; #[derive(Debug)] pub(crate) struct Initialize; @@ -108,10 +110,21 @@ pub(crate) struct ServerCapabilities { #[serde(skip_serializing_if = "Option::is_none")] pub(crate) text_document_sync: Option, + /// The server provides go to declaration support. + #[serde(skip_serializing_if = "Option::is_none")] + pub(crate) declaration_provider: Option, + + /// The server provides goto definition support. + #[serde(skip_serializing_if = "Option::is_none")] + pub(crate) definition_provider: Option>, + /// The server provides code lens. #[serde(skip_serializing_if = "Option::is_none")] pub(crate) code_lens_provider: Option, + /// The server provides document formatting. + pub(crate) document_formatting_provider: bool, + /// The server handles and provides custom nargo messages. #[serde(skip_serializing_if = "Option::is_none")] pub(crate) nargo: Option, @@ -212,3 +225,5 @@ pub(crate) struct NargoProfileRunResult { } pub(crate) type CodeLensResult = Option>; +pub(crate) type GotoDefinitionResult = Option; +pub(crate) type GotoDeclarationResult = Option; diff --git a/tooling/nargo/Cargo.toml b/tooling/nargo/Cargo.toml index f8269459968..cd97980b9e0 100644 --- a/tooling/nargo/Cargo.toml +++ b/tooling/nargo/Cargo.toml @@ -24,4 +24,16 @@ iter-extended.workspace = true serde.workspace = true thiserror.workspace = true codespan-reporting.workspace = true -rayon = "1.8.0" \ No newline at end of file +tracing.workspace = true +rayon = "1.8.0" +jsonrpc.workspace = true + +[dev-dependencies] +# TODO: This dependency is used to generate unit tests for `get_all_paths_in_dir` +# TODO: once that method is moved to nargo_cli, we can move this dependency to nargo_cli +tempfile.workspace = true +jsonrpc-http-server = "18.0" +jsonrpc-core-client = "18.0" +jsonrpc-derive = "18.0" +jsonrpc-core = "18.0" +serial_test = "2.0" diff --git a/tooling/nargo/src/artifacts/contract.rs b/tooling/nargo/src/artifacts/contract.rs index f9e8d45b02e..04699126762 100644 --- a/tooling/nargo/src/artifacts/contract.rs +++ b/tooling/nargo/src/artifacts/contract.rs @@ -1,23 +1,16 @@ use acvm::acir::circuit::Circuit; use noirc_abi::{Abi, ContractEvent}; -use noirc_driver::ContractFunctionType; +use noirc_driver::{ContractFunction, ContractFunctionType}; use serde::{Deserialize, Serialize}; -/// `PreprocessedContract` represents a Noir contract which has been preprocessed by a particular backend proving system. -/// -/// This differs from a generic Noir contract artifact in that: -/// - The ACIR bytecode has had an optimization pass applied to tailor it for the backend. -/// - Proving and verification keys have been pregenerated based on this ACIR. #[derive(Serialize, Deserialize)] -pub struct PreprocessedContract { +pub struct ContractArtifact { /// Version of noir used to compile this contract pub noir_version: String, /// The name of the contract. pub name: String, - /// The identifier of the proving backend which this contract has been compiled for. - pub backend: String, /// Each of the contract's functions are compiled into a separate program stored in this `Vec`. - pub functions: Vec, + pub functions: Vec, /// All the events defined inside the contract scope. pub events: Vec, } @@ -27,7 +20,7 @@ pub struct PreprocessedContract { /// A contract function unlike a regular Noir program however can have additional properties. /// One of these being a function type. #[derive(Debug, Serialize, Deserialize)] -pub struct PreprocessedContractFunction { +pub struct ContractFunctionArtifact { pub name: String, pub function_type: ContractFunctionType, @@ -42,3 +35,15 @@ pub struct PreprocessedContractFunction { )] pub bytecode: Circuit, } + +impl From for ContractFunctionArtifact { + fn from(func: ContractFunction) -> Self { + ContractFunctionArtifact { + name: func.name, + function_type: func.function_type, + is_internal: func.is_internal, + abi: func.abi, + bytecode: func.bytecode, + } + } +} diff --git a/tooling/nargo/src/artifacts/debug.rs b/tooling/nargo/src/artifacts/debug.rs index 40acc7db8f8..633fc7a8ded 100644 --- a/tooling/nargo/src/artifacts/debug.rs +++ b/tooling/nargo/src/artifacts/debug.rs @@ -34,7 +34,7 @@ impl DebugArtifact { .collect(); for file_id in files_with_debug_symbols { - let file_source = file_manager.fetch_file(file_id).source(); + let file_source = file_manager.fetch_file(file_id); file_map.insert( file_id, @@ -59,6 +59,12 @@ impl DebugArtifact { self.line_index(location.file, location_start) } + /// Given a location, returns the index of the line it ends at + pub fn location_end_line_index(&self, location: Location) -> Result { + let location_end = location.span.end() as usize; + self.line_index(location.file, location_end) + } + /// Given a location, returns the line number it starts at pub fn location_line_number(&self, location: Location) -> Result { let location_start = location.span.start() as usize; @@ -82,12 +88,28 @@ impl DebugArtifact { let line_index = self.line_index(location.file, location_start)?; let line_span = self.line_range(location.file, line_index)?; + let line_length = line_span.end - (line_span.start + 1); let start_in_line = location_start - line_span.start; + + // The location might continue beyond the line, + // so we need a bounds check let end_in_line = location_end - line_span.start; + let end_in_line = std::cmp::min(end_in_line, line_length); Ok(Range { start: start_in_line, end: end_in_line }) } + /// Given a location, returns a Span relative to its last line's + /// position in the file. This is useful when processing a file's + /// contents on a per-line-basis. + pub fn location_in_end_line(&self, location: Location) -> Result, Error> { + let end_line_index = self.location_end_line_index(location)?; + let line_span = self.line_range(location.file, end_line_index)?; + let location_end = location.span.end() as usize; + let end_in_line = location_end - line_span.start; + Ok(Range { start: 0, end: end_in_line }) + } + /// Given a location, returns the last line index /// of its file pub fn last_line_index(&self, location: Location) -> Result { @@ -149,3 +171,70 @@ impl<'a> Files<'a> for DebugArtifact { }) } } + +#[cfg(test)] +mod tests { + use crate::artifacts::debug::DebugArtifact; + use acvm::acir::circuit::OpcodeLocation; + use fm::FileManager; + use noirc_errors::{debug_info::DebugInfo, Location, Span}; + use std::collections::BTreeMap; + use std::ops::Range; + use std::path::Path; + use std::path::PathBuf; + use tempfile::{tempdir, TempDir}; + + // Returns the absolute path to the file + fn create_dummy_file(dir: &TempDir, file_name: &Path) -> PathBuf { + let file_path = dir.path().join(file_name); + let _file = std::fs::File::create(&file_path).unwrap(); + file_path + } + + // Tests that location_in_line correctly handles + // locations spanning multiple lines. + // For example, given the snippet: + // ``` + // permute( + // consts::x5_2_config(), + // state); + // ``` + // We want location_in_line to return the range + // containing `permute(` + #[test] + fn location_in_line_stops_at_end_of_line() { + let source_code = r##"pub fn main(mut state: [Field; 2]) -> [Field; 2] { + state = permute( + consts::x5_2_config(), + state); + + state +}"##; + + let dir = tempdir().unwrap(); + let file_name = Path::new("main.nr"); + create_dummy_file(&dir, file_name); + + let mut fm = FileManager::new(dir.path()); + let file_id = fm.add_file_with_source(file_name, source_code.to_string()).unwrap(); + + // Location of + // ``` + // permute( + // consts::x5_2_config(), + // state) + // ``` + let loc = Location::new(Span::inclusive(63, 116), file_id); + + // We don't care about opcodes in this context, + // we just use a dummy to construct debug_symbols + let mut opcode_locations = BTreeMap::>::new(); + opcode_locations.insert(OpcodeLocation::Acir(42), vec![loc]); + + let debug_symbols = vec![DebugInfo::new(opcode_locations)]; + let debug_artifact = DebugArtifact::new(debug_symbols, &fm); + + let location_in_line = debug_artifact.location_in_line(loc).expect("Expected a range"); + assert_eq!(location_in_line, Range { start: 12, end: 20 }); + } +} diff --git a/tooling/nargo/src/artifacts/program.rs b/tooling/nargo/src/artifacts/program.rs index 890b6c55f7d..96e63e6fe50 100644 --- a/tooling/nargo/src/artifacts/program.rs +++ b/tooling/nargo/src/artifacts/program.rs @@ -1,23 +1,18 @@ use acvm::acir::circuit::Circuit; use noirc_abi::Abi; +use noirc_driver::CompiledProgram; use serde::{Deserialize, Serialize}; -/// `PreprocessedProgram` represents a Noir program which has been preprocessed by a particular backend proving system. -/// -/// This differs from a generic Noir program artifact in that: -/// - The ACIR bytecode has had an optimization pass applied to tailor it for the backend. -/// - Proving and verification keys have been pregenerated based on this ACIR. #[derive(Serialize, Deserialize, Debug)] -pub struct PreprocessedProgram { +pub struct ProgramArtifact { pub noir_version: String, - /// Hash of the [`Program`][noirc_frontend::monomorphization::ast::Program] from which this [`PreprocessedProgram`] + /// Hash of the [`Program`][noirc_frontend::monomorphization::ast::Program] from which this [`ProgramArtifact`] /// was compiled. /// /// Used to short-circuit compilation in the case of the source code not changing since the last compilation. pub hash: u64, - pub backend: String, pub abi: Abi, #[serde( @@ -26,3 +21,14 @@ pub struct PreprocessedProgram { )] pub bytecode: Circuit, } + +impl From for ProgramArtifact { + fn from(program: CompiledProgram) -> Self { + ProgramArtifact { + hash: program.hash, + abi: program.abi, + noir_version: program.noir_version, + bytecode: program.circuit, + } + } +} diff --git a/tooling/nargo/src/constants.rs b/tooling/nargo/src/constants.rs index 5e448277694..0b50d61fe37 100644 --- a/tooling/nargo/src/constants.rs +++ b/tooling/nargo/src/constants.rs @@ -7,6 +7,8 @@ pub const PROOFS_DIR: &str = "proofs"; pub const SRC_DIR: &str = "src"; /// The directory to store circuits' serialized ACIR representations. pub const TARGET_DIR: &str = "target"; +/// The directory to store serialized ACIR representations of exported library functions. +pub const EXPORT_DIR: &str = "export"; // Files /// The file from which Nargo pulls prover inputs @@ -20,4 +22,4 @@ pub const PKG_FILE: &str = "Nargo.toml"; /// The extension for files containing circuit proofs. pub const PROOF_EXT: &str = "proof"; /// The extension for files containing proof witnesses. -pub const WITNESS_EXT: &str = "tr"; +pub const WITNESS_EXT: &str = "gz"; diff --git a/tooling/nargo/src/errors.rs b/tooling/nargo/src/errors.rs index bca8ca24767..c743768bee2 100644 --- a/tooling/nargo/src/errors.rs +++ b/tooling/nargo/src/errors.rs @@ -47,12 +47,6 @@ pub enum NargoError { ForeignCallError(#[from] ForeignCallError), } -impl From for NargoError { - fn from(_: acvm::compiler::CompileError) -> Self { - NargoError::CompilationError - } -} - impl NargoError { /// Extracts the user defined failure message from the ExecutionError /// If one exists. @@ -69,7 +63,6 @@ impl NargoError { ExecutionError::AssertionFailed(message, _) => Some(message), ExecutionError::SolvingError(error) => match error { OpcodeResolutionError::IndexOutOfBounds { .. } - | OpcodeResolutionError::UnsupportedBlackBoxFunc(_) | OpcodeResolutionError::OpcodeNotSolvable(_) | OpcodeResolutionError::UnsatisfiedConstrain { .. } => None, OpcodeResolutionError::BrilligFunctionFailed { message, .. } => Some(message), diff --git a/tooling/nargo/src/lib.rs b/tooling/nargo/src/lib.rs index ef014fb436b..62ff4325a23 100644 --- a/tooling/nargo/src/lib.rs +++ b/tooling/nargo/src/lib.rs @@ -16,10 +16,10 @@ pub mod workspace; use std::collections::BTreeMap; -use fm::{FileManager, FileReader}; +use fm::FileManager; use noirc_driver::{add_dep, prepare_crate, prepare_dependency}; use noirc_frontend::{ - graph::{CrateGraph, CrateId, CrateName}, + graph::{CrateId, CrateName}, hir::Context, }; use package::{Dependency, Package}; @@ -42,11 +42,64 @@ pub fn prepare_dependencies( } } -pub fn prepare_package(package: &Package, file_reader: Box) -> (Context, CrateId) { - // TODO: FileManager continues to leak into various crates - let fm = FileManager::new(&package.root_dir, file_reader); - let graph = CrateGraph::default(); - let mut context = Context::new(fm, graph); +pub fn insert_all_files_for_workspace_into_file_manager( + workspace: &workspace::Workspace, + file_manager: &mut FileManager, +) { + for package in workspace.clone().into_iter() { + insert_all_files_for_package_into_file_manager(package, file_manager); + } +} +// We will pre-populate the file manager with all the files in the package +// This is so that we can avoid having to read from disk when we are compiling +// +// This does not require parsing because we are interested in the files under the src directory +// it may turn out that we do not need to include some Noir files that we add to the file +// manager +fn insert_all_files_for_package_into_file_manager( + package: &Package, + file_manager: &mut FileManager, +) { + // Start off at the entry path and read all files in the parent directory. + let entry_path_parent = package + .entry_path + .parent() + .unwrap_or_else(|| panic!("The entry path is expected to be a single file within a directory and so should have a parent {:?}", package.entry_path)) + .clone(); + + // Get all files in the package and add them to the file manager + let paths = + get_all_paths_in_dir(entry_path_parent).expect("could not get all paths in the package"); + for path in paths { + let source = std::fs::read_to_string(path.as_path()) + .unwrap_or_else(|_| panic!("could not read file {:?} into string", path)); + file_manager.add_file_with_source(path.as_path(), source); + } + + insert_all_files_for_packages_dependencies_into_file_manager(package, file_manager); +} + +// Inserts all files for the dependencies of the package into the file manager +// too +fn insert_all_files_for_packages_dependencies_into_file_manager( + package: &Package, + file_manager: &mut FileManager, +) { + for (_, dep) in package.dependencies.iter() { + match dep { + Dependency::Local { package } | Dependency::Remote { package } => { + insert_all_files_for_package_into_file_manager(package, file_manager); + insert_all_files_for_packages_dependencies_into_file_manager(package, file_manager); + } + } + } +} + +pub fn prepare_package<'file_manager>( + file_manager: &'file_manager FileManager, + package: &Package, +) -> (Context<'file_manager>, CrateId) { + let mut context = Context::from_ref_file_manager(file_manager); let crate_id = prepare_crate(&mut context, &package.entry_path); @@ -54,3 +107,72 @@ pub fn prepare_package(package: &Package, file_reader: Box) -> (Cont (context, crate_id) } + +// Get all paths in the directory and subdirectories. +// +// Panics: If the path is not a path to a directory. +// +// TODO: Along with prepare_package, this function is an abstraction leak +// TODO: given that this crate should not know about the file manager. +// TODO: We can clean this up in a future refactor +fn get_all_paths_in_dir(dir: &std::path::Path) -> std::io::Result> { + assert!(dir.is_dir(), "directory {dir:?} is not a path to a directory"); + + let mut paths = Vec::new(); + + if dir.is_dir() { + for entry in std::fs::read_dir(dir)? { + let entry = entry?; + let path = entry.path(); + if path.is_dir() { + let mut sub_paths = get_all_paths_in_dir(&path)?; + paths.append(&mut sub_paths); + } else { + paths.push(path); + } + } + } + + Ok(paths) +} + +#[cfg(test)] +mod tests { + use crate::get_all_paths_in_dir; + use std::{ + fs::{self, File}, + path::Path, + }; + use tempfile::tempdir; + + fn create_test_dir_structure(temp_dir: &Path) -> std::io::Result<()> { + fs::create_dir(temp_dir.join("sub_dir1"))?; + File::create(temp_dir.join("sub_dir1/file1.txt"))?; + fs::create_dir(temp_dir.join("sub_dir2"))?; + File::create(temp_dir.join("sub_dir2/file2.txt"))?; + File::create(temp_dir.join("file3.txt"))?; + Ok(()) + } + + #[test] + fn test_get_all_paths_in_dir() { + let temp_dir = tempdir().expect("could not create a temporary directory"); + create_test_dir_structure(temp_dir.path()) + .expect("could not create test directory structure"); + + let paths = get_all_paths_in_dir(temp_dir.path()) + .expect("could not get all paths in the test directory"); + + // This should be the paths to all of the files in the directory and the subdirectory + let expected_paths = vec![ + temp_dir.path().join("file3.txt"), + temp_dir.path().join("sub_dir1/file1.txt"), + temp_dir.path().join("sub_dir2/file2.txt"), + ]; + + assert_eq!(paths.len(), expected_paths.len()); + for path in expected_paths { + assert!(paths.contains(&path)); + } + } +} diff --git a/tooling/nargo/src/ops/compile.rs b/tooling/nargo/src/ops/compile.rs index d4164eaa865..bd395d03f67 100644 --- a/tooling/nargo/src/ops/compile.rs +++ b/tooling/nargo/src/ops/compile.rs @@ -1,4 +1,4 @@ -use acvm::{acir::circuit::Opcode, Language}; +use acvm::ExpressionWidth; use fm::FileManager; use noirc_driver::{CompilationResult, CompileOptions, CompiledContract, CompiledProgram}; @@ -14,35 +14,32 @@ use rayon::prelude::*; /// /// This function will return an error if there are any compilations errors reported. pub fn compile_workspace( + file_manager: &FileManager, workspace: &Workspace, binary_packages: &[Package], contract_packages: &[Package], - np_language: Language, - is_opcode_supported: impl Fn(&Opcode) -> bool + std::marker::Sync, + expression_width: ExpressionWidth, compile_options: &CompileOptions, ) -> Result<(Vec, Vec), CompileError> { // Compile all of the packages in parallel. - let program_results: Vec<(FileManager, CompilationResult)> = binary_packages + let program_results: Vec> = binary_packages .par_iter() .map(|package| { - compile_program(workspace, package, compile_options, np_language, &is_opcode_supported) + compile_program(file_manager, workspace, package, compile_options, expression_width) }) .collect(); - let contract_results: Vec<(FileManager, CompilationResult)> = - contract_packages - .par_iter() - .map(|package| { - compile_contract(package, compile_options, np_language, &is_opcode_supported) - }) - .collect(); + let contract_results: Vec> = contract_packages + .par_iter() + .map(|package| compile_contract(file_manager, package, compile_options, expression_width)) + .collect(); // Report any warnings/errors which were encountered during compilation. let compiled_programs: Vec = program_results .into_iter() - .map(|(file_manager, compilation_result)| { + .map(|compilation_result| { report_errors( compilation_result, - &file_manager, + file_manager, compile_options.deny_warnings, compile_options.silence_warnings, ) @@ -50,10 +47,10 @@ pub fn compile_workspace( .collect::>()?; let compiled_contracts: Vec = contract_results .into_iter() - .map(|(file_manager, compilation_result)| { + .map(|compilation_result| { report_errors( compilation_result, - &file_manager, + file_manager, compile_options.deny_warnings, compile_options.silence_warnings, ) @@ -64,14 +61,13 @@ pub fn compile_workspace( } pub fn compile_program( + file_manager: &FileManager, workspace: &Workspace, package: &Package, compile_options: &CompileOptions, - np_language: Language, - is_opcode_supported: &impl Fn(&Opcode) -> bool, -) -> (FileManager, CompilationResult) { - let (mut context, crate_id) = - prepare_package(package, Box::new(|path| std::fs::read_to_string(path))); + expression_width: ExpressionWidth, +) -> CompilationResult { + let (mut context, crate_id) = prepare_package(file_manager, package); let program_artifact_path = workspace.package_build_path(package); let mut debug_artifact_path = program_artifact_path.clone(); @@ -81,51 +77,34 @@ pub fn compile_program( match noirc_driver::compile_main(&mut context, crate_id, compile_options, None, true) { Ok(program_and_warnings) => program_and_warnings, Err(errors) => { - return (context.file_manager, Err(errors)); + return Err(errors); } }; - // TODO: we say that pedersen hashing is supported by all backends for now - let is_opcode_supported_pedersen_hash = |opcode: &Opcode| -> bool { - if let Opcode::BlackBoxFuncCall( - acvm::acir::circuit::opcodes::BlackBoxFuncCall::PedersenHash { .. }, - ) = opcode - { - true - } else { - is_opcode_supported(opcode) - } - }; - // Apply backend specific optimizations. - let optimized_program = - crate::ops::optimize_program(program, np_language, &is_opcode_supported_pedersen_hash) - .expect("Backend does not support an opcode that is in the IR"); + let optimized_program = crate::ops::optimize_program(program, expression_width); - (context.file_manager, Ok((optimized_program, warnings))) + Ok((optimized_program, warnings)) } fn compile_contract( + file_manager: &FileManager, package: &Package, compile_options: &CompileOptions, - np_language: Language, - is_opcode_supported: &impl Fn(&Opcode) -> bool, -) -> (FileManager, CompilationResult) { - let (mut context, crate_id) = - prepare_package(package, Box::new(|path| std::fs::read_to_string(path))); + expression_width: ExpressionWidth, +) -> CompilationResult { + let (mut context, crate_id) = prepare_package(file_manager, package); let (contract, warnings) = match noirc_driver::compile_contract(&mut context, crate_id, compile_options) { Ok(contracts_and_warnings) => contracts_and_warnings, Err(errors) => { - return (context.file_manager, Err(errors)); + return Err(errors); } }; - let optimized_contract = - crate::ops::optimize_contract(contract, np_language, &is_opcode_supported) - .expect("Backend does not support an opcode that is in the IR"); + let optimized_contract = crate::ops::optimize_contract(contract, expression_width); - (context.file_manager, Ok((optimized_contract, warnings))) + Ok((optimized_contract, warnings)) } pub(crate) fn report_errors( diff --git a/tooling/nargo/src/ops/execute.rs b/tooling/nargo/src/ops/execute.rs index cf14934d61e..4fc7f7b599f 100644 --- a/tooling/nargo/src/ops/execute.rs +++ b/tooling/nargo/src/ops/execute.rs @@ -7,16 +7,15 @@ use crate::NargoError; use super::foreign_calls::ForeignCallExecutor; -pub fn execute_circuit( - blackbox_solver: &B, +#[tracing::instrument(level = "trace", skip_all)] +pub fn execute_circuit( circuit: &Circuit, initial_witness: WitnessMap, - show_output: bool, + blackbox_solver: &B, + foreign_call_executor: &mut F, ) -> Result { let mut acvm = ACVM::new(blackbox_solver, &circuit.opcodes, initial_witness); - let mut foreign_call_executor = ForeignCallExecutor::default(); - loop { let solver_status = acvm.solve(); @@ -50,13 +49,11 @@ pub fn execute_circuit( })); } ACVMStatus::RequiresForeignCall(foreign_call) => { - let foreign_call_result = - foreign_call_executor.execute(&foreign_call, show_output)?; + let foreign_call_result = foreign_call_executor.execute(&foreign_call)?; acvm.resolve_pending_foreign_call(foreign_call_result); } } } - let solved_witness = acvm.finalize(); - Ok(solved_witness) + Ok(acvm.finalize()) } diff --git a/tooling/nargo/src/ops/foreign_calls.rs b/tooling/nargo/src/ops/foreign_calls.rs index 4d20a0bd4f0..cbe40c92b4e 100644 --- a/tooling/nargo/src/ops/foreign_calls.rs +++ b/tooling/nargo/src/ops/foreign_calls.rs @@ -2,17 +2,20 @@ use acvm::{ acir::brillig::{ForeignCallParam, ForeignCallResult, Value}, pwg::ForeignCallWaitInfo, }; -use iter_extended::vecmap; +use jsonrpc::{arg as build_json_rpc_arg, minreq_http::Builder, Client}; use noirc_printable_type::{decode_string_value, ForeignCallError, PrintableValueDisplay}; -use crate::NargoError; +pub trait ForeignCallExecutor { + fn execute( + &mut self, + foreign_call: &ForeignCallWaitInfo, + ) -> Result; +} /// This enumeration represents the Brillig foreign calls that are natively supported by nargo. /// After resolution of a foreign call, nargo will restart execution of the ACVM pub(crate) enum ForeignCall { - Println, - Sequence, - ReverseSequence, + Print, CreateMock, SetMockParams, SetMockReturns, @@ -29,9 +32,7 @@ impl std::fmt::Display for ForeignCall { impl ForeignCall { pub(crate) fn name(&self) -> &'static str { match self { - ForeignCall::Println => "println", - ForeignCall::Sequence => "get_number_sequence", - ForeignCall::ReverseSequence => "get_reverse_number_sequence", + ForeignCall::Print => "print", ForeignCall::CreateMock => "create_mock", ForeignCall::SetMockParams => "set_mock_params", ForeignCall::SetMockReturns => "set_mock_returns", @@ -42,9 +43,7 @@ impl ForeignCall { pub(crate) fn lookup(op_name: &str) -> Option { match op_name { - "println" => Some(ForeignCall::Println), - "get_number_sequence" => Some(ForeignCall::Sequence), - "get_reverse_number_sequence" => Some(ForeignCall::ReverseSequence), + "print" => Some(ForeignCall::Print), "create_mock" => Some(ForeignCall::CreateMock), "set_mock_params" => Some(ForeignCall::SetMockParams), "set_mock_returns" => Some(ForeignCall::SetMockReturns), @@ -89,51 +88,75 @@ impl MockedCall { } #[derive(Debug, Default)] -pub struct ForeignCallExecutor { +pub struct DefaultForeignCallExecutor { /// Mocks have unique ids used to identify them in Noir, allowing to update or remove them. last_mock_id: usize, /// The registered mocks mocked_responses: Vec, + /// Whether to print [`ForeignCall::Print`] output. + show_output: bool, + /// JSON RPC client to resolve foreign calls + external_resolver: Option, +} + +impl DefaultForeignCallExecutor { + pub fn new(show_output: bool, resolver_url: Option<&str>) -> Self { + let oracle_resolver = resolver_url.map(|resolver_url| { + let transport_builder = + Builder::new().url(resolver_url).expect("Invalid oracle resolver URL"); + Client::with_transport(transport_builder.build()) + }); + DefaultForeignCallExecutor { + show_output, + external_resolver: oracle_resolver, + ..DefaultForeignCallExecutor::default() + } + } } -impl ForeignCallExecutor { - pub fn execute( +impl DefaultForeignCallExecutor { + fn extract_mock_id( + foreign_call_inputs: &[ForeignCallParam], + ) -> Result<(usize, &[ForeignCallParam]), ForeignCallError> { + let (id, params) = + foreign_call_inputs.split_first().ok_or(ForeignCallError::MissingForeignCallInputs)?; + Ok((id.unwrap_value().to_usize(), params)) + } + + fn find_mock_by_id(&mut self, id: usize) -> Option<&mut MockedCall> { + self.mocked_responses.iter_mut().find(|response| response.id == id) + } + + fn parse_string(param: &ForeignCallParam) -> String { + let fields: Vec<_> = param.values().into_iter().map(|value| value.to_field()).collect(); + decode_string_value(&fields) + } + + fn execute_print(foreign_call_inputs: &[ForeignCallParam]) -> Result<(), ForeignCallError> { + let skip_newline = foreign_call_inputs[0].unwrap_value().is_zero(); + let display_values: PrintableValueDisplay = foreign_call_inputs + .split_first() + .ok_or(ForeignCallError::MissingForeignCallInputs)? + .1 + .try_into()?; + print!("{display_values}{}", if skip_newline { "" } else { "\n" }); + Ok(()) + } +} + +impl ForeignCallExecutor for DefaultForeignCallExecutor { + fn execute( &mut self, foreign_call: &ForeignCallWaitInfo, - show_output: bool, - ) -> Result { + ) -> Result { let foreign_call_name = foreign_call.function.as_str(); match ForeignCall::lookup(foreign_call_name) { - Some(ForeignCall::Println) => { - if show_output { - Self::execute_println(&foreign_call.inputs)?; + Some(ForeignCall::Print) => { + if self.show_output { + Self::execute_print(&foreign_call.inputs)?; } Ok(ForeignCallResult { values: vec![] }) } - Some(ForeignCall::Sequence) => { - let sequence_length: u128 = - foreign_call.inputs[0].unwrap_value().to_field().to_u128(); - let sequence = vecmap(0..sequence_length, Value::from); - - Ok(ForeignCallResult { - values: vec![ - ForeignCallParam::Single(sequence_length.into()), - ForeignCallParam::Array(sequence), - ], - }) - } - Some(ForeignCall::ReverseSequence) => { - let sequence_length: u128 = - foreign_call.inputs[0].unwrap_value().to_field().to_u128(); - let sequence = vecmap((0..sequence_length).rev(), Value::from); - - Ok(ForeignCallResult { - values: vec![ - ForeignCallParam::Single(sequence_length.into()), - ForeignCallParam::Array(sequence), - ], - }) - } Some(ForeignCall::CreateMock) => { let mock_oracle_name = Self::parse_string(&foreign_call.inputs[0]); assert!(ForeignCall::lookup(&mock_oracle_name).is_none()); @@ -179,50 +202,136 @@ impl ForeignCallExecutor { Ok(ForeignCallResult { values: vec![] }) } None => { - let response_position = self + let mock_response_position = self .mocked_responses .iter() - .position(|response| response.matches(foreign_call_name, &foreign_call.inputs)) - .unwrap_or_else(|| panic!("Unknown foreign call {}", foreign_call_name)); + .position(|response| response.matches(foreign_call_name, &foreign_call.inputs)); - let mock = self - .mocked_responses - .get_mut(response_position) - .expect("Invalid position of mocked response"); - let result = mock.result.values.clone(); - - if let Some(times_left) = &mut mock.times_left { - *times_left -= 1; - if *times_left == 0 { - self.mocked_responses.remove(response_position); + match (mock_response_position, &self.external_resolver) { + (Some(response_position), _) => { + let mock = self + .mocked_responses + .get_mut(response_position) + .expect("Invalid position of mocked response"); + let result = mock.result.values.clone(); + + if let Some(times_left) = &mut mock.times_left { + *times_left -= 1; + if *times_left == 0 { + self.mocked_responses.remove(response_position); + } + } + + Ok(ForeignCallResult { values: result }) } - } + (None, Some(external_resolver)) => { + let encoded_params: Vec<_> = + foreign_call.inputs.iter().map(build_json_rpc_arg).collect(); + + let req = + external_resolver.build_request(foreign_call_name, &encoded_params); + + let response = external_resolver.send_request(req)?; + + let parsed_response: ForeignCallResult = response.result()?; - Ok(ForeignCallResult { values: result }) + Ok(parsed_response) + } + (None, None) => panic!("Unknown foreign call {}", foreign_call_name), + } } } } +} - fn extract_mock_id( - foreign_call_inputs: &[ForeignCallParam], - ) -> Result<(usize, &[ForeignCallParam]), ForeignCallError> { - let (id, params) = - foreign_call_inputs.split_first().ok_or(ForeignCallError::MissingForeignCallInputs)?; - Ok((id.unwrap_value().to_usize(), params)) +#[cfg(test)] +mod tests { + use acvm::{ + acir::brillig::ForeignCallParam, + brillig_vm::brillig::{ForeignCallResult, Value}, + pwg::ForeignCallWaitInfo, + FieldElement, + }; + use jsonrpc_core::Result as RpcResult; + use jsonrpc_derive::rpc; + use jsonrpc_http_server::{Server, ServerBuilder}; + use serial_test::serial; + + use crate::ops::{DefaultForeignCallExecutor, ForeignCallExecutor}; + + #[allow(unreachable_pub)] + #[rpc] + pub trait OracleResolver { + #[rpc(name = "echo")] + fn echo(&self, param: ForeignCallParam) -> RpcResult; + + #[rpc(name = "sum")] + fn sum(&self, array: ForeignCallParam) -> RpcResult; } - fn find_mock_by_id(&mut self, id: usize) -> Option<&mut MockedCall> { - self.mocked_responses.iter_mut().find(|response| response.id == id) + struct OracleResolverImpl; + + impl OracleResolver for OracleResolverImpl { + fn echo(&self, param: ForeignCallParam) -> RpcResult { + Ok(vec![param].into()) + } + + fn sum(&self, array: ForeignCallParam) -> RpcResult { + let mut res: FieldElement = 0_usize.into(); + + for value in array.values() { + res += value.to_field(); + } + + Ok(Value::from(res).into()) + } } - fn parse_string(param: &ForeignCallParam) -> String { - let fields: Vec<_> = param.values().into_iter().map(|value| value.to_field()).collect(); - decode_string_value(&fields) + fn build_oracle_server() -> (Server, String) { + let mut io = jsonrpc_core::IoHandler::new(); + io.extend_with(OracleResolverImpl.to_delegate()); + + let server = ServerBuilder::new(io) + .start_http(&"127.0.0.1:5555".parse().expect("Invalid address")) + .expect("Could not start server"); + + let url = format!("http://{}", server.address()); + (server, url) } - fn execute_println(foreign_call_inputs: &[ForeignCallParam]) -> Result<(), NargoError> { - let display_values: PrintableValueDisplay = foreign_call_inputs.try_into()?; - println!("{display_values}"); - Ok(()) + #[serial] + #[test] + fn test_oracle_resolver_echo() { + let (server, url) = build_oracle_server(); + + let mut executor = DefaultForeignCallExecutor::new(false, Some(&url)); + + let foreign_call = ForeignCallWaitInfo { + function: "echo".to_string(), + inputs: vec![ForeignCallParam::Single(1_u128.into())], + }; + + let result = executor.execute(&foreign_call); + assert_eq!(result.unwrap(), ForeignCallResult { values: foreign_call.inputs }); + + server.close(); + } + + #[serial] + #[test] + fn test_oracle_resolver_sum() { + let (server, url) = build_oracle_server(); + + let mut executor = DefaultForeignCallExecutor::new(false, Some(&url)); + + let foreign_call = ForeignCallWaitInfo { + function: "sum".to_string(), + inputs: vec![ForeignCallParam::Array(vec![1_usize.into(), 2_usize.into()])], + }; + + let result = executor.execute(&foreign_call); + assert_eq!(result.unwrap(), Value::from(3_usize).into()); + + server.close(); } } diff --git a/tooling/nargo/src/ops/mod.rs b/tooling/nargo/src/ops/mod.rs index 9a523c59841..34487ed9770 100644 --- a/tooling/nargo/src/ops/mod.rs +++ b/tooling/nargo/src/ops/mod.rs @@ -1,6 +1,6 @@ pub use self::compile::{compile_program, compile_workspace}; pub use self::execute::execute_circuit; -pub use self::foreign_calls::ForeignCallExecutor; +pub use self::foreign_calls::{DefaultForeignCallExecutor, ForeignCallExecutor}; pub use self::optimize::{optimize_contract, optimize_program}; pub use self::test::{run_test, TestStatus}; diff --git a/tooling/nargo/src/ops/optimize.rs b/tooling/nargo/src/ops/optimize.rs index 54e2432aa40..d3a36dd65ac 100644 --- a/tooling/nargo/src/ops/optimize.rs +++ b/tooling/nargo/src/ops/optimize.rs @@ -1,34 +1,30 @@ -use acvm::{acir::circuit::Opcode, Language}; -use iter_extended::try_vecmap; +use acvm::ExpressionWidth; +use iter_extended::vecmap; use noirc_driver::{CompiledContract, CompiledProgram}; -use crate::NargoError; - pub fn optimize_program( mut program: CompiledProgram, - np_language: Language, - is_opcode_supported: &impl Fn(&Opcode) -> bool, -) -> Result { + expression_width: ExpressionWidth, +) -> CompiledProgram { let (optimized_circuit, location_map) = - acvm::compiler::compile(program.circuit, np_language, is_opcode_supported)?; + acvm::compiler::compile(program.circuit, expression_width); program.circuit = optimized_circuit; program.debug.update_acir(location_map); - Ok(program) + program } pub fn optimize_contract( contract: CompiledContract, - np_language: Language, - is_opcode_supported: &impl Fn(&Opcode) -> bool, -) -> Result { - let functions = try_vecmap(contract.functions, |mut func| { + expression_width: ExpressionWidth, +) -> CompiledContract { + let functions = vecmap(contract.functions, |mut func| { let (optimized_bytecode, location_map) = - acvm::compiler::compile(func.bytecode, np_language, is_opcode_supported)?; + acvm::compiler::compile(func.bytecode, expression_width); func.bytecode = optimized_bytecode; func.debug.update_acir(location_map); - Ok::<_, NargoError>(func) - })?; + func + }); - Ok(CompiledContract { functions, ..contract }) + CompiledContract { functions, ..contract } } diff --git a/tooling/nargo/src/ops/test.rs b/tooling/nargo/src/ops/test.rs index d2ef2659e4d..f38dcad0c2f 100644 --- a/tooling/nargo/src/ops/test.rs +++ b/tooling/nargo/src/ops/test.rs @@ -6,7 +6,7 @@ use noirc_frontend::hir::{def_map::TestFunction, Context}; use crate::{errors::try_to_diagnose_runtime_error, NargoError}; -use super::execute_circuit; +use super::{execute_circuit, DefaultForeignCallExecutor}; pub enum TestStatus { Pass, @@ -19,6 +19,7 @@ pub fn run_test( context: &Context, test_function: TestFunction, show_output: bool, + foreign_call_resolver_url: Option<&str>, config: &CompileOptions, ) -> TestStatus { let program = compile_no_check(context, config, test_function.get_id(), None, false); @@ -26,8 +27,12 @@ pub fn run_test( Ok(program) => { // Run the backend to ensure the PWG evaluates functions like std::hash::pedersen, // otherwise constraints involving these expressions will not error. - let circuit_execution = - execute_circuit(blackbox_solver, &program.circuit, WitnessMap::new(), show_output); + let circuit_execution = execute_circuit( + &program.circuit, + WitnessMap::new(), + blackbox_solver, + &mut DefaultForeignCallExecutor::new(show_output, foreign_call_resolver_url), + ); test_status_program_compile_pass(test_function, program.debug, circuit_execution) } Err(err) => test_status_program_compile_fail(err, test_function), diff --git a/tooling/nargo/src/package.rs b/tooling/nargo/src/package.rs index 94c7c5b9c98..ecbf3585210 100644 --- a/tooling/nargo/src/package.rs +++ b/tooling/nargo/src/package.rs @@ -43,6 +43,7 @@ impl Dependency { #[derive(Clone)] pub struct Package { + pub version: Option, // A semver string which specifies the compiler version required to compile this package pub compiler_required_version: Option, pub root_dir: PathBuf, diff --git a/tooling/nargo/src/workspace.rs b/tooling/nargo/src/workspace.rs index 65f9ab7e0d9..5696a758531 100644 --- a/tooling/nargo/src/workspace.rs +++ b/tooling/nargo/src/workspace.rs @@ -10,7 +10,7 @@ use std::{ }; use crate::{ - constants::{CONTRACT_DIR, PROOFS_DIR, TARGET_DIR}, + constants::{CONTRACT_DIR, EXPORT_DIR, PROOFS_DIR, TARGET_DIR}, package::Package, }; @@ -40,6 +40,10 @@ impl Workspace { pub fn target_directory_path(&self) -> PathBuf { self.root_dir.join(TARGET_DIR) } + + pub fn export_directory_path(&self) -> PathBuf { + self.root_dir.join(EXPORT_DIR) + } } pub enum IntoIter<'a, T> { diff --git a/tooling/nargo_cli/Cargo.toml b/tooling/nargo_cli/Cargo.toml index 1a08514dc5f..2652adaf327 100644 --- a/tooling/nargo_cli/Cargo.toml +++ b/tooling/nargo_cli/Cargo.toml @@ -32,7 +32,7 @@ noirc_frontend.workspace = true noirc_abi.workspace = true noirc_errors.workspace = true acvm.workspace = true -barretenberg_blackbox_solver.workspace = true +bn254_blackbox_solver.workspace = true toml.workspace = true serde.workspace = true serde_json.workspace = true @@ -43,19 +43,24 @@ tower.workspace = true async-lsp = { workspace = true, features = ["client-monitor", "stdio", "tracing", "tokio"] } const_format.workspace = true hex.workspace = true +similar-asserts.workspace = true termcolor = "1.1.2" color-eyre = "0.6.2" tokio = { version = "1.0", features = ["io-std"] } +dap.workspace = true # Backends backend-interface = { path = "../backend_interface" } -bb_abstraction_leaks.workspace = true + +# Logs +tracing-subscriber.workspace = true +tracing-appender = "0.2.3" [target.'cfg(not(unix))'.dependencies] tokio-util = { version = "0.7.8", features = ["compat"] } [dev-dependencies] -tempfile = "3.6.0" +tempfile.workspace = true dirs.workspace = true assert_cmd = "2.0.8" assert_fs = "1.0.10" diff --git a/tooling/nargo_cli/build.rs b/tooling/nargo_cli/build.rs index 0d1acca6ee9..9a0492c99ad 100644 --- a/tooling/nargo_cli/build.rs +++ b/tooling/nargo_cli/build.rs @@ -14,9 +14,6 @@ fn check_rustc_version() { const GIT_COMMIT: &&str = &"GIT_COMMIT"; fn main() { - // Rebuild if the tests have changed - println!("cargo:rerun-if-changed=tests"); - check_rustc_version(); // Only use build_data if the environment variable isn't set @@ -33,11 +30,15 @@ fn main() { // Try to find the directory that Cargo sets when it is running; otherwise fallback to assuming the CWD // is the root of the repository and append the crate path - let manifest_dir = match std::env::var("CARGO_MANIFEST_DIR") { - Ok(dir) => PathBuf::from(dir), - Err(_) => std::env::current_dir().unwrap().join("crates").join("nargo_cli"), + let root_dir = match std::env::var("CARGO_MANIFEST_DIR") { + Ok(dir) => PathBuf::from(dir).parent().unwrap().parent().unwrap().to_path_buf(), + Err(_) => std::env::current_dir().unwrap(), }; - let test_dir = manifest_dir.join("tests"); + let test_dir = root_dir.join("test_programs"); + + // Rebuild if the tests have changed + println!("cargo:rerun-if-changed=tests"); + println!("cargo:rerun-if-changed={}", test_dir.as_os_str().to_str().unwrap()); generate_execution_success_tests(&mut test_file, &test_dir); generate_noir_test_success_tests(&mut test_file, &test_dir); @@ -201,7 +202,9 @@ fn compile_success_empty_{test_name}() {{ }} // `compile_success_empty` tests should be able to compile down to an empty circuit. - let json: serde_json::Value = serde_json::from_slice(&output.stdout).expect("JSON was not well-formatted"); + let json: serde_json::Value = serde_json::from_slice(&output.stdout).unwrap_or_else(|_| {{ + panic!("JSON was not well-formatted {{:?}}",output.stdout) + }}); let num_opcodes = &json["programs"][0]["acir_opcodes"]; assert_eq!(num_opcodes.as_u64().unwrap(), 0); }} diff --git a/tooling/nargo_cli/src/backends.rs b/tooling/nargo_cli/src/backends.rs index 8b1da2cd118..2b3e9d8861f 100644 --- a/tooling/nargo_cli/src/backends.rs +++ b/tooling/nargo_cli/src/backends.rs @@ -7,7 +7,7 @@ fn active_backend_file_path() -> PathBuf { backends_directory().join(".selected_backend") } -pub(crate) const ACVM_BACKEND_BARRETENBERG: &str = "acvm-backend-barretenberg"; +pub(crate) use backend_interface::ACVM_BACKEND_BARRETENBERG; pub(crate) fn clear_active_backend() { let active_backend_file = active_backend_file_path(); diff --git a/tooling/nargo_cli/src/cli/check_cmd.rs b/tooling/nargo_cli/src/cli/check_cmd.rs index 57b36b8932b..e2db492fe9c 100644 --- a/tooling/nargo_cli/src/cli/check_cmd.rs +++ b/tooling/nargo_cli/src/cli/check_cmd.rs @@ -2,12 +2,17 @@ use crate::backends::Backend; use crate::errors::CliError; use clap::Args; +use fm::FileManager; use iter_extended::btree_map; -use nargo::{errors::CompileError, package::Package, prepare_package}; +use nargo::{ + errors::CompileError, insert_all_files_for_workspace_into_file_manager, package::Package, + prepare_package, +}; use nargo_toml::{get_package_manifest, resolve_workspace_from_toml, PackageSelection}; use noirc_abi::{AbiParameter, AbiType, MAIN_RETURN_NAME}; use noirc_driver::{ - check_crate, compute_function_abi, CompileOptions, NOIR_ARTIFACT_VERSION_STRING, + check_crate, compute_function_abi, file_manager_with_stdlib, CompileOptions, + NOIR_ARTIFACT_VERSION_STRING, }; use noirc_frontend::{ graph::{CrateId, CrateName}, @@ -47,20 +52,27 @@ pub(crate) fn run( Some(NOIR_ARTIFACT_VERSION_STRING.to_string()), )?; + let mut workspace_file_manager = file_manager_with_stdlib(&workspace.root_dir); + insert_all_files_for_workspace_into_file_manager(&workspace, &mut workspace_file_manager); + for package in &workspace { - check_package(package, &args.compile_options)?; + check_package(&workspace_file_manager, package, &args.compile_options)?; println!("[{}] Constraint system successfully built!", package.name); } Ok(()) } -fn check_package(package: &Package, compile_options: &CompileOptions) -> Result<(), CompileError> { - let (mut context, crate_id) = - prepare_package(package, Box::new(|path| std::fs::read_to_string(path))); +fn check_package( + file_manager: &FileManager, + package: &Package, + compile_options: &CompileOptions, +) -> Result<(), CompileError> { + let (mut context, crate_id) = prepare_package(file_manager, package); check_crate_and_report_errors( &mut context, crate_id, compile_options.deny_warnings, + compile_options.disable_macros, compile_options.silence_warnings, )?; @@ -182,9 +194,10 @@ pub(crate) fn check_crate_and_report_errors( context: &mut Context, crate_id: CrateId, deny_warnings: bool, + disable_macros: bool, silence_warnings: bool, ) -> Result<(), CompileError> { - let result = check_crate(context, crate_id, deny_warnings); + let result = check_crate(context, crate_id, deny_warnings, disable_macros); super::compile_cmd::report_errors( result, &context.file_manager, diff --git a/tooling/nargo_cli/src/cli/codegen_verifier_cmd.rs b/tooling/nargo_cli/src/cli/codegen_verifier_cmd.rs index 59143526b24..1eb8153ce9b 100644 --- a/tooling/nargo_cli/src/cli/codegen_verifier_cmd.rs +++ b/tooling/nargo_cli/src/cli/codegen_verifier_cmd.rs @@ -6,14 +6,14 @@ use super::{ use crate::backends::Backend; use crate::errors::CliError; -use acvm::acir::circuit::Opcode; -use acvm::Language; -use bb_abstraction_leaks::ACVM_BACKEND_BARRETENBERG; +use acvm::ExpressionWidth; use clap::Args; +use fm::FileManager; +use nargo::insert_all_files_for_workspace_into_file_manager; use nargo::package::Package; use nargo::workspace::Workspace; use nargo_toml::{get_package_manifest, resolve_workspace_from_toml, PackageSelection}; -use noirc_driver::{CompileOptions, NOIR_ARTIFACT_VERSION_STRING}; +use noirc_driver::{file_manager_with_stdlib, CompileOptions, NOIR_ARTIFACT_VERSION_STRING}; use noirc_frontend::graph::CrateName; /// Generates a Solidity verifier smart contract for the program @@ -46,15 +46,18 @@ pub(crate) fn run( Some(NOIR_ARTIFACT_VERSION_STRING.to_string()), )?; - let (np_language, opcode_support) = backend.get_backend_info()?; + let mut workspace_file_manager = file_manager_with_stdlib(&workspace.root_dir); + insert_all_files_for_workspace_into_file_manager(&workspace, &mut workspace_file_manager); + + let expression_width = backend.get_backend_info()?; for package in &workspace { let smart_contract_string = smart_contract_for_package( + &workspace_file_manager, &workspace, backend, package, &args.compile_options, - np_language, - &|opcode| opcode_support.is_opcode_supported(opcode), + expression_width, )?; let contract_dir = workspace.contracts_directory_path(package); @@ -69,27 +72,15 @@ pub(crate) fn run( } fn smart_contract_for_package( + file_manager: &FileManager, workspace: &Workspace, backend: &Backend, package: &Package, compile_options: &CompileOptions, - np_language: Language, - is_opcode_supported: &impl Fn(&Opcode) -> bool, + expression_width: ExpressionWidth, ) -> Result { - let program = compile_bin_package( - workspace, - package, - compile_options, - np_language, - &is_opcode_supported, - )?; - - let mut smart_contract_string = backend.eth_contract(&program.circuit)?; - - if backend.name() == ACVM_BACKEND_BARRETENBERG { - smart_contract_string = - bb_abstraction_leaks::complete_barretenberg_verifier_contract(smart_contract_string); - } + let program = + compile_bin_package(file_manager, workspace, package, compile_options, expression_width)?; - Ok(smart_contract_string) + Ok(backend.eth_contract(&program.circuit)?) } diff --git a/tooling/nargo_cli/src/cli/compile_cmd.rs b/tooling/nargo_cli/src/cli/compile_cmd.rs index 9ffbc26828e..31105ebe68f 100644 --- a/tooling/nargo_cli/src/cli/compile_cmd.rs +++ b/tooling/nargo_cli/src/cli/compile_cmd.rs @@ -1,20 +1,18 @@ use std::path::Path; -use acvm::acir::circuit::opcodes::BlackBoxFuncCall; -use acvm::acir::circuit::Opcode; -use acvm::Language; -use backend_interface::BackendOpcodeSupport; +use acvm::ExpressionWidth; use fm::FileManager; use iter_extended::vecmap; -use nargo::artifacts::contract::PreprocessedContract; -use nargo::artifacts::contract::PreprocessedContractFunction; +use nargo::artifacts::contract::{ContractArtifact, ContractFunctionArtifact}; use nargo::artifacts::debug::DebugArtifact; -use nargo::artifacts::program::PreprocessedProgram; +use nargo::artifacts::program::ProgramArtifact; use nargo::errors::CompileError; +use nargo::insert_all_files_for_workspace_into_file_manager; use nargo::package::Package; use nargo::prepare_package; use nargo::workspace::Workspace; use nargo_toml::{get_package_manifest, resolve_workspace_from_toml, PackageSelection}; +use noirc_driver::file_manager_with_stdlib; use noirc_driver::NOIR_ARTIFACT_VERSION_STRING; use noirc_driver::{CompilationResult, CompileOptions, CompiledContract, CompiledProgram}; use noirc_frontend::graph::CrateName; @@ -24,6 +22,7 @@ use clap::Args; use crate::backends::Backend; use crate::errors::CliError; +use super::fs::program::only_acir; use super::fs::program::{ read_debug_artifact_from_file, read_program_from_file, save_contract_to_file, save_debug_artifact_to_file, save_program_to_file, @@ -31,16 +30,9 @@ use super::fs::program::{ use super::NargoConfig; use rayon::prelude::*; -// TODO(#1388): pull this from backend. -const BACKEND_IDENTIFIER: &str = "acvm-backend-barretenberg"; - /// Compile the program and its secret execution trace into ACIR format #[derive(Debug, Clone, Args)] pub(crate) struct CompileCommand { - /// Include Proving and Verification keys in the build artifacts. - #[arg(long)] - include_keys: bool, - /// The name of the package to compile #[clap(long, conflicts_with = "workspace")] package: Option, @@ -70,19 +62,22 @@ pub(crate) fn run( )?; let circuit_dir = workspace.target_directory_path(); + let mut workspace_file_manager = file_manager_with_stdlib(&workspace.root_dir); + insert_all_files_for_workspace_into_file_manager(&workspace, &mut workspace_file_manager); + let (binary_packages, contract_packages): (Vec<_>, Vec<_>) = workspace .into_iter() .filter(|package| !package.is_library()) .cloned() .partition(|package| package.is_binary()); - let (np_language, opcode_support) = backend.get_backend_info()?; + let expression_width = backend.get_backend_info_or_default(); let (_, compiled_contracts) = compile_workspace( + &workspace_file_manager, &workspace, &binary_packages, &contract_packages, - np_language, - &opcode_support, + expression_width, &args.compile_options, )?; @@ -95,37 +90,32 @@ pub(crate) fn run( } pub(super) fn compile_workspace( + file_manager: &FileManager, workspace: &Workspace, binary_packages: &[Package], contract_packages: &[Package], - np_language: Language, - opcode_support: &BackendOpcodeSupport, + expression_width: ExpressionWidth, compile_options: &CompileOptions, ) -> Result<(Vec, Vec), CliError> { - let is_opcode_supported = |opcode: &_| opcode_support.is_opcode_supported(opcode); - // Compile all of the packages in parallel. - let program_results: Vec<(FileManager, CompilationResult)> = binary_packages + let program_results: Vec> = binary_packages .par_iter() .map(|package| { - compile_program(workspace, package, compile_options, np_language, &is_opcode_supported) + compile_program(file_manager, workspace, package, compile_options, expression_width) }) .collect(); - let contract_results: Vec<(FileManager, CompilationResult)> = - contract_packages - .par_iter() - .map(|package| { - compile_contract(package, compile_options, np_language, &is_opcode_supported) - }) - .collect(); + let contract_results: Vec> = contract_packages + .par_iter() + .map(|package| compile_contract(file_manager, package, compile_options, expression_width)) + .collect(); // Report any warnings/errors which were encountered during compilation. let compiled_programs: Vec = program_results .into_iter() - .map(|(file_manager, compilation_result)| { + .map(|compilation_result| { report_errors( compilation_result, - &file_manager, + file_manager, compile_options.deny_warnings, compile_options.silence_warnings, ) @@ -133,10 +123,10 @@ pub(super) fn compile_workspace( .collect::>()?; let compiled_contracts: Vec = contract_results .into_iter() - .map(|(file_manager, compilation_result)| { + .map(|compilation_result| { report_errors( compilation_result, - &file_manager, + file_manager, compile_options.deny_warnings, compile_options.silence_warnings, ) @@ -147,22 +137,22 @@ pub(super) fn compile_workspace( } pub(crate) fn compile_bin_package( + file_manager: &FileManager, workspace: &Workspace, package: &Package, compile_options: &CompileOptions, - np_language: Language, - is_opcode_supported: &impl Fn(&Opcode) -> bool, + expression_width: ExpressionWidth, ) -> Result { if package.is_library() { return Err(CompileError::LibraryCrate(package.name.clone()).into()); } - let (file_manager, compilation_result) = - compile_program(workspace, package, compile_options, np_language, &is_opcode_supported); + let compilation_result = + compile_program(file_manager, workspace, package, compile_options, expression_width); let program = report_errors( compilation_result, - &file_manager, + file_manager, compile_options.deny_warnings, compile_options.silence_warnings, )?; @@ -171,27 +161,26 @@ pub(crate) fn compile_bin_package( } fn compile_program( + file_manager: &FileManager, workspace: &Workspace, package: &Package, compile_options: &CompileOptions, - np_language: Language, - is_opcode_supported: &impl Fn(&Opcode) -> bool, -) -> (FileManager, CompilationResult) { - let (mut context, crate_id) = - prepare_package(package, Box::new(|path| std::fs::read_to_string(path))); + expression_width: ExpressionWidth, +) -> CompilationResult { + let (mut context, crate_id) = prepare_package(file_manager, package); let program_artifact_path = workspace.package_build_path(package); let mut debug_artifact_path = program_artifact_path.clone(); debug_artifact_path.set_file_name(format!("debug_{}.json", package.name)); - let cached_program = if let (Ok(preprocessed_program), Ok(mut debug_artifact)) = ( + let cached_program = if let (Ok(program_artifact), Ok(mut debug_artifact)) = ( read_program_from_file(program_artifact_path), read_debug_artifact_from_file(debug_artifact_path), ) { Some(CompiledProgram { - hash: preprocessed_program.hash, - circuit: preprocessed_program.bytecode, - abi: preprocessed_program.abi, - noir_version: preprocessed_program.noir_version, + hash: program_artifact.hash, + circuit: program_artifact.bytecode, + abi: program_artifact.abi, + noir_version: program_artifact.noir_version, debug: debug_artifact.debug_symbols.remove(0), file_map: debug_artifact.file_map, warnings: debug_artifact.warnings, @@ -211,62 +200,50 @@ fn compile_program( ) { Ok(program_and_warnings) => program_and_warnings, Err(errors) => { - return (context.file_manager, Err(errors)); - } - }; - - // TODO: we say that pedersen hashing is supported by all backends for now - let is_opcode_supported_pedersen_hash = |opcode: &Opcode| -> bool { - if let Opcode::BlackBoxFuncCall(BlackBoxFuncCall::PedersenHash { .. }) = opcode { - true - } else { - is_opcode_supported(opcode) + return Err(errors); } }; // Apply backend specific optimizations. - let optimized_program = - nargo::ops::optimize_program(program, np_language, &is_opcode_supported_pedersen_hash) - .expect("Backend does not support an opcode that is in the IR"); - - save_program(optimized_program.clone(), package, &workspace.target_directory_path()); + let optimized_program = nargo::ops::optimize_program(program, expression_width); + let only_acir = compile_options.only_acir; + save_program(optimized_program.clone(), package, &workspace.target_directory_path(), only_acir); - (context.file_manager, Ok((optimized_program, warnings))) + Ok((optimized_program, warnings)) } fn compile_contract( + file_manager: &FileManager, package: &Package, compile_options: &CompileOptions, - np_language: Language, - is_opcode_supported: &impl Fn(&Opcode) -> bool, -) -> (FileManager, CompilationResult) { - let (mut context, crate_id) = - prepare_package(package, Box::new(|path| std::fs::read_to_string(path))); + expression_width: ExpressionWidth, +) -> CompilationResult { + let (mut context, crate_id) = prepare_package(file_manager, package); let (contract, warnings) = match noirc_driver::compile_contract(&mut context, crate_id, compile_options) { Ok(contracts_and_warnings) => contracts_and_warnings, Err(errors) => { - return (context.file_manager, Err(errors)); + return Err(errors); } }; - let optimized_contract = - nargo::ops::optimize_contract(contract, np_language, &is_opcode_supported) - .expect("Backend does not support an opcode that is in the IR"); + let optimized_contract = nargo::ops::optimize_contract(contract, expression_width); - (context.file_manager, Ok((optimized_contract, warnings))) + Ok((optimized_contract, warnings)) } -fn save_program(program: CompiledProgram, package: &Package, circuit_dir: &Path) { - let preprocessed_program = PreprocessedProgram { - hash: program.hash, - backend: String::from(BACKEND_IDENTIFIER), - abi: program.abi, - noir_version: program.noir_version, - bytecode: program.circuit, - }; - - save_program_to_file(&preprocessed_program, &package.name, circuit_dir); +pub(super) fn save_program( + program: CompiledProgram, + package: &Package, + circuit_dir: &Path, + only_acir_opt: bool, +) { + let program_artifact = ProgramArtifact::from(program.clone()); + if only_acir_opt { + only_acir(&program_artifact, circuit_dir); + } else { + save_program_to_file(&program_artifact, &package.name, circuit_dir); + } let debug_artifact = DebugArtifact { debug_symbols: vec![program.debug], @@ -280,7 +257,7 @@ fn save_program(program: CompiledProgram, package: &Package, circuit_dir: &Path) fn save_contract(contract: CompiledContract, package: &Package, circuit_dir: &Path) { // TODO(#1389): I wonder if it is incorrect for nargo-core to know anything about contracts. // As can be seen here, It seems like a leaky abstraction where ContractFunctions (essentially CompiledPrograms) - // are compiled via nargo-core and then the PreprocessedContract is constructed here. + // are compiled via nargo-core and then the ContractArtifact is constructed here. // This is due to EACH function needing it's own CRS, PKey, and VKey from the backend. let debug_artifact = DebugArtifact { debug_symbols: contract.functions.iter().map(|function| function.debug.clone()).collect(), @@ -288,7 +265,7 @@ fn save_contract(contract: CompiledContract, package: &Package, circuit_dir: &Pa warnings: contract.warnings, }; - let preprocessed_functions = vecmap(contract.functions, |func| PreprocessedContractFunction { + let functions = vecmap(contract.functions, |func| ContractFunctionArtifact { name: func.name, function_type: func.function_type, is_internal: func.is_internal, @@ -296,23 +273,22 @@ fn save_contract(contract: CompiledContract, package: &Package, circuit_dir: &Pa bytecode: func.bytecode, }); - let preprocessed_contract = PreprocessedContract { + let contract_artifact = ContractArtifact { noir_version: contract.noir_version, name: contract.name, - backend: String::from(BACKEND_IDENTIFIER), - functions: preprocessed_functions, + functions, events: contract.events, }; save_contract_to_file( - &preprocessed_contract, - &format!("{}-{}", package.name, preprocessed_contract.name), + &contract_artifact, + &format!("{}-{}", package.name, contract_artifact.name), circuit_dir, ); save_debug_artifact_to_file( &debug_artifact, - &format!("{}-{}", package.name, preprocessed_contract.name), + &format!("{}-{}", package.name, contract_artifact.name), circuit_dir, ); } diff --git a/tooling/nargo_cli/src/cli/dap_cmd.rs b/tooling/nargo_cli/src/cli/dap_cmd.rs new file mode 100644 index 00000000000..29e696ea608 --- /dev/null +++ b/tooling/nargo_cli/src/cli/dap_cmd.rs @@ -0,0 +1,180 @@ +use acvm::acir::native_types::WitnessMap; +use backend_interface::Backend; +use clap::Args; +use nargo::constants::PROVER_INPUT_FILE; +use nargo::insert_all_files_for_workspace_into_file_manager; +use nargo::workspace::Workspace; +use nargo_toml::{get_package_manifest, resolve_workspace_from_toml, PackageSelection}; +use noirc_abi::input_parser::Format; +use noirc_driver::{ + file_manager_with_stdlib, CompileOptions, CompiledProgram, NOIR_ARTIFACT_VERSION_STRING, +}; +use noirc_frontend::graph::CrateName; + +use std::io::{BufReader, BufWriter, Read, Write}; +use std::path::Path; + +use dap::errors::ServerError; +use dap::requests::Command; +use dap::responses::ResponseBody; +use dap::server::Server; +use dap::types::Capabilities; +use serde_json::Value; + +use super::compile_cmd::compile_bin_package; +use super::fs::inputs::read_inputs_from_file; +use crate::errors::CliError; + +use super::NargoConfig; + +#[derive(Debug, Clone, Args)] +pub(crate) struct DapCommand; + +struct LoadError(&'static str); + +fn find_workspace(project_folder: &str, package: Option<&str>) -> Option { + let Ok(toml_path) = get_package_manifest(Path::new(project_folder)) else { + eprintln!("ERROR: Failed to get package manifest"); + return None; + }; + let package = package.and_then(|p| serde_json::from_str::(p).ok()); + let selection = package.map_or(PackageSelection::DefaultOrAll, PackageSelection::Selected); + match resolve_workspace_from_toml( + &toml_path, + selection, + Some(NOIR_ARTIFACT_VERSION_STRING.to_string()), + ) { + Ok(workspace) => Some(workspace), + Err(err) => { + eprintln!("ERROR: Failed to resolve workspace: {err}"); + None + } + } +} + +fn load_and_compile_project( + backend: &Backend, + project_folder: &str, + package: Option<&str>, + prover_name: &str, +) -> Result<(CompiledProgram, WitnessMap), LoadError> { + let workspace = + find_workspace(project_folder, package).ok_or(LoadError("Cannot open workspace"))?; + + let expression_width = + backend.get_backend_info().map_err(|_| LoadError("Failed to get backend info"))?; + let package = workspace + .into_iter() + .find(|p| p.is_binary()) + .ok_or(LoadError("No matching binary packages found in workspace"))?; + + let mut workspace_file_manager = file_manager_with_stdlib(std::path::Path::new("")); + insert_all_files_for_workspace_into_file_manager(&workspace, &mut workspace_file_manager); + + let compiled_program = compile_bin_package( + &workspace_file_manager, + &workspace, + package, + &CompileOptions::default(), + expression_width, + ) + .map_err(|_| LoadError("Failed to compile project"))?; + + let (inputs_map, _) = + read_inputs_from_file(&package.root_dir, prover_name, Format::Toml, &compiled_program.abi) + .map_err(|_| LoadError("Failed to read program inputs"))?; + let initial_witness = compiled_program + .abi + .encode(&inputs_map, None) + .map_err(|_| LoadError("Failed to encode inputs"))?; + + Ok((compiled_program, initial_witness)) +} + +fn loop_uninitialized_dap( + mut server: Server, + backend: &Backend, +) -> Result<(), ServerError> { + loop { + let req = match server.poll_request()? { + Some(req) => req, + None => break, + }; + + match req.command { + Command::Initialize(_) => { + let rsp = req.success(ResponseBody::Initialize(Capabilities { + supports_disassemble_request: Some(true), + supports_instruction_breakpoints: Some(true), + supports_stepping_granularity: Some(true), + ..Default::default() + })); + server.respond(rsp)?; + } + + Command::Launch(ref arguments) => { + let Some(Value::Object(ref additional_data)) = arguments.additional_data else { + server.respond(req.error("Missing launch arguments"))?; + continue; + }; + let Some(Value::String(ref project_folder)) = additional_data.get("projectFolder") else { + server.respond(req.error("Missing project folder argument"))?; + continue; + }; + + let project_folder = project_folder.as_str(); + let package = additional_data.get("package").and_then(|v| v.as_str()); + let prover_name = additional_data + .get("proverName") + .and_then(|v| v.as_str()) + .unwrap_or(PROVER_INPUT_FILE); + + eprintln!("Project folder: {}", project_folder); + eprintln!("Package: {}", package.unwrap_or("(default)")); + eprintln!("Prover name: {}", prover_name); + + match load_and_compile_project(backend, project_folder, package, prover_name) { + Ok((compiled_program, initial_witness)) => { + server.respond(req.ack()?)?; + + let blackbox_solver = bn254_blackbox_solver::Bn254BlackBoxSolver::new(); + + noir_debugger::run_dap_loop( + server, + &blackbox_solver, + compiled_program, + initial_witness, + )?; + break; + } + Err(LoadError(message)) => { + server.respond(req.error(message))?; + } + } + } + + Command::Disconnect(_) => { + server.respond(req.ack()?)?; + break; + } + + _ => { + let command = req.command; + eprintln!("ERROR: unhandled command: {command:?}"); + } + } + } + Ok(()) +} + +pub(crate) fn run( + backend: &Backend, + _args: DapCommand, + _config: NargoConfig, +) -> Result<(), CliError> { + let output = BufWriter::new(std::io::stdout()); + let input = BufReader::new(std::io::stdin()); + let server = Server::new(input, output); + + loop_uninitialized_dap(server, backend).map_err(CliError::DapError) +} diff --git a/tooling/nargo_cli/src/cli/debug_cmd.rs b/tooling/nargo_cli/src/cli/debug_cmd.rs index 1d344058312..f78a683aa8f 100644 --- a/tooling/nargo_cli/src/cli/debug_cmd.rs +++ b/tooling/nargo_cli/src/cli/debug_cmd.rs @@ -1,13 +1,19 @@ +use std::path::PathBuf; + use acvm::acir::native_types::WitnessMap; +use bn254_blackbox_solver::Bn254BlackBoxSolver; use clap::Args; use nargo::artifacts::debug::DebugArtifact; use nargo::constants::PROVER_INPUT_FILE; +use nargo::insert_all_files_for_workspace_into_file_manager; use nargo::package::Package; use nargo_toml::{get_package_manifest, resolve_workspace_from_toml, PackageSelection}; use noirc_abi::input_parser::{Format, InputValue}; use noirc_abi::InputMap; -use noirc_driver::{CompileOptions, CompiledProgram, NOIR_ARTIFACT_VERSION_STRING}; +use noirc_driver::{ + file_manager_with_stdlib, CompileOptions, CompiledProgram, NOIR_ARTIFACT_VERSION_STRING, +}; use noirc_frontend::graph::CrateName; use super::compile_cmd::compile_bin_package; @@ -47,7 +53,10 @@ pub(crate) fn run( Some(NOIR_ARTIFACT_VERSION_STRING.to_string()), )?; let target_dir = &workspace.target_directory_path(); - let (np_language, opcode_support) = backend.get_backend_info()?; + let expression_width = backend.get_backend_info()?; + + let mut workspace_file_manager = file_manager_with_stdlib(std::path::Path::new("")); + insert_all_files_for_workspace_into_file_manager(&workspace, &mut workspace_file_manager); let Some(package) = workspace.into_iter().find(|p| p.is_binary()) else { println!( @@ -56,32 +65,50 @@ pub(crate) fn run( return Ok(()); }; - let compiled_program = - compile_bin_package(&workspace, package, &args.compile_options, np_language, &|opcode| { - opcode_support.is_opcode_supported(opcode) - })?; + let compiled_program = compile_bin_package( + &workspace_file_manager, + &workspace, + package, + &args.compile_options, + expression_width, + )?; + + run_async(package, compiled_program, &args.prover_name, &args.witness_name, target_dir) +} + +fn run_async( + package: &Package, + program: CompiledProgram, + prover_name: &str, + witness_name: &Option, + target_dir: &PathBuf, +) -> Result<(), CliError> { + use tokio::runtime::Builder; + let runtime = Builder::new_current_thread().enable_all().build().unwrap(); - println!("[{}] Starting debugger", package.name); - let (return_value, solved_witness) = - debug_program_and_decode(compiled_program, package, &args.prover_name)?; + runtime.block_on(async { + println!("[{}] Starting debugger", package.name); + let (return_value, solved_witness) = + debug_program_and_decode(program, package, prover_name)?; - if let Some(solved_witness) = solved_witness { - println!("[{}] Circuit witness successfully solved", package.name); + if let Some(solved_witness) = solved_witness { + println!("[{}] Circuit witness successfully solved", package.name); - if let Some(return_value) = return_value { - println!("[{}] Circuit output: {return_value:?}", package.name); - } + if let Some(return_value) = return_value { + println!("[{}] Circuit output: {return_value:?}", package.name); + } - if let Some(witness_name) = &args.witness_name { - let witness_path = save_witness_to_dir(solved_witness, witness_name, target_dir)?; + if let Some(witness_name) = witness_name { + let witness_path = save_witness_to_dir(solved_witness, witness_name, target_dir)?; - println!("[{}] Witness saved to {}", package.name, witness_path.display()); + println!("[{}] Witness saved to {}", package.name, witness_path.display()); + } + } else { + println!("Debugger execution halted."); } - } else { - println!("Debugger execution halted."); - } - Ok(()) + Ok(()) + }) } fn debug_program_and_decode( @@ -108,8 +135,7 @@ pub(crate) fn debug_program( compiled_program: &CompiledProgram, inputs_map: &InputMap, ) -> Result, CliError> { - #[allow(deprecated)] - let blackbox_solver = barretenberg_blackbox_solver::BarretenbergSolver::new(); + let blackbox_solver = Bn254BlackBoxSolver::new(); let initial_witness = compiled_program.abi.encode(inputs_map, None)?; diff --git a/tooling/nargo_cli/src/cli/execute_cmd.rs b/tooling/nargo_cli/src/cli/execute_cmd.rs index 3a0da2b1134..7f695c42fa4 100644 --- a/tooling/nargo_cli/src/cli/execute_cmd.rs +++ b/tooling/nargo_cli/src/cli/execute_cmd.rs @@ -1,14 +1,19 @@ use acvm::acir::native_types::WitnessMap; +use bn254_blackbox_solver::Bn254BlackBoxSolver; use clap::Args; use nargo::artifacts::debug::DebugArtifact; use nargo::constants::PROVER_INPUT_FILE; use nargo::errors::try_to_diagnose_runtime_error; +use nargo::insert_all_files_for_workspace_into_file_manager; +use nargo::ops::DefaultForeignCallExecutor; use nargo::package::Package; use nargo_toml::{get_package_manifest, resolve_workspace_from_toml, PackageSelection}; use noirc_abi::input_parser::{Format, InputValue}; use noirc_abi::InputMap; -use noirc_driver::{CompileOptions, CompiledProgram, NOIR_ARTIFACT_VERSION_STRING}; +use noirc_driver::{ + file_manager_with_stdlib, CompileOptions, CompiledProgram, NOIR_ARTIFACT_VERSION_STRING, +}; use noirc_frontend::graph::CrateName; use super::compile_cmd::compile_bin_package; @@ -37,6 +42,10 @@ pub(crate) struct ExecuteCommand { #[clap(flatten)] compile_options: CompileOptions, + + /// JSON RPC url to solve oracle calls + #[clap(long)] + oracle_resolver: Option, } pub(crate) fn run( @@ -55,18 +64,25 @@ pub(crate) fn run( )?; let target_dir = &workspace.target_directory_path(); - let (np_language, opcode_support) = backend.get_backend_info()?; + let mut workspace_file_manager = file_manager_with_stdlib(&workspace.root_dir); + insert_all_files_for_workspace_into_file_manager(&workspace, &mut workspace_file_manager); + + let expression_width = backend.get_backend_info_or_default(); for package in &workspace { let compiled_program = compile_bin_package( + &workspace_file_manager, &workspace, package, &args.compile_options, - np_language, - &|opcode| opcode_support.is_opcode_supported(opcode), + expression_width, )?; - let (return_value, solved_witness) = - execute_program_and_decode(compiled_program, package, &args.prover_name)?; + let (return_value, solved_witness) = execute_program_and_decode( + compiled_program, + package, + &args.prover_name, + args.oracle_resolver.as_deref(), + )?; println!("[{}] Circuit witness successfully solved", package.name); if let Some(return_value) = return_value { @@ -85,11 +101,12 @@ fn execute_program_and_decode( program: CompiledProgram, package: &Package, prover_name: &str, + foreign_call_resolver_url: Option<&str>, ) -> Result<(Option, WitnessMap), CliError> { // Parse the initial witness values from Prover.toml let (inputs_map, _) = read_inputs_from_file(&package.root_dir, prover_name, Format::Toml, &program.abi)?; - let solved_witness = execute_program(&program, &inputs_map)?; + let solved_witness = execute_program(&program, &inputs_map, foreign_call_resolver_url)?; let public_abi = program.abi.public_abi(); let (_, return_value) = public_abi.decode(&solved_witness)?; @@ -99,17 +116,17 @@ fn execute_program_and_decode( pub(crate) fn execute_program( compiled_program: &CompiledProgram, inputs_map: &InputMap, + foreign_call_resolver_url: Option<&str>, ) -> Result { - #[allow(deprecated)] - let blackbox_solver = barretenberg_blackbox_solver::BarretenbergSolver::new(); + let blackbox_solver = Bn254BlackBoxSolver::new(); let initial_witness = compiled_program.abi.encode(inputs_map, None)?; let solved_witness_err = nargo::ops::execute_circuit( - &blackbox_solver, &compiled_program.circuit, initial_witness, - true, + &blackbox_solver, + &mut DefaultForeignCallExecutor::new(true, foreign_call_resolver_url), ); match solved_witness_err { Ok(solved_witness) => Ok(solved_witness), diff --git a/tooling/nargo_cli/src/cli/export_cmd.rs b/tooling/nargo_cli/src/cli/export_cmd.rs new file mode 100644 index 00000000000..ac3e93e09b7 --- /dev/null +++ b/tooling/nargo_cli/src/cli/export_cmd.rs @@ -0,0 +1,120 @@ +use nargo::errors::CompileError; +use noirc_errors::FileDiagnostic; +use rayon::prelude::*; + +use fm::FileManager; +use iter_extended::try_vecmap; +use nargo::insert_all_files_for_workspace_into_file_manager; +use nargo::package::Package; +use nargo::prepare_package; +use nargo::workspace::Workspace; +use nargo_toml::{get_package_manifest, resolve_workspace_from_toml, PackageSelection}; +use noirc_driver::{ + compile_no_check, file_manager_with_stdlib, CompileOptions, CompiledProgram, + NOIR_ARTIFACT_VERSION_STRING, +}; + +use noirc_frontend::graph::CrateName; + +use clap::Args; + +use crate::backends::Backend; +use crate::errors::CliError; + +use super::check_cmd::check_crate_and_report_errors; + +use super::compile_cmd::report_errors; +use super::fs::program::save_program_to_file; +use super::NargoConfig; + +/// Exports functions marked with #[export] attribute +#[derive(Debug, Clone, Args)] +pub(crate) struct ExportCommand { + /// The name of the package to compile + #[clap(long, conflicts_with = "workspace")] + package: Option, + + /// Compile all packages in the workspace + #[clap(long, conflicts_with = "package")] + workspace: bool, + + #[clap(flatten)] + compile_options: CompileOptions, +} + +pub(crate) fn run( + _backend: &Backend, + args: ExportCommand, + config: NargoConfig, +) -> Result<(), CliError> { + let toml_path = get_package_manifest(&config.program_dir)?; + let default_selection = + if args.workspace { PackageSelection::All } else { PackageSelection::DefaultOrAll }; + let selection = args.package.map_or(default_selection, PackageSelection::Selected); + + let workspace = resolve_workspace_from_toml( + &toml_path, + selection, + Some(NOIR_ARTIFACT_VERSION_STRING.to_owned()), + )?; + + let mut workspace_file_manager = file_manager_with_stdlib(&workspace.root_dir); + insert_all_files_for_workspace_into_file_manager(&workspace, &mut workspace_file_manager); + + let library_packages: Vec<_> = + workspace.into_iter().filter(|package| package.is_library()).collect(); + + library_packages + .par_iter() + .map(|package| { + compile_exported_functions( + &workspace_file_manager, + &workspace, + package, + &args.compile_options, + ) + }) + .collect() +} + +fn compile_exported_functions( + file_manager: &FileManager, + workspace: &Workspace, + package: &Package, + compile_options: &CompileOptions, +) -> Result<(), CliError> { + let (mut context, crate_id) = prepare_package(file_manager, package); + check_crate_and_report_errors( + &mut context, + crate_id, + compile_options.deny_warnings, + compile_options.disable_macros, + compile_options.silence_warnings, + )?; + + let exported_functions = context.get_all_exported_functions_in_crate(&crate_id); + + let exported_programs = try_vecmap( + exported_functions, + |(function_name, function_id)| -> Result<(String, CompiledProgram), CompileError> { + // TODO: We should to refactor how to deal with compilation errors to avoid this. + let program = compile_no_check(&context, compile_options, function_id, None, false) + .map_err(|error| vec![FileDiagnostic::from(error)]); + + let program = report_errors( + program.map(|program| (program, Vec::new())), + file_manager, + compile_options.deny_warnings, + compile_options.silence_warnings, + )?; + + Ok((function_name, program)) + }, + )?; + + let export_dir = workspace.export_directory_path(); + for (function_name, program) in exported_programs { + save_program_to_file(&program.into(), &function_name.parse().unwrap(), &export_dir); + } + Ok(()) +} diff --git a/tooling/nargo_cli/src/cli/fmt_cmd.rs b/tooling/nargo_cli/src/cli/fmt_cmd.rs index eaa66fff02b..78678559547 100644 --- a/tooling/nargo_cli/src/cli/fmt_cmd.rs +++ b/tooling/nargo_cli/src/cli/fmt_cmd.rs @@ -1,9 +1,9 @@ use std::{fs::DirEntry, path::Path}; use clap::Args; -use fm::FileManager; +use nargo::insert_all_files_for_workspace_into_file_manager; use nargo_toml::{get_package_manifest, resolve_workspace_from_toml, PackageSelection}; -use noirc_driver::NOIR_ARTIFACT_VERSION_STRING; +use noirc_driver::{file_manager_with_stdlib, NOIR_ARTIFACT_VERSION_STRING}; use noirc_errors::CustomDiagnostic; use noirc_frontend::{hir::def_map::parse_file, parser::ParserError}; @@ -11,10 +11,17 @@ use crate::errors::CliError; use super::NargoConfig; +/// Format the Noir files in a workspace #[derive(Debug, Clone, Args)] -pub(crate) struct FormatCommand {} +pub(crate) struct FormatCommand { + /// Run noirfmt in check mode + #[arg(long)] + check: bool, +} + +pub(crate) fn run(args: FormatCommand, config: NargoConfig) -> Result<(), CliError> { + let check_mode = args.check; -pub(crate) fn run(_args: FormatCommand, config: NargoConfig) -> Result<(), CliError> { let toml_path = get_package_manifest(&config.program_dir)?; let workspace = resolve_workspace_from_toml( &toml_path, @@ -22,16 +29,18 @@ pub(crate) fn run(_args: FormatCommand, config: NargoConfig) -> Result<(), CliEr Some(NOIR_ARTIFACT_VERSION_STRING.to_string()), )?; + let mut workspace_file_manager = file_manager_with_stdlib(&workspace.root_dir); + insert_all_files_for_workspace_into_file_manager(&workspace, &mut workspace_file_manager); + let config = nargo_fmt::Config::read(&config.program_dir) .map_err(|err| CliError::Generic(err.to_string()))?; - for package in &workspace { - let mut file_manager = - FileManager::new(&package.root_dir, Box::new(|path| std::fs::read_to_string(path))); + let mut check_exit_code_one = false; + for package in &workspace { visit_noir_files(&package.root_dir.join("src"), &mut |entry| { - let file_id = file_manager.add_file(&entry.path()).expect("file exists"); - let (parsed_module, errors) = parse_file(&file_manager, file_id); + let file_id = workspace_file_manager.name_to_id(entry.path().to_path_buf()).expect("The file should exist since we added all files in the package into the file manager"); + let (parsed_module, errors) = parse_file(&workspace_file_manager, file_id); let is_all_warnings = errors.iter().all(ParserError::is_warning); if !is_all_warnings { @@ -45,23 +54,47 @@ pub(crate) fn run(_args: FormatCommand, config: NargoConfig) -> Result<(), CliEr let _ = super::compile_cmd::report_errors::<()>( Err(errors), - &file_manager, + &workspace_file_manager, false, false, ); return Ok(()); } - let source = nargo_fmt::format( - file_manager.fetch_file(file_id).source(), - parsed_module, - &config, - ); + let original = workspace_file_manager.fetch_file(file_id); + let formatted = nargo_fmt::format(original, parsed_module, &config); + + if check_mode { + let diff = similar_asserts::SimpleDiff::from_str( + original, + &formatted, + "original", + "formatted", + ) + .to_string(); - std::fs::write(entry.path(), source) + if !diff.lines().next().is_some_and(|line| line.contains("Invisible differences")) { + if !check_exit_code_one { + check_exit_code_one = true; + } + + println!("{diff}"); + } + + Ok(()) + } else { + std::fs::write(entry.path(), formatted) + } }) .map_err(|error| CliError::Generic(error.to_string()))?; } + + if check_exit_code_one { + std::process::exit(1); + } else if check_mode { + println!("No formatting changes were detected"); + } + Ok(()) } diff --git a/tooling/nargo_cli/src/cli/fs/inputs.rs b/tooling/nargo_cli/src/cli/fs/inputs.rs index f3f0baf10f4..023195010ac 100644 --- a/tooling/nargo_cli/src/cli/fs/inputs.rs +++ b/tooling/nargo_cli/src/cli/fs/inputs.rs @@ -73,7 +73,7 @@ mod tests { use nargo::constants::VERIFIER_INPUT_FILE; use noirc_abi::{ input_parser::{Format, InputValue}, - Abi, AbiParameter, AbiType, AbiVisibility, + Abi, AbiParameter, AbiReturnType, AbiType, AbiVisibility, }; use tempfile::TempDir; @@ -98,7 +98,10 @@ mod tests { visibility: AbiVisibility::Private, }, ], - return_type: Some(AbiType::Field), + return_type: Some(AbiReturnType { + abi_type: AbiType::Field, + visibility: AbiVisibility::Public, + }), // Input serialization is only dependent on types, not position in witness map. // Neither of these should be relevant so we leave them empty. diff --git a/tooling/nargo_cli/src/cli/fs/program.rs b/tooling/nargo_cli/src/cli/fs/program.rs index e82f2d55264..1d2f012736e 100644 --- a/tooling/nargo_cli/src/cli/fs/program.rs +++ b/tooling/nargo_cli/src/cli/fs/program.rs @@ -1,7 +1,8 @@ use std::path::{Path, PathBuf}; +use acvm::acir::circuit::Circuit; use nargo::artifacts::{ - contract::PreprocessedContract, debug::DebugArtifact, program::PreprocessedProgram, + contract::ContractArtifact, debug::DebugArtifact, program::ProgramArtifact, }; use noirc_frontend::graph::CrateName; @@ -10,16 +11,29 @@ use crate::errors::FilesystemError; use super::{create_named_dir, write_to_file}; pub(crate) fn save_program_to_file>( - compiled_program: &PreprocessedProgram, + program_artifact: &ProgramArtifact, crate_name: &CrateName, circuit_dir: P, ) -> PathBuf { let circuit_name: String = crate_name.into(); - save_build_artifact_to_file(compiled_program, &circuit_name, circuit_dir) + save_build_artifact_to_file(program_artifact, &circuit_name, circuit_dir) +} + +/// Writes the bytecode as acir.gz +pub(crate) fn only_acir>( + program_artifact: &ProgramArtifact, + circuit_dir: P, +) -> PathBuf { + create_named_dir(circuit_dir.as_ref(), "target"); + let circuit_path = circuit_dir.as_ref().join("acir").with_extension("gz"); + let bytes = Circuit::serialize_circuit(&program_artifact.bytecode); + write_to_file(&bytes, &circuit_path); + + circuit_path } pub(crate) fn save_contract_to_file>( - compiled_contract: &PreprocessedContract, + compiled_contract: &ContractArtifact, circuit_name: &str, circuit_dir: P, ) -> PathBuf { @@ -50,7 +64,7 @@ fn save_build_artifact_to_file, T: ?Sized + serde::Serialize>( pub(crate) fn read_program_from_file>( circuit_path: P, -) -> Result { +) -> Result { let file_path = circuit_path.as_ref().with_extension("json"); let input_string = diff --git a/tooling/nargo_cli/src/cli/info_cmd.rs b/tooling/nargo_cli/src/cli/info_cmd.rs index b1cd5f0b64f..f983a19c0fd 100644 --- a/tooling/nargo_cli/src/cli/info_cmd.rs +++ b/tooling/nargo_cli/src/cli/info_cmd.rs @@ -1,13 +1,17 @@ use std::collections::HashMap; -use acvm::Language; +use acvm::ExpressionWidth; use backend_interface::BackendError; use clap::Args; use iter_extended::vecmap; -use nargo::{artifacts::debug::DebugArtifact, package::Package}; +use nargo::{ + artifacts::debug::DebugArtifact, insert_all_files_for_workspace_into_file_manager, + package::Package, +}; use nargo_toml::{get_package_manifest, resolve_workspace_from_toml, PackageSelection}; use noirc_driver::{ - CompileOptions, CompiledContract, CompiledProgram, NOIR_ARTIFACT_VERSION_STRING, + file_manager_with_stdlib, CompileOptions, CompiledContract, CompiledProgram, + NOIR_ARTIFACT_VERSION_STRING, }; use noirc_errors::{debug_info::OpCodesCount, Location}; use noirc_frontend::graph::CrateName; @@ -61,19 +65,22 @@ pub(crate) fn run( Some(NOIR_ARTIFACT_VERSION_STRING.to_string()), )?; + let mut workspace_file_manager = file_manager_with_stdlib(&workspace.root_dir); + insert_all_files_for_workspace_into_file_manager(&workspace, &mut workspace_file_manager); + let (binary_packages, contract_packages): (Vec<_>, Vec<_>) = workspace .into_iter() .filter(|package| !package.is_library()) .cloned() .partition(|package| package.is_binary()); - let (np_language, opcode_support) = backend.get_backend_info()?; + let expression_width = backend.get_backend_info_or_default(); let (compiled_programs, compiled_contracts) = compile_workspace( + &workspace_file_manager, &workspace, &binary_packages, &contract_packages, - np_language, - &opcode_support, + expression_width, &args.compile_options, )?; @@ -98,13 +105,13 @@ pub(crate) fn run( .into_par_iter() .zip(compiled_programs) .map(|(package, program)| { - count_opcodes_and_gates_in_program(backend, program, &package, np_language) + count_opcodes_and_gates_in_program(backend, program, &package, expression_width) }) .collect::>()?; let contract_info = compiled_contracts .into_par_iter() - .map(|contract| count_opcodes_and_gates_in_contract(backend, contract, np_language)) + .map(|contract| count_opcodes_and_gates_in_contract(backend, contract, expression_width)) .collect::>()?; let info_report = InfoReport { programs: program_info, contracts: contract_info }; @@ -115,7 +122,7 @@ pub(crate) fn run( } else { // Otherwise print human-readable table. if !info_report.programs.is_empty() { - let mut program_table = table!([Fm->"Package", Fm->"Language", Fm->"ACIR Opcodes", Fm->"Backend Circuit Size"]); + let mut program_table = table!([Fm->"Package", Fm->"Expression Width", Fm->"ACIR Opcodes", Fm->"Backend Circuit Size"]); for program in info_report.programs { program_table.add_row(program.into()); @@ -126,7 +133,7 @@ pub(crate) fn run( let mut contract_table = table!([ Fm->"Contract", Fm->"Function", - Fm->"Language", + Fm->"Expression Width", Fm->"ACIR Opcodes", Fm->"Backend Circuit Size" ]); @@ -203,7 +210,7 @@ struct InfoReport { struct ProgramInfo { name: String, #[serde(skip)] - language: Language, + expression_width: ExpressionWidth, acir_opcodes: usize, circuit_size: u32, } @@ -212,7 +219,7 @@ impl From for Row { fn from(program_info: ProgramInfo) -> Self { row![ Fm->format!("{}", program_info.name), - format!("{:?}", program_info.language), + format!("{:?}", program_info.expression_width), Fc->format!("{}", program_info.acir_opcodes), Fc->format!("{}", program_info.circuit_size), ] @@ -223,7 +230,7 @@ impl From for Row { struct ContractInfo { name: String, #[serde(skip)] - language: Language, + expression_width: ExpressionWidth, functions: Vec, } @@ -240,7 +247,7 @@ impl From for Vec { row![ Fm->format!("{}", contract_info.name), Fc->format!("{}", function.name), - format!("{:?}", contract_info.language), + format!("{:?}", contract_info.expression_width), Fc->format!("{}", function.acir_opcodes), Fc->format!("{}", function.circuit_size), ] @@ -252,11 +259,11 @@ fn count_opcodes_and_gates_in_program( backend: &Backend, compiled_program: CompiledProgram, package: &Package, - language: Language, + expression_width: ExpressionWidth, ) -> Result { Ok(ProgramInfo { name: package.name.to_string(), - language, + expression_width, acir_opcodes: compiled_program.circuit.opcodes.len(), circuit_size: backend.get_exact_circuit_size(&compiled_program.circuit)?, }) @@ -265,7 +272,7 @@ fn count_opcodes_and_gates_in_program( fn count_opcodes_and_gates_in_contract( backend: &Backend, contract: CompiledContract, - language: Language, + expression_width: ExpressionWidth, ) -> Result { let functions = contract .functions @@ -279,5 +286,5 @@ fn count_opcodes_and_gates_in_contract( }) .collect::>()?; - Ok(ContractInfo { name: contract.name, language, functions }) + Ok(ContractInfo { name: contract.name, expression_width, functions }) } diff --git a/tooling/nargo_cli/src/cli/init_cmd.rs b/tooling/nargo_cli/src/cli/init_cmd.rs index e53c2e4cdc9..dd3af97ecd6 100644 --- a/tooling/nargo_cli/src/cli/init_cmd.rs +++ b/tooling/nargo_cli/src/cli/init_cmd.rs @@ -1,4 +1,3 @@ -use crate::backends::Backend; use crate::errors::CliError; use super::fs::{create_named_dir, write_to_file}; @@ -34,12 +33,7 @@ const BIN_EXAMPLE: &str = include_str!("./noir_template_files/binary.nr"); const CONTRACT_EXAMPLE: &str = include_str!("./noir_template_files/contract.nr"); const LIB_EXAMPLE: &str = include_str!("./noir_template_files/library.nr"); -pub(crate) fn run( - // Backend is currently unused, but we might want to use it to inform the "new" template in the future - _backend: &Backend, - args: InitCommand, - config: NargoConfig, -) -> Result<(), CliError> { +pub(crate) fn run(args: InitCommand, config: NargoConfig) -> Result<(), CliError> { let package_name = match args.name { Some(name) => name, None => { diff --git a/tooling/nargo_cli/src/cli/lsp_cmd.rs b/tooling/nargo_cli/src/cli/lsp_cmd.rs index a41bb877991..1428b8070c8 100644 --- a/tooling/nargo_cli/src/cli/lsp_cmd.rs +++ b/tooling/nargo_cli/src/cli/lsp_cmd.rs @@ -1,7 +1,8 @@ use async_lsp::{ - client_monitor::ClientProcessMonitorLayer, concurrency::ConcurrencyLayer, - panic::CatchUnwindLayer, server::LifecycleLayer, tracing::TracingLayer, + concurrency::ConcurrencyLayer, panic::CatchUnwindLayer, server::LifecycleLayer, + tracing::TracingLayer, }; +use bn254_blackbox_solver::Bn254BlackBoxSolver; use clap::Args; use noir_lsp::NargoLspService; use tower::ServiceBuilder; @@ -30,8 +31,7 @@ pub(crate) fn run( runtime.block_on(async { let (server, _) = async_lsp::MainLoop::new_server(|client| { - #[allow(deprecated)] - let blackbox_solver = barretenberg_blackbox_solver::BarretenbergSolver::new(); + let blackbox_solver = Bn254BlackBoxSolver::new(); let router = NargoLspService::new(&client, blackbox_solver); ServiceBuilder::new() @@ -39,10 +39,11 @@ pub(crate) fn run( .layer(LifecycleLayer::default()) .layer(CatchUnwindLayer::default()) .layer(ConcurrencyLayer::default()) - .layer(ClientProcessMonitorLayer::new(client)) .service(router) }); + eprintln!("LSP starting..."); + // Prefer truly asynchronous piped stdin/stdout without blocking tasks. #[cfg(unix)] let (stdin, stdout) = ( diff --git a/tooling/nargo_cli/src/cli/mod.rs b/tooling/nargo_cli/src/cli/mod.rs index 8d22fb1b204..01adbe9da98 100644 --- a/tooling/nargo_cli/src/cli/mod.rs +++ b/tooling/nargo_cli/src/cli/mod.rs @@ -14,8 +14,10 @@ mod backend_cmd; mod check_cmd; mod codegen_verifier_cmd; mod compile_cmd; +mod dap_cmd; mod debug_cmd; mod execute_cmd; +mod export_cmd; mod fmt_cmd; mod info_cmd; mod init_cmd; @@ -60,7 +62,6 @@ pub(crate) struct NargoConfig { enum NargoCommand { Backend(backend_cmd::BackendCommand), Check(check_cmd::CheckCommand), - #[command(hide = true)] // Hidden while the feature has not been extensively tested Fmt(fmt_cmd::FormatCommand), CodegenVerifier(codegen_verifier_cmd::CodegenVerifierCommand), #[command(alias = "build")] @@ -69,12 +70,16 @@ enum NargoCommand { Init(init_cmd::InitCommand), Execute(execute_cmd::ExecuteCommand), #[command(hide = true)] // Hidden while the feature is being built out + Export(export_cmd::ExportCommand), + #[command(hide = true)] // Hidden while the feature is being built out Debug(debug_cmd::DebugCommand), Prove(prove_cmd::ProveCommand), Verify(verify_cmd::VerifyCommand), Test(test_cmd::TestCommand), Info(info_cmd::InfoCommand), Lsp(lsp_cmd::LspCommand), + #[command(hide = true)] + Dap(dap_cmd::DapCommand), } pub(crate) fn start_cli() -> eyre::Result<()> { @@ -92,6 +97,7 @@ pub(crate) fn start_cli() -> eyre::Result<()> { | NargoCommand::Init(_) | NargoCommand::Lsp(_) | NargoCommand::Backend(_) + | NargoCommand::Dap(_) ) { config.program_dir = find_package_root(&config.program_dir)?; } @@ -101,11 +107,12 @@ pub(crate) fn start_cli() -> eyre::Result<()> { match command { NargoCommand::New(args) => new_cmd::run(&backend, args, config), - NargoCommand::Init(args) => init_cmd::run(&backend, args, config), + NargoCommand::Init(args) => init_cmd::run(args, config), NargoCommand::Check(args) => check_cmd::run(&backend, args, config), NargoCommand::Compile(args) => compile_cmd::run(&backend, args, config), NargoCommand::Debug(args) => debug_cmd::run(&backend, args, config), NargoCommand::Execute(args) => execute_cmd::run(&backend, args, config), + NargoCommand::Export(args) => export_cmd::run(&backend, args, config), NargoCommand::Prove(args) => prove_cmd::run(&backend, args, config), NargoCommand::Verify(args) => verify_cmd::run(&backend, args, config), NargoCommand::Test(args) => test_cmd::run(&backend, args, config), @@ -113,6 +120,7 @@ pub(crate) fn start_cli() -> eyre::Result<()> { NargoCommand::CodegenVerifier(args) => codegen_verifier_cmd::run(&backend, args, config), NargoCommand::Backend(args) => backend_cmd::run(args), NargoCommand::Lsp(args) => lsp_cmd::run(&backend, args, config), + NargoCommand::Dap(args) => dap_cmd::run(&backend, args, config), NargoCommand::Fmt(args) => fmt_cmd::run(args, config), }?; diff --git a/tooling/nargo_cli/src/cli/prove_cmd.rs b/tooling/nargo_cli/src/cli/prove_cmd.rs index 3586e73ff2e..167ab541bc5 100644 --- a/tooling/nargo_cli/src/cli/prove_cmd.rs +++ b/tooling/nargo_cli/src/cli/prove_cmd.rs @@ -1,10 +1,13 @@ use clap::Args; use nargo::constants::{PROVER_INPUT_FILE, VERIFIER_INPUT_FILE}; +use nargo::insert_all_files_for_workspace_into_file_manager; use nargo::package::Package; use nargo::workspace::Workspace; use nargo_toml::{get_package_manifest, resolve_workspace_from_toml, PackageSelection}; use noirc_abi::input_parser::Format; -use noirc_driver::{CompileOptions, CompiledProgram, NOIR_ARTIFACT_VERSION_STRING}; +use noirc_driver::{ + file_manager_with_stdlib, CompileOptions, CompiledProgram, NOIR_ARTIFACT_VERSION_STRING, +}; use noirc_frontend::graph::CrateName; use super::compile_cmd::compile_bin_package; @@ -40,6 +43,10 @@ pub(crate) struct ProveCommand { #[clap(flatten)] compile_options: CompileOptions, + + /// JSON RPC url to solve oracle calls + #[clap(long)] + oracle_resolver: Option, } pub(crate) fn run( @@ -57,14 +64,17 @@ pub(crate) fn run( Some(NOIR_ARTIFACT_VERSION_STRING.to_string()), )?; - let (np_language, opcode_support) = backend.get_backend_info()?; + let mut workspace_file_manager = file_manager_with_stdlib(&workspace.root_dir); + insert_all_files_for_workspace_into_file_manager(&workspace, &mut workspace_file_manager); + + let expression_width = backend.get_backend_info()?; for package in &workspace { let program = compile_bin_package( + &workspace_file_manager, &workspace, package, &args.compile_options, - np_language, - &|opcode| opcode_support.is_opcode_supported(opcode), + expression_width, )?; prove_package( @@ -75,12 +85,14 @@ pub(crate) fn run( &args.prover_name, &args.verifier_name, args.verify, + args.oracle_resolver.as_deref(), )?; } Ok(()) } +#[allow(clippy::too_many_arguments)] pub(crate) fn prove_package( backend: &Backend, workspace: &Workspace, @@ -89,12 +101,14 @@ pub(crate) fn prove_package( prover_name: &str, verifier_name: &str, check_proof: bool, + foreign_call_resolver_url: Option<&str>, ) -> Result<(), CliError> { // Parse the initial witness values from Prover.toml let (inputs_map, _) = read_inputs_from_file(&package.root_dir, prover_name, Format::Toml, &compiled_program.abi)?; - let solved_witness = execute_program(&compiled_program, &inputs_map)?; + let solved_witness = + execute_program(&compiled_program, &inputs_map, foreign_call_resolver_url)?; // Write public inputs into Verifier.toml let public_abi = compiled_program.abi.public_abi(); diff --git a/tooling/nargo_cli/src/cli/test_cmd.rs b/tooling/nargo_cli/src/cli/test_cmd.rs index e117d8555a5..69f03b49cbd 100644 --- a/tooling/nargo_cli/src/cli/test_cmd.rs +++ b/tooling/nargo_cli/src/cli/test_cmd.rs @@ -1,14 +1,17 @@ use std::io::Write; use acvm::BlackBoxFunctionSolver; +use bn254_blackbox_solver::Bn254BlackBoxSolver; use clap::Args; +use fm::FileManager; use nargo::{ + insert_all_files_for_workspace_into_file_manager, ops::{run_test, TestStatus}, package::Package, prepare_package, }; use nargo_toml::{get_package_manifest, resolve_workspace_from_toml, PackageSelection}; -use noirc_driver::{CompileOptions, NOIR_ARTIFACT_VERSION_STRING}; +use noirc_driver::{file_manager_with_stdlib, CompileOptions, NOIR_ARTIFACT_VERSION_STRING}; use noirc_frontend::{graph::CrateName, hir::FunctionNameMatch}; use termcolor::{Color, ColorChoice, ColorSpec, StandardStream, WriteColor}; @@ -40,6 +43,10 @@ pub(crate) struct TestCommand { #[clap(flatten)] compile_options: CompileOptions, + + /// JSON RPC url to solve oracle calls + #[clap(long)] + oracle_resolver: Option, } pub(crate) fn run( @@ -57,6 +64,9 @@ pub(crate) fn run( Some(NOIR_ARTIFACT_VERSION_STRING.to_string()), )?; + let mut workspace_file_manager = file_manager_with_stdlib(&workspace.root_dir); + insert_all_files_for_workspace_into_file_manager(&workspace, &mut workspace_file_manager); + let pattern = match &args.test_name { Some(name) => { if args.exact { @@ -68,61 +78,94 @@ pub(crate) fn run( None => FunctionNameMatch::Anything, }; - #[allow(deprecated)] - let blackbox_solver = barretenberg_blackbox_solver::BarretenbergSolver::new(); + let blackbox_solver = Bn254BlackBoxSolver::new(); for package in &workspace { // By unwrapping here with `?`, we stop the test runner upon a package failing // TODO: We should run the whole suite even if there are failures in a package - run_tests(&blackbox_solver, package, pattern, args.show_output, &args.compile_options)?; + run_tests( + &workspace_file_manager, + &blackbox_solver, + package, + pattern, + args.show_output, + args.oracle_resolver.as_deref(), + &args.compile_options, + )?; } Ok(()) } fn run_tests( + file_manager: &FileManager, blackbox_solver: &S, package: &Package, - test_name: FunctionNameMatch, + fn_name: FunctionNameMatch, show_output: bool, + foreign_call_resolver_url: Option<&str>, compile_options: &CompileOptions, ) -> Result<(), CliError> { - let (mut context, crate_id) = - prepare_package(package, Box::new(|path| std::fs::read_to_string(path))); + let (mut context, crate_id) = prepare_package(file_manager, package); check_crate_and_report_errors( &mut context, crate_id, compile_options.deny_warnings, + compile_options.disable_macros, compile_options.silence_warnings, )?; - let test_functions = context.get_all_test_functions_in_crate_matching(&crate_id, test_name); + let test_functions = context.get_all_test_functions_in_crate_matching(&crate_id, fn_name); + let count_all = test_functions.len(); + if count_all == 0 { + match &fn_name { + FunctionNameMatch::Exact(pattern) => { + return Err(CliError::Generic(format!( + "[{}] Found 0 tests matching input '{pattern}'.", + package.name + ))) + } + FunctionNameMatch::Contains(pattern) => { + return Err(CliError::Generic(format!( + "[{}] Found 0 tests containing '{pattern}'.", + package.name + ))) + } + // If we are running all tests in a crate, having none is not an error + FunctionNameMatch::Anything => {} + }; + } - println!("[{}] Running {} test functions", package.name, test_functions.len()); - let mut failing = 0; + let plural = if count_all == 1 { "" } else { "s" }; + println!("[{}] Running {count_all} test function{plural}", package.name); + let mut count_failed = 0; let writer = StandardStream::stderr(ColorChoice::Always); let mut writer = writer.lock(); for (test_name, test_function) in test_functions { write!(writer, "[{}] Testing {test_name}... ", package.name) - .expect("Failed to write to stdout"); + .expect("Failed to write to stderr"); writer.flush().expect("Failed to flush writer"); - match run_test(blackbox_solver, &context, test_function, show_output, compile_options) { + match run_test( + blackbox_solver, + &context, + test_function, + show_output, + foreign_call_resolver_url, + compile_options, + ) { TestStatus::Pass { .. } => { writer .set_color(ColorSpec::new().set_fg(Some(Color::Green))) .expect("Failed to set color"); - writeln!(writer, "ok").expect("Failed to write to stdout"); + writeln!(writer, "ok").expect("Failed to write to stderr"); } TestStatus::Fail { message, error_diagnostic } => { - let writer = StandardStream::stderr(ColorChoice::Always); - let mut writer = writer.lock(); writer .set_color(ColorSpec::new().set_fg(Some(Color::Red))) .expect("Failed to set color"); - writeln!(writer, "{message}").expect("Failed to write to stdout"); - writer.reset().expect("Failed to reset writer"); + writeln!(writer, "FAIL\n{message}\n").expect("Failed to write to stderr"); if let Some(diag) = error_diagnostic { noirc_errors::reporter::report_all( context.file_manager.as_file_map(), @@ -131,7 +174,7 @@ fn run_tests( compile_options.silence_warnings, ); } - failing += 1; + count_failed += 1; } TestStatus::CompileError(err) => { noirc_errors::reporter::report_all( @@ -140,21 +183,40 @@ fn run_tests( compile_options.deny_warnings, compile_options.silence_warnings, ); - failing += 1; + count_failed += 1; } } writer.reset().expect("Failed to reset writer"); } - if failing == 0 { - write!(writer, "[{}] ", package.name).expect("Failed to write to stdout"); + write!(writer, "[{}] ", package.name).expect("Failed to write to stderr"); + + if count_failed == 0 { writer.set_color(ColorSpec::new().set_fg(Some(Color::Green))).expect("Failed to set color"); - writeln!(writer, "All tests passed").expect("Failed to write to stdout"); + write!(writer, "{count_all} test{plural} passed").expect("Failed to write to stderr"); + writer.reset().expect("Failed to reset writer"); + writeln!(writer).expect("Failed to write to stderr"); + + Ok(()) } else { - let plural = if failing == 1 { "" } else { "s" }; - return Err(CliError::Generic(format!("[{}] {failing} test{plural} failed", package.name))); - } + let count_passed = count_all - count_failed; + let plural_failed = if count_failed == 1 { "" } else { "s" }; + let plural_passed = if count_passed == 1 { "" } else { "s" }; + + if count_passed != 0 { + writer + .set_color(ColorSpec::new().set_fg(Some(Color::Green))) + .expect("Failed to set color"); + write!(writer, "{count_passed} test{plural_passed} passed, ",) + .expect("Failed to write to stderr"); + } - writer.reset().expect("Failed to reset writer"); - Ok(()) + writer.set_color(ColorSpec::new().set_fg(Some(Color::Red))).expect("Failed to set color"); + write!(writer, "{count_failed} test{plural_failed} failed") + .expect("Failed to write to stderr"); + writer.reset().expect("Failed to reset writer"); + + // Writes final newline. + Err(CliError::Generic(String::new())) + } } diff --git a/tooling/nargo_cli/src/cli/verify_cmd.rs b/tooling/nargo_cli/src/cli/verify_cmd.rs index 8c6d92b3d2f..86d5e774cbe 100644 --- a/tooling/nargo_cli/src/cli/verify_cmd.rs +++ b/tooling/nargo_cli/src/cli/verify_cmd.rs @@ -7,11 +7,14 @@ use crate::{backends::Backend, errors::CliError}; use clap::Args; use nargo::constants::{PROOF_EXT, VERIFIER_INPUT_FILE}; +use nargo::insert_all_files_for_workspace_into_file_manager; use nargo::package::Package; use nargo::workspace::Workspace; use nargo_toml::{get_package_manifest, resolve_workspace_from_toml, PackageSelection}; use noirc_abi::input_parser::Format; -use noirc_driver::{CompileOptions, CompiledProgram, NOIR_ARTIFACT_VERSION_STRING}; +use noirc_driver::{ + file_manager_with_stdlib, CompileOptions, CompiledProgram, NOIR_ARTIFACT_VERSION_STRING, +}; use noirc_frontend::graph::CrateName; /// Given a proof and a program, verify whether the proof is valid @@ -48,14 +51,17 @@ pub(crate) fn run( Some(NOIR_ARTIFACT_VERSION_STRING.to_string()), )?; - let (np_language, opcode_support) = backend.get_backend_info()?; + let mut workspace_file_manager = file_manager_with_stdlib(&workspace.root_dir); + insert_all_files_for_workspace_into_file_manager(&workspace, &mut workspace_file_manager); + + let expression_width = backend.get_backend_info()?; for package in &workspace { let program = compile_bin_package( + &workspace_file_manager, &workspace, package, &args.compile_options, - np_language, - &|opcode| opcode_support.is_opcode_supported(opcode), + expression_width, )?; verify_package(backend, &workspace, package, program, &args.verifier_name)?; diff --git a/tooling/nargo_cli/src/errors.rs b/tooling/nargo_cli/src/errors.rs index 92da74c71d4..4636772231b 100644 --- a/tooling/nargo_cli/src/errors.rs +++ b/tooling/nargo_cli/src/errors.rs @@ -53,6 +53,9 @@ pub(crate) enum CliError { #[error(transparent)] LspError(#[from] async_lsp::Error), + #[error(transparent)] + DapError(#[from] dap::errors::ServerError), + /// Error from Nargo #[error(transparent)] NargoError(#[from] NargoError), diff --git a/tooling/nargo_cli/src/main.rs b/tooling/nargo_cli/src/main.rs index 92bd7b94988..3f797b0bf0c 100644 --- a/tooling/nargo_cli/src/main.rs +++ b/tooling/nargo_cli/src/main.rs @@ -11,11 +11,33 @@ mod backends; mod cli; mod errors; +use std::env; + use color_eyre::config::HookBuilder; +use tracing_appender::rolling; +use tracing_subscriber::{fmt::format::FmtSpan, EnvFilter}; + const PANIC_MESSAGE: &str = "This is a bug. We may have already fixed this in newer versions of Nargo so try searching for similar issues at https://github.com/noir-lang/noir/issues/.\nIf there isn't an open issue for this bug, consider opening one at https://github.com/noir-lang/noir/issues/new?labels=bug&template=bug_report.yml"; fn main() { + // Setup tracing + if let Ok(log_dir) = env::var("NARGO_LOG_DIR") { + let debug_file = rolling::daily(log_dir, "nargo-log"); + tracing_subscriber::fmt() + .with_span_events(FmtSpan::ACTIVE) + .with_writer(debug_file) + .with_ansi(false) + .with_env_filter(EnvFilter::from_default_env()) + .init(); + } else { + tracing_subscriber::fmt() + .with_span_events(FmtSpan::ACTIVE) + .with_ansi(true) + .with_env_filter(EnvFilter::from_env("NOIR_LOG")) + .init(); + } + // Register a panic hook to display more readable panic messages to end-users let (panic_hook, _) = HookBuilder::default().display_env_section(false).panic_section(PANIC_MESSAGE).into_hooks(); diff --git a/tooling/nargo_cli/tests/acir_artifacts/1327_concrete_in_generic/target/acir.gz b/tooling/nargo_cli/tests/acir_artifacts/1327_concrete_in_generic/target/acir.gz deleted file mode 100644 index cc92863a4a8..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/1327_concrete_in_generic/target/acir.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/1327_concrete_in_generic/target/witness.gz b/tooling/nargo_cli/tests/acir_artifacts/1327_concrete_in_generic/target/witness.gz deleted file mode 100644 index 454a9b75e04..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/1327_concrete_in_generic/target/witness.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/1_mul/target/acir.gz b/tooling/nargo_cli/tests/acir_artifacts/1_mul/target/acir.gz deleted file mode 100644 index 7572c9ac2cf..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/1_mul/target/acir.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/1_mul/target/witness.gz b/tooling/nargo_cli/tests/acir_artifacts/1_mul/target/witness.gz deleted file mode 100644 index 76f5c8a2fe2..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/1_mul/target/witness.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/2_div/target/acir.gz b/tooling/nargo_cli/tests/acir_artifacts/2_div/target/acir.gz deleted file mode 100644 index 46405fc2029..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/2_div/target/acir.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/2_div/target/witness.gz b/tooling/nargo_cli/tests/acir_artifacts/2_div/target/witness.gz deleted file mode 100644 index 3145b77d957..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/2_div/target/witness.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/3_add/target/acir.gz b/tooling/nargo_cli/tests/acir_artifacts/3_add/target/acir.gz deleted file mode 100644 index 42e66d90f73..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/3_add/target/acir.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/3_add/target/witness.gz b/tooling/nargo_cli/tests/acir_artifacts/3_add/target/witness.gz deleted file mode 100644 index 0cfc48c525e..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/3_add/target/witness.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/4_sub/target/acir.gz b/tooling/nargo_cli/tests/acir_artifacts/4_sub/target/acir.gz deleted file mode 100644 index 633bec13563..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/4_sub/target/acir.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/4_sub/target/witness.gz b/tooling/nargo_cli/tests/acir_artifacts/4_sub/target/witness.gz deleted file mode 100644 index 68e9df80789..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/4_sub/target/witness.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/5_over/target/acir.gz b/tooling/nargo_cli/tests/acir_artifacts/5_over/target/acir.gz deleted file mode 100644 index 681a0290f75..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/5_over/target/acir.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/5_over/target/witness.gz b/tooling/nargo_cli/tests/acir_artifacts/5_over/target/witness.gz deleted file mode 100644 index b0a38188cab..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/5_over/target/witness.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/6/target/acir.gz b/tooling/nargo_cli/tests/acir_artifacts/6/target/acir.gz deleted file mode 100644 index 0cc489d8932..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/6/target/acir.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/6/target/witness.gz b/tooling/nargo_cli/tests/acir_artifacts/6/target/witness.gz deleted file mode 100644 index 5c060e1b469..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/6/target/witness.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/6_array/target/acir.gz b/tooling/nargo_cli/tests/acir_artifacts/6_array/target/acir.gz deleted file mode 100644 index 787db190b49..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/6_array/target/acir.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/6_array/target/witness.gz b/tooling/nargo_cli/tests/acir_artifacts/6_array/target/witness.gz deleted file mode 100644 index cc96fd18e00..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/6_array/target/witness.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/7/target/acir.gz b/tooling/nargo_cli/tests/acir_artifacts/7/target/acir.gz deleted file mode 100644 index 7f14d2a932c..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/7/target/acir.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/7/target/witness.gz b/tooling/nargo_cli/tests/acir_artifacts/7/target/witness.gz deleted file mode 100644 index d51356eb6c1..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/7/target/witness.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/7_function/target/acir.gz b/tooling/nargo_cli/tests/acir_artifacts/7_function/target/acir.gz deleted file mode 100644 index 5ddc1ba38e6..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/7_function/target/acir.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/7_function/target/witness.gz b/tooling/nargo_cli/tests/acir_artifacts/7_function/target/witness.gz deleted file mode 100644 index 0bb522d210e..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/7_function/target/witness.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/arithmetic_binary_operations/target/acir.gz b/tooling/nargo_cli/tests/acir_artifacts/arithmetic_binary_operations/target/acir.gz deleted file mode 100644 index fd31cc3bfa6..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/arithmetic_binary_operations/target/acir.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/arithmetic_binary_operations/target/witness.gz b/tooling/nargo_cli/tests/acir_artifacts/arithmetic_binary_operations/target/witness.gz deleted file mode 100644 index 450a83edc9c..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/arithmetic_binary_operations/target/witness.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/array_dynamic/target/acir.gz b/tooling/nargo_cli/tests/acir_artifacts/array_dynamic/target/acir.gz deleted file mode 100644 index 376ae46ff4f..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/array_dynamic/target/acir.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/array_dynamic/target/witness.gz b/tooling/nargo_cli/tests/acir_artifacts/array_dynamic/target/witness.gz deleted file mode 100644 index 706a5784953..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/array_dynamic/target/witness.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/array_eq/target/acir.gz b/tooling/nargo_cli/tests/acir_artifacts/array_eq/target/acir.gz deleted file mode 100644 index b274cd72649..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/array_eq/target/acir.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/array_eq/target/witness.gz b/tooling/nargo_cli/tests/acir_artifacts/array_eq/target/witness.gz deleted file mode 100644 index f000e986c3d..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/array_eq/target/witness.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/array_len/target/acir.gz b/tooling/nargo_cli/tests/acir_artifacts/array_len/target/acir.gz deleted file mode 100644 index 795d22712b2..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/array_len/target/acir.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/array_len/target/witness.gz b/tooling/nargo_cli/tests/acir_artifacts/array_len/target/witness.gz deleted file mode 100644 index c3763958eeb..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/array_len/target/witness.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/array_neq/target/acir.gz b/tooling/nargo_cli/tests/acir_artifacts/array_neq/target/acir.gz deleted file mode 100644 index 8d87f8bc575..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/array_neq/target/acir.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/array_neq/target/witness.gz b/tooling/nargo_cli/tests/acir_artifacts/array_neq/target/witness.gz deleted file mode 100644 index c56b373217d..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/array_neq/target/witness.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/array_sort/target/acir.gz b/tooling/nargo_cli/tests/acir_artifacts/array_sort/target/acir.gz deleted file mode 100644 index 42d701ede8a..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/array_sort/target/acir.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/array_sort/target/witness.gz b/tooling/nargo_cli/tests/acir_artifacts/array_sort/target/witness.gz deleted file mode 100644 index 8229809cc95..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/array_sort/target/witness.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/assert/target/acir.gz b/tooling/nargo_cli/tests/acir_artifacts/assert/target/acir.gz deleted file mode 100644 index c4e7f86f219..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/assert/target/acir.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/assert/target/witness.gz b/tooling/nargo_cli/tests/acir_artifacts/assert/target/witness.gz deleted file mode 100644 index 16880cedea2..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/assert/target/witness.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/assert_statement/target/acir.gz b/tooling/nargo_cli/tests/acir_artifacts/assert_statement/target/acir.gz deleted file mode 100644 index d71ac1b6b0e..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/assert_statement/target/acir.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/assert_statement/target/witness.gz b/tooling/nargo_cli/tests/acir_artifacts/assert_statement/target/witness.gz deleted file mode 100644 index 3e073aac635..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/assert_statement/target/witness.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/assign_ex/target/acir.gz b/tooling/nargo_cli/tests/acir_artifacts/assign_ex/target/acir.gz deleted file mode 100644 index a682df0b963..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/assign_ex/target/acir.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/assign_ex/target/witness.gz b/tooling/nargo_cli/tests/acir_artifacts/assign_ex/target/witness.gz deleted file mode 100644 index 35e05b7622b..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/assign_ex/target/witness.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/bit_and/target/acir.gz b/tooling/nargo_cli/tests/acir_artifacts/bit_and/target/acir.gz deleted file mode 100644 index 5fb7041cdf1..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/bit_and/target/acir.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/bit_and/target/witness.gz b/tooling/nargo_cli/tests/acir_artifacts/bit_and/target/witness.gz deleted file mode 100644 index 0c5dc12cf1c..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/bit_and/target/witness.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/bit_shifts_comptime/target/acir.gz b/tooling/nargo_cli/tests/acir_artifacts/bit_shifts_comptime/target/acir.gz deleted file mode 100644 index 4756ea6b632..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/bit_shifts_comptime/target/acir.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/bit_shifts_comptime/target/witness.gz b/tooling/nargo_cli/tests/acir_artifacts/bit_shifts_comptime/target/witness.gz deleted file mode 100644 index 890e987dd74..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/bit_shifts_comptime/target/witness.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/bit_shifts_runtime/target/acir.gz b/tooling/nargo_cli/tests/acir_artifacts/bit_shifts_runtime/target/acir.gz deleted file mode 100644 index 45c1cc44c8e..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/bit_shifts_runtime/target/acir.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/bit_shifts_runtime/target/witness.gz b/tooling/nargo_cli/tests/acir_artifacts/bit_shifts_runtime/target/witness.gz deleted file mode 100644 index bdc4e70ba09..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/bit_shifts_runtime/target/witness.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/bool_not/target/acir.gz b/tooling/nargo_cli/tests/acir_artifacts/bool_not/target/acir.gz deleted file mode 100644 index 233a1e25f33..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/bool_not/target/acir.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/bool_not/target/witness.gz b/tooling/nargo_cli/tests/acir_artifacts/bool_not/target/witness.gz deleted file mode 100644 index 16880cedea2..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/bool_not/target/witness.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/bool_or/target/acir.gz b/tooling/nargo_cli/tests/acir_artifacts/bool_or/target/acir.gz deleted file mode 100644 index 697832be207..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/bool_or/target/acir.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/bool_or/target/witness.gz b/tooling/nargo_cli/tests/acir_artifacts/bool_or/target/witness.gz deleted file mode 100644 index 10cffba7141..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/bool_or/target/witness.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/brillig_acir_as_brillig/target/acir.gz b/tooling/nargo_cli/tests/acir_artifacts/brillig_acir_as_brillig/target/acir.gz deleted file mode 100644 index 69cbde31d9d..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/brillig_acir_as_brillig/target/acir.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/brillig_acir_as_brillig/target/witness.gz b/tooling/nargo_cli/tests/acir_artifacts/brillig_acir_as_brillig/target/witness.gz deleted file mode 100644 index 844178f0430..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/brillig_acir_as_brillig/target/witness.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/brillig_arrays/target/acir.gz b/tooling/nargo_cli/tests/acir_artifacts/brillig_arrays/target/acir.gz deleted file mode 100644 index a093703d4b3..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/brillig_arrays/target/acir.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/brillig_arrays/target/witness.gz b/tooling/nargo_cli/tests/acir_artifacts/brillig_arrays/target/witness.gz deleted file mode 100644 index 2d9b4cf245b..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/brillig_arrays/target/witness.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/brillig_assert/target/acir.gz b/tooling/nargo_cli/tests/acir_artifacts/brillig_assert/target/acir.gz deleted file mode 100644 index 2cf3b7251e6..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/brillig_assert/target/acir.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/brillig_assert/target/witness.gz b/tooling/nargo_cli/tests/acir_artifacts/brillig_assert/target/witness.gz deleted file mode 100644 index 628e5fbc6d8..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/brillig_assert/target/witness.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/brillig_blake2s/target/acir.gz b/tooling/nargo_cli/tests/acir_artifacts/brillig_blake2s/target/acir.gz deleted file mode 100644 index be47506c42f..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/brillig_blake2s/target/acir.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/brillig_blake2s/target/witness.gz b/tooling/nargo_cli/tests/acir_artifacts/brillig_blake2s/target/witness.gz deleted file mode 100644 index d51356eb6c1..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/brillig_blake2s/target/witness.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/brillig_calls/target/acir.gz b/tooling/nargo_cli/tests/acir_artifacts/brillig_calls/target/acir.gz deleted file mode 100644 index b69e231774b..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/brillig_calls/target/acir.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/brillig_calls/target/witness.gz b/tooling/nargo_cli/tests/acir_artifacts/brillig_calls/target/witness.gz deleted file mode 100644 index 844178f0430..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/brillig_calls/target/witness.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/brillig_calls_array/target/acir.gz b/tooling/nargo_cli/tests/acir_artifacts/brillig_calls_array/target/acir.gz deleted file mode 100644 index 59b89c22bc3..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/brillig_calls_array/target/acir.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/brillig_calls_array/target/witness.gz b/tooling/nargo_cli/tests/acir_artifacts/brillig_calls_array/target/witness.gz deleted file mode 100644 index 266c94d043a..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/brillig_calls_array/target/witness.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/brillig_calls_conditionals/target/acir.gz b/tooling/nargo_cli/tests/acir_artifacts/brillig_calls_conditionals/target/acir.gz deleted file mode 100644 index e4155f58ead..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/brillig_calls_conditionals/target/acir.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/brillig_calls_conditionals/target/witness.gz b/tooling/nargo_cli/tests/acir_artifacts/brillig_calls_conditionals/target/witness.gz deleted file mode 100644 index 3e7c051ffc4..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/brillig_calls_conditionals/target/witness.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/brillig_conditional/target/acir.gz b/tooling/nargo_cli/tests/acir_artifacts/brillig_conditional/target/acir.gz deleted file mode 100644 index 9117511d800..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/brillig_conditional/target/acir.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/brillig_conditional/target/witness.gz b/tooling/nargo_cli/tests/acir_artifacts/brillig_conditional/target/witness.gz deleted file mode 100644 index 162d33a5fd3..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/brillig_conditional/target/witness.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/brillig_ecdsa/target/acir.gz b/tooling/nargo_cli/tests/acir_artifacts/brillig_ecdsa/target/acir.gz deleted file mode 100644 index cdc28517544..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/brillig_ecdsa/target/acir.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/brillig_ecdsa/target/witness.gz b/tooling/nargo_cli/tests/acir_artifacts/brillig_ecdsa/target/witness.gz deleted file mode 100644 index 5fe202b72f0..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/brillig_ecdsa/target/witness.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/brillig_fns_as_values/target/acir.gz b/tooling/nargo_cli/tests/acir_artifacts/brillig_fns_as_values/target/acir.gz deleted file mode 100644 index d1819212993..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/brillig_fns_as_values/target/acir.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/brillig_fns_as_values/target/witness.gz b/tooling/nargo_cli/tests/acir_artifacts/brillig_fns_as_values/target/witness.gz deleted file mode 100644 index f4a9c9f6dda..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/brillig_fns_as_values/target/witness.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/brillig_hash_to_field/target/acir.gz b/tooling/nargo_cli/tests/acir_artifacts/brillig_hash_to_field/target/acir.gz deleted file mode 100644 index 73c742a2dd1..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/brillig_hash_to_field/target/acir.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/brillig_hash_to_field/target/witness.gz b/tooling/nargo_cli/tests/acir_artifacts/brillig_hash_to_field/target/witness.gz deleted file mode 100644 index 1529254d597..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/brillig_hash_to_field/target/witness.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/brillig_identity_function/target/acir.gz b/tooling/nargo_cli/tests/acir_artifacts/brillig_identity_function/target/acir.gz deleted file mode 100644 index 4e17ecc5d7b..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/brillig_identity_function/target/acir.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/brillig_identity_function/target/witness.gz b/tooling/nargo_cli/tests/acir_artifacts/brillig_identity_function/target/witness.gz deleted file mode 100644 index 9a911d62512..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/brillig_identity_function/target/witness.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/brillig_keccak/target/acir.gz b/tooling/nargo_cli/tests/acir_artifacts/brillig_keccak/target/acir.gz deleted file mode 100644 index 1a64fd03980..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/brillig_keccak/target/acir.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/brillig_keccak/target/witness.gz b/tooling/nargo_cli/tests/acir_artifacts/brillig_keccak/target/witness.gz deleted file mode 100644 index 95b5064a5a7..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/brillig_keccak/target/witness.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/brillig_loop/target/acir.gz b/tooling/nargo_cli/tests/acir_artifacts/brillig_loop/target/acir.gz deleted file mode 100644 index 04baae8d290..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/brillig_loop/target/acir.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/brillig_loop/target/witness.gz b/tooling/nargo_cli/tests/acir_artifacts/brillig_loop/target/witness.gz deleted file mode 100644 index 6e9e8ecd1d0..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/brillig_loop/target/witness.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/brillig_nested_arrays/target/acir.gz b/tooling/nargo_cli/tests/acir_artifacts/brillig_nested_arrays/target/acir.gz deleted file mode 100644 index f69df4781ec..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/brillig_nested_arrays/target/acir.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/brillig_nested_arrays/target/witness.gz b/tooling/nargo_cli/tests/acir_artifacts/brillig_nested_arrays/target/witness.gz deleted file mode 100644 index 87cf83430f7..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/brillig_nested_arrays/target/witness.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/brillig_nested_slices/target/acir.gz b/tooling/nargo_cli/tests/acir_artifacts/brillig_nested_slices/target/acir.gz deleted file mode 100644 index 100a208bcd8..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/brillig_nested_slices/target/acir.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/brillig_nested_slices/target/witness.gz b/tooling/nargo_cli/tests/acir_artifacts/brillig_nested_slices/target/witness.gz deleted file mode 100644 index 3530c6f59c1..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/brillig_nested_slices/target/witness.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/brillig_not/target/acir.gz b/tooling/nargo_cli/tests/acir_artifacts/brillig_not/target/acir.gz deleted file mode 100644 index 9702ca340a5..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/brillig_not/target/acir.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/brillig_not/target/witness.gz b/tooling/nargo_cli/tests/acir_artifacts/brillig_not/target/witness.gz deleted file mode 100644 index 3fbf07be37e..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/brillig_not/target/witness.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/brillig_oracle/target/acir.gz b/tooling/nargo_cli/tests/acir_artifacts/brillig_oracle/target/acir.gz deleted file mode 100644 index db158f61882..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/brillig_oracle/target/acir.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/brillig_oracle/target/witness.gz b/tooling/nargo_cli/tests/acir_artifacts/brillig_oracle/target/witness.gz deleted file mode 100644 index 3fead7f6b2e..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/brillig_oracle/target/witness.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/brillig_pedersen/target/acir.gz b/tooling/nargo_cli/tests/acir_artifacts/brillig_pedersen/target/acir.gz deleted file mode 100644 index 27f6f353d25..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/brillig_pedersen/target/acir.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/brillig_pedersen/target/witness.gz b/tooling/nargo_cli/tests/acir_artifacts/brillig_pedersen/target/witness.gz deleted file mode 100644 index b26110156a0..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/brillig_pedersen/target/witness.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/brillig_recursion/target/acir.gz b/tooling/nargo_cli/tests/acir_artifacts/brillig_recursion/target/acir.gz deleted file mode 100644 index c0c91d81546..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/brillig_recursion/target/acir.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/brillig_recursion/target/witness.gz b/tooling/nargo_cli/tests/acir_artifacts/brillig_recursion/target/witness.gz deleted file mode 100644 index 46e192995f3..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/brillig_recursion/target/witness.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/brillig_references/target/acir.gz b/tooling/nargo_cli/tests/acir_artifacts/brillig_references/target/acir.gz deleted file mode 100644 index 4069ca2cb5e..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/brillig_references/target/acir.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/brillig_references/target/witness.gz b/tooling/nargo_cli/tests/acir_artifacts/brillig_references/target/witness.gz deleted file mode 100644 index bf62ea672eb..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/brillig_references/target/witness.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/brillig_scalar_mul/target/acir.gz b/tooling/nargo_cli/tests/acir_artifacts/brillig_scalar_mul/target/acir.gz deleted file mode 100644 index 9f5f787c655..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/brillig_scalar_mul/target/acir.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/brillig_scalar_mul/target/witness.gz b/tooling/nargo_cli/tests/acir_artifacts/brillig_scalar_mul/target/witness.gz deleted file mode 100644 index 3204207ec63..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/brillig_scalar_mul/target/witness.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/brillig_schnorr/target/acir.gz b/tooling/nargo_cli/tests/acir_artifacts/brillig_schnorr/target/acir.gz deleted file mode 100644 index 625ae64a11d..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/brillig_schnorr/target/acir.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/brillig_schnorr/target/witness.gz b/tooling/nargo_cli/tests/acir_artifacts/brillig_schnorr/target/witness.gz deleted file mode 100644 index 17d93cc4d19..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/brillig_schnorr/target/witness.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/brillig_sha256/target/acir.gz b/tooling/nargo_cli/tests/acir_artifacts/brillig_sha256/target/acir.gz deleted file mode 100644 index 49f7e6afcf4..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/brillig_sha256/target/acir.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/brillig_sha256/target/witness.gz b/tooling/nargo_cli/tests/acir_artifacts/brillig_sha256/target/witness.gz deleted file mode 100644 index 118042d5841..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/brillig_sha256/target/witness.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/brillig_slices/target/acir.gz b/tooling/nargo_cli/tests/acir_artifacts/brillig_slices/target/acir.gz deleted file mode 100644 index 57ca1d59fed..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/brillig_slices/target/acir.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/brillig_slices/target/witness.gz b/tooling/nargo_cli/tests/acir_artifacts/brillig_slices/target/witness.gz deleted file mode 100644 index 3530c6f59c1..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/brillig_slices/target/witness.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/brillig_to_be_bytes/target/acir.gz b/tooling/nargo_cli/tests/acir_artifacts/brillig_to_be_bytes/target/acir.gz deleted file mode 100644 index 1249975b27c..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/brillig_to_be_bytes/target/acir.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/brillig_to_be_bytes/target/witness.gz b/tooling/nargo_cli/tests/acir_artifacts/brillig_to_be_bytes/target/witness.gz deleted file mode 100644 index 0d48d549824..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/brillig_to_be_bytes/target/witness.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/brillig_to_bytes_integration/target/acir.gz b/tooling/nargo_cli/tests/acir_artifacts/brillig_to_bytes_integration/target/acir.gz deleted file mode 100644 index fe9ac34ed17..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/brillig_to_bytes_integration/target/acir.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/brillig_to_bytes_integration/target/witness.gz b/tooling/nargo_cli/tests/acir_artifacts/brillig_to_bytes_integration/target/witness.gz deleted file mode 100644 index b3813a1f976..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/brillig_to_bytes_integration/target/witness.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/brillig_to_le_bytes/target/acir.gz b/tooling/nargo_cli/tests/acir_artifacts/brillig_to_le_bytes/target/acir.gz deleted file mode 100644 index 834f57e1ee7..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/brillig_to_le_bytes/target/acir.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/brillig_to_le_bytes/target/witness.gz b/tooling/nargo_cli/tests/acir_artifacts/brillig_to_le_bytes/target/witness.gz deleted file mode 100644 index d12168c557b..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/brillig_to_le_bytes/target/witness.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/brillig_top_level/target/acir.gz b/tooling/nargo_cli/tests/acir_artifacts/brillig_top_level/target/acir.gz deleted file mode 100644 index 4b2fbcd3462..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/brillig_top_level/target/acir.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/brillig_top_level/target/witness.gz b/tooling/nargo_cli/tests/acir_artifacts/brillig_top_level/target/witness.gz deleted file mode 100644 index 38bdf1f7263..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/brillig_top_level/target/witness.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/brillig_unitialised_arrays/target/acir.gz b/tooling/nargo_cli/tests/acir_artifacts/brillig_unitialised_arrays/target/acir.gz deleted file mode 100644 index ac18684a07e..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/brillig_unitialised_arrays/target/acir.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/brillig_unitialised_arrays/target/witness.gz b/tooling/nargo_cli/tests/acir_artifacts/brillig_unitialised_arrays/target/witness.gz deleted file mode 100644 index ceaf7ad008e..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/brillig_unitialised_arrays/target/witness.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/cast_bool/target/acir.gz b/tooling/nargo_cli/tests/acir_artifacts/cast_bool/target/acir.gz deleted file mode 100644 index 032b36d1629..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/cast_bool/target/acir.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/cast_bool/target/witness.gz b/tooling/nargo_cli/tests/acir_artifacts/cast_bool/target/witness.gz deleted file mode 100644 index fa79236ad55..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/cast_bool/target/witness.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/closures_mut_ref/target/acir.gz b/tooling/nargo_cli/tests/acir_artifacts/closures_mut_ref/target/acir.gz deleted file mode 100644 index 271b0ddd649..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/closures_mut_ref/target/acir.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/closures_mut_ref/target/witness.gz b/tooling/nargo_cli/tests/acir_artifacts/closures_mut_ref/target/witness.gz deleted file mode 100644 index 37c6d67fada..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/closures_mut_ref/target/witness.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/conditional_1/target/acir.gz b/tooling/nargo_cli/tests/acir_artifacts/conditional_1/target/acir.gz deleted file mode 100644 index dff5ebe0a87..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/conditional_1/target/acir.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/conditional_1/target/witness.gz b/tooling/nargo_cli/tests/acir_artifacts/conditional_1/target/witness.gz deleted file mode 100644 index acc00ece890..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/conditional_1/target/witness.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/conditional_2/target/acir.gz b/tooling/nargo_cli/tests/acir_artifacts/conditional_2/target/acir.gz deleted file mode 100644 index 8b56f25b2cf..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/conditional_2/target/acir.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/conditional_2/target/witness.gz b/tooling/nargo_cli/tests/acir_artifacts/conditional_2/target/witness.gz deleted file mode 100644 index 310c2cba8c4..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/conditional_2/target/witness.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/conditional_regression_421/target/acir.gz b/tooling/nargo_cli/tests/acir_artifacts/conditional_regression_421/target/acir.gz deleted file mode 100644 index bb060b5ebcc..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/conditional_regression_421/target/acir.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/conditional_regression_421/target/witness.gz b/tooling/nargo_cli/tests/acir_artifacts/conditional_regression_421/target/witness.gz deleted file mode 100644 index 025b2d9ea44..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/conditional_regression_421/target/witness.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/conditional_regression_661/target/acir.gz b/tooling/nargo_cli/tests/acir_artifacts/conditional_regression_661/target/acir.gz deleted file mode 100644 index 7c336747f92..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/conditional_regression_661/target/acir.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/conditional_regression_661/target/witness.gz b/tooling/nargo_cli/tests/acir_artifacts/conditional_regression_661/target/witness.gz deleted file mode 100644 index 1efbfebfaad..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/conditional_regression_661/target/witness.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/conditional_regression_short_circuit/target/acir.gz b/tooling/nargo_cli/tests/acir_artifacts/conditional_regression_short_circuit/target/acir.gz deleted file mode 100644 index 75f2bcfdb0b..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/conditional_regression_short_circuit/target/acir.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/conditional_regression_short_circuit/target/witness.gz b/tooling/nargo_cli/tests/acir_artifacts/conditional_regression_short_circuit/target/witness.gz deleted file mode 100644 index b2aa4a0b23f..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/conditional_regression_short_circuit/target/witness.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/custom_entry/target/acir.gz b/tooling/nargo_cli/tests/acir_artifacts/custom_entry/target/acir.gz deleted file mode 100644 index c4e7f86f219..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/custom_entry/target/acir.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/custom_entry/target/witness.gz b/tooling/nargo_cli/tests/acir_artifacts/custom_entry/target/witness.gz deleted file mode 100644 index 16880cedea2..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/custom_entry/target/witness.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/debug_logs/target/acir.gz b/tooling/nargo_cli/tests/acir_artifacts/debug_logs/target/acir.gz deleted file mode 100644 index ea9187f4084..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/debug_logs/target/acir.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/debug_logs/target/witness.gz b/tooling/nargo_cli/tests/acir_artifacts/debug_logs/target/witness.gz deleted file mode 100644 index 3199dac0924..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/debug_logs/target/witness.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/diamond_deps_0/target/acir.gz b/tooling/nargo_cli/tests/acir_artifacts/diamond_deps_0/target/acir.gz deleted file mode 100644 index e73668fd86c..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/diamond_deps_0/target/acir.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/diamond_deps_0/target/witness.gz b/tooling/nargo_cli/tests/acir_artifacts/diamond_deps_0/target/witness.gz deleted file mode 100644 index d2a6bdba5c8..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/diamond_deps_0/target/witness.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/distinct_keyword/target/acir.gz b/tooling/nargo_cli/tests/acir_artifacts/distinct_keyword/target/acir.gz deleted file mode 100644 index b3411dc96a7..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/distinct_keyword/target/acir.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/distinct_keyword/target/witness.gz b/tooling/nargo_cli/tests/acir_artifacts/distinct_keyword/target/witness.gz deleted file mode 100644 index d79dfba9359..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/distinct_keyword/target/witness.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/double_verify_proof/target/acir.gz b/tooling/nargo_cli/tests/acir_artifacts/double_verify_proof/target/acir.gz deleted file mode 100644 index a2faad65143..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/double_verify_proof/target/acir.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/double_verify_proof/target/witness.gz b/tooling/nargo_cli/tests/acir_artifacts/double_verify_proof/target/witness.gz deleted file mode 100644 index 251984d6292..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/double_verify_proof/target/witness.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/ecdsa_secp256k1/target/acir.gz b/tooling/nargo_cli/tests/acir_artifacts/ecdsa_secp256k1/target/acir.gz deleted file mode 100644 index 9108d663e86..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/ecdsa_secp256k1/target/acir.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/ecdsa_secp256k1/target/witness.gz b/tooling/nargo_cli/tests/acir_artifacts/ecdsa_secp256k1/target/witness.gz deleted file mode 100644 index a094ba3246b..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/ecdsa_secp256k1/target/witness.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/ecdsa_secp256r1/target/acir.gz b/tooling/nargo_cli/tests/acir_artifacts/ecdsa_secp256r1/target/acir.gz deleted file mode 100644 index ec6bc2c73a0..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/ecdsa_secp256r1/target/acir.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/ecdsa_secp256r1/target/witness.gz b/tooling/nargo_cli/tests/acir_artifacts/ecdsa_secp256r1/target/witness.gz deleted file mode 100644 index 79d009caea4..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/ecdsa_secp256r1/target/witness.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/eddsa/target/acir.gz b/tooling/nargo_cli/tests/acir_artifacts/eddsa/target/acir.gz deleted file mode 100644 index b8577dbd3ac..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/eddsa/target/acir.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/eddsa/target/witness.gz b/tooling/nargo_cli/tests/acir_artifacts/eddsa/target/witness.gz deleted file mode 100644 index 7c125021d96..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/eddsa/target/witness.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/field_attribute/target/acir.gz b/tooling/nargo_cli/tests/acir_artifacts/field_attribute/target/acir.gz deleted file mode 100644 index 9401237fd8c..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/field_attribute/target/acir.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/field_attribute/target/witness.gz b/tooling/nargo_cli/tests/acir_artifacts/field_attribute/target/witness.gz deleted file mode 100644 index 5f3c241de56..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/field_attribute/target/witness.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/generics/target/acir.gz b/tooling/nargo_cli/tests/acir_artifacts/generics/target/acir.gz deleted file mode 100644 index c9462cfa87f..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/generics/target/acir.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/generics/target/witness.gz b/tooling/nargo_cli/tests/acir_artifacts/generics/target/witness.gz deleted file mode 100644 index 4d120219b14..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/generics/target/witness.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/global_consts/target/acir.gz b/tooling/nargo_cli/tests/acir_artifacts/global_consts/target/acir.gz deleted file mode 100644 index 8b6a0d9db65..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/global_consts/target/acir.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/global_consts/target/witness.gz b/tooling/nargo_cli/tests/acir_artifacts/global_consts/target/witness.gz deleted file mode 100644 index 41fe927e809..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/global_consts/target/witness.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/hash_to_field/target/acir.gz b/tooling/nargo_cli/tests/acir_artifacts/hash_to_field/target/acir.gz deleted file mode 100644 index 9be98aef491..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/hash_to_field/target/acir.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/hash_to_field/target/witness.gz b/tooling/nargo_cli/tests/acir_artifacts/hash_to_field/target/witness.gz deleted file mode 100644 index 743d797096b..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/hash_to_field/target/witness.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/higher_order_functions/target/acir.gz b/tooling/nargo_cli/tests/acir_artifacts/higher_order_functions/target/acir.gz deleted file mode 100644 index eab354be13d..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/higher_order_functions/target/acir.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/higher_order_functions/target/witness.gz b/tooling/nargo_cli/tests/acir_artifacts/higher_order_functions/target/witness.gz deleted file mode 100644 index 329d15dfb17..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/higher_order_functions/target/witness.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/if_else_chain/target/acir.gz b/tooling/nargo_cli/tests/acir_artifacts/if_else_chain/target/acir.gz deleted file mode 100644 index 21cb3898a2d..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/if_else_chain/target/acir.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/if_else_chain/target/witness.gz b/tooling/nargo_cli/tests/acir_artifacts/if_else_chain/target/witness.gz deleted file mode 100644 index 4ab0b124e70..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/if_else_chain/target/witness.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/import/target/acir.gz b/tooling/nargo_cli/tests/acir_artifacts/import/target/acir.gz deleted file mode 100644 index ff6e6f6c394..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/import/target/acir.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/import/target/witness.gz b/tooling/nargo_cli/tests/acir_artifacts/import/target/witness.gz deleted file mode 100644 index 93c5b96bdf3..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/import/target/witness.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/integer_array_indexing/target/acir.gz b/tooling/nargo_cli/tests/acir_artifacts/integer_array_indexing/target/acir.gz deleted file mode 100644 index 1c4c50039eb..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/integer_array_indexing/target/acir.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/integer_array_indexing/target/witness.gz b/tooling/nargo_cli/tests/acir_artifacts/integer_array_indexing/target/witness.gz deleted file mode 100644 index b3d60e315ec..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/integer_array_indexing/target/witness.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/keccak256/target/acir.gz b/tooling/nargo_cli/tests/acir_artifacts/keccak256/target/acir.gz deleted file mode 100644 index cb74273e4d7..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/keccak256/target/acir.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/keccak256/target/witness.gz b/tooling/nargo_cli/tests/acir_artifacts/keccak256/target/witness.gz deleted file mode 100644 index 42bcc0ccbd1..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/keccak256/target/witness.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/main_bool_arg/target/acir.gz b/tooling/nargo_cli/tests/acir_artifacts/main_bool_arg/target/acir.gz deleted file mode 100644 index d054abe1df0..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/main_bool_arg/target/acir.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/main_bool_arg/target/witness.gz b/tooling/nargo_cli/tests/acir_artifacts/main_bool_arg/target/witness.gz deleted file mode 100644 index 80a779d4464..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/main_bool_arg/target/witness.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/merkle_insert/target/acir.gz b/tooling/nargo_cli/tests/acir_artifacts/merkle_insert/target/acir.gz deleted file mode 100644 index 75b1cbb5072..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/merkle_insert/target/acir.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/merkle_insert/target/witness.gz b/tooling/nargo_cli/tests/acir_artifacts/merkle_insert/target/witness.gz deleted file mode 100644 index 6351d29dd11..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/merkle_insert/target/witness.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/mock_oracle/target/acir.gz b/tooling/nargo_cli/tests/acir_artifacts/mock_oracle/target/acir.gz deleted file mode 100644 index c45cd40f28e..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/mock_oracle/target/acir.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/mock_oracle/target/witness.gz b/tooling/nargo_cli/tests/acir_artifacts/mock_oracle/target/witness.gz deleted file mode 100644 index 4e90289d5e1..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/mock_oracle/target/witness.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/modules/target/acir.gz b/tooling/nargo_cli/tests/acir_artifacts/modules/target/acir.gz deleted file mode 100644 index 05b5e23ae3e..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/modules/target/acir.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/modules/target/witness.gz b/tooling/nargo_cli/tests/acir_artifacts/modules/target/witness.gz deleted file mode 100644 index 58c7b52ef85..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/modules/target/witness.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/modules_more/target/acir.gz b/tooling/nargo_cli/tests/acir_artifacts/modules_more/target/acir.gz deleted file mode 100644 index ff6e6f6c394..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/modules_more/target/acir.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/modules_more/target/witness.gz b/tooling/nargo_cli/tests/acir_artifacts/modules_more/target/witness.gz deleted file mode 100644 index 6f12eac202f..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/modules_more/target/witness.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/modulus/target/acir.gz b/tooling/nargo_cli/tests/acir_artifacts/modulus/target/acir.gz deleted file mode 100644 index a99124a5e3a..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/modulus/target/acir.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/modulus/target/witness.gz b/tooling/nargo_cli/tests/acir_artifacts/modulus/target/witness.gz deleted file mode 100644 index 02931c632ff..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/modulus/target/witness.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/nested_array_dynamic/target/acir.gz b/tooling/nargo_cli/tests/acir_artifacts/nested_array_dynamic/target/acir.gz deleted file mode 100644 index 9f518522755..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/nested_array_dynamic/target/acir.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/nested_array_dynamic/target/witness.gz b/tooling/nargo_cli/tests/acir_artifacts/nested_array_dynamic/target/witness.gz deleted file mode 100644 index 62a1378dece..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/nested_array_dynamic/target/witness.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/nested_arrays_from_brillig/target/acir.gz b/tooling/nargo_cli/tests/acir_artifacts/nested_arrays_from_brillig/target/acir.gz deleted file mode 100644 index 270cfcaf53c..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/nested_arrays_from_brillig/target/acir.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/nested_arrays_from_brillig/target/witness.gz b/tooling/nargo_cli/tests/acir_artifacts/nested_arrays_from_brillig/target/witness.gz deleted file mode 100644 index a3161ac44f9..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/nested_arrays_from_brillig/target/witness.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/nested_slice_dynamic/target/acir.gz b/tooling/nargo_cli/tests/acir_artifacts/nested_slice_dynamic/target/acir.gz deleted file mode 100644 index 3db0a495a9d..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/nested_slice_dynamic/target/acir.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/nested_slice_dynamic/target/witness.gz b/tooling/nargo_cli/tests/acir_artifacts/nested_slice_dynamic/target/witness.gz deleted file mode 100644 index 9c9e80efe8f..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/nested_slice_dynamic/target/witness.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/pedersen_check/target/acir.gz b/tooling/nargo_cli/tests/acir_artifacts/pedersen_check/target/acir.gz deleted file mode 100644 index 02c9f32e3c5..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/pedersen_check/target/acir.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/pedersen_check/target/witness.gz b/tooling/nargo_cli/tests/acir_artifacts/pedersen_check/target/witness.gz deleted file mode 100644 index caf34e2b734..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/pedersen_check/target/witness.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/poseidon_bn254_hash/target/acir.gz b/tooling/nargo_cli/tests/acir_artifacts/poseidon_bn254_hash/target/acir.gz deleted file mode 100644 index 0327f600884..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/poseidon_bn254_hash/target/acir.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/poseidon_bn254_hash/target/witness.gz b/tooling/nargo_cli/tests/acir_artifacts/poseidon_bn254_hash/target/witness.gz deleted file mode 100644 index b3f3f1a3b24..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/poseidon_bn254_hash/target/witness.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/poseidonsponge_x5_254/target/acir.gz b/tooling/nargo_cli/tests/acir_artifacts/poseidonsponge_x5_254/target/acir.gz deleted file mode 100644 index dc260ce2aa7..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/poseidonsponge_x5_254/target/acir.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/poseidonsponge_x5_254/target/witness.gz b/tooling/nargo_cli/tests/acir_artifacts/poseidonsponge_x5_254/target/witness.gz deleted file mode 100644 index f61ba4ec0cf..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/poseidonsponge_x5_254/target/witness.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/pred_eq/target/acir.gz b/tooling/nargo_cli/tests/acir_artifacts/pred_eq/target/acir.gz deleted file mode 100644 index 032b36d1629..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/pred_eq/target/acir.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/pred_eq/target/witness.gz b/tooling/nargo_cli/tests/acir_artifacts/pred_eq/target/witness.gz deleted file mode 100644 index f1ea0249fe9..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/pred_eq/target/witness.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/references/target/acir.gz b/tooling/nargo_cli/tests/acir_artifacts/references/target/acir.gz deleted file mode 100644 index 0668e2eca25..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/references/target/acir.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/references/target/witness.gz b/tooling/nargo_cli/tests/acir_artifacts/references/target/witness.gz deleted file mode 100644 index bf62ea672eb..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/references/target/witness.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/regression/target/acir.gz b/tooling/nargo_cli/tests/acir_artifacts/regression/target/acir.gz deleted file mode 100644 index 66b4abc42dc..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/regression/target/acir.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/regression/target/witness.gz b/tooling/nargo_cli/tests/acir_artifacts/regression/target/witness.gz deleted file mode 100644 index ea06e69e18e..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/regression/target/witness.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/regression_2854/target/acir.gz b/tooling/nargo_cli/tests/acir_artifacts/regression_2854/target/acir.gz deleted file mode 100644 index 6f4ffaa488f..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/regression_2854/target/acir.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/regression_2854/target/witness.gz b/tooling/nargo_cli/tests/acir_artifacts/regression_2854/target/witness.gz deleted file mode 100644 index c0b900e8119..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/regression_2854/target/witness.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/regression_mem_op_predicate/target/acir.gz b/tooling/nargo_cli/tests/acir_artifacts/regression_mem_op_predicate/target/acir.gz deleted file mode 100644 index 5c0339446c5..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/regression_mem_op_predicate/target/acir.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/regression_mem_op_predicate/target/witness.gz b/tooling/nargo_cli/tests/acir_artifacts/regression_mem_op_predicate/target/witness.gz deleted file mode 100644 index 095aef252ee..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/regression_mem_op_predicate/target/witness.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/regression_method_cannot_be_found/target/acir.gz b/tooling/nargo_cli/tests/acir_artifacts/regression_method_cannot_be_found/target/acir.gz deleted file mode 100644 index f29fbef8d3f..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/regression_method_cannot_be_found/target/acir.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/regression_method_cannot_be_found/target/witness.gz b/tooling/nargo_cli/tests/acir_artifacts/regression_method_cannot_be_found/target/witness.gz deleted file mode 100644 index 4e90289d5e1..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/regression_method_cannot_be_found/target/witness.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/scalar_mul/target/acir.gz b/tooling/nargo_cli/tests/acir_artifacts/scalar_mul/target/acir.gz deleted file mode 100644 index 0bf8db7df70..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/scalar_mul/target/acir.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/scalar_mul/target/witness.gz b/tooling/nargo_cli/tests/acir_artifacts/scalar_mul/target/witness.gz deleted file mode 100644 index 637e61f60de..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/scalar_mul/target/witness.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/schnorr/target/acir.gz b/tooling/nargo_cli/tests/acir_artifacts/schnorr/target/acir.gz deleted file mode 100644 index 047e59422ee..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/schnorr/target/acir.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/schnorr/target/witness.gz b/tooling/nargo_cli/tests/acir_artifacts/schnorr/target/witness.gz deleted file mode 100644 index 91bf1aeb7ad..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/schnorr/target/witness.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/sha256/target/acir.gz b/tooling/nargo_cli/tests/acir_artifacts/sha256/target/acir.gz deleted file mode 100644 index a2de8064bb5..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/sha256/target/acir.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/sha256/target/witness.gz b/tooling/nargo_cli/tests/acir_artifacts/sha256/target/witness.gz deleted file mode 100644 index d5762dfc7d5..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/sha256/target/witness.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/sha2_byte/target/acir.gz b/tooling/nargo_cli/tests/acir_artifacts/sha2_byte/target/acir.gz deleted file mode 100644 index 7f6715a12af..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/sha2_byte/target/acir.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/sha2_byte/target/witness.gz b/tooling/nargo_cli/tests/acir_artifacts/sha2_byte/target/witness.gz deleted file mode 100644 index c7be868682e..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/sha2_byte/target/witness.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/signed_arithmetic/target/acir.gz b/tooling/nargo_cli/tests/acir_artifacts/signed_arithmetic/target/acir.gz deleted file mode 100644 index 82747c17417..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/signed_arithmetic/target/acir.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/signed_arithmetic/target/witness.gz b/tooling/nargo_cli/tests/acir_artifacts/signed_arithmetic/target/witness.gz deleted file mode 100644 index 6627fd7d53f..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/signed_arithmetic/target/witness.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/signed_division/target/acir.gz b/tooling/nargo_cli/tests/acir_artifacts/signed_division/target/acir.gz deleted file mode 100644 index 39a17a5a529..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/signed_division/target/acir.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/signed_division/target/witness.gz b/tooling/nargo_cli/tests/acir_artifacts/signed_division/target/witness.gz deleted file mode 100644 index a35e3011ee6..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/signed_division/target/witness.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/simple_2d_array/target/acir.gz b/tooling/nargo_cli/tests/acir_artifacts/simple_2d_array/target/acir.gz deleted file mode 100644 index 59b62b9c99d..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/simple_2d_array/target/acir.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/simple_2d_array/target/witness.gz b/tooling/nargo_cli/tests/acir_artifacts/simple_2d_array/target/witness.gz deleted file mode 100644 index 321a76492da..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/simple_2d_array/target/witness.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/simple_add_and_ret_arr/target/acir.gz b/tooling/nargo_cli/tests/acir_artifacts/simple_add_and_ret_arr/target/acir.gz deleted file mode 100644 index d7ec1ebc477..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/simple_add_and_ret_arr/target/acir.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/simple_add_and_ret_arr/target/witness.gz b/tooling/nargo_cli/tests/acir_artifacts/simple_add_and_ret_arr/target/witness.gz deleted file mode 100644 index 35e05b7622b..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/simple_add_and_ret_arr/target/witness.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/simple_bitwise/target/acir.gz b/tooling/nargo_cli/tests/acir_artifacts/simple_bitwise/target/acir.gz deleted file mode 100644 index 84fc5cc5de2..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/simple_bitwise/target/acir.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/simple_bitwise/target/witness.gz b/tooling/nargo_cli/tests/acir_artifacts/simple_bitwise/target/witness.gz deleted file mode 100644 index 2afa317a120..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/simple_bitwise/target/witness.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/simple_comparison/target/acir.gz b/tooling/nargo_cli/tests/acir_artifacts/simple_comparison/target/acir.gz deleted file mode 100644 index 452780c4d30..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/simple_comparison/target/acir.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/simple_comparison/target/witness.gz b/tooling/nargo_cli/tests/acir_artifacts/simple_comparison/target/witness.gz deleted file mode 100644 index 5896584fa8a..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/simple_comparison/target/witness.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/simple_mut/target/acir.gz b/tooling/nargo_cli/tests/acir_artifacts/simple_mut/target/acir.gz deleted file mode 100644 index 9338b74aabd..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/simple_mut/target/acir.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/simple_mut/target/witness.gz b/tooling/nargo_cli/tests/acir_artifacts/simple_mut/target/witness.gz deleted file mode 100644 index 9e7641cea1e..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/simple_mut/target/witness.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/simple_not/target/acir.gz b/tooling/nargo_cli/tests/acir_artifacts/simple_not/target/acir.gz deleted file mode 100644 index a47defb9fe6..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/simple_not/target/acir.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/simple_not/target/witness.gz b/tooling/nargo_cli/tests/acir_artifacts/simple_not/target/witness.gz deleted file mode 100644 index a8e277ea795..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/simple_not/target/witness.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/simple_print/target/acir.gz b/tooling/nargo_cli/tests/acir_artifacts/simple_print/target/acir.gz deleted file mode 100644 index 5a2c524f26b..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/simple_print/target/acir.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/simple_print/target/witness.gz b/tooling/nargo_cli/tests/acir_artifacts/simple_print/target/witness.gz deleted file mode 100644 index 35e05b7622b..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/simple_print/target/witness.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/simple_program_addition/target/acir.gz b/tooling/nargo_cli/tests/acir_artifacts/simple_program_addition/target/acir.gz deleted file mode 100644 index d7ec1ebc477..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/simple_program_addition/target/acir.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/simple_program_addition/target/witness.gz b/tooling/nargo_cli/tests/acir_artifacts/simple_program_addition/target/witness.gz deleted file mode 100644 index 94ea8c8f2b1..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/simple_program_addition/target/witness.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/simple_radix/target/acir.gz b/tooling/nargo_cli/tests/acir_artifacts/simple_radix/target/acir.gz deleted file mode 100644 index 8c5fd3c0bd4..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/simple_radix/target/acir.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/simple_radix/target/witness.gz b/tooling/nargo_cli/tests/acir_artifacts/simple_radix/target/witness.gz deleted file mode 100644 index 4b051d62ee2..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/simple_radix/target/witness.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/simple_shield/target/acir.gz b/tooling/nargo_cli/tests/acir_artifacts/simple_shield/target/acir.gz deleted file mode 100644 index 1916c475919..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/simple_shield/target/acir.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/simple_shield/target/witness.gz b/tooling/nargo_cli/tests/acir_artifacts/simple_shield/target/witness.gz deleted file mode 100644 index 171330f5142..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/simple_shield/target/witness.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/simple_shift_left_right/target/acir.gz b/tooling/nargo_cli/tests/acir_artifacts/simple_shift_left_right/target/acir.gz deleted file mode 100644 index 6bbdaef4a9b..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/simple_shift_left_right/target/acir.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/simple_shift_left_right/target/witness.gz b/tooling/nargo_cli/tests/acir_artifacts/simple_shift_left_right/target/witness.gz deleted file mode 100644 index 6f15adc6525..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/simple_shift_left_right/target/witness.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/slice_dynamic_index/target/acir.gz b/tooling/nargo_cli/tests/acir_artifacts/slice_dynamic_index/target/acir.gz deleted file mode 100644 index 7b605a2b87b..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/slice_dynamic_index/target/acir.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/slice_dynamic_index/target/witness.gz b/tooling/nargo_cli/tests/acir_artifacts/slice_dynamic_index/target/witness.gz deleted file mode 100644 index 148355f9335..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/slice_dynamic_index/target/witness.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/slice_struct_field/target/acir.gz b/tooling/nargo_cli/tests/acir_artifacts/slice_struct_field/target/acir.gz deleted file mode 100644 index 6b1f189a331..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/slice_struct_field/target/acir.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/slice_struct_field/target/witness.gz b/tooling/nargo_cli/tests/acir_artifacts/slice_struct_field/target/witness.gz deleted file mode 100644 index f404e58ade3..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/slice_struct_field/target/witness.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/slices/target/acir.gz b/tooling/nargo_cli/tests/acir_artifacts/slices/target/acir.gz deleted file mode 100644 index 7a053fcb196..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/slices/target/acir.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/slices/target/witness.gz b/tooling/nargo_cli/tests/acir_artifacts/slices/target/witness.gz deleted file mode 100644 index 359b2f75601..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/slices/target/witness.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/strings/target/acir.gz b/tooling/nargo_cli/tests/acir_artifacts/strings/target/acir.gz deleted file mode 100644 index 424f4bd2d0f..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/strings/target/acir.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/strings/target/witness.gz b/tooling/nargo_cli/tests/acir_artifacts/strings/target/witness.gz deleted file mode 100644 index 72a93aabbfe..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/strings/target/witness.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/struct/target/acir.gz b/tooling/nargo_cli/tests/acir_artifacts/struct/target/acir.gz deleted file mode 100644 index e9de8adcb38..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/struct/target/acir.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/struct/target/witness.gz b/tooling/nargo_cli/tests/acir_artifacts/struct/target/witness.gz deleted file mode 100644 index a8e277ea795..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/struct/target/witness.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/struct_array_inputs/target/acir.gz b/tooling/nargo_cli/tests/acir_artifacts/struct_array_inputs/target/acir.gz deleted file mode 100644 index f66ed17a0cf..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/struct_array_inputs/target/acir.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/struct_array_inputs/target/witness.gz b/tooling/nargo_cli/tests/acir_artifacts/struct_array_inputs/target/witness.gz deleted file mode 100644 index 82307dcb96e..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/struct_array_inputs/target/witness.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/struct_fields_ordering/target/acir.gz b/tooling/nargo_cli/tests/acir_artifacts/struct_fields_ordering/target/acir.gz deleted file mode 100644 index 8ddb62e8799..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/struct_fields_ordering/target/acir.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/struct_fields_ordering/target/witness.gz b/tooling/nargo_cli/tests/acir_artifacts/struct_fields_ordering/target/witness.gz deleted file mode 100644 index e2eb3145306..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/struct_fields_ordering/target/witness.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/struct_inputs/target/acir.gz b/tooling/nargo_cli/tests/acir_artifacts/struct_inputs/target/acir.gz deleted file mode 100644 index b658b4111f6..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/struct_inputs/target/acir.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/struct_inputs/target/witness.gz b/tooling/nargo_cli/tests/acir_artifacts/struct_inputs/target/witness.gz deleted file mode 100644 index b52a8644265..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/struct_inputs/target/witness.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/submodules/target/acir.gz b/tooling/nargo_cli/tests/acir_artifacts/submodules/target/acir.gz deleted file mode 100644 index 697832be207..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/submodules/target/acir.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/submodules/target/witness.gz b/tooling/nargo_cli/tests/acir_artifacts/submodules/target/witness.gz deleted file mode 100644 index 10cffba7141..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/submodules/target/witness.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/to_be_bytes/target/acir.gz b/tooling/nargo_cli/tests/acir_artifacts/to_be_bytes/target/acir.gz deleted file mode 100644 index df6294bc970..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/to_be_bytes/target/acir.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/to_be_bytes/target/witness.gz b/tooling/nargo_cli/tests/acir_artifacts/to_be_bytes/target/witness.gz deleted file mode 100644 index b2ac9601bae..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/to_be_bytes/target/witness.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/to_bytes_consistent/target/acir.gz b/tooling/nargo_cli/tests/acir_artifacts/to_bytes_consistent/target/acir.gz deleted file mode 100644 index 2371186e8fc..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/to_bytes_consistent/target/acir.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/to_bytes_consistent/target/witness.gz b/tooling/nargo_cli/tests/acir_artifacts/to_bytes_consistent/target/witness.gz deleted file mode 100644 index 610802628c6..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/to_bytes_consistent/target/witness.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/to_bytes_integration/target/acir.gz b/tooling/nargo_cli/tests/acir_artifacts/to_bytes_integration/target/acir.gz deleted file mode 100644 index 4deef489b9c..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/to_bytes_integration/target/acir.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/to_bytes_integration/target/witness.gz b/tooling/nargo_cli/tests/acir_artifacts/to_bytes_integration/target/witness.gz deleted file mode 100644 index 71d29209eba..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/to_bytes_integration/target/witness.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/to_le_bytes/target/acir.gz b/tooling/nargo_cli/tests/acir_artifacts/to_le_bytes/target/acir.gz deleted file mode 100644 index 02d2bd105f1..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/to_le_bytes/target/acir.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/to_le_bytes/target/witness.gz b/tooling/nargo_cli/tests/acir_artifacts/to_le_bytes/target/witness.gz deleted file mode 100644 index 610802628c6..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/to_le_bytes/target/witness.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/trait_as_return_type/target/acir.gz b/tooling/nargo_cli/tests/acir_artifacts/trait_as_return_type/target/acir.gz deleted file mode 100644 index 1d34f5becaa..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/trait_as_return_type/target/acir.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/trait_as_return_type/target/witness.gz b/tooling/nargo_cli/tests/acir_artifacts/trait_as_return_type/target/witness.gz deleted file mode 100644 index c3b8e758662..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/trait_as_return_type/target/witness.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/trait_impl_base_type/target/acir.gz b/tooling/nargo_cli/tests/acir_artifacts/trait_impl_base_type/target/acir.gz deleted file mode 100644 index 531a1baf42e..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/trait_impl_base_type/target/acir.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/trait_impl_base_type/target/witness.gz b/tooling/nargo_cli/tests/acir_artifacts/trait_impl_base_type/target/witness.gz deleted file mode 100644 index c3b8e758662..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/trait_impl_base_type/target/witness.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/traits_in_crates_1/target/acir.gz b/tooling/nargo_cli/tests/acir_artifacts/traits_in_crates_1/target/acir.gz deleted file mode 100644 index 6eb630ce2ff..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/traits_in_crates_1/target/acir.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/traits_in_crates_1/target/witness.gz b/tooling/nargo_cli/tests/acir_artifacts/traits_in_crates_1/target/witness.gz deleted file mode 100644 index 60fc9526465..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/traits_in_crates_1/target/witness.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/traits_in_crates_2/target/acir.gz b/tooling/nargo_cli/tests/acir_artifacts/traits_in_crates_2/target/acir.gz deleted file mode 100644 index 6eb630ce2ff..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/traits_in_crates_2/target/acir.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/traits_in_crates_2/target/witness.gz b/tooling/nargo_cli/tests/acir_artifacts/traits_in_crates_2/target/witness.gz deleted file mode 100644 index 60fc9526465..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/traits_in_crates_2/target/witness.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/tuple_inputs/target/acir.gz b/tooling/nargo_cli/tests/acir_artifacts/tuple_inputs/target/acir.gz deleted file mode 100644 index 79ae7dccb3d..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/tuple_inputs/target/acir.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/tuple_inputs/target/witness.gz b/tooling/nargo_cli/tests/acir_artifacts/tuple_inputs/target/witness.gz deleted file mode 100644 index 0eb0d6f09f6..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/tuple_inputs/target/witness.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/tuples/target/acir.gz b/tooling/nargo_cli/tests/acir_artifacts/tuples/target/acir.gz deleted file mode 100644 index a053f565e5b..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/tuples/target/acir.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/tuples/target/witness.gz b/tooling/nargo_cli/tests/acir_artifacts/tuples/target/witness.gz deleted file mode 100644 index 10cffba7141..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/tuples/target/witness.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/type_aliases/target/acir.gz b/tooling/nargo_cli/tests/acir_artifacts/type_aliases/target/acir.gz deleted file mode 100644 index 7855747826f..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/type_aliases/target/acir.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/type_aliases/target/witness.gz b/tooling/nargo_cli/tests/acir_artifacts/type_aliases/target/witness.gz deleted file mode 100644 index 8137a9d31c5..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/type_aliases/target/witness.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/xor/target/acir.gz b/tooling/nargo_cli/tests/acir_artifacts/xor/target/acir.gz deleted file mode 100644 index eda28c748c5..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/xor/target/acir.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/acir_artifacts/xor/target/witness.gz b/tooling/nargo_cli/tests/acir_artifacts/xor/target/witness.gz deleted file mode 100644 index 444c8e01cd1..00000000000 Binary files a/tooling/nargo_cli/tests/acir_artifacts/xor/target/witness.gz and /dev/null differ diff --git a/tooling/nargo_cli/tests/compile_failure/no_nested_impl/src/main.nr b/tooling/nargo_cli/tests/compile_failure/no_nested_impl/src/main.nr deleted file mode 100644 index 916567a7c04..00000000000 --- a/tooling/nargo_cli/tests/compile_failure/no_nested_impl/src/main.nr +++ /dev/null @@ -1,21 +0,0 @@ -fn main() { - let a: [[[[Field; 2]; 2]; 2]; 2] = [[[[1, 2], [3, 4]], [[5, 6], [7, 8]]], [[[1, 2], [3, 4]], [[5, 6], [7, 8]]]]; - assert(a.eq(a)); -} - -trait Eq { - fn eq(self, other: Self) -> bool; -} - -impl Eq for [T; 2] where T: Eq { - fn eq(self, other: Self) -> bool { - self[0].eq(other[0]) - & self[0].eq(other[0]) - } -} -// Impl for u32 but not Field -impl Eq for u32 { - fn eq(self, other: Self) -> bool { - self == other - } -} diff --git a/tooling/nargo_cli/tests/compile_failure/workspace_missing_toml/crates/a/src/main.nr b/tooling/nargo_cli/tests/compile_failure/workspace_missing_toml/crates/a/src/main.nr deleted file mode 100644 index 550e5034a7b..00000000000 --- a/tooling/nargo_cli/tests/compile_failure/workspace_missing_toml/crates/a/src/main.nr +++ /dev/null @@ -1,3 +0,0 @@ -fn main(x : Field, y : pub Field) { - assert(x == y); -} diff --git a/tooling/nargo_cli/tests/compile_success_contract/simple_contract/src/main.nr b/tooling/nargo_cli/tests/compile_success_contract/simple_contract/src/main.nr deleted file mode 100644 index 88edd4ac2c3..00000000000 --- a/tooling/nargo_cli/tests/compile_success_contract/simple_contract/src/main.nr +++ /dev/null @@ -1,14 +0,0 @@ -contract Foo { - fn double(x: Field) -> pub Field { - x * 2 - } - fn triple(x: Field) -> pub Field { - x * 3 - } - internal fn quadruple(x: Field) -> pub Field { - x * 4 - } - open internal fn skibbidy(x: Field) -> pub Field { - x * 5 - } -} diff --git a/tooling/nargo_cli/tests/compile_success_empty/ec_baby_jubjub/src/main.nr b/tooling/nargo_cli/tests/compile_success_empty/ec_baby_jubjub/src/main.nr deleted file mode 100644 index e7f32999454..00000000000 --- a/tooling/nargo_cli/tests/compile_success_empty/ec_baby_jubjub/src/main.nr +++ /dev/null @@ -1,174 +0,0 @@ -// Tests may be checked against https://github.com/cfrg/draft-irtf-cfrg-hash-to-curve/tree/main/poc -use dep::std::ec::tecurve::affine::Curve as AffineCurve; -use dep::std::ec::tecurve::affine::Point as Gaffine; -use dep::std::ec::tecurve::curvegroup::Curve; -use dep::std::ec::tecurve::curvegroup::Point as G; - -use dep::std::ec::swcurve::affine::Point as SWGaffine; -use dep::std::ec::swcurve::curvegroup::Point as SWG; - -use dep::std::ec::montcurve::affine::Point as MGaffine; -use dep::std::ec::montcurve::curvegroup::Point as MG; - -fn main() { - // This test only makes sense if Field is the right prime field. - if 21888242871839275222246405745257275088548364400416034343698204186575808495617 == 0 { - // Define Baby Jubjub (ERC-2494) parameters in affine representation - let bjj_affine = AffineCurve::new(168700, - 168696, - Gaffine::new(995203441582195749578291179787384436505546430278305826713579947235728471134, - 5472060717959818805561601436314318772137091100104008585924551046643952123905)); - // Test addition - let p1_affine = Gaffine::new(17777552123799933955779906779655732241715742912184938656739573121738514868268, - 2626589144620713026669568689430873010625803728049924121243784502389097019475); - let p2_affine = Gaffine::new(16540640123574156134436876038791482806971768689494387082833631921987005038935, - 20819045374670962167435360035096875258406992893633759881276124905556507972311); - - let p3_affine = bjj_affine.add(p1_affine, p2_affine); - assert(p3_affine.eq(Gaffine::new(7916061937171219682591368294088513039687205273691143098332585753343424131937, - 14035240266687799601661095864649209771790948434046947201833777492504781204499))); - // Test scalar multiplication - let p4_affine = bjj_affine.mul(2, p1_affine); - assert(p4_affine.eq(Gaffine::new(6890855772600357754907169075114257697580319025794532037257385534741338397365, - 4338620300185947561074059802482547481416142213883829469920100239455078257889))); - assert(p4_affine.eq(bjj_affine.bit_mul([0, 1], p1_affine))); - // Test subtraction - let p5_affine = bjj_affine.subtract(p3_affine, p3_affine); - assert(p5_affine.eq(Gaffine::zero())); - // Check that these points are on the curve - assert(bjj_affine.contains(bjj_affine.gen) - & bjj_affine.contains(p1_affine) - & bjj_affine.contains(p2_affine) - & bjj_affine.contains(p3_affine) - & bjj_affine.contains(p4_affine) - & bjj_affine.contains(p5_affine)); - // Test CurveGroup equivalents - let bjj = bjj_affine.into_group(); // Baby Jubjub - let p1 = p1_affine.into_group(); - let p2 = p2_affine.into_group(); - let p3 = p3_affine.into_group(); - let p4 = p4_affine.into_group(); - let p5 = p5_affine.into_group(); - // Test addition - assert(p3.eq(bjj.add(p1, p2))); - // Test scalar multiplication - assert(p4.eq(bjj.mul(2, p1))); - assert(p4.eq(bjj.bit_mul([0, 1], p1))); - // Test subtraction - assert(G::zero().eq(bjj.subtract(p3, p3))); - assert(p5.eq(G::zero())); - // Check that these points are on the curve - assert(bjj.contains(bjj.gen) - & bjj.contains(p1) - & bjj.contains(p2) - & bjj.contains(p3) - & bjj.contains(p4) - & bjj.contains(p5)); - // Test SWCurve equivalents of the above - // First the affine representation - let bjj_swcurve_affine = bjj_affine.into_swcurve(); - - let p1_swcurve_affine = bjj_affine.map_into_swcurve(p1_affine); - let p2_swcurve_affine = bjj_affine.map_into_swcurve(p2_affine); - let p3_swcurve_affine = bjj_affine.map_into_swcurve(p3_affine); - let p4_swcurve_affine = bjj_affine.map_into_swcurve(p4_affine); - let p5_swcurve_affine = bjj_affine.map_into_swcurve(p5_affine); - // Addition - assert(p3_swcurve_affine.eq(bjj_swcurve_affine.add(p1_swcurve_affine, p2_swcurve_affine))); - // Doubling - assert(p4_swcurve_affine.eq(bjj_swcurve_affine.mul(2, p1_swcurve_affine))); - assert(p4_swcurve_affine.eq(bjj_swcurve_affine.bit_mul([0, 1], p1_swcurve_affine))); - // Subtraction - assert(SWGaffine::zero().eq(bjj_swcurve_affine.subtract(p3_swcurve_affine, p3_swcurve_affine))); - assert(p5_swcurve_affine.eq(SWGaffine::zero())); - // Check that these points are on the curve - assert(bjj_swcurve_affine.contains(bjj_swcurve_affine.gen) - & bjj_swcurve_affine.contains(p1_swcurve_affine) - & bjj_swcurve_affine.contains(p2_swcurve_affine) - & bjj_swcurve_affine.contains(p3_swcurve_affine) - & bjj_swcurve_affine.contains(p4_swcurve_affine) - & bjj_swcurve_affine.contains(p5_swcurve_affine)); - // Then the CurveGroup representation - let bjj_swcurve = bjj.into_swcurve(); - - let p1_swcurve = bjj.map_into_swcurve(p1); - let p2_swcurve = bjj.map_into_swcurve(p2); - let p3_swcurve = bjj.map_into_swcurve(p3); - let p4_swcurve = bjj.map_into_swcurve(p4); - let p5_swcurve = bjj.map_into_swcurve(p5); - // Addition - assert(p3_swcurve.eq(bjj_swcurve.add(p1_swcurve, p2_swcurve))); - // Doubling - assert(p4_swcurve.eq(bjj_swcurve.mul(2, p1_swcurve))); - assert(p4_swcurve.eq(bjj_swcurve.bit_mul([0, 1], p1_swcurve))); - // Subtraction - assert(SWG::zero().eq(bjj_swcurve.subtract(p3_swcurve, p3_swcurve))); - assert(p5_swcurve.eq(SWG::zero())); - // Check that these points are on the curve - assert(bjj_swcurve.contains(bjj_swcurve.gen) - & bjj_swcurve.contains(p1_swcurve) - & bjj_swcurve.contains(p2_swcurve) - & bjj_swcurve.contains(p3_swcurve) - & bjj_swcurve.contains(p4_swcurve) - & bjj_swcurve.contains(p5_swcurve)); - // Test MontCurve conversions - // First the affine representation - let bjj_montcurve_affine = bjj_affine.into_montcurve(); - - let p1_montcurve_affine = p1_affine.into_montcurve(); - let p2_montcurve_affine = p2_affine.into_montcurve(); - let p3_montcurve_affine = p3_affine.into_montcurve(); - let p4_montcurve_affine = p4_affine.into_montcurve(); - let p5_montcurve_affine = p5_affine.into_montcurve(); - // Addition - assert(p3_montcurve_affine.eq(bjj_montcurve_affine.add(p1_montcurve_affine, p2_montcurve_affine))); - // Doubling - assert(p4_montcurve_affine.eq(bjj_montcurve_affine.mul(2, p1_montcurve_affine))); - assert(p4_montcurve_affine.eq(bjj_montcurve_affine.bit_mul([0, 1], p1_montcurve_affine))); - // Subtraction - assert(MGaffine::zero().eq(bjj_montcurve_affine.subtract(p3_montcurve_affine, p3_montcurve_affine))); - assert(p5_montcurve_affine.eq(MGaffine::zero())); - // Check that these points are on the curve - assert(bjj_montcurve_affine.contains(bjj_montcurve_affine.gen) - & bjj_montcurve_affine.contains(p1_montcurve_affine) - & bjj_montcurve_affine.contains(p2_montcurve_affine) - & bjj_montcurve_affine.contains(p3_montcurve_affine) - & bjj_montcurve_affine.contains(p4_montcurve_affine) - & bjj_montcurve_affine.contains(p5_montcurve_affine)); - // Then the CurveGroup representation - let bjj_montcurve = bjj.into_montcurve(); - - let p1_montcurve = p1_montcurve_affine.into_group(); - let p2_montcurve = p2_montcurve_affine.into_group(); - let p3_montcurve = p3_montcurve_affine.into_group(); - let p4_montcurve = p4_montcurve_affine.into_group(); - let p5_montcurve = p5_montcurve_affine.into_group(); - // Addition - assert(p3_montcurve.eq(bjj_montcurve.add(p1_montcurve, p2_montcurve))); - // Doubling - assert(p4_montcurve.eq(bjj_montcurve.mul(2, p1_montcurve))); - assert(p4_montcurve.eq(bjj_montcurve.bit_mul([0, 1], p1_montcurve))); - // Subtraction - assert(MG::zero().eq(bjj_montcurve.subtract(p3_montcurve, p3_montcurve))); - assert(p5_montcurve.eq(MG::zero())); - // Check that these points are on the curve - assert(bjj_montcurve.contains(bjj_montcurve.gen) - & bjj_montcurve.contains(p1_montcurve) - & bjj_montcurve.contains(p2_montcurve) - & bjj_montcurve.contains(p3_montcurve) - & bjj_montcurve.contains(p4_montcurve) - & bjj_montcurve.contains(p5_montcurve)); - // Elligator 2 map-to-curve - let ell2_pt_map = bjj_affine.elligator2_map(27); - - assert(ell2_pt_map.eq(MGaffine::new(7972459279704486422145701269802978968072470631857513331988813812334797879121, - 8142420778878030219043334189293412482212146646099536952861607542822144507872).into_tecurve())); - // SWU map-to-curve - let swu_pt_map = bjj_affine.swu_map(5, 27); - - assert(swu_pt_map.eq(bjj_affine.map_from_swcurve( - SWGaffine::new(2162719247815120009132293839392097468339661471129795280520343931405114293888, - 5341392251743377373758788728206293080122949448990104760111875914082289313973) - ))); - } -} diff --git a/tooling/nargo_cli/tests/compile_success_empty/impl_with_where_clause/src/main.nr b/tooling/nargo_cli/tests/compile_success_empty/impl_with_where_clause/src/main.nr deleted file mode 100644 index de3078be8ba..00000000000 --- a/tooling/nargo_cli/tests/compile_success_empty/impl_with_where_clause/src/main.nr +++ /dev/null @@ -1,27 +0,0 @@ -fn main() { - let array: [Field; 3] = [1, 2, 3]; - assert(array.eq(array)); - // Ensure this still works if we have to infer the type of the integer literals - let array = [1, 2, 3]; - assert(array.eq(array)); -} - -trait Eq { - fn eq(self, other: Self) -> bool; -} - -impl Eq for [T; 3] where T: Eq { - fn eq(self, other: Self) -> bool { - let mut ret = true; - for i in 0 .. self.len() { - ret &= self[i].eq(other[i]); - } - ret - } -} - -impl Eq for Field { - fn eq(self, other: Field) -> bool { - self == other - } -} diff --git a/tooling/nargo_cli/tests/compile_success_empty/regression_2099/src/main.nr b/tooling/nargo_cli/tests/compile_success_empty/regression_2099/src/main.nr deleted file mode 100644 index 185a6426cb3..00000000000 --- a/tooling/nargo_cli/tests/compile_success_empty/regression_2099/src/main.nr +++ /dev/null @@ -1,34 +0,0 @@ -use dep::std::ec::tecurve::affine::Curve as AffineCurve; -use dep::std::ec::tecurve::affine::Point as Gaffine; -use dep::std::ec::tecurve::curvegroup::Curve; -use dep::std::ec::tecurve::curvegroup::Point as G; - -use dep::std::ec::swcurve::affine::Point as SWGaffine; -use dep::std::ec::swcurve::curvegroup::Point as SWG; - -use dep::std::ec::montcurve::affine::Point as MGaffine; -use dep::std::ec::montcurve::curvegroup::Point as MG; - -fn main() { - // Define Baby Jubjub (ERC-2494) parameters in affine representation - let bjj_affine = AffineCurve::new(168700, - 168696, - Gaffine::new(995203441582195749578291179787384436505546430278305826713579947235728471134, - 5472060717959818805561601436314318772137091100104008585924551046643952123905)); - // Test addition - let p1_affine = Gaffine::new(17777552123799933955779906779655732241715742912184938656739573121738514868268, - 2626589144620713026669568689430873010625803728049924121243784502389097019475); - let p2_affine = Gaffine::new(16540640123574156134436876038791482806971768689494387082833631921987005038935, - 20819045374670962167435360035096875258406992893633759881276124905556507972311); - let _p3_affine = bjj_affine.add(p1_affine, p2_affine); - // Test SWCurve equivalents of the above - // First the affine representation - let bjj_swcurve_affine = bjj_affine.into_swcurve(); - - let p1_swcurve_affine = bjj_affine.map_into_swcurve(p1_affine); - let p2_swcurve_affine = bjj_affine.map_into_swcurve(p2_affine); - - let _p3_swcurve_affine_from_add = bjj_swcurve_affine.add(p1_swcurve_affine, p2_swcurve_affine); - // Check that these points are on the curve - assert(bjj_swcurve_affine.contains(p1_swcurve_affine)); -} diff --git a/tooling/nargo_cli/tests/compile_success_empty/trait_default_implementation/src/main.nr b/tooling/nargo_cli/tests/compile_success_empty/trait_default_implementation/src/main.nr deleted file mode 100644 index e1f29ce3f48..00000000000 --- a/tooling/nargo_cli/tests/compile_success_empty/trait_default_implementation/src/main.nr +++ /dev/null @@ -1,26 +0,0 @@ -use dep::std; - -trait Default { - fn default(x: Field, y: Field) -> Self; - - fn method2(x: Field) -> Field { - x - } - -} - -struct Foo { - bar: Field, - array: [Field; 2], -} - -impl Default for Foo { - fn default(x: Field,y: Field) -> Self { - Self { bar: x, array: [x,y] } - } -} - -fn main(x: Field) { - let first = Foo::method2(x); - assert(first == x); -} diff --git a/tooling/nargo_cli/tests/compile_success_empty/trait_override_implementation/src/main.nr b/tooling/nargo_cli/tests/compile_success_empty/trait_override_implementation/src/main.nr deleted file mode 100644 index a385efc63fd..00000000000 --- a/tooling/nargo_cli/tests/compile_success_empty/trait_override_implementation/src/main.nr +++ /dev/null @@ -1,71 +0,0 @@ -use dep::std; - -trait Default { - fn default(x: Field, y: Field) -> Self; - - fn method2(x: Field) -> Field { - x - } -} - -struct Foo { - bar: Field, - array: [Field; 2], -} - -impl Default for Foo { - fn default(x: Field,y: Field) -> Self { - Self { bar: x, array: [x,y] } - } - - fn method2(x: Field) -> Field { - x * 3 - } -} - -trait F { - fn f1(self) -> Field; - fn f2(self) -> Field { 2 } - fn f3(self) -> Field { 3 } - fn f4(self) -> Field { 4 } - fn f5(self) -> Field { 5 } -} - -struct Bar {} - -impl F for Bar { - fn f5(self) -> Field { 50 } - fn f1(self) -> Field { 10 } - fn f3(self) -> Field { 30 } -} -// Impls on mutable references are temporarily disabled -// impl F for &mut Bar { -// fn f1(self) -> Field { 101 } -// fn f5(self) -> Field { 505 } -// } -fn main(x: Field) { - let first = Foo::method2(x); - assert(first == 3 * x); - - let bar = Bar {}; - - assert(bar.f1() == 10, "1"); - assert(bar.f2() == 2, "2"); - assert(bar.f3() == 30, "3"); - assert(bar.f4() == 4, "4"); - assert(bar.f5() == 50, "5"); - - let mut bar_mut = Bar {}; - // Impls on mutable references are temporarily disabled - // assert_eq((&mut bar_mut).f1(), 101); - // assert((&mut bar_mut).f2() == 2, "7"); - // assert((&mut bar_mut).f3() == 3, "8"); - // assert((&mut bar_mut).f4() == 4, "9"); - // assert((&mut bar_mut).f5() == 505, "10"); - assert(bar_mut.f1() == 10, "10"); - assert(bar_mut.f2() == 2, "12"); - assert(bar_mut.f3() == 30, "13"); - assert(bar_mut.f4() == 4, "14"); - assert(bar_mut.f5() == 50, "15"); -} - diff --git a/tooling/nargo_cli/tests/compile_success_empty/trait_static_methods/src/main.nr b/tooling/nargo_cli/tests/compile_success_empty/trait_static_methods/src/main.nr deleted file mode 100644 index 0150da68315..00000000000 --- a/tooling/nargo_cli/tests/compile_success_empty/trait_static_methods/src/main.nr +++ /dev/null @@ -1,41 +0,0 @@ -trait ATrait { - fn asd() -> Self; - - fn static_method() -> Field { - Self::static_method_2() - } - - fn static_method_2() -> Field { - 100 - } -} - -struct Foo { - x: Field -} -impl ATrait for Foo { - fn asd() -> Self { - // This should pass as Self should be bound to Foo while typechecking this - Foo{x: 100} - } -} - -struct Bar { - x: Field -} -impl ATrait for Bar { - // The trait method is declared as returning `Self` - // but explicitly specifying the type in the impl should work - fn asd() -> Bar { - Bar{x: 100} - } - - fn static_method_2() -> Field { - 200 - } -} - -fn main() { - assert(Foo::static_method() == 100); - assert(Bar::static_method() == 200); -} diff --git a/tooling/nargo_cli/tests/compile_success_empty/traits/src/main.nr b/tooling/nargo_cli/tests/compile_success_empty/traits/src/main.nr deleted file mode 100644 index 784ff01a883..00000000000 --- a/tooling/nargo_cli/tests/compile_success_empty/traits/src/main.nr +++ /dev/null @@ -1,21 +0,0 @@ -use dep::std; - -trait Default { - fn default(x: Field, y: Field) -> Self; -} - -struct Foo { - bar: Field, - array: [Field; 2], -} - -impl Default for Foo { - fn default(x: Field,y: Field) -> Self { - Self { bar: x, array: [x,y] } - } -} - -fn main(x: Field, y: Field) { - let first = Foo::default(x, y); - assert(first.bar == x); -} diff --git a/tooling/nargo_cli/tests/execution_success/1_mul/Nargo.toml b/tooling/nargo_cli/tests/execution_success/1_mul/Nargo.toml deleted file mode 100644 index a0fd8d98027..00000000000 --- a/tooling/nargo_cli/tests/execution_success/1_mul/Nargo.toml +++ /dev/null @@ -1,6 +0,0 @@ -[package] -name = "1_mul" -type = "bin" -authors = [""] - -[dependencies] diff --git a/tooling/nargo_cli/tests/execution_success/array_dynamic/src/main.nr b/tooling/nargo_cli/tests/execution_success/array_dynamic/src/main.nr deleted file mode 100644 index 9e21334e510..00000000000 --- a/tooling/nargo_cli/tests/execution_success/array_dynamic/src/main.nr +++ /dev/null @@ -1,30 +0,0 @@ -fn main(x: [u32; 5], mut z: u32, t: u32, index: [Field;5], index2: [Field;5], offset: Field, sublen: Field) { - let idx = (z - 5 * t - 5) as Field; - //dynamic array test - dyn_array(x, idx, idx - 3); - //regression for issue 1283 - let mut s = 0; - let x3 = [246, 159, 32, 176, 8]; - for i in 0..5 { - s += x3[index[i]]; - } - assert(s != 0); - - if 3 < (sublen as u32) { - assert(index[offset + 3] == index2[3]); - } -} - -fn dyn_array(mut x: [u32; 5], y: Field, z: Field) { - assert(x[y] == 111); - assert(x[z] == 101); - x[z] = 0; - assert(x[y] == 111); - assert(x[1] == 0); - if y as u32 < 10 { - x[y] = x[y] - 2; - } else { - x[y] = 0; - } - assert(x[4] == 109); -} diff --git a/tooling/nargo_cli/tests/execution_success/bit_shifts_comptime/src/main.nr b/tooling/nargo_cli/tests/execution_success/bit_shifts_comptime/src/main.nr deleted file mode 100644 index 8afed4c1e1e..00000000000 --- a/tooling/nargo_cli/tests/execution_success/bit_shifts_comptime/src/main.nr +++ /dev/null @@ -1,21 +0,0 @@ -fn main(x: u64) { - let two: u64 = 2; - let three: u64 = 3; - // shifts on constant values - assert(two << 2 == 8); - assert((two << 3) / 8 == two); - assert((three >> 1) == 1); - // shifts on runtime values - assert(x << 1 == 128); - assert(x >> 2 == 16); - - regression_2250(); -} - -fn regression_2250() { - let a: u1 = 1 >> 1; - assert(a == 0); - - let b: u32 = 1 >> 32; - assert(b == 0); -} diff --git a/tooling/nargo_cli/tests/execution_success/bit_shifts_runtime/src/main.nr b/tooling/nargo_cli/tests/execution_success/bit_shifts_runtime/src/main.nr deleted file mode 100644 index a2c873a7e7f..00000000000 --- a/tooling/nargo_cli/tests/execution_success/bit_shifts_runtime/src/main.nr +++ /dev/null @@ -1,8 +0,0 @@ -fn main(x: u64, y: u64) { - // runtime shifts on compile-time known values - assert(64 << y == 128); - assert(64 >> y == 32); - // runtime shifts on runtime values - assert(x << y == 128); - assert(x >> y == 32); -} diff --git a/tooling/nargo_cli/tests/execution_success/brillig_ecdsa/Nargo.toml b/tooling/nargo_cli/tests/execution_success/brillig_ecdsa/Nargo.toml deleted file mode 100644 index 972dd9ce93b..00000000000 --- a/tooling/nargo_cli/tests/execution_success/brillig_ecdsa/Nargo.toml +++ /dev/null @@ -1,6 +0,0 @@ -[package] -name = "brillig_ecdsa" -type = "bin" -authors = [""] - -[dependencies] diff --git a/tooling/nargo_cli/tests/execution_success/brillig_ecdsa/src/main.nr b/tooling/nargo_cli/tests/execution_success/brillig_ecdsa/src/main.nr deleted file mode 100644 index 23f017aa336..00000000000 --- a/tooling/nargo_cli/tests/execution_success/brillig_ecdsa/src/main.nr +++ /dev/null @@ -1,11 +0,0 @@ -use dep::std; -// Tests a very simple program. -// -// The features being tested is ecdsa in brillig -fn main(hashed_message: [u8;32], pub_key_x: [u8;32], pub_key_y: [u8;32], signature: [u8;64]) { - assert(ecdsa(hashed_message, pub_key_x, pub_key_y, signature)); -} - -unconstrained fn ecdsa(hashed_message: [u8;32], pub_key_x: [u8;32], pub_key_y: [u8;32], signature: [u8;64]) -> bool { - std::ecdsa_secp256k1::verify_signature(pub_key_x, pub_key_y, signature, hashed_message) -} diff --git a/tooling/nargo_cli/tests/execution_success/brillig_oracle/src/main.nr b/tooling/nargo_cli/tests/execution_success/brillig_oracle/src/main.nr deleted file mode 100644 index 86cf6ff1498..00000000000 --- a/tooling/nargo_cli/tests/execution_success/brillig_oracle/src/main.nr +++ /dev/null @@ -1,26 +0,0 @@ -use dep::std::slice; -// Tests oracle usage in brillig/unconstrained functions -fn main(x: Field) { - get_number_sequence_wrapper(20); -} -// TODO(#1911): This function does not need to be an oracle but acts -// as a useful test while we finalize code generation for slices in Brillig -#[oracle(get_number_sequence)] -unconstrained fn get_number_sequence(_size: Field) -> [Field] {} -// TODO(#1911) -#[oracle(get_reverse_number_sequence)] -unconstrained fn get_reverse_number_sequence(_size: Field) -> [Field] {} - -unconstrained fn get_number_sequence_wrapper(size: Field) { - let slice = get_number_sequence(size); - for i in 0..19 as u32 { - assert(slice[i] == i as Field); - } - - let reversed_slice = get_reverse_number_sequence(size); - // Regression test that we have not overwritten memory - for i in 0..19 as u32 { - assert(slice[i] == reversed_slice[19 - i]); - } -} - diff --git a/tooling/nargo_cli/tests/execution_success/brillig_scalar_mul/src/main.nr b/tooling/nargo_cli/tests/execution_success/brillig_scalar_mul/src/main.nr deleted file mode 100644 index 30c51ce0f50..00000000000 --- a/tooling/nargo_cli/tests/execution_success/brillig_scalar_mul/src/main.nr +++ /dev/null @@ -1,16 +0,0 @@ -use dep::std; - -unconstrained fn main(a: Field, a_pub_x: pub Field, a_pub_y: pub Field, b: Field, b_pub_x: pub Field, b_pub_y: pub Field) { - let mut priv_key = a; - let mut pub_x: Field = a_pub_x; - let mut pub_y: Field = a_pub_y; - if a != 1 { - // Change `a` in Prover.toml to test input `b` - priv_key = b; - pub_x = b_pub_x; - pub_y = b_pub_y; - } - let res = std::scalar_mul::fixed_base_embedded_curve(priv_key, 0); - assert(res[0] == pub_x); - assert(res[1] == pub_y); -} diff --git a/tooling/nargo_cli/tests/execution_success/brillig_schnorr/src/main.nr b/tooling/nargo_cli/tests/execution_success/brillig_schnorr/src/main.nr deleted file mode 100644 index 107b311f6ab..00000000000 --- a/tooling/nargo_cli/tests/execution_success/brillig_schnorr/src/main.nr +++ /dev/null @@ -1,19 +0,0 @@ -use dep::std; -// Note: If main has any unsized types, then the verifier will never be able -// to figure out the circuit instance -unconstrained fn main(message: [u8; 10], message_field: Field, pub_key_x: Field, pub_key_y: Field, signature: [u8; 64]) { - // Regression for issue #2421 - // We want to make sure that we can accurately verify a signature whose message is a slice vs. an array - let message_field_bytes = message_field.to_be_bytes(10); - for i in 0..10 { - assert(message[i] == message_field_bytes[i]); - } - // Is there ever a situation where someone would want - // to ensure that a signature was invalid? - // Check that passing a slice as the message is valid - let valid_signature = std::schnorr::verify_signature(pub_key_x, pub_key_y, signature, message_field_bytes); - assert(valid_signature); - // Check that passing an array as the message is valid - let valid_signature = std::schnorr::verify_signature(pub_key_x, pub_key_y, signature, message); - assert(valid_signature); -} diff --git a/tooling/nargo_cli/tests/execution_success/conditional_regression_underflow/Nargo.toml b/tooling/nargo_cli/tests/execution_success/conditional_regression_underflow/Nargo.toml deleted file mode 100644 index 54a082081f7..00000000000 --- a/tooling/nargo_cli/tests/execution_success/conditional_regression_underflow/Nargo.toml +++ /dev/null @@ -1,6 +0,0 @@ -[package] -name = "conditional_underflow" -type = "bin" -authors = [""] - -[dependencies] diff --git a/tooling/nargo_cli/tests/execution_success/conditional_regression_underflow/src/main.nr b/tooling/nargo_cli/tests/execution_success/conditional_regression_underflow/src/main.nr deleted file mode 100644 index be70b67be5e..00000000000 --- a/tooling/nargo_cli/tests/execution_success/conditional_regression_underflow/src/main.nr +++ /dev/null @@ -1,15 +0,0 @@ -// Regression test for https://github.com/noir-lang/noir/issues/3493 -fn main(x: u4) { - if x == 10 { - x + 15; - } - if x == 9 { - x << 3; - } - if x == 8 { - x * 3; - } - if x == 7 { - x - 8; - } -} \ No newline at end of file diff --git a/tooling/nargo_cli/tests/execution_success/double_verify_proof/Prover.toml b/tooling/nargo_cli/tests/execution_success/double_verify_proof/Prover.toml deleted file mode 100644 index 3e6d996d0e1..00000000000 --- a/tooling/nargo_cli/tests/execution_success/double_verify_proof/Prover.toml +++ /dev/null @@ -1,12 +0,0 @@ -input_aggregation_object = ["0", "0", "0", "0", "0", "0", "0", "0", "0", "0", "0", "0", "0", "0", "0", "0"] -# key_hash = "0x17a5d2b205c1bf45b015ba33bc2f0beb7fbb36682f31f953b8d4d093c8644be5" -# proof = ["0x0000000000000000000000000000000000000000000000000000000000000003","0x0000000000000000000000000000008f66908323784e7c5259f4eefab77ca881","0x0000000000000000000000000000000000109cac7b943f9b737d7b023d4f5d8a","0x000000000000000000000000000000e991d3ac0a68a252bd3cd09cd1b43fe1b4","0x000000000000000000000000000000000014213d346a426777351fdabaa0fa26","0x000000000000000000000000000000e4603692a76af630015380b08d0e13c239","0x0000000000000000000000000000000000149e7046461203c1b72392bb93c262","0x000000000000000000000000000000c27ffc719f223ca6ef5476a5b66f03a4a8","0x000000000000000000000000000000000003718c62098243e3c1f38090e61753","0x000000000000000000000000000000749492aa98716ce5bf7c06e5c2a0a8a528","0x000000000000000000000000000000000018e4c7d33848bccdc3eed924bfaa15","0x0000000000000000000000000000004e10a37f41fd7c4fe32982daa498530d62","0x00000000000000000000000000000000001b76c8c59489c63f11280187109dd7","0x0000000000000000000000000000002a6cd84d3b8537a7c3cb0cb9461f02e4bb","0x0000000000000000000000000000000000197e524fd48ca5ccb30d6c5ffe689d","0x0000000000000000000000000000000013bf25498ce1f51078c06dac450c0325","0x000000000000000000000000000000000018d347b88a0c32e32571deb9b40466","0x00000000000000000000000000000060d496191298eb1b1c2ce18f9a4afcfc55","0x000000000000000000000000000000000024e11b8e8fcb45b8628cb9cc565513","0x00000000000000000000000000000004e976f6d12fff6250eea2d21c570d3d6a","0x00000000000000000000000000000000000967dbd89d2c7dc0121ea71ded7203","0x000000000000000000000000000000d96f810588c0daa43e88d765a3f82ea9b7","0x00000000000000000000000000000000001f69d7015fe6694bd1d4d61049dae9","0x000000000000000000000000000000c539910d0f81a890fa3d996a676db39640","0x000000000000000000000000000000000026d8b64020a669e24f740b4eba633a","0x000000000000000000000000000000c53cc90f99c40eb5d449f38180d9e9c8b6","0x00000000000000000000000000000000001071ddf2bacc2367dfb2c5084b7dd1","0x0000000000000000000000000000001b9791181eb174db1a50d903fa9fea9999","0x0000000000000000000000000000000000118c059d41a95311a5c361c6a9a00d","0x0000000000000000000000000000003caf4ad04551a3ffba19cc6a1fff457370","0x00000000000000000000000000000000001dc4d8be804c5289fbf54183f93149","0x00000000000000000000000000000050766764bb82799df5172043c515956263","0x00000000000000000000000000000000000a5849adbac9c33e53571b29aab672","0x0000000000000000000000000000002edb078e589d44ac93e283680b34adf574","0x000000000000000000000000000000000015e9e187c4fb683ca78d52a2a0301b","0x00000000000000000000000000000048ac0f1db3575ed0f84d61ab6cbdd53d9f","0x00000000000000000000000000000000002ddc4243fbc7104347d29a823194ae","0x00000000000000000000000000000070ad92aeea2bdea4277ffdfa3d3ed93443","0x000000000000000000000000000000000003bad3e3aae806c278094cb682a8e0","0x000000000000000000000000000000fb74b99eb44c80d8f7ba83d7e9e2efa5c0","0x00000000000000000000000000000000002819cc14e399c1dadc4f921e2a58fa","0x000000000000000000000000000000e3938bb3e7866c6499ec44fb72549efca0","0x00000000000000000000000000000000002d8264d5cdc2109da12e1864aca147","0x000000000000000000000000000000b12d7828cacbe86350f0b171b0cb0d1cd4","0x0000000000000000000000000000000000244155cecb315326f05641cac9475c","0x070b059f9471e22eed5a9ea08093dba3c59c941634611884c5f0f1a1a6b93e5c","0x118124ada70b46c7d23a6ca8b90d545f30e028b1689fe5c55c86bf55f42e0401","0x25dca6ad78c03ce1f7783cc39a5ea5ef90b318d5edf4f1367d4451c1db3c113e","0x0d9557b4e661b5c53b84fcb41f05d15c0ca112430db16f56d0ab54032fffe734","0x06aedf13a3b182784f4d64162f4706759f95e42fc8dc17d1b8b5f551dafdc007","0x132f97ab5f1f8817689b17e336125c5273d6970a1b3b0901fd26d193a4d2dce4","0x1b0c9980b5b09343e807d82bad307a06d1bfadcd1fa50be666c2907d31ef43e1","0x1ce7000cb24ecc1f2ff9d9507b2290513fed574a84d893811cb54a3c0bc51ccc","0x2e1df58d36444c2dfda98991847422f56ef66f079d26eb7f8110d0d7c46b2c0c","0x166c2f821be7c3e3e4440980e73770840194f14d003778b7fbcdd2690776255c","0x1ae8390287e2eb117851a5011575ba3801e5ee5c66a8f7432e2a2fb13c276008","0x047c09806bfb272d940d9b802e3e49b40050fd0f66717e8b325c5d4834b13aac","0x08f81300d7f64e5b281b37005c7c936800a1fa1ecce2fd1664b8ba9069627558","0x2ed7260e623b68d580304751341bb72141314b881e9e8dade626bf5cde8a077c","0x23e04c035fd9396ca06cdc0171f24da00287e87b338bf45992e2ea41a589d560","0x285c5583cbd4609a347a7781a030975402d8e58a99fd72e4c53f4789da3b100c","0x2cd85f0437cf87c7c8881301ce6ee1080329e29a006ef16ff79ba4d20eec4ab8","0x12eb74da267caf98c494db16c87f90f510fdca1f8095b40156a6f0bb066e3400","0x2267004535c434df4cbee1a356e48b1f317cb849ac69c3eb94e377d2274f1e08","0x2c9d4ce9d1d8b8cf1cb90cbc3e121f570c8260c53b48ed2070d474d5a6f12c4e","0x2c6c83ffaad6f30da5aa696973ccfbd0337cb7a5e5f9e5fc8e826dce21e8f51c","0x056c23922e9435f93853044ba96a1c12db97f47053987df5187269ce884ec00f","0x09e82d129a8f5d26cc609fcbd7a865c6dbe8f17fba09fc804acec716bcfffabb","0x0e643693068a8454606e3b4c05e6af7adc39ee8f207b7b0b7d2b245ef1b13567","0x12e040137285ab81f47bd6cc3424f92edc8aeb9e86ecf996af8781a726627013","0x00f01a11c2990ecba44568cb7b2bd25edb46f760ed26ff69e6160c86539d8563","0x28a91699dfa4e85e18e8621d39a147a40930701d2d88546e01adb71a1f8e407f","0x000000000000000000000000000000009d7cc0b2d2bdef816f4fb17e7a6f6c08","0x00000000000000000000000000000000bcfc1a7030171f681f2c6e97c61f4e70","0x00000000000000000000000000000000dc7b742d8d704f4ecf092bb111cf30d8","0x13b099dc4869006fde9df04bf36f4c8f08d4491cc6229ac36a98f93214c79b6a","0x008fa95e0d431d617d8d3288fde7f8bbe36492019943e2018564633528575892","0x0fc66c06bdff20dba4dc491d5cd13cc209c4d2d9e29802db665bb397c2a4e754","0x0fe48ae6623efbaadce6d6b75b87be6caa19c2fd4d94a74149ceb6bcb88251e1","0x1bb41738028855cb5e0085edcd62cff208121427ea19a57425a0cf6bb68deb93","0x0fbc646333ddc21ab1a77b01a35973a56d5a617c482a21a231497fd3cc9b74c1","0x19ab9eaa1a902faff2dd9baa19ff00cea9086baa8c28bcdb95f7a3549eaf09b4","0x25e2b7a7643df4d964cd787b593888b00abfe3ce79e8deaa6d68fd1686b84bcb","0x2d134d7eea07414451e54854d61d5b71245434d0638bba9a1184914f65f2521c","0x03df94e38e9eed8586acd277d180d5d515b49d89d37525f871be2ff4552c586c","0x0b102abb146839f073c4a2514e65a8962f48ee8bbd1801e815d9c42d34665ebd","0x000000000000000000000000000000b7a4109cb92b514401fb63667454a9c892","0x0000000000000000000000000000000000016fce7f8ef56fef466636f3fbc3de","0x00000000000000000000000000000005f2d1c401a7aa14ac7e9fce7c21ec2e1a","0x00000000000000000000000000000000000621322c74c5d0da5eb71a4f2b046f","0x00000000000000000000000000000073d774ad7f61b1c1b93800f7838cca6bde","0x00000000000000000000000000000000002d603cc025e6af192394df113d4677","0x00000000000000000000000000000066a2a702b4d4b1a24af9c56cacb18ae4b8","0x00000000000000000000000000000000000124a3c25b427cfb6fca07525c5b8d"] -# public_inputs = ["0x0000000000000000000000000000000000000000000000000000000000000003"] -# verification_key = ["0x2b337de1c8c14f22ec9b9e2f96afef3652627366f8170a0a948dad4ac1bd5e80","0x0000000000000000000000000000000000000000000000000000000000000008","0x0000000000000000000000000000000000000000000000000000000000000005","0x0000000000000000000000000000000000000000000000000000000000000008","0x0000000000000000000000000000000000000000000000000000000000000001","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x00000000000000000000000000000092139c61bae1a44f0fc7689507414be688","0x00000000000000000000000000000000000160ce4e279582f91bde4f03f5e9a2","0x0000000000000000000000000000005dc2d37f658c3b2d60f24740eb13b65d79","0x000000000000000000000000000000000007e3e8a5d98a1177ec85bf88f163a5","0x000000000000000000000000000000dc3035fbd7ff16412a8fd7da587a935298","0x000000000000000000000000000000000023d08e2817ac16990004ed11d8fc66","0x000000000000000000000000000000356a5ad59c646c746a8d09f5d154e47c4f","0x00000000000000000000000000000000000708529196af3c8e16ffa580c26182","0x0000000000000000000000000000002ddfe70eb7a1280596e8e4a804f118a6dd","0x000000000000000000000000000000000013757e15a0905f298303784a161b21","0x000000000000000000000000000000a23a729df796935c7824e3a26be794829b","0x000000000000000000000000000000000005775b6c146c4a59856e869fe5a70e","0x000000000000000000000000000000eef0c9e088fd2d45aa40311082d1f2809b","0x00000000000000000000000000000000001d539ccbfc556d0ad59307a218de65","0x000000000000000000000000000000a2c848beceb6ab7806fd3b88037b8410fc","0x0000000000000000000000000000000000177004deeb1f9d401fd7b1af1a5ac8","0x0000000000000000000000000000002508eb63672a733f20de1a97644be4f540","0x00000000000000000000000000000000000d82d51f2f75d806285fd248c819b8","0x000000000000000000000000000000d002f9100cbba8a29f13b11513c53c59d0","0x000000000000000000000000000000000006cd3b0e3460533b9e5ea2cdc0fcbb","0x000000000000000000000000000000f45ea38a93b2f810c5633ddb54927c1c96","0x000000000000000000000000000000000021791de65f9a28ec7024b1a87ab4f3","0x000000000000000000000000000000926511a0439502c86885a8c6f0327aa7ad","0x000000000000000000000000000000000029fa14a969c5d81ed3abbbfb11220a","0x000000000000000000000000000000b84c3258e8206f560e5b5b18cbeafef87e","0x00000000000000000000000000000000002a910445cd8fc895e5d235cd8ea185","0x000000000000000000000000000000887e67f15e84bcb8507a5064a363f6043b","0x000000000000000000000000000000000014dc6643d801c3ef27c2066b6e2bb4","0x000000000000000000000000000000e38e900b42c314ba803088e8fbf125203f","0x000000000000000000000000000000000020690fd4869db418306046b38161dc","0x0000000000000000000000000000001e2fa856bf7951b8292b1e88185993629c","0x0000000000000000000000000000000000048a85e0bbac7c60ad3d78f601f63c","0x0000000000000000000000000000006f457719495073d3666d77a625aeab0c51","0x00000000000000000000000000000000002623ad892dc62b1fa7d0a650f0d470","0x000000000000000000000000000000dbfcc8a467e021c03b13f74a9f79c3a10c","0x0000000000000000000000000000000000295f6f10976c37bd9c6f96bb7187d5","0x000000000000000000000000000000c13ef9a937cc12420fb38d9ab8e848e85e","0x000000000000000000000000000000000003560a3b334e887532f605c9cb7628","0x0000000000000000000000000000009bcebf08a4599cdda0fb96312d4dc0c7a9","0x000000000000000000000000000000000015adc8bb1e01c835f48959d1237bd6","0x00000000000000000000000000000047762ab839e4ff63c77605a9f383da37c2","0x000000000000000000000000000000000016a8c3c53d89660cf271522cd301fb","0x000000000000000000000000000000f0c8539a0b5f94420a513f9c305b932bfe","0x00000000000000000000000000000000002957ba01d9de5638f808f88a692533","0x000000000000000000000000000000ab17c6189d67d3bf5dd2f3885de0151b6f","0x0000000000000000000000000000000000060d8aa43fdc434d1942263f364d95","0x0000000000000000000000000000005d292333b3adb497f00b4bc32d45229060","0x00000000000000000000000000000000001a1018a66221883639f2898a66f345","0x00000000000000000000000000000006555a806b1993291deba0dc44e2abf431","0x00000000000000000000000000000000000cacff7099a9d5e35a21f4a00b2dc3","0x000000000000000000000000000000f50c11ba95d349c36d143eefd12e494950","0x00000000000000000000000000000000001022e8c5f02d639bc9dd8bc4407f99","0x000000000000000000000000000000c76828795098eda73d50b4b585c60afc60","0x00000000000000000000000000000000002bf09c0ec7011e93888962f2406630","0x00000000000000000000000000000049e5c83a8978d832fb8e144548e3ca1adb","0x00000000000000000000000000000000000e0ec242c2e160a984f61ca5adf5f5","0x0000000000000000000000000000009c5d6e08a6605ab4513748ac0fa017dd1c","0x00000000000000000000000000000000001f54baa07558e5fb055bd9ba49c067","0x0000000000000000000000000000001e1ee7ee29bbb5e4b080c6091c1433ce62","0x000000000000000000000000000000000024aec62a9d9763499267dc98c33428","0x0000000000000000000000000000001a96755946ff16f0d6632365f0eb0ab4d4","0x000000000000000000000000000000000028cf3e22bcd53782ebc3e0490e27e5","0x00000000000000000000000000000043148d7d8c9ba43f2133fab4201435a364","0x0000000000000000000000000000000000234ce541f1f5117dd404cfaf01a229","0x000000000000000000000000000000a7fb95ffb461d9514a1070e2d2403982ef","0x00000000000000000000000000000000003016955028b6390f446c3fd0c5b424","0x00000000000000000000000000000008863c3b7cd7cddc20ba79ce915051c56e","0x000000000000000000000000000000000013ef666111b0be56a235983d397d2a","0x000000000000000000000000000000e3993f465fc9f56e93ac769e597b752c1c","0x0000000000000000000000000000000000217f7c4235161e9a3c16c45b6ca499","0x0000000000000000000000000000008ffa4cd96bc67b0b7df5678271e1114075","0x0000000000000000000000000000000000256467bfcb63d9fdcb5dde397757ad","0x00000000000000000000000000000054e5eb270bb64bde6e6ececadfd8c3236c","0x00000000000000000000000000000000000e52d1bd75812c33c6f3d79ee4b94c","0x000000000000000000000000000000484a2c641dce55bc2dd64ef0cd790a7fea","0x00000000000000000000000000000000000ff417d256be43e73c8b1aa85bdda3","0x0000000000000000000000000000000b72e7b7713ab5da44e0f864182e748a23","0x00000000000000000000000000000000001a221055f1625ad833a44705f5f74e","0x00000000000000000000000000000067a99a34e9b81a17ad001db02e29bcb82a","0x000000000000000000000000000000000018a6c02e398389827568fa960e86e2","0x000000000000000000000000000000bb29f26f9890d6cc6401f4921d5884edca","0x00000000000000000000000000000000000868357b28039385c5a5058b6d358e","0x00000000000000000000000000000036fb6e229dde8edf7ec858b12d7e8be485","0x00000000000000000000000000000000001060afe929554ca473103f5e68193c","0x00000000000000000000000000000015226e07e207744c0857074dcab883af4a","0x00000000000000000000000000000000000b1c02619282755533457230b19b4a","0x0000000000000000000000000000001f2a0277e4807e6e1cbabca21dde5eb5e1","0x00000000000000000000000000000000000d928deafed363659688ed4ccdef52","0x000000000000000000000000000000363f0c994e91cecad25835338edee2294f","0x00000000000000000000000000000000002eea648c8732596b1314fe2a4d2f05","0x000000000000000000000000000000b2671d2ae51d31c1210433c3972bb64578","0x00000000000000000000000000000000000ab49886c2b94bd0bd3f6ed1dbbe2c"] -# proof_b = ["0x0000000000000000000000000000000000000000000000000000000000000003","0x0000000000000000000000000000002ab91b132e624f2a408aa8c9bf31cca8d7","0x000000000000000000000000000000000015ad57528e0f065c820cc5ad4eab81","0x0000000000000000000000000000001acb78b1b6a5c9a6ec8bf2272b463014da","0x0000000000000000000000000000000000117fd65346e04bf3666d2ab3f24c90","0x000000000000000000000000000000aad0adaf9a768ba6a178f804edac5c8943","0x000000000000000000000000000000000004a11c7d31f25c20e3af16f9b01f71","0x0000000000000000000000000000001f0ae9bb921893ce2710148eb1fcd99e39","0x0000000000000000000000000000000000123fda5008d3709f5afeda01de1930","0x000000000000000000000000000000971c2a8d0119097fd82b7a8074a14853f8","0x000000000000000000000000000000000009965b998750710678da7891d8aba6","0x0000000000000000000000000000002d6ef3813ba14a5f5202afed6b1c41de1c","0x000000000000000000000000000000000020366bfdb2f9279c43d66f90dfdf4d","0x00000000000000000000000000000041389f221eadec33e1b87518668c3bc92e","0x00000000000000000000000000000000000d3858169bb0432ab761d4be8ef03e","0x000000000000000000000000000000c1dbfe670dc912cb0fa1a0f633f81a4cef","0x00000000000000000000000000000000000fc0c403e668b0f51e07089082c32f","0x0000000000000000000000000000009a4fba9bf1369f637fd295c8bf795c9d02","0x00000000000000000000000000000000001d6d1e7286ce52401e6ea79d2cfa3d","0x0000000000000000000000000000004762bf7702ffe7a2c147e704280cd50bba","0x0000000000000000000000000000000000205797cdeaeff9a8d5ea4b95d41b1a","0x000000000000000000000000000000b3d43cc863ba8d98f51118c0db70761079","0x00000000000000000000000000000000002d2a3d10381bc6b47a693c1692b1b6","0x000000000000000000000000000000d35a69fb0e68729f71e651799c0d19e9eb","0x00000000000000000000000000000000002ade1dc7741b7f397271c10e596557","0x0000000000000000000000000000001a67b44714687085004e4142f700043298","0x00000000000000000000000000000000001bb7bbb7f45876b1d72e5d20cee106","0x00000000000000000000000000000025f1f1cbf43fad70cba255b37a19e88b0c","0x00000000000000000000000000000000000cc46b215fbd8e4b233cc74aab250b","0x0000000000000000000000000000008168026f51135fc1670664bc50e629917f","0x000000000000000000000000000000000004d822d80ba0c1bcbd4b000573c6f9","0x000000000000000000000000000000d85756249b937277eba3f5dcb89c56e7bb","0x000000000000000000000000000000000019a3a7a5b20dac138d7ddb1d499134","0x0000000000000000000000000000007621614c7ebc31a2177011f9da01668eb3","0x000000000000000000000000000000000024e9beb5d616ab120073170fc431e8","0x00000000000000000000000000000031fbf901896e958fbbed3e5c57aebbdd04","0x0000000000000000000000000000000000005ac0f10fcc255e179a40518875d4","0x0000000000000000000000000000002dab820c019bcca563b7dbdd26974653e9","0x00000000000000000000000000000000001a5655ec1a67f722b14c65d5c2197f","0x0000000000000000000000000000008e277e490196db5c19d09a9034e10c6432","0x000000000000000000000000000000000003f13b1af07db07eec88698d0aaf2a","0x0000000000000000000000000000002d618452e2b4c790d0551ea5863ed62e76","0x00000000000000000000000000000000001a7171e790a433a972d80218fb482d","0x0000000000000000000000000000005669975cd5bf65a739c0a35a8ab9b7963b","0x00000000000000000000000000000000000d27ffb6f00c86a0ce76a8067d1bce","0x03a0054fe9f93ab96e7c7ed6ec1ac641dffd99a1c804ee5db52cf1efa1a12c15","0x059324381c89c12c87d0f6c27963c31647721fdb02c125961da1a21cbfb3ed1c","0x04a5ead891b7c3f30329e6abcf2ac6903c3c1d8e68874f6baf3a6fc00e84533a","0x03c02f6b862734acf9d0c5133f8141b3a008c5499336a588b376a5dd86d9c837","0x1dd26b35c21c584c410df89d1fd549e7f5da9bb4fd290b7c528d92fbd652f5ad","0x2c8e7ef6f7a130769ae74d0f47aeab5c443492ef4b1ed0b3a9d61dfca80cbdda","0x2b074486c21c62e6eccf3191b3ab3c8df0fb98f0c44b9f0e9e2c281b908b83a6","0x149a6d620be135bba6bbfe8ac826df37567c8be78007e47cdcf5d6e4683d339e","0x119fdfd330036bde31af71e43bd5e191460605e4760d08a6e0ebddbdb5abfeeb","0x1713efc63c00b2de4f68e696d9d30c5603963484f4829e716de2796640864b09","0x1bb1862114cda3712c177b1b6bca0ecd9de7723925698aee83dc91ade7078d3e","0x049d965ad8ccf092dcae948491f702779a513db430e6ec7d15fa1847a6814235","0x093b2cb5b199e125b95d290923ee04ef34a27b6861cdd8fa2bf4308f4d02846a","0x2710c6cd6f14f8071976509d1002e915bfc182b843a3967134de380302423c72","0x24ecb2d6c6678496e574a4248fb813bcd289eda1873763e8afd0c23d340a11a8","0x298a49319f347529c22338a921af16346cdb2b55b81e5065c5cada84da8b53dd","0x2e27df8c780165bc9ed1cd2db3a618ac072c6909e9053ce2dbc4f2cc810c9612","0x07350f3a2e23c1ccbde0d39370159060de5b8df40ae7c58d3f9852b371f1adac","0x2fdf8bf8e2fa2acad0f6d6a3f73e7dc516e8e2d167128bf3a560972339122835","0x0d3ec457703c228d4b6cd1635df9d9bde51997d0228edef64d667cbd16f3cb70","0x148320b9ceab1f3be840dc38b0344e7db0755283d1eacf2dd472e99ee0fb473f","0x06febdcf4869a6b89fdeb0805612c53e139afc29d119a54bc3d72dc7de0f1a7b","0x0b9c542a2136974b7c8d4504e809c7b5adec39de020091c8d9d1460f84905cb0","0x1039ea84fa0387de593bd9897a00ca2d483d779232e77e45efcb5e572b119ee5","0x14d780dfd2d0787135ea6e0e0bf7cca4e28eb54663ce6ac305c5769ed192e11a","0x026127746f9cb625c3301bfbc41bc2c67949be75a032b8ceaddd1580378dd846","0x123cf1180af5fdf09444de423947c9a71790f2c85468b51ecc25fb7bf075a0d5","0x000000000000000000000000000000008419a4f769ceb72c3ac28f559331a5df","0x000000000000000000000000000000009e852c5c1891a89b79b64599e3d52d72","0x00000000000000000000000000000000b8f0b3c0c7549a0ab8a9fbde3478b505","0x056af493dda97ae84cdbbf9ce379e35bdd66e1223eebacdc4a6c2c92553604f4","0x023624c49a722bc8dc5d945b4b10be8ed6c608020e65038a470b5a407375c8aa","0x0ed9f8dd445bda548ef08b7a2ff233867c41b72786f98054597833a68cc9b900","0x2cbf3d04669aa3a0dcda95e19da879f36029abe28317f1ee69be28ddef2a0b87","0x284ca7049611e293aa4535dd7841a540996609d541814373b387b00069636f14","0x246a69ce4030b1e8a675eec89960bfe188bd4073f07afe74f7a77c0698c80bc5","0x1bbdab5d007c4743fbcbf3cc89252baf0b0e1b645b977434ccd4e7560d124761","0x210427e70ee1b484bbb0b4e98263faf24a45325236eed618d51dcb1cb3a9f60d","0x1fbc24b0bd5b0b8c514e138317cc332962714dd306b34939768d723d6ea2ca8e","0x1e74217a6bd46293e6eb721cad346b607a9d6953d677bc5a17fd195e299b9f0f","0x1d2c1e441a4db99b7c88d0b6429ca39792c984d4a63c2f7ab96cc07ee4947390","0x00000000000000000000000000000005b1e3524625c466540f3f7468172403cb","0x000000000000000000000000000000000013bb985f9d5562699347b5dfbc441e","0x000000000000000000000000000000f4fb87d7f481bb198aa6237a0c9ffd3c22","0x0000000000000000000000000000000000254c5f1b76e278f4c71cf5e71533dd","0x0000000000000000000000000000005a72a28b51be9c538b4d28b5106b9239b8","0x00000000000000000000000000000000000d02d80e1a73c82cb0dd8af1aabb3f","0x000000000000000000000000000000434c46502fc1c425a72a4717a3e44c3415","0x00000000000000000000000000000000001c8d74d967b9b65ff2772592a15d0e"] - -key_hash = "0x096129b1c6e108252fc5c829c4cc9b7e8f0d1fd9f29c2532b563d6396645e08f" -proof = ["0x0000000000000000000000000000000000000000000000000000000000000003","0x000000000000000000000000000000d62b795bec274279129a71195796825fcc","0x00000000000000000000000000000000000793ab763140f20a68a6bd2721fd74","0x00000000000000000000000000000053141d06d3307b36153f321511199e579c","0x00000000000000000000000000000000000a4b55d6c21f98a9c434911dcb5c67","0x0000000000000000000000000000005f9d324c0abd22cec92d99dbec438e9491","0x0000000000000000000000000000000000240dfafe1b53dc27147cbab14ea893","0x000000000000000000000000000000044a61d3aac32c6931247cf334a19d9611","0x000000000000000000000000000000000003f0f8cf4207bfa85c23ec9f8d0c88","0x00000000000000000000000000000002168a470e39ba2ac266f6b474de12045f","0x000000000000000000000000000000000025791e7d3feab542345c00ec5a30df","0x000000000000000000000000000000dcafd76d4c3640969c80e017b951ef6397","0x00000000000000000000000000000000001d27f75a1256771e88e0c86fc42dbc","0x0000000000000000000000000000007347ae7d2d9d7fc2b8f0baa014ee1fed9f","0x000000000000000000000000000000000018bd927f42bf7caf9555f56f09000d","0x000000000000000000000000000000041f765f83cbe5904c8f453f70a4531d10","0x00000000000000000000000000000000001858aabeeb5331a221419f4fed1c19","0x000000000000000000000000000000d254a54caaedf8287b9af951b2f2611121","0x000000000000000000000000000000000005ab493623c9563cf2e55ba5f18200","0x00000000000000000000000000000014f24cddc1a02440dc63637df8032c8074","0x000000000000000000000000000000000011950c16cef98471b1d78b935195a4","0x000000000000000000000000000000b0340b459e6bd5cc8f031c8654a502897f","0x00000000000000000000000000000000000e1cf3968dac4545a76a2ae58e512c","0x0000000000000000000000000000002adf7218aa06ca0d2c2e600dcc39193a2d","0x00000000000000000000000000000000001302e7e4b0f14749bd885ca25588b6","0x00000000000000000000000000000092009ce4056e79ab815d8cdfd4491138ae","0x000000000000000000000000000000000018af11e853c6cf2f0f6274b0da8133","0x000000000000000000000000000000dd3dc6f49232141718527b3a0e4b26e21d","0x00000000000000000000000000000000001a877853348a8b695c4f9a9aa4ce68","0x000000000000000000000000000000aecfc56ba07155450b368140d6324023b5","0x000000000000000000000000000000000029c11052798c57ece614617d33fcc2","0x000000000000000000000000000000eb106ffc816d16fb84e84b0b61157b2603","0x000000000000000000000000000000000026c3cac16206899a21cb5126841446","0x000000000000000000000000000000a782ed54805fe845068b362b58e2fa34ec","0x00000000000000000000000000000000000cf046a1bfcc666b7f28b572676073","0x000000000000000000000000000000b931c8dda60bb4aca4cc817f5540f1209f","0x000000000000000000000000000000000024ad50c3936fafc3d190e6a4874223","0x000000000000000000000000000000cce90cfbaf5671c8c8652db28a3a9566f7","0x000000000000000000000000000000000003574db9d0f84380c9635660f86354","0x0000000000000000000000000000003eb3e1dc31846a90f721e7a08c6d6dc4f7","0x000000000000000000000000000000000028999a700cd1abae1a288eebb9a91c","0x000000000000000000000000000000c1be4d385b11387e14eb9817050d772f78","0x000000000000000000000000000000000003c56b5bad8b4484c66ac921f1f102","0x000000000000000000000000000000ace245cabf0f00dc7fd253dd8af0377a14","0x0000000000000000000000000000000000107f1731fcf34b364c813599fa1df7","0x035b937d404932b542b706eb810ef4a7dca4566d4dde1ad6a8717f46167ead7e","0x17608cef3dc7960f41cb1295706df663727d45ee598a61e05e989d111449fb65","0x054712a950ad67da3aa860e49e6891f99b586b7f37caff94eb013fdb374b61ee","0x04b755083086c769b7f593e0e48d68dc54be808203351380ca5566a48149d8bb","0x17d7670b0915235f626fdc1d7e1134d2be906ef138d7843384b3ebc23b1d630f","0x064cf544ab5f4e3dab47960502cccc83321fb275068dfbdd3a2fcbc6dddcaa65","0x083338262712e2b66769ea40d9f412b18caa1bc81a51ff5a50b6c41f8c4b3d23","0x0cdd38958cab97defde00f4a5961b6fd676e29d9f2c352f6bb2c68b91f83f8af","0x02c8bdd005c2f43a0a8cbb2744916ce5c322dfa5b23367a829c12699f4036d32","0x25bac73c7e7b659fbea3135b7a0decf9db8dc3045bd2837dae337c64cc722546","0x19eb361aa419d37bce3d2e8b2b7692a02a9559e83d7f3d8fe9169970fbbc2cba","0x2494bd5106d00e05c7ea60e632e9fe03773b7f2c5b662aa37ec512a01f4a0775","0x18c52c2f2c6e7be1d7847c15e452a3a9c64316103d12e4b5b9a82fac4e940ee9","0x0e0342810456ef78f498c1bfa085a5f3cbc06db1f32fabd0ea9ad27dccac1680","0x024c13d6ef56af33ed7164ea8e47ddecc8a487b000d8b1b45edcd3895a503ba2","0x26e0d127f626bd39b55bc5d0c131dbf03fe006dc5c3edc57dda1e629799a4317","0x1b1140061bc52b15c4f5e100729a81968ee79dc03deb966a18850335a8e44a8b","0x1bb76f945199e71d531a89288912087a02dd0e83020e65d671485bf2e5e86e1a","0x29269900859c6d86e404185b415bf3b279cd100f38cfdb0077e8d6a299c4fd35","0x22b5e94bae2f6f0cdb424a3b12c4bf82cec3fb228e012c1974ed457827bbe012","0x18d3543a93249778e7a57936170dae85ffc47c2567f2d0076a32c0bb86fcf10a","0x03721dc2670206cde42a175fd56bcce32cf6cb8801450a8e8e4b3d4e07785973","0x2806db136dd214d3ac1478460855cae6a4324ab45cab35320d104fee26c260e8","0x1c3749f1937082afbbae9375b9be708cf339e1983e57ef4447f36cfa560c685c","0x1067b8cfb90ef08bcb48aea56b2716334241787c2004a95682d68a0685566fd0","0x0f41aee4416398f1d48ffc302403273cddef34a41f98507c53682041d82e51ff","0x10d854c9f0bfbdff7ca91a68f4978e9a79e7b14243d92f465f17bdf88d9f64f8","0x00000000000000000000000000000000018938b11099e0cdc05ddab84a153a97","0x0000000000000000000000000000000001d7dda1471f0dc3b3a3d3438c197982","0x00000000000000000000000000000000022682917da43ab9a6e9cbcece1db86d","0x2453913e6b0f36eab883ac4b0e0604d56aaeb9c55e641135173e63c342f1a660","0x05216c1b58dc43a49d01aaba3113b0e86be450fc17d28016e648e7162a1b67fb","0x152b34845a0222a2b41354c0d395a250d8363dc18748647d85acd89d6934ec56","0x1dfc6e971ce82b7dcda1f7f282713c6e22a8c79258a61209bda69719806da544","0x2968dd8b3af8e3953f1fbbd72f4c49b8270597bb27d4037adc157ac6083bee60","0x1b9425b88a4c7d39b3d75afe66917a9aa1d2055724392bc01fb918d84ff1410e","0x04ab571f236d8e750904dc307dd274003d9130f1a7110e4c1521cfb408877c73","0x2ad84f26fdc5831545272d02b806bb0e6dae44e71f73552c4eb9ff06030748c7","0x020e632b99d325db774b8630fb50b9a4e74d35b7f27d9fc02c65087ee747e42c","0x09a8c5a3171268cb61c02515c01c109889200ed13f415ae54df2078bbb887f92","0x1143281a9451abbb4c34c3fa84e7678c2af2e7ea8c05160a6f7f06988fc91af8","0x000000000000000000000000000000cbda736ca5cf6bc75413c2cc9e28ab0a68","0x00000000000000000000000000000000001ee78c9cc56aa5991062ae2e338587","0x000000000000000000000000000000bc9bfcdebb486f4cb314e681d2cc5f8df6","0x00000000000000000000000000000000000ad538431d04771bca7f633cb659ff","0x000000000000000000000000000000d45b317afcefa466a59bba9e171f1af70c","0x0000000000000000000000000000000000133c50180ea17932e4881124e7a7c6","0x000000000000000000000000000000fc9ed37f543775849f3e84eaa06f77f992","0x00000000000000000000000000000000001372873c9c051d1baff99248b8f70e"] -public_inputs = ["0x0000000000000000000000000000000000000000000000000000000000000003"] -verification_key = ["0x2b337de1c8c14f22ec9b9e2f96afef3652627366f8170a0a948dad4ac1bd5e80","0x0000000000000000000000000000000000000000000000000000000000000008","0x0000000000000000000000000000000000000000000000000000000000000005","0x0000000000000000000000000000000000000000000000000000000000000008","0x0000000000000000000000000000000000000000000000000000000000000001","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x00000000000000000000000000000092139c61bae1a44f0fc7689507414be688","0x00000000000000000000000000000000000160ce4e279582f91bde4f03f5e9a2","0x0000000000000000000000000000005dc2d37f658c3b2d60f24740eb13b65d79","0x000000000000000000000000000000000007e3e8a5d98a1177ec85bf88f163a5","0x000000000000000000000000000000dc3035fbd7ff16412a8fd7da587a935298","0x000000000000000000000000000000000023d08e2817ac16990004ed11d8fc66","0x000000000000000000000000000000356a5ad59c646c746a8d09f5d154e47c4f","0x00000000000000000000000000000000000708529196af3c8e16ffa580c26182","0x0000000000000000000000000000002ddfe70eb7a1280596e8e4a804f118a6dd","0x000000000000000000000000000000000013757e15a0905f298303784a161b21","0x000000000000000000000000000000a23a729df796935c7824e3a26be794829b","0x000000000000000000000000000000000005775b6c146c4a59856e869fe5a70e","0x000000000000000000000000000000eef0c9e088fd2d45aa40311082d1f2809b","0x00000000000000000000000000000000001d539ccbfc556d0ad59307a218de65","0x000000000000000000000000000000a2c848beceb6ab7806fd3b88037b8410fc","0x0000000000000000000000000000000000177004deeb1f9d401fd7b1af1a5ac8","0x0000000000000000000000000000002508eb63672a733f20de1a97644be4f540","0x00000000000000000000000000000000000d82d51f2f75d806285fd248c819b8","0x000000000000000000000000000000d002f9100cbba8a29f13b11513c53c59d0","0x000000000000000000000000000000000006cd3b0e3460533b9e5ea2cdc0fcbb","0x000000000000000000000000000000f45ea38a93b2f810c5633ddb54927c1c96","0x000000000000000000000000000000000021791de65f9a28ec7024b1a87ab4f3","0x000000000000000000000000000000926511a0439502c86885a8c6f0327aa7ad","0x000000000000000000000000000000000029fa14a969c5d81ed3abbbfb11220a","0x000000000000000000000000000000b84c3258e8206f560e5b5b18cbeafef87e","0x00000000000000000000000000000000002a910445cd8fc895e5d235cd8ea185","0x000000000000000000000000000000887e67f15e84bcb8507a5064a363f6043b","0x000000000000000000000000000000000014dc6643d801c3ef27c2066b6e2bb4","0x000000000000000000000000000000e38e900b42c314ba803088e8fbf125203f","0x000000000000000000000000000000000020690fd4869db418306046b38161dc","0x0000000000000000000000000000001e2fa856bf7951b8292b1e88185993629c","0x0000000000000000000000000000000000048a85e0bbac7c60ad3d78f601f63c","0x0000000000000000000000000000006f457719495073d3666d77a625aeab0c51","0x00000000000000000000000000000000002623ad892dc62b1fa7d0a650f0d470","0x000000000000000000000000000000dbfcc8a467e021c03b13f74a9f79c3a10c","0x0000000000000000000000000000000000295f6f10976c37bd9c6f96bb7187d5","0x000000000000000000000000000000c13ef9a937cc12420fb38d9ab8e848e85e","0x000000000000000000000000000000000003560a3b334e887532f605c9cb7628","0x0000000000000000000000000000009bcebf08a4599cdda0fb96312d4dc0c7a9","0x000000000000000000000000000000000015adc8bb1e01c835f48959d1237bd6","0x00000000000000000000000000000047762ab839e4ff63c77605a9f383da37c2","0x000000000000000000000000000000000016a8c3c53d89660cf271522cd301fb","0x000000000000000000000000000000f0c8539a0b5f94420a513f9c305b932bfe","0x00000000000000000000000000000000002957ba01d9de5638f808f88a692533","0x000000000000000000000000000000ab17c6189d67d3bf5dd2f3885de0151b6f","0x0000000000000000000000000000000000060d8aa43fdc434d1942263f364d95","0x0000000000000000000000000000005d292333b3adb497f00b4bc32d45229060","0x00000000000000000000000000000000001a1018a66221883639f2898a66f345","0x00000000000000000000000000000006555a806b1993291deba0dc44e2abf431","0x00000000000000000000000000000000000cacff7099a9d5e35a21f4a00b2dc3","0x000000000000000000000000000000f50c11ba95d349c36d143eefd12e494950","0x00000000000000000000000000000000001022e8c5f02d639bc9dd8bc4407f99","0x000000000000000000000000000000c76828795098eda73d50b4b585c60afc60","0x00000000000000000000000000000000002bf09c0ec7011e93888962f2406630","0x00000000000000000000000000000049e5c83a8978d832fb8e144548e3ca1adb","0x00000000000000000000000000000000000e0ec242c2e160a984f61ca5adf5f5","0x0000000000000000000000000000009c5d6e08a6605ab4513748ac0fa017dd1c","0x00000000000000000000000000000000001f54baa07558e5fb055bd9ba49c067","0x0000000000000000000000000000001e1ee7ee29bbb5e4b080c6091c1433ce62","0x000000000000000000000000000000000024aec62a9d9763499267dc98c33428","0x0000000000000000000000000000001a96755946ff16f0d6632365f0eb0ab4d4","0x000000000000000000000000000000000028cf3e22bcd53782ebc3e0490e27e5","0x00000000000000000000000000000043148d7d8c9ba43f2133fab4201435a364","0x0000000000000000000000000000000000234ce541f1f5117dd404cfaf01a229","0x000000000000000000000000000000a7fb95ffb461d9514a1070e2d2403982ef","0x00000000000000000000000000000000003016955028b6390f446c3fd0c5b424","0x00000000000000000000000000000008863c3b7cd7cddc20ba79ce915051c56e","0x000000000000000000000000000000000013ef666111b0be56a235983d397d2a","0x000000000000000000000000000000e3993f465fc9f56e93ac769e597b752c1c","0x0000000000000000000000000000000000217f7c4235161e9a3c16c45b6ca499","0x0000000000000000000000000000008ffa4cd96bc67b0b7df5678271e1114075","0x0000000000000000000000000000000000256467bfcb63d9fdcb5dde397757ad","0x00000000000000000000000000000054e5eb270bb64bde6e6ececadfd8c3236c","0x00000000000000000000000000000000000e52d1bd75812c33c6f3d79ee4b94c","0x000000000000000000000000000000484a2c641dce55bc2dd64ef0cd790a7fea","0x00000000000000000000000000000000000ff417d256be43e73c8b1aa85bdda3","0x0000000000000000000000000000000b72e7b7713ab5da44e0f864182e748a23","0x00000000000000000000000000000000001a221055f1625ad833a44705f5f74e","0x00000000000000000000000000000067a99a34e9b81a17ad001db02e29bcb82a","0x000000000000000000000000000000000018a6c02e398389827568fa960e86e2","0x000000000000000000000000000000bb29f26f9890d6cc6401f4921d5884edca","0x00000000000000000000000000000000000868357b28039385c5a5058b6d358e","0x00000000000000000000000000000036fb6e229dde8edf7ec858b12d7e8be485","0x00000000000000000000000000000000001060afe929554ca473103f5e68193c","0x00000000000000000000000000000015226e07e207744c0857074dcab883af4a","0x00000000000000000000000000000000000b1c02619282755533457230b19b4a","0x0000000000000000000000000000001f2a0277e4807e6e1cbabca21dde5eb5e1","0x00000000000000000000000000000000000d928deafed363659688ed4ccdef52","0x000000000000000000000000000000363f0c994e91cecad25835338edee2294f","0x00000000000000000000000000000000002eea648c8732596b1314fe2a4d2f05","0x000000000000000000000000000000b2671d2ae51d31c1210433c3972bb64578","0x00000000000000000000000000000000000ab49886c2b94bd0bd3f6ed1dbbe2c"] -proof_b = ["0x0000000000000000000000000000000000000000000000000000000000000003","0x000000000000000000000000000000f05c69448ca29bdf52076f9b073bb30fed","0x000000000000000000000000000000000028c86bb3e27b4aaaaef126f7df5349","0x00000000000000000000000000000026ae031fc93594375dfc7f3bbe027f97d5","0x000000000000000000000000000000000000dd12c7290fe7f775796a233b8590","0x000000000000000000000000000000c1ee6631704de424d010c5c4ac8293ac49","0x00000000000000000000000000000000002f41818c9aa83f5c8d9bdd128015b9","0x000000000000000000000000000000b50a5801482f7e3a5de8ab3cce0f10b0d3","0x000000000000000000000000000000000022a0bc69c293dbf293b25bc9eef7f8","0x0000000000000000000000000000003b02abf1967ef394154dc15d763135e903","0x00000000000000000000000000000000000d8a2ee46acc6d1ed8d517b56d47c8","0x00000000000000000000000000000039bf0d1b3d8cf9de898f101c626e978d78","0x0000000000000000000000000000000000008faa7df2451a24d291a9b584f1a5","0x000000000000000000000000000000c1dae329ed7adf63a2d89a5f16fb98b6d8","0x00000000000000000000000000000000001ff0bc16fc0bd4aa2d6255690453c2","0x000000000000000000000000000000d12d7589f853a9b472613efa56689beaf1","0x00000000000000000000000000000000002d6fbc798f4403751df6aeee8bedd3","0x0000000000000000000000000000007c1fa069cb17194fecf88db9dd54a4ee36","0x0000000000000000000000000000000000268e026f9814822a42b2d59eec5d24","0x000000000000000000000000000000c3fb56beab774218cd63498fc050a5fd9b","0x00000000000000000000000000000000000071c014d7b5063f005a0bc2ee1af4","0x000000000000000000000000000000ae12b25371c6af42bbe0a85cddd2eaebc7","0x000000000000000000000000000000000026d270e1ffc9c7c344c694dfadda83","0x00000000000000000000000000000080280858c6be461716921caa3c26f3f6f3","0x000000000000000000000000000000000001dcdd3f39e27d0ce6aa5d14dff4c1","0x000000000000000000000000000000080e1d2c913c834ebcf7e0600c076c08fd","0x00000000000000000000000000000000002df3d142217694e65fb7c355d62764","0x000000000000000000000000000000e5e336f3f59d77e500f49771bfbeb12e83","0x000000000000000000000000000000000028fffe08bdc4c0690643d2e1a1275f","0x000000000000000000000000000000db5618b32afc13e18f21b39f3fbede9d11","0x00000000000000000000000000000000001d244818370d43fb7e8bc67e03787b","0x0000000000000000000000000000006bcc1fd3f9f78449ad1df1bc11bc379edd","0x000000000000000000000000000000000009ac9cbb285edbf5b3a973f3f5f1cb","0x000000000000000000000000000000fd885905b6c0fc95bb4dd0b11f6797d4b3","0x000000000000000000000000000000000021f07995cdd835145e19c38127c562","0x000000000000000000000000000000bbbf2b975c2c97ae4b45c4a52059e53ee3","0x000000000000000000000000000000000024158163788841cf4590bbc1e89a90","0x0000000000000000000000000000009aca93d2b1386ea412d4b36ea5bb9894a8","0x00000000000000000000000000000000002532d1d210e8ed4c2f5c00cbaaa475","0x000000000000000000000000000000634a88caa1d77cb6b5fe77cac31458fc31","0x00000000000000000000000000000000000bdf18bae92fce7cfddab5520cac6e","0x000000000000000000000000000000622e9626255170ccec77602c755aa193e1","0x000000000000000000000000000000000001d4edba370e04436a988bad05dada","0x000000000000000000000000000000b52934323a0aec8f803cdaafee2ab7bfb2","0x0000000000000000000000000000000000155312af5e0e25ca9fd61aef9e58ed","0x06270b517855f6f6a608e432883d1d1030a12a1e33022dc142b7728691421da2","0x2af7c794d7b720b25eb1df0afd8c8e3c15b6e518194c3caea7966a5f8210ff04","0x073fe573aeb27d81a5713be93e1365390dcbc3c8e7439ff1d36a84cc014f5642","0x11351b961147431e54535248b58b35cf5cddb9b13827899167617d7a96794d64","0x297c9421c9c3db286770787c35b86bc41583386491b4ae55e5fa81aefa21efc4","0x0f4eeca3ff4a3495f859898937688652d33f9b4dd3e003e12adf15278e0997c3","0x133e3d8b82721d40d919f2326810ba6f07eff3f7d20d86b2bde692a811522019","0x2c502f53c9698b73bb8c8f9b9cf2d705d16a64a7040348b4b39c637a2064316c","0x0cbc1971e1c566cde9d9125c91cdc88e817db182692f836c1a5170a6246eaf73","0x12c47793e7db706c637cd4b4d96d227f569850176b852b1fe8ad522ddb38ef0e","0x0cd7b300e9309a135285be1aeb02b152f97931a7357ab6d609a2cb1970aab877","0x2a7789dfe286c9d0a7592f1c9316e730cb14c9d843aefc4764d76e7f8571c96a","0x248ac54ce3dbf37796621882a4ac76046df5ab680da487fd85cce76b1ae392d3","0x149d1d07cebe320f77b03533e34912545cedeae62bd9778d37724728762b5710","0x00fe29daebdaed61309790e70e2dcefa3f3af4c6c965ce424b8dbcf09b8e4b49","0x2b75b3bace61b731d7f0c003a144b62b0a4fbe9f0d14ca89b0652b70210014b3","0x2588ef27cfb6e0d8c6f9a969b2da44fead30a02ed70a563fd15aa45bb671de1c","0x2b74d7674b55642697b4a1e226eddb0e4918b2d57aa5b99093dc46cadcdea000","0x244c626845d3a5040f08f01e9611f968ad675ca857789149b13a0cfa83a2e064","0x2cb8d02f90cae33fd7bcfb80af4aff067c4f5fc4b3f9228d5b8f768bc8f6c971","0x1372f3d1f04e0c39a50e823d5da03d70bebe19a1b8e28f8c2ff601cc0bfc0095","0x19af6601d2613426a50b7c35d60562a5f2f2634e6af56dac13459632e15570ee","0x13c2a16ed3b65dcd9414659be79af17995d344de34eaf962343b0f1e76c73a57","0x0dd5dcdbd50b8774831d4f01f930804d38b4266dfee085185530880a0c3903c0","0x07e91848d660b11b722638680ac60f20db9507fdc8d610ce762600f5a1aacd29","0x1f9c2a94d10c0a7fb60292cfc46fd3d2501181bea0ffe1f5f2501d474be3a785","0x14edb9c5bd389eae08a5ea2a7a1662894e1e878c142084d966a625bef68cf7c3","0x00000000000000000000000000000000cecd01810814d175f0a533f0067618c4","0x00000000000000000000000000000000f82935013ce5c82720c63e533af41db8","0x000000000000000000000000000000012185688171b6bed850e748b66f7222ac","0x2dd7f5ff2150155c2ac86ebe28d9ecbca2eea812b0021ab2bceae111cfea8325","0x04ea6c2daf2b9e827d2213c3d03953410dcf1ed67ba34a3c00e772be92606a8b","0x163f2bd18dcde52f99b9867c944780fd718d1612927053b139b280fc55013d1b","0x05e388fd160ccac30a8f7b18a4bd042f705e92b5937e8c0e9478e2ff623907c6","0x00ba3f6f527d6ed3ff17a63b1d5be3c42bdfae88fdf63311fc7b871157939309","0x16187d9daa8c2e5a1a9ab15be7ca6a8feebfb31bea76f9a3ca69381881c70561","0x0f64522e4904edb7377b14a7b9dad848829167324ef5c016346b3ad8251191ee","0x273bbe6000a4001dce369e5a36cc0b0ca3fd351665b688238aa8c556a6ca6b8e","0x022d2232efb2faa8307846c9a4c697aabad1b7f1336b35ad72fa8922975b49d9","0x0d82d478bff3955c4b0a34ef94427ca5f9da23147ad953c89f2e428277ec2825","0x18d886be90343010659c231583be61a138e28e37c24771e3cb61fbe2587d0671","0x000000000000000000000000000000196ba6a58dbeb7c34cb1d6287e23d434de","0x00000000000000000000000000000000001df8ae8a1589590f8863c1fefd8dfd","0x000000000000000000000000000000f30e11b2c5fbefa166cbb9f58c5f8e1a4c","0x000000000000000000000000000000000026420ade7666bc0ab1cf1fd9d0c534","0x0000000000000000000000000000000feb5b7d8260d25a1ee1ce76ff461673fc","0x00000000000000000000000000000000002bd2ac6223a80671b777bf5dca70a4","0x000000000000000000000000000000690f757006d2fa1ddb0114c9f268783537","0x000000000000000000000000000000000023ad36feadd91e50118f32e97a0204"] \ No newline at end of file diff --git a/tooling/nargo_cli/tests/execution_success/double_verify_proof/src/main.nr b/tooling/nargo_cli/tests/execution_success/double_verify_proof/src/main.nr deleted file mode 100644 index ecc00bbd560..00000000000 --- a/tooling/nargo_cli/tests/execution_success/double_verify_proof/src/main.nr +++ /dev/null @@ -1,28 +0,0 @@ -use dep::std; - -fn main( - verification_key: [Field; 114], - proof: [Field; 94], - public_inputs: [Field; 1], - key_hash: Field, - input_aggregation_object: [Field; 16], - proof_b: [Field; 94] -) -> pub [Field; 16] { - let output_aggregation_object_a = std::verify_proof(verification_key.as_slice(), - proof.as_slice(), - public_inputs.as_slice(), - key_hash, - input_aggregation_object); - - let output_aggregation_object = std::verify_proof(verification_key.as_slice(), - proof_b.as_slice(), - public_inputs.as_slice(), - key_hash, - output_aggregation_object_a); - - let mut output = [0; 16]; - for i in 0..16 { - output[i] = output_aggregation_object[i]; - } - output -} diff --git a/tooling/nargo_cli/tests/execution_success/ecdsa_secp256k1/src/main.nr b/tooling/nargo_cli/tests/execution_success/ecdsa_secp256k1/src/main.nr deleted file mode 100644 index 552fcea9af7..00000000000 --- a/tooling/nargo_cli/tests/execution_success/ecdsa_secp256k1/src/main.nr +++ /dev/null @@ -1,10 +0,0 @@ -use dep::std; - -fn main(message: [u8;38], hashed_message: [u8;32], pub_key_x: [u8;32], pub_key_y: [u8;32], signature: [u8;64]) { - // Hash the message, since secp256k1 expects a hashed_message - let expected = std::hash::sha256(message); - assert(hashed_message == expected); - - let valid_signature = std::ecdsa_secp256k1::verify_signature(pub_key_x, pub_key_y, signature, hashed_message); - assert(valid_signature); -} diff --git a/tooling/nargo_cli/tests/execution_success/global_consts/src/main.nr b/tooling/nargo_cli/tests/execution_success/global_consts/src/main.nr deleted file mode 100644 index a749ec77da6..00000000000 --- a/tooling/nargo_cli/tests/execution_success/global_consts/src/main.nr +++ /dev/null @@ -1,96 +0,0 @@ -mod foo; -mod baz; - -global M: Field = 32; -global L: Field = 10; // Unused globals currently allowed -global N: Field = 5; -global T_LEN = 2; // Type inference is allowed on globals -//global N: Field = 5; // Uncomment to see duplicate globals error -struct Dummy { - x: [Field; N], - y: [Field; foo::MAGIC_NUMBER] -} - -struct Test { - v: Field, -} -global VALS: [Test; 1] = [Test { v: 100 }]; -global NESTED = [VALS, VALS]; - -fn main( - a: [Field; M + N - N], - b: [Field; 30 + N / 2], - c: pub [Field; foo::MAGIC_NUMBER], - d: [Field; foo::bar::N] -) { - let test_struct = Dummy { x: d, y: c }; - - for i in 0..foo::MAGIC_NUMBER { - assert(c[i] == foo::MAGIC_NUMBER); - assert(test_struct.y[i] == foo::MAGIC_NUMBER); - assert(test_struct.y[i] != NESTED[1][0].v); - } - - assert(N != M); - - let expected: u32 = 42; - assert(foo::TYPE_INFERRED == expected); - - let mut y = 5; - let mut x = M; - for i in 0..N * N { - let M: Field = 10; - x = M; - - y = i; - } - assert(y == 24); - assert(x == 10); - - let q = multiplyByM(3); - assert(q == 96); - - arrays_neq(a, b); - - let t: [Field; T_LEN] = [N, M]; - assert(t[1] == 32); - - assert(15 == mysubmodule::my_helper()); - - let add_submodules_N = mysubmodule::N + foo::bar::N; - assert(15 == add_submodules_N); - let add_from_bar_N = mysubmodule::N + foo::bar::from_bar(1); - assert(15 == add_from_bar_N); - // Example showing an array filled with (mysubmodule::N + 2) 0's - let sugared = [0; mysubmodule::N + 2]; - assert(sugared[mysubmodule::N + 1] == 0); - - let arr: [Field; mysubmodule::N] = [N; 10]; - assert((arr[0] == 5) & (arr[9] == 5)); - - foo::from_foo(d); - baz::from_baz(c); -} - -fn multiplyByM(x: Field) -> Field { - x * M -} - -fn arrays_neq(a: [Field; M], b: [Field; M]) { - assert(a != b); -} - -mod mysubmodule { - global N: Field = 10; - global L: Field = 50; - - fn my_bool_or(x: u1, y: u1) { - assert(x | y == 1); - } - - pub fn my_helper() -> Field { - let N: Field = 15; // Like in Rust, local variables override globals - let x = N; - x - } -} diff --git a/tooling/nargo_cli/tests/execution_success/higher_order_functions/src/main.nr b/tooling/nargo_cli/tests/execution_success/higher_order_functions/src/main.nr deleted file mode 100644 index 479cdbbe7bf..00000000000 --- a/tooling/nargo_cli/tests/execution_success/higher_order_functions/src/main.nr +++ /dev/null @@ -1,97 +0,0 @@ -fn main(w: Field) -> pub Field { - let f = if 3 * 7 > 200 as u32 { foo } else { bar }; - assert(f()[1] == 2); - // Lambdas: - assert(twice(|x| x * 2, 5) == 20); - assert((|x, y| x + y + 1)(2, 3) == 6); - // nested lambdas - assert((|a, b| { - a + (|c| c + 2)(b) - })(0, 1) - == 3); - // Closures: - let a = 42; - let g = || a; - assert(g() == 42); - // When you copy mutable variables, - // the capture of the copies shouldn't change: - let mut x = 2; - x = x + 1; - let z = x; - // Add extra mutations to ensure we can mutate x without the - // captured z changing. - x = x + 1; - assert((|y| y + z)(1) == 4); - // When you capture mutable variables, - // again, the captured variable doesn't change: - let closure_capturing_mutable = (|y| y + x); - assert(closure_capturing_mutable(1) == 5); - x += 1; - assert(closure_capturing_mutable(1) == 5); - - regression_2154(); - - let ret = twice(add1, 3); - - test_array_functions(); - w + ret -} -/// Test the array functions in std::array -fn test_array_functions() { - let two = 2; // giving this a name, to ensure that the Option functions work with closures - let myarray: [i32; 3] = [1, 2, 3]; - assert(myarray.any(|n| n > 2)); - assert(myarray.any(|n| n > two)); - - let evens: [i32; 3] = myarray.map(|n| n * two); // [2, 4, 6] - assert(evens.all(|n| n > 1)); - assert(evens.all(|n| n >= two)); - - assert(evens.fold(0, |a, b| a + b) == 12); - assert(evens.fold(0, |a, b| a + b + two) == 18); - assert(evens.reduce(|a, b| a + b) == 12); - assert(evens.reduce(|a, b| a + b + two) == 16); - // TODO: is this a sort_via issue with the new backend, - // or something more general? - // - // currently it fails only with `--experimental-ssa` with - // "not yet implemented: Cast into signed" - // but it worked with the original ssa backend - // (before dropping it) - // - // opened #2121 for it - // https://github.com/noir-lang/noir/issues/2121 - // let descending = myarray.sort_via(|a, b| a > b); - // assert(descending == [3, 2, 1]); - assert(evens.map(|n| n / 2) == myarray); - assert(evens.map(|n| n / two) == myarray); -} - -fn foo() -> [u32; 2] { - [1, 3] -} - -fn bar() -> [u32; 2] { - [3, 2] -} - -fn add1(x: Field) -> Field { - x + 1 -} - -fn twice(f: fn(Field) -> Field, x: Field) -> Field { - f(f(x)) -} -// Fixing an ICE, where rewriting the closures -// during monomorphization didn't correspond -// to an internal `if` type -// found by @jfecher: -// https://github.com/noir-lang/noir/pull/1959#issuecomment-1658992989 -// issue https://github.com/noir-lang/noir/issues/2154 -fn regression_2154() { - let x: u32 = 32; - - let closure_if_else = if x > 2 { || x } else { || x + 2342 }; - - assert(closure_if_else() == 32); -} diff --git a/tooling/nargo_cli/tests/execution_success/regression/src/main.nr b/tooling/nargo_cli/tests/execution_success/regression/src/main.nr deleted file mode 100644 index ed3dbf23a24..00000000000 --- a/tooling/nargo_cli/tests/execution_success/regression/src/main.nr +++ /dev/null @@ -1,107 +0,0 @@ -global NIBBLE_LENGTH: Field = 16; - -fn compact_decode(input: [u8; N], length: Field) -> ([u4; NIBBLE_LENGTH], Field) { - assert(2 * input.len() as u64 <= NIBBLE_LENGTH as u64); - assert(length as u64 <= input.len() as u64); - - let mut nibble = [0 as u4; NIBBLE_LENGTH]; - - let first_nibble = (input[0] >> 4) as u4; - let parity = first_nibble as u1; - - if parity == 1 { - nibble[0] = (input[0] & 0x0f) as u4; - for i in 1..input.len() { - if i as u64 < length as u64 { - let x = input[i]; - nibble[2*i - 1] = (x >> 4) as u4; - nibble[2*i] = (x & 0x0f) as u4; - } - } - } else { - for i in 0..2 { - if (i as u64) < length as u64 - 1 { - let x = input[i + 1]; - nibble[2*i] = (x >> 4) as u4; - nibble[2*i + 1] = (x & 0x0f) as u4; - } - } - } - - let out = (nibble, 2 * length + (parity as Field) - 2); - - out -} - -fn enc(value: [u8; N], value_length: Field) -> ([u8; 32], Field) { - assert(value.len() as u8 >= value_length as u8); - let mut out_value = [0; 32]; - if value_length == 0 { - let out = (out_value, value_length); - out - } else if value_length as u8 < 31 { - out_value[0] = 0x80 + value_length as u8; - - for i in 1..value.len() { - out_value[i] = value[i-1]; - } - - let out = (out_value, value_length + 1); - - out - } else { - let out = (out_value, 32); - out - } -} - -fn bitshift_literal_0() -> u64 { - let mut bits: u64 = 0; - bits |= 1 << 0; - - bits -} -fn bitshift_literal_4() -> u64 { - let mut bits: u64 = 0; - bits |= 1 << 4; - - bits -} -fn bitshift_variable(idx: u64) -> u64 { - let mut bits: u64 = 0; - bits |= 1 << idx; - - bits -} - -fn main(x: [u8; 5], z: Field) { - //Issue 1144 - let (nib, len) = compact_decode(x, z); - assert(len == 5); - assert([nib[0], nib[1], nib[2], nib[3], nib[4]] == [15, 1, 12, 11, 8]); - // Issue 1169 - let val1 = [ - 0xb8, 0x8f, 0x61, 0xe6, 0xfb, 0xda, 0x83, 0xfb, 0xff, 0xfa, 0xbe, 0x36, 0x41, 0x12, 0x13, - 0x74, 0x80, 0x39, 0x80, 0x18, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00 - ]; - let val1_length = 20; - - let enc_val1 = enc(val1, val1_length); - - assert(enc_val1.0 == [ - 0x94, 0xb8, 0x8f, 0x61, 0xe6, 0xfb, 0xda, 0x83, 0xfb, 0xff, 0xfa, 0xbe, 0x36, 0x41, - 0x12, 0x13, 0x74, 0x80, 0x39, 0x80, 0x18, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00 - ]); - assert(enc_val1.1 == 21); - // Issue 2399 - let result_0 = bitshift_literal_0(); - assert(result_0 == 1); - let result_4 = bitshift_literal_4(); - assert(result_4 == 16); - let result_0 = bitshift_variable(0); - assert(result_0 == 1); - let result_4 = bitshift_variable(4); - assert(result_4 == 16); -} diff --git a/tooling/nargo_cli/tests/execution_success/scalar_mul/src/main.nr b/tooling/nargo_cli/tests/execution_success/scalar_mul/src/main.nr deleted file mode 100644 index 4f0a83ea9e5..00000000000 --- a/tooling/nargo_cli/tests/execution_success/scalar_mul/src/main.nr +++ /dev/null @@ -1,16 +0,0 @@ -use dep::std; - -fn main(a: Field, a_pub_x: pub Field, a_pub_y: pub Field, b: Field, b_pub_x: pub Field, b_pub_y: pub Field) { - let mut priv_key = a; - let mut pub_x: Field = a_pub_x; - let mut pub_y: Field = a_pub_y; - if a != 1 { - // Change `a` in Prover.toml to test input `b` - priv_key = b; - pub_x = b_pub_x; - pub_y = b_pub_y; - } - let res = std::scalar_mul::fixed_base_embedded_curve(priv_key, 0); - assert(res[0] == pub_x); - assert(res[1] == pub_y); -} diff --git a/tooling/nargo_cli/tests/execution_success/schnorr/src/main.nr b/tooling/nargo_cli/tests/execution_success/schnorr/src/main.nr deleted file mode 100644 index d5eb92fd5f8..00000000000 --- a/tooling/nargo_cli/tests/execution_success/schnorr/src/main.nr +++ /dev/null @@ -1,19 +0,0 @@ -use dep::std; -// Note: If main has any unsized types, then the verifier will never be able -// to figure out the circuit instance -fn main(message: [u8; 10], message_field: Field, pub_key_x: Field, pub_key_y: Field, signature: [u8; 64]) { - // Regression for issue #2421 - // We want to make sure that we can accurately verify a signature whose message is a slice vs. an array - let message_field_bytes = message_field.to_be_bytes(10); - for i in 0..10 { - assert(message[i] == message_field_bytes[i]); - } - // Is there ever a situation where someone would want - // to ensure that a signature was invalid? - // Check that passing a slice as the message is valid - let valid_signature = std::schnorr::verify_signature(pub_key_x, pub_key_y, signature, message_field_bytes); - assert(valid_signature); - // Check that passing an array as the message is valid - let valid_signature = std::schnorr::verify_signature(pub_key_x, pub_key_y, signature, message); - assert(valid_signature); -} diff --git a/tooling/nargo_cli/tests/execution_success/simple_print/Prover.toml b/tooling/nargo_cli/tests/execution_success/simple_print/Prover.toml deleted file mode 100644 index 2c1854573a4..00000000000 --- a/tooling/nargo_cli/tests/execution_success/simple_print/Prover.toml +++ /dev/null @@ -1,2 +0,0 @@ -x = 1 -y = 2 diff --git a/tooling/nargo_cli/tests/execution_success/slice_dynamic_index/src/main.nr b/tooling/nargo_cli/tests/execution_success/slice_dynamic_index/src/main.nr deleted file mode 100644 index 2e5c0122dfb..00000000000 --- a/tooling/nargo_cli/tests/execution_success/slice_dynamic_index/src/main.nr +++ /dev/null @@ -1,308 +0,0 @@ -fn main(x: Field) { - // The parameters to this function must come directly from witness values (inputs to main). - regression_dynamic_slice_index(x - 1, x - 4); -} - -fn regression_dynamic_slice_index(x: Field, y: Field) { - let mut slice = []; - for i in 0..5 { - slice = slice.push_back(i); - } - assert(slice.len() == 5); - - dynamic_slice_index_set_if(slice, x, y); - dynamic_slice_index_set_else(slice, x, y); - dynamic_slice_index_set_nested_if_else_else(slice, x, y); - dynamic_slice_index_set_nested_if_else_if(slice, x, y + 1); - dynamic_slice_index_if(slice, x); - dynamic_array_index_if([0, 1, 2, 3, 4], x); - dynamic_slice_index_else(slice, x); - - dynamic_slice_merge_if(slice, x); - dynamic_slice_merge_else(slice, x); - dynamic_slice_merge_two_ifs(slice, x); - dynamic_slice_merge_mutate_between_ifs(slice, x, y); - dynamic_slice_merge_push_then_pop(slice, x, y); -} - -fn dynamic_slice_index_set_if(mut slice: [Field], x: Field, y: Field) { - assert(slice[x] == 4); - assert(slice[y] == 1); - slice[y] = 0; - assert(slice[x] == 4); - assert(slice[1] == 0); - if x as u32 < 10 { - assert(slice[x] == 4); - slice[x] = slice[x] - 2; - slice[x - 1] = slice[x]; - } else { - slice[x] = 0; - } - assert(slice[3] == 2); - assert(slice[4] == 2); -} - -fn dynamic_slice_index_set_else(mut slice: [Field], x: Field, y: Field) { - assert(slice[x] == 4); - assert(slice[y] == 1); - slice[y] = 0; - assert(slice[x] == 4); - assert(slice[1] == 0); - if x as u32 > 10 { - assert(slice[x] == 4); - slice[x] = slice[x] - 2; - slice[x - 1] = slice[x]; - } else { - slice[x] = 0; - } - assert(slice[4] == 0); -} -// This tests the case of missing a store instruction in the else branch -// of merging slices -fn dynamic_slice_index_if(mut slice: [Field], x: Field) { - if x as u32 < 10 { - assert(slice[x] == 4); - slice[x] = slice[x] - 2; - } else { - assert(slice[x] == 0); - } - assert(slice[4] == 2); -} - -fn dynamic_array_index_if(mut array: [Field; 5], x: Field) { - if x as u32 < 10 { - assert(array[x] == 4); - array[x] = array[x] - 2; - } else { - assert(array[x] == 0); - } - assert(array[4] == 2); -} -// This tests the case of missing a store instruction in the then branch -// of merging slices -fn dynamic_slice_index_else(mut slice: [Field], x: Field) { - if x as u32 > 10 { - assert(slice[x] == 0); - } else { - assert(slice[x] == 4); - slice[x] = slice[x] - 2; - } - assert(slice[4] == 2); -} - -fn dynamic_slice_merge_if(mut slice: [Field], x: Field) { - if x as u32 < 10 { - assert(slice[x] == 4); - slice[x] = slice[x] - 2; - - slice = slice.push_back(10); - // Having an array set here checks whether we appropriately - // handle a slice length that is not yet resolving to a constant - // during flattening - slice[x] = 10; - assert(slice[slice.len() - 1] == 10); - assert(slice.len() == 6); - - slice[x] = 20; - slice[x] = slice[x] + 10; - - slice = slice.push_front(11); - assert(slice[0] == 11); - assert(slice.len() == 7); - assert(slice[5] == 30); - - slice = slice.push_front(12); - assert(slice[0] == 12); - assert(slice.len() == 8); - assert(slice[6] == 30); - - let (popped_slice, last_elem) = slice.pop_back(); - assert(last_elem == 10); - assert(popped_slice.len() == 7); - - let (first_elem, rest_of_slice) = popped_slice.pop_front(); - assert(first_elem == 12); - assert(rest_of_slice.len() == 6); - // TODO(#2462): SliceInsert and SliceRemove with a dynamic index are not yet implemented in ACIR gen - slice = rest_of_slice.insert(2, 20); - assert(slice[2] == 20); - assert(slice[6] == 30); - assert(slice.len() == 7); - // TODO(#2462): SliceInsert and SliceRemove with a dynamic index are not yet implemented in ACIR gen - let (removed_slice, removed_elem) = slice.remove(3); - // The deconstructed tuple assigns to the slice but is not seen outside of the if statement - // without a direct assignment - slice = removed_slice; - - assert(removed_elem == 1); - assert(slice.len() == 6); - } else { - assert(slice[x] == 0); - slice = slice.push_back(20); - } - - assert(slice.len() == 6); - assert(slice[slice.len() - 1] == 30); -} - -fn dynamic_slice_merge_else(mut slice: [Field], x: Field) { - if x as u32 > 10 { - assert(slice[x] == 0); - slice[x] = 2; - } else { - assert(slice[x] == 4); - slice[x] = slice[x] - 2; - slice = slice.push_back(10); - } - assert(slice.len() == 6); - assert(slice[slice.len() - 1] == 10); - - slice = slice.push_back(20); - assert(slice.len() == 7); - assert(slice[slice.len() - 1] == 20); -} - -fn dynamic_slice_index_set_nested_if_else_else(mut slice: [Field], x: Field, y: Field) { - assert(slice[x] == 4); - assert(slice[y] == 1); - slice[y] = 0; - assert(slice[x] == 4); - assert(slice[1] == 0); - if x as u32 < 10 { - slice[x] = slice[x] - 2; - if y != 1 { - slice[x] = slice[x] + 20; - } else { - if x == 5 { - // We should not hit this case - assert(slice[x] == 22); - } else { - slice[x] = 10; - slice = slice.push_back(15); - assert(slice.len() == 6); - } - assert(slice[4] == 10); - } - } else { - slice[x] = 0; - } - assert(slice[4] == 10); - assert(slice.len() == 6); - assert(slice[slice.len() - 1] == 15); - - slice = slice.push_back(20); - assert(slice.len() == 7); - assert(slice[slice.len() - 1] == 20); -} - -fn dynamic_slice_index_set_nested_if_else_if(mut slice: [Field], x: Field, y: Field) { - assert(slice[x] == 4); - assert(slice[y] == 2); - slice[y] = 0; - assert(slice[x] == 4); - assert(slice[2] == 0); - if x as u32 < 10 { - slice[x] = slice[x] - 2; - // TODO: this panics as we have a load for the slice in flattening - if y == 1 { - slice[x] = slice[x] + 20; - } else { - if x == 4 { - slice[x] = 5; - } - assert(slice[4] == 5); - } - } else { - slice[x] = 0; - } - assert(slice[4] == 5); -} - -fn dynamic_slice_merge_two_ifs(mut slice: [Field], x: Field) { - if x as u32 > 10 { - assert(slice[x] == 0); - slice[x] = 2; - } else { - assert(slice[x] == 4); - slice[x] = slice[x] - 2; - slice = slice.push_back(10); - } - - assert(slice.len() == 6); - assert(slice[slice.len() - 1] == 10); - - if x == 20 { - slice = slice.push_back(20); - } - - slice = slice.push_back(15); - - assert(slice.len() == 7); - assert(slice[slice.len() - 1] == 15); - - slice = slice.push_back(20); - assert(slice.len() == 8); - assert(slice[slice.len() - 1] == 20); -} - -fn dynamic_slice_merge_mutate_between_ifs(mut slice: [Field], x: Field, y: Field) { - if x != y { - slice[x] = 50; - slice = slice.push_back(y); - slice = slice.push_back(x); - } else { - slice[x] = slice[x] - 2; - slice = slice.push_back(x); - } - - slice = slice.push_back(30); - assert(slice.len() == 8); - - if x == 20 { - slice = slice.push_back(20); - } - - slice = slice.push_back(15); - - if x != 20 { - slice = slice.push_back(50); - } - - slice = slice.push_back(60); - assert(slice.len() == 11); - assert(slice[x] == 50); - assert(slice[slice.len() - 4] == 30); - assert(slice[slice.len() - 3] == 15); - assert(slice[slice.len() - 2] == 50); - assert(slice[slice.len() - 1] == 60); -} - -fn dynamic_slice_merge_push_then_pop(mut slice: [Field], x: Field, y: Field) { - if x != y { - slice[x] = 5; - slice = slice.push_back(y); - slice = slice.push_back(x); - assert(slice.len() == 7); - - let (popped_slice, elem) = slice.pop_back(); - assert(slice.len() == 7); - assert(elem == x); - slice = popped_slice; - } else { - slice = slice.push_back(x); - } - - slice = slice.push_back(30); - assert(slice.len() == 7); - - if x == 20 { - slice = slice.push_back(20); - } - - let (slice, elem) = slice.pop_back(); - assert(elem == 30); - - let (_, elem) = slice.pop_back(); - assert(elem == y); -} - diff --git a/tooling/nargo_cli/tests/execution_success/slice_struct_field/src/main.nr b/tooling/nargo_cli/tests/execution_success/slice_struct_field/src/main.nr deleted file mode 100644 index c00fdf85180..00000000000 --- a/tooling/nargo_cli/tests/execution_success/slice_struct_field/src/main.nr +++ /dev/null @@ -1,177 +0,0 @@ -struct FooParent { - parent_arr: [Field; 3], - foos: [Foo], -} - -struct Bar { - inner: [Field; 3], -} - -struct Foo { - a: Field, - b: [Field], - bar: Bar, -} - -fn main(y: pub Field) { - let mut b_one = [2, 3, 20]; - b_one = b_one.push_back(20); - let foo_one = Foo { a: 1, b: b_one, bar: Bar { inner: [100, 101, 102] } }; - - let mut b_two = [5, 6, 21]; - b_two = b_two.push_back(21); - let foo_two = Foo { a: 4, b: b_two, bar: Bar { inner: [103, 104, 105] } }; - - let foo_three = Foo { a: 7, b: [8, 9, 22], bar: Bar { inner: [106, 107, 108] } }; - let foo_four = Foo { a: 10, b: [11, 12, 23], bar: Bar { inner: [109, 110, 111] } }; - - let mut x = [foo_one, foo_two]; - x = x.push_back(foo_three); - x = x.push_back(foo_four); - - assert(x[y - 3].a == 1); - let struct_slice = x[y - 3].b; - for i in 0..4 { - assert(struct_slice[i] == b_one[i]); - } - - assert(x[y - 2].a == 4); - let struct_slice = x[y - 2].b; - for i in 0..4 { - assert(struct_slice[i] == b_two[i]); - } - - assert(x[y - 1].a == 7); - let struct_slice = x[y - 1].b; - assert(struct_slice[0] == 8); - assert(struct_slice[1] == 9); - assert(struct_slice[2] == 22); - - assert(x[y].a == 10); - let struct_slice = x[y].b; - assert(struct_slice[0] == 11); - assert(struct_slice[1] == 12); - assert(struct_slice[2] == 23); - assert(x[y].bar.inner == [109, 110, 111]); - - assert(x[y - 3].bar.inner == [100, 101, 102]); - assert(x[y - 2].bar.inner == [103, 104, 105]); - assert(x[y - 1].bar.inner == [106, 107, 108]); - assert(x[y].bar.inner == [109, 110, 111]); - // Check that switching the lhs and rhs is still valid - assert([109, 110, 111] == x[y].bar.inner); - // TODO: Enable merging nested slices - // if y != 2 { - // x[y].a = 50; - // } else { - // x[y].a = 100; - // } - // assert(x[3].a == 50); - // if y == 2 { - // x[y - 1].b = [50, 51, 52]; - // } else { - // x[y - 1].b = [100, 101, 102]; - // } - // assert(x[2].b[0] == 100); - // assert(x[2].b[1] == 101); - // assert(x[2].b[2] == 102); - let q = x.push_back(foo_four); - let foo_parent_one = FooParent { parent_arr: [0, 1, 2], foos: x }; - let foo_parent_two = FooParent { parent_arr: [3, 4, 5], foos: q }; - let mut foo_parents = [foo_parent_one]; - foo_parents = foo_parents.push_back(foo_parent_two); - // TODO: make a separate test for compile time - // foo_parents[1].foos.push_back(foo_four); - // TODO: Merging nested slices is broken - // if y == 3 { - // foo_parents[y - 2].foos[y - 1].b[y - 1] = 5000; - // } else { - // foo_parents[y - 2].foos[y - 1].b[y - 1] = 1000; - // } - assert(foo_parents[y - 2].foos[y - 2].b[y - 1] == 21); - foo_parents[y - 2].foos[y - 2].b[y - 1] = 5000; - assert(foo_parents[y - 2].foos[y - 2].b[y - 1] == 5000); - - let b_array = foo_parents[y - 2].foos[y - 3].b; - assert(foo_parents[y - 2].foos[y - 3].a == 1); - assert(b_array[0] == 2); - assert(b_array[1] == 3); - assert(b_array[2] == 20); - assert(b_array[3] == 20); - - let b_array = foo_parents[y - 2].foos[y - 2].b; - assert(foo_parents[y - 2].foos[y - 2].a == 4); - assert(b_array[0] == 5); - assert(b_array[1] == 6); - assert(b_array[2] == 5000); - assert(b_array[3] == 21); - - assert(foo_parents[y - 2].foos[y - 1].a == 7); - foo_parents[y - 2].foos[y - 1].a = 50; - - let b_array = foo_parents[y - 2].foos[y - 1].b; - assert(b_array[2] == 22); - assert(b_array.len() == 3); - // Test setting a nested array with non-dynamic - let x = [5, 6, 5000, 21, 100, 101].as_slice(); - foo_parents[y - 2].foos[y - 1].b = x; - - assert(foo_parents[y - 2].foos[y - 1].b.len() == 6); - assert(foo_parents[y - 2].foos[y - 1].b[4] == 100); - assert(foo_parents[y - 2].foos[y - 1].b[5] == 101); - - test_basic_intrinsics_nested_slices(foo_parents, y); - // TODO(#3364): still have to enable slice intrinsics on dynamic nested slices - // assert(foo_parents[y - 2].foos.len() == 5); - // foo_parents[y - 2].foos = foo_parents[y - 2].foos.push_back(foo_four); - // assert(foo_parents[y - 2].foos.len() == 6); - let b_array = foo_parents[y - 2].foos[y - 1].b; - assert(b_array[0] == 5); - assert(b_array[1] == 6); - assert(b_array[2] == 5000); - assert(b_array[3] == 21); - - let b_array = foo_parents[y - 2].foos[y].b; - assert(foo_parents[y - 2].foos[y].a == 10); - assert(b_array[0] == 11); - assert(b_array[1] == 12); - assert(b_array[2] == 23); - - assert(foo_parents[y - 2].foos[y - 3].bar.inner == [100, 101, 102]); - assert(foo_parents[y - 2].foos[y - 2].bar.inner == [103, 104, 105]); - assert(foo_parents[y - 2].foos[y - 1].bar.inner == [106, 107, 108]); - assert(foo_parents[y - 2].foos[y].bar.inner == [109, 110, 111]); -} - -fn test_basic_intrinsics_nested_slices(mut foo_parents: [FooParent], y: Field) { - foo_parents[y - 2].foos[y - 1].b = foo_parents[y - 2].foos[y - 1].b.push_back(500); - assert(foo_parents[y - 2].foos[y - 1].b.len() == 7); - assert(foo_parents[y - 2].foos[y - 1].b[6] == 500); - - let (popped_slice, last_elem) = foo_parents[y - 2].foos[y - 1].b.pop_back(); - foo_parents[y - 2].foos[y - 1].b = popped_slice; - assert(foo_parents[y - 2].foos[y - 1].b.len() == 6); - assert(last_elem == 500); - - foo_parents[y - 2].foos[y - 1].b = foo_parents[y - 2].foos[y - 1].b.push_front(11); - assert(foo_parents[y - 2].foos[y - 1].b.len() == 7); - assert(foo_parents[y - 2].foos[y - 1].b[0] == 11); - - let (first_elem, rest_of_slice) = foo_parents[y - 2].foos[y - 1].b.pop_front(); - foo_parents[y - 2].foos[y - 1].b = rest_of_slice; - assert(foo_parents[y - 2].foos[y - 1].b.len() == 6); - assert(first_elem == 11); - - foo_parents[y - 2].foos[y - 1].b = foo_parents[y - 2].foos[y - 1].b.insert(2, 20); - assert(foo_parents[y - 2].foos[y - 1].b.len() == 7); - assert(foo_parents[y - 2].foos[y - 1].b[y - 1] == 20); - assert(foo_parents[y - 2].foos[y - 1].b[y] == 5000); - assert(foo_parents[y - 2].foos[y - 1].b[6] == 101); - - let (rest_of_slice, removed_elem) = foo_parents[y - 2].foos[y - 1].b.remove(3); - foo_parents[y - 2].foos[y - 1].b = rest_of_slice; - assert(removed_elem == 5000); - assert(foo_parents[y - 2].foos[y - 1].b.len() == 6); - assert(foo_parents[y - 2].foos[y - 1].b[2] == 20); - assert(foo_parents[y - 2].foos[y - 1].b[3] == 21); -} diff --git a/tooling/nargo_cli/tests/execution_success/strings/src/main.nr b/tooling/nargo_cli/tests/execution_success/strings/src/main.nr deleted file mode 100644 index cb2218837f3..00000000000 --- a/tooling/nargo_cli/tests/execution_success/strings/src/main.nr +++ /dev/null @@ -1,73 +0,0 @@ -use dep::std; -// Test global string literals -global HELLO_WORLD = "hello world"; - -fn main(message: pub str<11>, y: Field, hex_as_string: str<4>, hex_as_field: Field) { - let mut bad_message = "hello world"; - - assert(message == "hello world"); - assert(message == HELLO_WORLD); - let x = 10; - let z = x * 5; - std::println(10); - - std::println(z); // x * 5 in println not yet supported - std::println(x); - - let array = [1, 2, 3, 5, 8]; - assert(y == 5); // Change to y != 5 to see how the later print statements are not called - std::println(array); - - bad_message = "hell\0\"world"; - std::println(bad_message); - assert(message != bad_message); - - let hash = std::hash::pedersen_commitment([x]); - std::println(hash); - - assert(hex_as_string == "0x41"); - // assert(hex_as_string != 0x41); This will fail with a type mismatch between str[4] and Field - assert(hex_as_field == 0x41); -} - -#[test] -fn test_prints_strings() { - let message = "hello world!"; - - std::println(message); - std::println("goodbye world"); -} - -#[test] -fn test_prints_array() { - let array = [1, 2, 3, 5, 8]; - - let s = Test { a: 1, b: 2, c: [3, 4] }; - std::println(s); - - std::println(array); - - let hash = std::hash::pedersen_commitment(array); - std::println(hash); -} - -fn failed_constraint(hex_as_field: Field) { - // TODO(#2116): Note that `println` will not work if a failed constraint can be - // evaluated at compile time. - // When this method is called from a test method or with constant values - // a `Failed constraint` compile error will be caught before this `println` - // is executed as the input will be a constant. - std::println(hex_as_field); - assert(hex_as_field != 0x41); -} - -#[test] -fn test_failed_constraint() { - failed_constraint(0x41); -} - -struct Test { - a: Field, - b: Field, - c: [Field; 2], -} diff --git a/tooling/nargo_cli/tests/execution_success/submodules/src/main.nr b/tooling/nargo_cli/tests/execution_success/submodules/src/main.nr deleted file mode 100644 index 813f3a26a20..00000000000 --- a/tooling/nargo_cli/tests/execution_success/submodules/src/main.nr +++ /dev/null @@ -1,15 +0,0 @@ -use mysubmodule::my_helper; - -fn main(x: u1, y: u1) { - my_helper(); - mysubmodule::my_bool_or(x, y); -} - -mod mysubmodule { - pub fn my_bool_or(x: u1, y: u1) { - assert(x | y == 1); - } - - pub fn my_helper() {} -} - diff --git a/tooling/nargo_cli/tests/execution_success/workspace_default_member/b/src/main.nr b/tooling/nargo_cli/tests/execution_success/workspace_default_member/b/src/main.nr deleted file mode 100644 index 6e170de75fc..00000000000 --- a/tooling/nargo_cli/tests/execution_success/workspace_default_member/b/src/main.nr +++ /dev/null @@ -1,3 +0,0 @@ -fn main(x : Field, y : pub Field) { - assert(x != y); -} diff --git a/tooling/nargo_cli/tests/noir_test_failure/should_fail_mismatch/src/main.nr b/tooling/nargo_cli/tests/noir_test_failure/should_fail_mismatch/src/main.nr deleted file mode 100644 index a677b10b0cd..00000000000 --- a/tooling/nargo_cli/tests/noir_test_failure/should_fail_mismatch/src/main.nr +++ /dev/null @@ -1,14 +0,0 @@ -#[test(should_fail_with = "Not equal")] -fn test_different_string() { - assert_eq(0, 1, "Different string"); -} -// The assert message has a space -#[test(should_fail_with = "Not equal")] -fn test_with_extra_space() { - assert_eq(0, 1, "Not equal "); -} -// The assert message has a space -#[test(should_fail_with = "Not equal")] -fn test_runtime_mismatch() { - assert_eq(dep::std::hash::pedersen_commitment([27])[0], 0, "Not equal "); -} diff --git a/tooling/nargo_cli/tests/rebuild.sh b/tooling/nargo_cli/tests/rebuild.sh deleted file mode 100755 index d89402fb1cf..00000000000 --- a/tooling/nargo_cli/tests/rebuild.sh +++ /dev/null @@ -1,56 +0,0 @@ -#!/bin/bash -set -e - -excluded_dirs=("workspace" "workspace_default_member") - -current_dir=$(pwd) -base_path="$current_dir/execution_success" - -# Clear the acir_artifacts directory of any existing artifacts -rm -rf $current_dir/acir_artifacts -mkdir -p $current_dir/acir_artifacts - -# Loop over every directory -for dir in $base_path/*; do - if [[ ! -d $dir ]]; then - continue - fi - - dir_name=$(basename "$dir") - - if [[ ! " ${excluded_dirs[@]} " =~ " ${dir_name} " ]]; then - if [[ ! -d "$current_dir/acir_artifacts/$dir_name" ]]; then - mkdir -p $current_dir/acir_artifacts/$dir_name - fi - - cd $dir - if [ -d ./target/ ]; then - rm -r ./target/ - fi - nargo compile && nargo execute witness - - # Rename witness.tr to witness.gz - if [ -f ./target/witness.tr ]; then - mv ./target/witness.tr ./target/witness.gz - fi - - # Extract bytecode field from JSON, base64 decode it, and save it to the target directory - if [ -f ./target/${dir_name}.json ]; then - jq -r '.bytecode' ./target/${dir_name}.json | base64 -d > ./target/acir.gz - fi - - # Delete the JSON file after extracting bytecode field - rm ./target/${dir_name}.json - - # Clear the target directory in acir_artifacts - if [ -d "$current_dir/acir_artifacts/$dir_name/target" ]; then - rm -r "$current_dir/acir_artifacts/$dir_name/target" - fi - mkdir $current_dir/acir_artifacts/$dir_name/target - - # Move the artifacts from the target directory to the corresponding directory in acir_artifacts - mv ./target/*.gz $current_dir/acir_artifacts/$dir_name/target/ - - cd $base_path - fi -done diff --git a/tooling/nargo_fmt/Cargo.toml b/tooling/nargo_fmt/Cargo.toml index 921c9893ab5..374413ac9f2 100644 --- a/tooling/nargo_fmt/Cargo.toml +++ b/tooling/nargo_fmt/Cargo.toml @@ -13,4 +13,4 @@ toml.workspace = true thiserror.workspace = true [dev-dependencies] -similar-asserts = "1.5.0" +similar-asserts.workspace = true diff --git a/tooling/nargo_fmt/build.rs b/tooling/nargo_fmt/build.rs index cd93866ece9..c356b403ae5 100644 --- a/tooling/nargo_fmt/build.rs +++ b/tooling/nargo_fmt/build.rs @@ -46,8 +46,8 @@ fn generate_formatter_tests(test_file: &mut File, test_data_dir: &Path) { .collect::>() .join("\n"); - let output_source_path = outputs_dir.join(file_name); - let output_source = std::fs::read_to_string(output_source_path).unwrap(); + let output_source_path = outputs_dir.join(file_name).display().to_string(); + let output_source = std::fs::read_to_string(output_source_path.clone()).unwrap(); write!( test_file, @@ -58,11 +58,14 @@ fn format_{test_name}() {{ let expected_output = r#"{output_source}"#; - let (parsed_module, errors) = noirc_frontend::parse_program(&input); + let (parsed_module, _errors) = noirc_frontend::parse_program(&input); let config = nargo_fmt::Config::of("{config}").unwrap(); let fmt_text = nargo_fmt::format(&input, parsed_module, &config); + if std::env::var("UPDATE_EXPECT").is_ok() {{ + std::fs::write("{output_source_path}", fmt_text.clone()).unwrap(); + }} similar_asserts::assert_eq!(fmt_text, expected_output); }} diff --git a/tooling/nargo_fmt/src/rewrite.rs b/tooling/nargo_fmt/src/rewrite.rs index c1ac585985e..6a95eba8759 100644 --- a/tooling/nargo_fmt/src/rewrite.rs +++ b/tooling/nargo_fmt/src/rewrite.rs @@ -1,5 +1,11 @@ mod array; +mod expr; mod infix; +mod parenthesized; +mod typ; pub(crate) use array::rewrite as array; +pub(crate) use expr::{rewrite as expr, rewrite_sub_expr as sub_expr}; pub(crate) use infix::rewrite as infix; +pub(crate) use parenthesized::rewrite as parenthesized; +pub(crate) use typ::rewrite as typ; diff --git a/tooling/nargo_fmt/src/rewrite/array.rs b/tooling/nargo_fmt/src/rewrite/array.rs index 9c49d827528..fc5b240f83e 100644 --- a/tooling/nargo_fmt/src/rewrite/array.rs +++ b/tooling/nargo_fmt/src/rewrite/array.rs @@ -2,7 +2,7 @@ use noirc_frontend::{hir::resolution::errors::Span, token::Token, Expression}; use crate::{ utils::{Expr, FindToken}, - visitor::FmtVisitor, + visitor::{expr::NewlineMode, FmtVisitor}, }; pub(crate) fn rewrite(mut visitor: FmtVisitor, array: Vec, array_span: Span) -> String { @@ -26,7 +26,7 @@ pub(crate) fn rewrite(mut visitor: FmtVisitor, array: Vec, array_spa let end = item_span.start(); let leading = visitor.slice(start..end).trim_matches(pattern); - let item = visitor.format_sub_expr(item); + let item = super::sub_expr(&visitor, visitor.shape(), item); let next_start = items.peek().map_or(end_position, |expr| expr.span.start()); let trailing = visitor.slice(item_span.end()..next_start); let offset = trailing @@ -80,6 +80,6 @@ pub(crate) fn rewrite(mut visitor: FmtVisitor, array: Vec, array_spa items_str.trim().into(), nested_indent, visitor.shape(), - true, + NewlineMode::IfContainsNewLineAndWidth, ) } diff --git a/tooling/nargo_fmt/src/rewrite/expr.rs b/tooling/nargo_fmt/src/rewrite/expr.rs new file mode 100644 index 00000000000..32d104f559b --- /dev/null +++ b/tooling/nargo_fmt/src/rewrite/expr.rs @@ -0,0 +1,155 @@ +use noirc_frontend::{token::Token, ArrayLiteral, Expression, ExpressionKind, Literal, UnaryOp}; + +use crate::visitor::{ + expr::{format_brackets, format_parens, NewlineMode}, + ExpressionType, FmtVisitor, Indent, Shape, +}; + +pub(crate) fn rewrite_sub_expr( + visitor: &FmtVisitor, + shape: Shape, + expression: Expression, +) -> String { + rewrite(visitor, expression, ExpressionType::SubExpression, shape) +} + +pub(crate) fn rewrite( + visitor: &FmtVisitor, + Expression { kind, span }: Expression, + expr_type: ExpressionType, + shape: Shape, +) -> String { + match kind { + ExpressionKind::Block(block) => { + let mut visitor = visitor.fork(); + visitor.visit_block(block, span); + visitor.finish() + } + ExpressionKind::Prefix(prefix) => { + let op = match prefix.operator { + UnaryOp::Minus => "-", + UnaryOp::Not => "!", + UnaryOp::MutableReference => "&mut ", + UnaryOp::Dereference { implicitly_added } => { + if implicitly_added { + "" + } else { + "*" + } + } + }; + + format!("{op}{}", rewrite_sub_expr(visitor, shape, prefix.rhs)) + } + ExpressionKind::Cast(cast) => { + format!("{} as {}", rewrite_sub_expr(visitor, shape, cast.lhs), cast.r#type) + } + kind @ ExpressionKind::Infix(_) => { + super::infix(visitor.fork(), Expression { kind, span }, shape) + } + ExpressionKind::Call(call_expr) => { + let args_span = + visitor.span_before(call_expr.func.span.end()..span.end(), Token::LeftParen); + + let callee = rewrite_sub_expr(visitor, shape, *call_expr.func); + let args = format_parens( + visitor.config.fn_call_width.into(), + visitor.fork(), + shape, + false, + call_expr.arguments, + args_span, + true, + NewlineMode::IfContainsNewLineAndWidth, + ); + + format!("{callee}{args}") + } + ExpressionKind::MethodCall(method_call_expr) => { + let args_span = visitor.span_before( + method_call_expr.method_name.span().end()..span.end(), + Token::LeftParen, + ); + + let object = rewrite_sub_expr(visitor, shape, method_call_expr.object); + let method = method_call_expr.method_name.to_string(); + let args = format_parens( + visitor.config.fn_call_width.into(), + visitor.fork(), + shape, + false, + method_call_expr.arguments, + args_span, + true, + NewlineMode::IfContainsNewLineAndWidth, + ); + + format!("{object}.{method}{args}") + } + ExpressionKind::MemberAccess(member_access_expr) => { + let lhs_str = rewrite_sub_expr(visitor, shape, member_access_expr.lhs); + format!("{}.{}", lhs_str, member_access_expr.rhs) + } + ExpressionKind::Index(index_expr) => { + let index_span = visitor + .span_before(index_expr.collection.span.end()..span.end(), Token::LeftBracket); + + let collection = rewrite_sub_expr(visitor, shape, index_expr.collection); + let index = format_brackets(visitor.fork(), false, vec![index_expr.index], index_span); + + format!("{collection}{index}") + } + ExpressionKind::Tuple(exprs) => format_parens( + None, + visitor.fork(), + shape, + exprs.len() == 1, + exprs, + span, + true, + NewlineMode::Normal, + ), + ExpressionKind::Literal(literal) => match literal { + Literal::Integer(_, _) + | Literal::Bool(_) + | Literal::Str(_) + | Literal::RawStr(..) + | Literal::FmtStr(_) => visitor.slice(span).to_string(), + Literal::Array(ArrayLiteral::Repeated { repeated_element, length }) => { + let repeated = rewrite_sub_expr(visitor, shape, *repeated_element); + let length = rewrite_sub_expr(visitor, shape, *length); + + format!("[{repeated}; {length}]") + } + Literal::Array(ArrayLiteral::Standard(exprs)) => { + super::array(visitor.fork(), exprs, span) + } + Literal::Unit => "()".to_string(), + }, + ExpressionKind::Parenthesized(sub_expr) => { + super::parenthesized(visitor, shape, span, *sub_expr) + } + ExpressionKind::Constructor(constructor) => { + let type_name = visitor.slice(span.start()..constructor.type_name.span().end()); + let fields_span = visitor + .span_before(constructor.type_name.span().end()..span.end(), Token::LeftBrace); + + visitor.format_struct_lit(type_name, fields_span, *constructor) + } + ExpressionKind::If(if_expr) => { + let allow_single_line = expr_type == ExpressionType::SubExpression; + + if allow_single_line { + let mut visitor = visitor.fork(); + visitor.indent = Indent::default(); + if let Some(line) = visitor.format_if_single_line(*if_expr.clone()) { + return line; + } + } + + visitor.format_if(*if_expr) + } + ExpressionKind::Lambda(_) | ExpressionKind::Variable(_) => visitor.slice(span).to_string(), + ExpressionKind::Error => unreachable!(), + } +} diff --git a/tooling/nargo_fmt/src/rewrite/infix.rs b/tooling/nargo_fmt/src/rewrite/infix.rs index 0fbfa07a841..15f5fe23aae 100644 --- a/tooling/nargo_fmt/src/rewrite/infix.rs +++ b/tooling/nargo_fmt/src/rewrite/infix.rs @@ -3,8 +3,9 @@ use std::iter::zip; use noirc_frontend::{Expression, ExpressionKind}; use crate::{ + rewrite, utils::{first_line_width, is_single_line}, - visitor::{ExpressionType, FmtVisitor, Shape}, + visitor::{FmtVisitor, Shape}, }; pub(crate) fn rewrite(visitor: FmtVisitor, expr: Expression, shape: Shape) -> String { @@ -16,9 +17,9 @@ pub(crate) fn rewrite(visitor: FmtVisitor, expr: Expression, shape: Shape) -> St format!( "{} {} {}", - visitor.format_sub_expr(infix.lhs), + rewrite::sub_expr(&visitor, shape, infix.lhs), infix.operator.contents.as_string(), - visitor.format_sub_expr(infix.rhs) + rewrite::sub_expr(&visitor, shape, infix.rhs) ) } } @@ -87,10 +88,10 @@ pub(crate) fn flatten( } _ => { let rewrite = if result.is_empty() { - visitor.format_expr(node.clone(), ExpressionType::SubExpression) + rewrite::sub_expr(&visitor, visitor.shape(), node.clone()) } else { visitor.indent.block_indent(visitor.config); - visitor.format_expr(node.clone(), ExpressionType::SubExpression) + rewrite::sub_expr(&visitor, visitor.shape(), node.clone()) }; result.push(rewrite); diff --git a/tooling/nargo_fmt/src/rewrite/parenthesized.rs b/tooling/nargo_fmt/src/rewrite/parenthesized.rs new file mode 100644 index 00000000000..3926b52cb73 --- /dev/null +++ b/tooling/nargo_fmt/src/rewrite/parenthesized.rs @@ -0,0 +1,67 @@ +use noirc_frontend::{hir::resolution::errors::Span, Expression, ExpressionKind}; + +use crate::visitor::{FmtVisitor, Shape}; + +pub(crate) fn rewrite( + visitor: &FmtVisitor<'_>, + shape: Shape, + mut span: Span, + mut sub_expr: Expression, +) -> String { + let remove_nested_parens = visitor.config.remove_nested_parens; + + let mut leading; + let mut trailing; + + loop { + let leading_span = span.start() + 1..sub_expr.span.start(); + let trailing_span = sub_expr.span.end()..span.end() - 1; + + leading = visitor.format_comment(leading_span.into()); + trailing = visitor.format_comment(trailing_span.into()); + + if let ExpressionKind::Parenthesized(ref sub_sub_expr) = sub_expr.kind { + if remove_nested_parens && leading.is_empty() && trailing.is_empty() { + span = sub_expr.span; + sub_expr = *sub_sub_expr.clone(); + continue; + } + } + + break; + } + + if !leading.contains("//") && !trailing.contains("//") { + let sub_expr = super::sub_expr(visitor, shape, sub_expr); + format!("({leading}{sub_expr}{trailing})") + } else { + let mut visitor = visitor.fork(); + + let indent = visitor.indent.to_string_with_newline(); + visitor.indent.block_indent(visitor.config); + let nested_indent = visitor.indent.to_string_with_newline(); + + let sub_expr = super::sub_expr(&visitor, shape, sub_expr); + + let mut result = String::new(); + result.push('('); + + if !leading.is_empty() { + result.push_str(&nested_indent); + result.push_str(&leading); + } + + result.push_str(&nested_indent); + result.push_str(&sub_expr); + + if !trailing.is_empty() { + result.push_str(&nested_indent); + result.push_str(&trailing); + } + + result.push_str(&indent); + result.push(')'); + + result + } +} diff --git a/tooling/nargo_fmt/src/rewrite/typ.rs b/tooling/nargo_fmt/src/rewrite/typ.rs new file mode 100644 index 00000000000..4c6411e92b8 --- /dev/null +++ b/tooling/nargo_fmt/src/rewrite/typ.rs @@ -0,0 +1,70 @@ +use noirc_frontend::{UnresolvedType, UnresolvedTypeData}; + +use crate::{ + utils::span_is_empty, + visitor::{FmtVisitor, Shape}, +}; + +pub(crate) fn rewrite(visitor: &FmtVisitor, _shape: Shape, typ: UnresolvedType) -> String { + match typ.typ { + UnresolvedTypeData::Array(length, element) => { + let typ = rewrite(visitor, _shape, *element); + if let Some(length) = length { + let length = visitor.slice(length.span()); + format!("[{typ}; {length}]") + } else { + format!("[{typ}]") + } + } + UnresolvedTypeData::Parenthesized(typ) => { + let typ = rewrite(visitor, _shape, *typ); + format!("({typ})") + } + UnresolvedTypeData::MutableReference(typ) => { + let typ = rewrite(visitor, _shape, *typ); + format!("&mut {typ}") + } + UnresolvedTypeData::Tuple(mut types) => { + if types.len() == 1 { + let typ = types.pop().unwrap(); + let typ = rewrite(visitor, _shape, typ); + + format!("({typ},)") + } else { + let types: Vec<_> = + types.into_iter().map(|typ| rewrite(visitor, _shape, typ)).collect(); + let types = types.join(", "); + format!("({types})") + } + } + UnresolvedTypeData::Function(args, return_type, env) => { + let env = if span_is_empty(env.span.unwrap()) { + "".into() + } else { + let ty = rewrite(visitor, _shape, *env); + format!("[{ty}]") + }; + + let args = args + .into_iter() + .map(|arg| rewrite(visitor, _shape, arg)) + .collect::>() + .join(", "); + + let return_type = rewrite(visitor, _shape, *return_type); + + format!("fn{env}({args}) -> {return_type}") + } + UnresolvedTypeData::Unspecified => todo!(), + UnresolvedTypeData::FieldElement + | UnresolvedTypeData::Integer(_, _) + | UnresolvedTypeData::Bool + | UnresolvedTypeData::Named(_, _) + | UnresolvedTypeData::Unit + | UnresolvedTypeData::Expression(_) + | UnresolvedTypeData::String(_) + | UnresolvedTypeData::FormatString(_, _) + | UnresolvedTypeData::TraitAsType(_, _) => visitor.slice(typ.span.unwrap()).into(), + UnresolvedTypeData::Error => unreachable!(), + } +} diff --git a/tooling/nargo_fmt/src/utils.rs b/tooling/nargo_fmt/src/utils.rs index 13938079e68..1160f01972f 100644 --- a/tooling/nargo_fmt/src/utils.rs +++ b/tooling/nargo_fmt/src/utils.rs @@ -1,4 +1,5 @@ -use crate::visitor::FmtVisitor; +use crate::rewrite; +use crate::visitor::{FmtVisitor, Shape}; use noirc_frontend::hir::resolution::errors::Span; use noirc_frontend::lexer::Lexer; use noirc_frontend::token::Token; @@ -40,15 +41,22 @@ impl Expr { pub(crate) struct Exprs<'me, T> { pub(crate) visitor: &'me FmtVisitor<'me>, + shape: Shape, pub(crate) elements: std::iter::Peekable>, pub(crate) last_position: u32, pub(crate) end_position: u32, } impl<'me, T: Item> Exprs<'me, T> { - pub(crate) fn new(visitor: &'me FmtVisitor<'me>, span: Span, elements: Vec) -> Self { + pub(crate) fn new( + visitor: &'me FmtVisitor<'me>, + shape: Shape, + span: Span, + elements: Vec, + ) -> Self { Self { visitor, + shape, last_position: span.start() + 1, /*(*/ end_position: span.end() - 1, /*)*/ elements: elements.into_iter().peekable(), @@ -70,7 +78,7 @@ impl Iterator for Exprs<'_, T> { let next_start = self.elements.peek().map_or(self.end_position, |expr| expr.start()); let (leading, different_line) = self.leading(start, end); - let expr = element.format(self.visitor); + let expr = element.format(self.visitor, self.shape); let trailing = self.trailing(element_span.end(), next_start, is_last); Expr { leading, value: expr, trailing, different_line }.into() @@ -112,7 +120,7 @@ impl<'me, T> Exprs<'me, T> { pub(crate) trait FindToken { fn find_token(&self, token: Token) -> Option; - fn find_token_with(&self, f: impl Fn(&Token) -> bool) -> Option; + fn find_token_with(&self, f: impl Fn(&Token) -> bool) -> Option; } impl FindToken for str { @@ -120,11 +128,11 @@ impl FindToken for str { Lexer::new(self).flatten().find_map(|it| (it.token() == &token).then(|| it.to_span())) } - fn find_token_with(&self, f: impl Fn(&Token) -> bool) -> Option { + fn find_token_with(&self, f: impl Fn(&Token) -> bool) -> Option { Lexer::new(self) .skip_comments(false) .flatten() - .find_map(|spanned| f(spanned.token()).then(|| spanned.to_span().end())) + .find_map(|spanned| f(spanned.token()).then(|| spanned.to_span())) } } @@ -134,7 +142,7 @@ pub(crate) fn find_comment_end(slice: &str, is_last: bool) -> usize { .find_token_with(|token| { matches!(token, Token::LineComment(_, _) | Token::BlockComment(_, _)) }) - .map(|index| index as usize) + .map(|index| index.end() as usize) .unwrap_or(slice.len()) } @@ -196,7 +204,7 @@ pub(crate) fn count_newlines(slice: &str) -> usize { pub(crate) trait Item { fn span(&self) -> Span; - fn format(self, visitor: &FmtVisitor) -> String; + fn format(self, visitor: &FmtVisitor, shape: Shape) -> String; fn start(&self) -> u32 { self.span().start() @@ -212,8 +220,8 @@ impl Item for Expression { self.span } - fn format(self, visitor: &FmtVisitor) -> String { - visitor.format_sub_expr(self) + fn format(self, visitor: &FmtVisitor, shape: Shape) -> String { + rewrite::sub_expr(visitor, shape, self) } } @@ -223,11 +231,11 @@ impl Item for (Ident, Expression) { (name.span().start()..value.span.end()).into() } - fn format(self, visitor: &FmtVisitor) -> String { + fn format(self, visitor: &FmtVisitor, shape: Shape) -> String { let (name, expr) = self; let name = name.0.contents; - let expr = visitor.format_sub_expr(expr); + let expr = rewrite::sub_expr(visitor, shape, expr); if name == expr { name @@ -242,13 +250,14 @@ impl Item for Param { self.span } - fn format(self, visitor: &FmtVisitor) -> String { + fn format(self, visitor: &FmtVisitor, shape: Shape) -> String { let visibility = match self.visibility { Visibility::Public => "pub ", Visibility::Private => "", + Visibility::DataBus => "call_data", }; let pattern = visitor.slice(self.pattern.span()); - let ty = visitor.slice(self.typ.span.unwrap()); + let ty = rewrite::typ(visitor, shape, self.typ); format!("{pattern}: {visibility}{ty}") } @@ -259,7 +268,7 @@ impl Item for Ident { self.span() } - fn format(self, visitor: &FmtVisitor) -> String { + fn format(self, visitor: &FmtVisitor, _shape: Shape) -> String { visitor.slice(self.span()).into() } } @@ -268,6 +277,10 @@ pub(crate) fn first_line_width(exprs: &str) -> usize { exprs.lines().next().map_or(0, |line: &str| line.chars().count()) } +pub(crate) fn last_line_width(s: &str) -> usize { + s.rsplit('\n').next().unwrap_or("").chars().count() +} + pub(crate) fn is_single_line(s: &str) -> bool { !s.chars().any(|c| c == '\n') } @@ -275,3 +288,15 @@ pub(crate) fn is_single_line(s: &str) -> bool { pub(crate) fn last_line_contains_single_line_comment(s: &str) -> bool { s.lines().last().map_or(false, |line| line.contains("//")) } + +pub(crate) fn last_line_used_width(s: &str, offset: usize) -> usize { + if s.contains('\n') { + last_line_width(s) + } else { + offset + s.chars().count() + } +} + +pub(crate) fn span_is_empty(span: Span) -> bool { + span.start() == span.end() +} diff --git a/tooling/nargo_fmt/src/visitor.rs b/tooling/nargo_fmt/src/visitor.rs index cf3b3a41e8a..85989db79d8 100644 --- a/tooling/nargo_fmt/src/visitor.rs +++ b/tooling/nargo_fmt/src/visitor.rs @@ -30,12 +30,16 @@ impl<'me> FmtVisitor<'me> { } } + pub(crate) fn budget(&self, used_width: usize) -> usize { + self.config.max_width.saturating_sub(used_width) + } + pub(crate) fn slice(&self, span: impl Into) -> &'me str { let span = span.into(); &self.source[span.start() as usize..span.end() as usize] } - fn span_after(&self, span: impl Into, token: Token) -> Span { + pub(crate) fn span_after(&self, span: impl Into, token: Token) -> Span { let span = span.into(); let slice = self.slice(span); @@ -44,7 +48,7 @@ impl<'me> FmtVisitor<'me> { (span.start() + offset..span.end()).into() } - fn span_before(&self, span: impl Into, token: Token) -> Span { + pub(crate) fn span_before(&self, span: impl Into, token: Token) -> Span { let span = span.into(); let slice = self.slice(span); @@ -253,10 +257,12 @@ impl Indent { self.block_indent } + #[track_caller] pub(crate) fn block_indent(&mut self, config: &Config) { self.block_indent += config.tab_spaces; } + #[track_caller] pub(crate) fn block_unindent(&mut self, config: &Config) { self.block_indent -= config.tab_spaces; } diff --git a/tooling/nargo_fmt/src/visitor/expr.rs b/tooling/nargo_fmt/src/visitor/expr.rs index 8492fd5c05d..586d9583e32 100644 --- a/tooling/nargo_fmt/src/visitor/expr.rs +++ b/tooling/nargo_fmt/src/visitor/expr.rs @@ -1,10 +1,9 @@ use noirc_frontend::{ - hir::resolution::errors::Span, lexer::Lexer, token::Token, ArrayLiteral, BlockExpression, - ConstructorExpression, Expression, ExpressionKind, IfExpression, Literal, Statement, - StatementKind, UnaryOp, + hir::resolution::errors::Span, lexer::Lexer, token::Token, BlockExpression, + ConstructorExpression, Expression, ExpressionKind, IfExpression, Statement, StatementKind, }; -use super::{ExpressionType, FmtVisitor, Indent, Shape}; +use super::{ExpressionType, FmtVisitor, Shape}; use crate::{ rewrite, utils::{self, first_line_width, Expr, FindToken, Item}, @@ -14,199 +13,14 @@ use crate::{ impl FmtVisitor<'_> { pub(crate) fn visit_expr(&mut self, expr: Expression, expr_type: ExpressionType) { let span = expr.span; - let rewrite = self.format_expr(expr, expr_type); + let rewrite = rewrite::expr(self, expr, expr_type, self.shape()); self.push_rewrite(rewrite, span); self.last_position = span.end(); } - pub(crate) fn format_sub_expr(&self, expression: Expression) -> String { - self.format_expr(expression, ExpressionType::SubExpression) - } - - pub(crate) fn format_expr( - &self, - Expression { kind, mut span }: Expression, - expr_type: ExpressionType, - ) -> String { - match kind { - ExpressionKind::Block(block) => { - let mut visitor = self.fork(); - visitor.visit_block(block, span); - visitor.buffer - } - ExpressionKind::Prefix(prefix) => { - let op = match prefix.operator { - UnaryOp::Minus => "-", - UnaryOp::Not => "!", - UnaryOp::MutableReference => "&mut ", - UnaryOp::Dereference { implicitly_added } => { - if implicitly_added { - "" - } else { - "*" - } - } - }; - - format!("{op}{}", self.format_sub_expr(prefix.rhs)) - } - ExpressionKind::Cast(cast) => { - format!("{} as {}", self.format_sub_expr(cast.lhs), cast.r#type) - } - kind @ ExpressionKind::Infix(_) => { - let shape = self.shape(); - rewrite::infix(self.fork(), Expression { kind, span }, shape) - } - ExpressionKind::Call(call_expr) => { - let args_span = - self.span_before(call_expr.func.span.end()..span.end(), Token::LeftParen); - - let callee = self.format_sub_expr(*call_expr.func); - let args = format_parens( - self.config.fn_call_width.into(), - self.fork(), - false, - call_expr.arguments, - args_span, - ); - - format!("{callee}{args}") - } - ExpressionKind::MethodCall(method_call_expr) => { - let args_span = self.span_before( - method_call_expr.method_name.span().end()..span.end(), - Token::LeftParen, - ); - - let object = self.format_sub_expr(method_call_expr.object); - let method = method_call_expr.method_name.to_string(); - let args = format_parens( - self.config.fn_call_width.into(), - self.fork(), - false, - method_call_expr.arguments, - args_span, - ); - - format!("{object}.{method}{args}") - } - ExpressionKind::MemberAccess(member_access_expr) => { - let lhs_str = self.format_sub_expr(member_access_expr.lhs); - format!("{}.{}", lhs_str, member_access_expr.rhs) - } - ExpressionKind::Index(index_expr) => { - let index_span = self - .span_before(index_expr.collection.span.end()..span.end(), Token::LeftBracket); - - let collection = self.format_sub_expr(index_expr.collection); - let index = format_brackets(self.fork(), false, vec![index_expr.index], index_span); - - format!("{collection}{index}") - } - ExpressionKind::Tuple(exprs) => { - format_parens(None, self.fork(), exprs.len() == 1, exprs, span) - } - ExpressionKind::Literal(literal) => match literal { - Literal::Integer(_) | Literal::Bool(_) | Literal::Str(_) | Literal::FmtStr(_) => { - self.slice(span).to_string() - } - Literal::Array(ArrayLiteral::Repeated { repeated_element, length }) => { - let repeated = self.format_sub_expr(*repeated_element); - let length = self.format_sub_expr(*length); - - format!("[{repeated}; {length}]") - } - Literal::Array(ArrayLiteral::Standard(exprs)) => { - rewrite::array(self.fork(), exprs, span) - } - Literal::Unit => "()".to_string(), - }, - ExpressionKind::Parenthesized(mut sub_expr) => { - let remove_nested_parens = self.config.remove_nested_parens; - - let mut leading; - let mut trailing; - - loop { - let leading_span = span.start() + 1..sub_expr.span.start(); - let trailing_span = sub_expr.span.end()..span.end() - 1; - - leading = self.format_comment(leading_span.into()); - trailing = self.format_comment(trailing_span.into()); - - if let ExpressionKind::Parenthesized(ref sub_sub_expr) = sub_expr.kind { - if remove_nested_parens && leading.is_empty() && trailing.is_empty() { - span = sub_expr.span; - sub_expr = sub_sub_expr.clone(); - continue; - } - } - - break; - } - - if !leading.contains("//") && !trailing.contains("//") { - let sub_expr = self.format_sub_expr(*sub_expr); - format!("({leading}{sub_expr}{trailing})") - } else { - let mut visitor = self.fork(); - - let indent = visitor.indent.to_string_with_newline(); - visitor.indent.block_indent(self.config); - let nested_indent = visitor.indent.to_string_with_newline(); - - let sub_expr = visitor.format_sub_expr(*sub_expr); - - let mut result = String::new(); - result.push('('); - - if !leading.is_empty() { - result.push_str(&nested_indent); - result.push_str(&leading); - } - - result.push_str(&nested_indent); - result.push_str(&sub_expr); - - if !trailing.is_empty() { - result.push_str(&nested_indent); - result.push_str(&trailing); - } - - result.push_str(&indent); - result.push(')'); - - result - } - } - ExpressionKind::Constructor(constructor) => { - let type_name = self.slice(span.start()..constructor.type_name.span().end()); - let fields_span = self - .span_before(constructor.type_name.span().end()..span.end(), Token::LeftBrace); - - self.format_struct_lit(type_name, fields_span, *constructor) - } - ExpressionKind::If(if_expr) => { - let allow_single_line = expr_type == ExpressionType::SubExpression; - - if allow_single_line { - let mut visitor = self.fork(); - visitor.indent = Indent::default(); - if let Some(line) = visitor.format_if_single_line(*if_expr.clone()) { - return line; - } - } - - self.format_if(*if_expr) - } - ExpressionKind::Lambda(_) | ExpressionKind::Variable(_) => self.slice(span).to_string(), - ExpressionKind::Error => unreachable!(), - } - } - - fn format_if(&self, if_expr: IfExpression) -> String { - let condition_str = self.format_sub_expr(if_expr.condition); - let consequence_str = self.format_sub_expr(if_expr.consequence); + pub(crate) fn format_if(&self, if_expr: IfExpression) -> String { + let condition_str = rewrite::sub_expr(self, self.shape(), if_expr.condition); + let consequence_str = rewrite::sub_expr(self, self.shape(), if_expr.consequence); let mut result = format!("if {condition_str} {consequence_str}"); @@ -216,7 +30,7 @@ impl FmtVisitor<'_> { { self.format_if(*if_expr) } else { - self.format_expr(alternative, ExpressionType::Statement) + rewrite::expr(self, alternative, ExpressionType::Statement, self.shape()) }; result.push_str(" else "); @@ -226,9 +40,10 @@ impl FmtVisitor<'_> { result } - fn format_if_single_line(&self, if_expr: IfExpression) -> Option { - let condition_str = self.format_sub_expr(if_expr.condition); - let consequence_str = self.format_sub_expr(extract_simple_expr(if_expr.consequence)?); + pub(crate) fn format_if_single_line(&self, if_expr: IfExpression) -> Option { + let condition_str = rewrite::sub_expr(self, self.shape(), if_expr.condition); + let consequence_str = + rewrite::sub_expr(self, self.shape(), extract_simple_expr(if_expr.consequence)?); let if_str = if let Some(alternative) = if_expr.alternative { let alternative_str = if let Some(ExpressionKind::If(_)) = @@ -236,7 +51,12 @@ impl FmtVisitor<'_> { { return None; } else { - self.format_expr(extract_simple_expr(alternative)?, ExpressionType::Statement) + rewrite::expr( + self, + extract_simple_expr(alternative)?, + ExpressionType::Statement, + self.shape(), + ) }; format!("if {} {{ {} }} else {{ {} }}", condition_str, consequence_str, alternative_str) @@ -247,7 +67,7 @@ impl FmtVisitor<'_> { (if_str.len() <= self.config.single_line_if_else_max_width).then_some(if_str) } - fn format_struct_lit( + pub(crate) fn format_struct_lit( &self, type_name: &str, fields_span: Span, @@ -261,13 +81,15 @@ impl FmtVisitor<'_> { let nested_indent = visitor.shape(); let exprs: Vec<_> = - utils::Exprs::new(&visitor, fields_span, constructor.fields).collect(); + utils::Exprs::new(&visitor, nested_indent, fields_span, constructor.fields) + .collect(); let exprs = format_exprs( visitor.config, Tactic::HorizontalVertical, false, exprs, nested_indent, + true, ); visitor.indent.block_unindent(visitor.config); @@ -365,29 +187,32 @@ impl FmtVisitor<'_> { } } +// TODO: fixme #[allow(clippy::too_many_arguments)] pub(crate) fn format_seq( + shape: Shape, prefix: &str, suffix: &str, - mut visitor: FmtVisitor, + visitor: FmtVisitor, trailing_comma: bool, exprs: Vec, span: Span, tactic: Tactic, - soft_newline: bool, + mode: NewlineMode, + reduce: bool, ) -> String { - visitor.indent.block_indent(visitor.config); + let mut nested_indent = shape; + let shape = shape; - let nested_indent = visitor.shape(); - let exprs: Vec<_> = utils::Exprs::new(&visitor, span, exprs).collect(); - let exprs = format_exprs(visitor.config, tactic, trailing_comma, exprs, nested_indent); + nested_indent.indent.block_indent(visitor.config); - visitor.indent.block_unindent(visitor.config); + let exprs: Vec<_> = utils::Exprs::new(&visitor, nested_indent, span, exprs).collect(); + let exprs = format_exprs(visitor.config, tactic, trailing_comma, exprs, nested_indent, reduce); - wrap_exprs(prefix, suffix, exprs, nested_indent, visitor.shape(), soft_newline) + wrap_exprs(prefix, suffix, exprs, nested_indent, shape, mode) } -fn format_brackets( +pub(crate) fn format_brackets( visitor: FmtVisitor, trailing_comma: bool, exprs: Vec, @@ -395,6 +220,7 @@ fn format_brackets( ) -> String { let array_width = visitor.config.array_width; format_seq( + visitor.shape(), "[", "]", visitor, @@ -402,19 +228,25 @@ fn format_brackets( exprs, span, Tactic::LimitedHorizontalVertical(array_width), + NewlineMode::Normal, false, ) } -fn format_parens( +// TODO: fixme +#[allow(clippy::too_many_arguments)] +pub(crate) fn format_parens( max_width: Option, visitor: FmtVisitor, + shape: Shape, trailing_comma: bool, exprs: Vec, span: Span, + reduce: bool, + mode: NewlineMode, ) -> String { let tactic = max_width.map(Tactic::LimitedHorizontalVertical).unwrap_or(Tactic::Horizontal); - format_seq("(", ")", visitor, trailing_comma, exprs, span, tactic, false) + format_seq(shape, "(", ")", visitor, trailing_comma, exprs, span, tactic, mode, reduce) } fn format_exprs( @@ -423,11 +255,12 @@ fn format_exprs( trailing_comma: bool, exprs: Vec, shape: Shape, + reduce: bool, ) -> String { let mut result = String::new(); let indent_str = shape.indent.to_string(); - let tactic = tactic.definitive(&exprs, config.short_array_element_width_threshold); + let tactic = tactic.definitive(&exprs, config.short_array_element_width_threshold, reduce); let mut exprs = exprs.into_iter().enumerate().peekable(); let mut line_len = 0; let mut prev_expr_trailing_comment = false; @@ -500,18 +333,34 @@ fn format_exprs( result } +#[derive(PartialEq, Eq)] +pub(crate) enum NewlineMode { + IfContainsNewLine, + IfContainsNewLineAndWidth, + Normal, +} + pub(crate) fn wrap_exprs( prefix: &str, suffix: &str, exprs: String, nested_shape: Shape, shape: Shape, - soft_newline: bool, + newline_mode: NewlineMode, ) -> String { - let first_line_width = first_line_width(&exprs); - - let force_one_line = - if soft_newline { !exprs.contains('\n') } else { first_line_width <= shape.width }; + let mut force_one_line = if newline_mode == NewlineMode::IfContainsNewLine { + true + } else { + first_line_width(&exprs) <= shape.width + }; + + if matches!( + newline_mode, + NewlineMode::IfContainsNewLine | NewlineMode::IfContainsNewLineAndWidth + ) && force_one_line + { + force_one_line = !exprs.contains('\n'); + } if force_one_line { let allow_trailing_newline = exprs @@ -548,7 +397,8 @@ impl Tactic { fn definitive( self, exprs: &[Expr], - short_array_element_width_threshold: usize, + short_width_threshold: usize, + reduce: bool, ) -> DefinitiveTactic { let tactic = || { let has_single_line_comment = exprs.iter().any(|item| { @@ -582,7 +432,12 @@ impl Tactic { } }; - tactic().reduce(exprs, short_array_element_width_threshold) + let definitive_tactic = tactic(); + if reduce { + definitive_tactic.reduce(exprs, short_width_threshold) + } else { + definitive_tactic + } } } diff --git a/tooling/nargo_fmt/src/visitor/item.rs b/tooling/nargo_fmt/src/visitor/item.rs index 7774618afea..eb2086168ba 100644 --- a/tooling/nargo_fmt/src/visitor/item.rs +++ b/tooling/nargo_fmt/src/visitor/item.rs @@ -1,17 +1,23 @@ use noirc_frontend::{ + hir::resolution::errors::Span, parser::{Item, ItemKind}, - token::Token, + token::{Keyword, Token}, Distinctness, NoirFunction, ParsedModule, Visibility, }; -use crate::{utils::last_line_contains_single_line_comment, visitor::expr::format_seq}; +use crate::{ + rewrite, + utils::{last_line_contains_single_line_comment, last_line_used_width, FindToken}, + visitor::expr::{format_seq, NewlineMode}, +}; -use super::expr::Tactic::LimitedHorizontalVertical; +use super::{ + expr::Tactic::{HorizontalVertical, LimitedHorizontalVertical}, + Shape, +}; impl super::FmtVisitor<'_> { fn format_fn_before_block(&self, func: NoirFunction, start: u32) -> (String, bool) { - let tactic = LimitedHorizontalVertical(self.config.max_width); - let name_span = func.name_ident().span(); let func_span = func.span(); @@ -30,6 +36,7 @@ impl super::FmtVisitor<'_> { let params_span = params_open..params_end; let return_type_span = func.return_type().span; + let return_type = self.format_return_type(return_type_span, &func, func_span, params_end); let parameters = func.def.parameters; if !func.def.generics.is_empty() { @@ -39,7 +46,18 @@ impl super::FmtVisitor<'_> { let generics = func.def.generics; let span = (start..end).into(); - let generics = format_seq("<", ">", self.fork(), false, generics, span, tactic, false); + let generics = format_seq( + self.shape(), + "<", + ">", + self.fork(), + false, + generics, + span, + HorizontalVertical, + NewlineMode::IfContainsNewLine, + false, + ); result.push_str(&generics); } @@ -47,10 +65,52 @@ impl super::FmtVisitor<'_> { let parameters = if parameters.is_empty() { self.slice(params_span).into() } else { - format_seq("(", ")", self.fork(), false, parameters, params_span.into(), tactic, true) + let fn_start = result + .find_token_with(|token| { + matches!(token, Token::Keyword(Keyword::Fn | Keyword::Unconstrained)) + }) + .unwrap() + .start(); + + let slice = self.slice(fn_start..result.len() as u32); + let indent = self.indent; + let used_width = last_line_used_width(slice, indent.width()); + let overhead = if return_type.is_empty() { 2 } else { 3 }; // 2 = `()`, 3 = `() ` + let one_line_budget = self.budget(used_width + return_type.len() + overhead); + let shape = Shape { width: one_line_budget, indent }; + + let tactic = LimitedHorizontalVertical(one_line_budget); + + format_seq( + shape, + "(", + ")", + self.fork(), + false, + parameters, + params_span.into(), + tactic, + NewlineMode::IfContainsNewLine, + false, + ) }; result.push_str(¶meters); + result.push_str(&return_type); + + let maybe_comment = self.slice(params_end..func_span.start()); + + (result.trim_end().to_string(), last_line_contains_single_line_comment(maybe_comment)) + } + + fn format_return_type( + &self, + return_type_span: Option, + func: &NoirFunction, + func_span: Span, + params_end: u32, + ) -> String { + let mut result = String::new(); if let Some(span) = return_type_span { result.push_str(" -> "); @@ -63,7 +123,8 @@ impl super::FmtVisitor<'_> { result.push_str("pub "); } - result.push_str(self.slice(span)); + let typ = rewrite::typ(self, self.shape(), func.return_type()); + result.push_str(&typ); let slice = self.slice(span.end()..func_span.start()); if !slice.trim().is_empty() { @@ -73,9 +134,7 @@ impl super::FmtVisitor<'_> { result.push_str(self.slice(params_end..func_span.start())); } - let maybe_comment = self.slice(params_end..func_span.start()); - - (result.trim_end().to_string(), last_line_contains_single_line_comment(maybe_comment)) + result } pub(crate) fn visit_file(&mut self, module: ParsedModule) { diff --git a/tooling/nargo_fmt/src/visitor/stmt.rs b/tooling/nargo_fmt/src/visitor/stmt.rs index b6dd67323fa..800a8656ef3 100644 --- a/tooling/nargo_fmt/src/visitor/stmt.rs +++ b/tooling/nargo_fmt/src/visitor/stmt.rs @@ -4,7 +4,9 @@ use noirc_frontend::{ ConstrainKind, ConstrainStatement, ExpressionKind, ForRange, Statement, StatementKind, }; -use super::ExpressionType; +use crate::{rewrite, visitor::expr::wrap_exprs}; + +use super::{expr::NewlineMode, ExpressionType}; impl super::FmtVisitor<'_> { pub(crate) fn visit_stmts(&mut self, stmts: Vec) { @@ -25,36 +27,57 @@ impl super::FmtVisitor<'_> { StatementKind::Let(let_stmt) => { let let_str = self.slice(span.start()..let_stmt.expression.span.start()).trim_end(); - let expr_str = - self.format_expr(let_stmt.expression, ExpressionType::SubExpression); + + let expr_str = rewrite::sub_expr(self, self.shape(), let_stmt.expression); self.push_rewrite(format!("{let_str} {expr_str};"), span); } StatementKind::Constrain(ConstrainStatement(expr, message, kind)) => { + let mut nested_shape = self.shape(); + let shape = nested_shape; + + nested_shape.indent.block_indent(self.config); + let message = message.map_or(String::new(), |message| format!(", \"{message}\"")); - let constrain = match kind { + + let (callee, args) = match kind { ConstrainKind::Assert => { - let assertion = self.format_sub_expr(expr); + let assertion = rewrite::sub_expr(self, nested_shape, expr); + let args = format!("{assertion}{message}"); - format!("assert({assertion}{message});") + ("assert", args) } ConstrainKind::AssertEq => { if let ExpressionKind::Infix(infix) = expr.kind { - let lhs = self.format_sub_expr(infix.lhs); - let rhs = self.format_sub_expr(infix.rhs); + let lhs = rewrite::sub_expr(self, nested_shape, infix.lhs); + let rhs = rewrite::sub_expr(self, nested_shape, infix.rhs); + + let args = format!("{lhs}, {rhs}{message}"); - format!("assert_eq({lhs}, {rhs}{message});") + ("assert_eq", args) } else { unreachable!() } } ConstrainKind::Constrain => { - let expr = self.format_sub_expr(expr); - format!("constrain {expr};") + let expr = rewrite::sub_expr(self, self.shape(), expr); + let constrain = format!("constrain {expr};"); + self.push_rewrite(constrain, span); + return; } }; + let args = wrap_exprs( + "(", + ")", + args, + nested_shape, + shape, + NewlineMode::IfContainsNewLineAndWidth, + ); + let constrain = format!("{callee}{args};"); + self.push_rewrite(constrain, span); } StatementKind::For(for_stmt) => { @@ -62,12 +85,12 @@ impl super::FmtVisitor<'_> { let range = match for_stmt.range { ForRange::Range(start, end) => format!( "{}..{}", - self.format_sub_expr(start), - self.format_sub_expr(end) + rewrite::sub_expr(self, self.shape(), start), + rewrite::sub_expr(self, self.shape(), end) ), - ForRange::Array(array) => self.format_sub_expr(array), + ForRange::Array(array) => rewrite::sub_expr(self, self.shape(), array), }; - let block = self.format_sub_expr(for_stmt.block); + let block = rewrite::sub_expr(self, self.shape(), for_stmt.block); let result = format!("for {identifier} in {range} {block}"); self.push_rewrite(result, span); diff --git a/tooling/nargo_fmt/tests/expected/call.nr b/tooling/nargo_fmt/tests/expected/call.nr index 3c9b36e8403..de78d7c4edb 100644 --- a/tooling/nargo_fmt/tests/expected/call.nr +++ b/tooling/nargo_fmt/tests/expected/call.nr @@ -1,25 +1,33 @@ fn foo() { my_function(10, some_value, another_func(20, 30)); - outer_function(some_function(), // Original inner function call + outer_function( + some_function(), // Original inner function call another_function() // Original inner function call ); - outer_function(some_function(), // Original inner function call + outer_function( + some_function(), // Original inner function call another_function() // Original inner function call ); - my_function(// Comment + my_function( + // Comment some_value, /* Multiline Comment */ - another_func(20, 30)); + another_func(20, 30) + ); - my_function(some_function(10, "arg1", another_function()), - another_func(20, some_function(), 30)); + my_function( + some_function(10, "arg1", another_function()), + another_func(20, some_function(), 30) + ); - outer_function(some_function(), - another_function(some_function(), some_value)); + outer_function( + some_function(), + another_function(some_function(), some_value) + ); assert_eq(x, y); @@ -31,6 +39,12 @@ fn foo() { assert(x == y); - assert(p4_affine.eq(Gaffine::new(6890855772600357754907169075114257697580319025794532037257385534741338397365, - 4338620300185947561074059802482547481416142213883829469920100239455078257889))); + assert( + p4_affine.eq( + Gaffine::new( + 6890855772600357754907169075114257697580319025794532037257385534741338397365, + 4338620300185947561074059802482547481416142213883829469920100239455078257889 + ) + ) + ); } diff --git a/tooling/nargo_fmt/tests/expected/contract.nr b/tooling/nargo_fmt/tests/expected/contract.nr index 0d9bd2f100d..0313da832a8 100644 --- a/tooling/nargo_fmt/tests/expected/contract.nr +++ b/tooling/nargo_fmt/tests/expected/contract.nr @@ -3,6 +3,8 @@ // Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. // Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum. contract Benchmarking { + use dep::protocol_types::abis::function_selector::FunctionSelector; + use dep::value_note::{ utils::{increment, decrement}, value_note::{VALUE_NOTE_LEN, ValueNote, ValueNoteMethods}, @@ -11,7 +13,6 @@ contract Benchmarking { use dep::aztec::{ context::{Context}, note::{utils as note_utils, note_getter_options::NoteGetterOptions, note_header::NoteHeader}, - selector::compute_selector, log::emit_unencrypted_log, state_vars::{map::Map, public_state::PublicState, set::Set}, types::type_serialization::field_serialization::{FieldSerializationMethods, FIELD_SERIALIZED_LEN}, @@ -57,9 +58,11 @@ contract Benchmarking { fn increment_balance(owner: Field, value: Field) { let current = storage.balances.at(owner).read(); storage.balances.at(owner).write(current + value); - let _callStackItem1 = context.call_public_function(context.this_address(), - compute_selector("broadcast(Field)"), - [owner]); + let _callStackItem1 = context.call_public_function( + context.this_address(), + FunctionSelector::from_signature("broadcast(Field)"), + [owner] + ); } // Est ultricies integer quis auctor elit sed. In nibh mauris cursus mattis molestie a iaculis. @@ -68,7 +71,12 @@ contract Benchmarking { emit_unencrypted_log(&mut context, storage.balances.at(owner).read()); } - unconstrained fn compute_note_hash_and_nullifier(contract_address: Field, nonce: Field, storage_slot: Field, preimage: [Field; VALUE_NOTE_LEN]) -> [Field; 4] { + unconstrained fn compute_note_hash_and_nullifier( + contract_address: AztecAddress, + nonce: Field, + storage_slot: Field, + preimage: [Field; VALUE_NOTE_LEN] + ) -> [Field; 4] { let note_header = NoteHeader::new(contract_address, nonce, storage_slot); note_utils::compute_note_hash_and_nullifier(ValueNoteMethods, note_header, preimage) } diff --git a/tooling/nargo_fmt/tests/expected/fn.nr b/tooling/nargo_fmt/tests/expected/fn.nr index 5bca2dd8bb1..0088dba6a8f 100644 --- a/tooling/nargo_fmt/tests/expected/fn.nr +++ b/tooling/nargo_fmt/tests/expected/fn.nr @@ -28,6 +28,36 @@ fn main( initial_call_stack_pointer: u64 ) -> pub ExecutionResult {} -fn apply_binary_field_op(lhs: RegisterIndex, rhs: RegisterIndex, result: RegisterIndex, op: u8, registers: &mut Registers) -> bool {} +fn apply_binary_field_op( + lhs: RegisterIndex, + rhs: RegisterIndex, + result: RegisterIndex, + op: u8, + registers: &mut Registers +) -> bool {} -fn main() -> distinct pub [Field;2] {} +fn main() -> distinct pub [Field; 2] {} + +fn ret_normal_lambda1() -> ((fn() -> Field)) {} + +fn ret_normal_lambda1() -> fn() -> Field {} + +fn ret_closure1() -> fn[(Field,)]() -> Field {} + +fn ret_closure2() -> fn[(Field, Field)]() -> Field {} + +fn ret_closure3() -> fn[(u32, u64)]() -> u64 {} + +fn make_counter() -> fn[(&mut Field,)]() -> Field {} + +fn get_some(generator: fn[Env]() -> Field) -> [Field; 5] {} + +fn main( + message: [u8; 10], + message_field: Field, + pub_key_x: Field, + pub_key_y: Field, + signature: [u8; 64] +) {} + +pub fn from_baz(x: [Field; crate::foo::MAGIC_NUMBER]) {} diff --git a/tooling/nargo_fmt/tests/expected/infix.nr b/tooling/nargo_fmt/tests/expected/infix.nr index cbc73045fe3..228dfdf68c4 100644 --- a/tooling/nargo_fmt/tests/expected/infix.nr +++ b/tooling/nargo_fmt/tests/expected/infix.nr @@ -9,10 +9,12 @@ fn foo() { } fn big() { - assert(bjj_affine.contains(bjj_affine.gen) + assert( + bjj_affine.contains(bjj_affine.gen) & bjj_affine.contains(p1_affine) & bjj_affine.contains(p2_affine) & bjj_affine.contains(p3_affine) & bjj_affine.contains(p4_affine) - & bjj_affine.contains(p5_affine)); + & bjj_affine.contains(p5_affine) + ); } diff --git a/tooling/nargo_fmt/tests/expected/let.nr b/tooling/nargo_fmt/tests/expected/let.nr index b60ae644790..c57801155a0 100644 --- a/tooling/nargo_fmt/tests/expected/let.nr +++ b/tooling/nargo_fmt/tests/expected/let.nr @@ -1,7 +1,9 @@ //@error_on_lost_comment=false fn let_() { - let fn_call = my_function(some_function(10, "arg1", another_function()), - another_func(20, some_function(), 30)); + let fn_call = my_function( + some_function(10, "arg1", another_function()), + another_func(20, some_function(), 30) + ); let array = [[[1, 2, 3], [4, 5, 6]], [[7, 8, 9], [10, 11, 12]], [[13, 14, 15], [16, 17, 18]]]; let padded_sha256_hash: [u8; 259] = [ @@ -15,7 +17,11 @@ fn let_() { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]; - let a = BigUint56 { limbs: [1, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0] }; + let a = BigUint56 { + limbs: [ + 1, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 + ] + }; let person = Person { first_name: "John", diff --git a/tooling/nargo_fmt/tests/expected/print.nr b/tooling/nargo_fmt/tests/expected/print.nr index e169f565455..3bce0941da2 100644 --- a/tooling/nargo_fmt/tests/expected/print.nr +++ b/tooling/nargo_fmt/tests/expected/print.nr @@ -1,5 +1,6 @@ use dep::std; fn main() { + std::print("Hello world"); std::println("Hello world"); } diff --git a/tooling/nargo_fmt/tests/expected/print2.nr b/tooling/nargo_fmt/tests/expected/print2.nr index 80284444af8..3bce0941da2 100644 --- a/tooling/nargo_fmt/tests/expected/print2.nr +++ b/tooling/nargo_fmt/tests/expected/print2.nr @@ -1,5 +1,6 @@ use dep::std; -fn main( ) { +fn main() { + std::print("Hello world"); std::println("Hello world"); } diff --git a/tooling/nargo_fmt/tests/input/contract.nr b/tooling/nargo_fmt/tests/input/contract.nr index 6bc5c552110..58ae0e909a1 100644 --- a/tooling/nargo_fmt/tests/input/contract.nr +++ b/tooling/nargo_fmt/tests/input/contract.nr @@ -3,6 +3,8 @@ // Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. // Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum. contract Benchmarking { + use dep::protocol_types::abis::function_selector::FunctionSelector; + use dep::value_note::{ utils::{increment, decrement}, value_note::{VALUE_NOTE_LEN, ValueNote, ValueNoteMethods}, @@ -11,7 +13,6 @@ contract Benchmarking { use dep::aztec::{ context::{Context}, note::{utils as note_utils, note_getter_options::NoteGetterOptions, note_header::NoteHeader}, - selector::compute_selector, log::emit_unencrypted_log, state_vars::{map::Map, public_state::PublicState, set::Set}, types::type_serialization::field_serialization::{FieldSerializationMethods, FIELD_SERIALIZED_LEN}, @@ -57,7 +58,7 @@ contract Benchmarking { fn increment_balance(owner: Field, value: Field) { let current = storage.balances.at(owner).read(); storage.balances.at(owner).write(current + value); - let _callStackItem1 = context.call_public_function(context.this_address(), compute_selector("broadcast(Field)"), [owner]); + let _callStackItem1 = context.call_public_function(context.this_address(), FunctionSelector::from_signature("broadcast(Field)"), [owner]); } // Est ultricies integer quis auctor elit sed. In nibh mauris cursus mattis molestie a iaculis. @@ -66,7 +67,7 @@ contract Benchmarking { emit_unencrypted_log(&mut context, storage.balances.at(owner).read()); } - unconstrained fn compute_note_hash_and_nullifier(contract_address: Field, nonce: Field, storage_slot: Field, preimage: [Field; VALUE_NOTE_LEN]) -> [Field; 4] { + unconstrained fn compute_note_hash_and_nullifier(contract_address: AztecAddress, nonce: Field, storage_slot: Field, preimage: [Field; VALUE_NOTE_LEN]) -> [Field; 4] { let note_header = NoteHeader::new(contract_address, nonce, storage_slot); note_utils::compute_note_hash_and_nullifier(ValueNoteMethods, note_header, preimage) } diff --git a/tooling/nargo_fmt/tests/input/fn.nr b/tooling/nargo_fmt/tests/input/fn.nr index f503db99853..26ff5933802 100644 --- a/tooling/nargo_fmt/tests/input/fn.nr +++ b/tooling/nargo_fmt/tests/input/fn.nr @@ -24,3 +24,23 @@ fn main(tape: [Field; TAPE_LEN], initial_registers: [Field; REGISTER_COUNT], ini fn apply_binary_field_op(lhs: RegisterIndex, rhs: RegisterIndex, result: RegisterIndex, op: u8, registers: &mut Registers) -> bool {} fn main() -> distinct pub [Field;2] {} + +fn ret_normal_lambda1() -> ((fn() -> Field)) {} + +fn ret_normal_lambda1() -> fn() -> Field {} + +fn ret_closure1() -> fn[(Field,)]() -> Field {} + +fn ret_closure2() -> fn[(Field,Field)]() -> Field {} + +fn ret_closure3() -> fn[(u32,u64)]() -> u64 {} + +fn make_counter() -> fn[(&mut Field,)]() -> Field {} + +fn get_some(generator: fn[Env]() -> Field) -> [Field;5] {} + +fn main( + message: [u8; 10], message_field: Field, pub_key_x: Field, pub_key_y: Field, signature: [u8; 64] +) {} + +pub fn from_baz(x: [Field; crate::foo::MAGIC_NUMBER]) {} diff --git a/tooling/nargo_fmt/tests/input/print.nr b/tooling/nargo_fmt/tests/input/print.nr index 8afa562dada..3bce0941da2 100644 --- a/tooling/nargo_fmt/tests/input/print.nr +++ b/tooling/nargo_fmt/tests/input/print.nr @@ -1,3 +1,6 @@ use dep::std; -fn main() { std::println("Hello world"); } +fn main() { + std::print("Hello world"); + std::println("Hello world"); +} diff --git a/tooling/nargo_fmt/tests/input/print2.nr b/tooling/nargo_fmt/tests/input/print2.nr index 07ef9dd0386..3bce0941da2 100644 --- a/tooling/nargo_fmt/tests/input/print2.nr +++ b/tooling/nargo_fmt/tests/input/print2.nr @@ -1,5 +1,6 @@ use dep::std; -fn main( ) { -std::println("Hello world"); +fn main() { + std::print("Hello world"); + std::println("Hello world"); } diff --git a/tooling/nargo_toml/src/errors.rs b/tooling/nargo_toml/src/errors.rs index fdbdc317bf9..440895056c3 100644 --- a/tooling/nargo_toml/src/errors.rs +++ b/tooling/nargo_toml/src/errors.rs @@ -69,11 +69,15 @@ pub enum ManifestError { #[error(transparent)] SemverError(SemverError), + + #[error("Cyclic package dependency found when processing {cycle}")] + CyclicDependency { cycle: String }, } +#[allow(clippy::enum_variant_names)] #[derive(Error, Debug, PartialEq, Eq, Clone)] pub enum SemverError { - #[error("Incompatible compiler version in package {package_name}. Required compiler version is {required_compiler_version} but the compiler version is {compiler_version_found}.\n Update the compiler_version field in Nargo.toml to >={compiler_version_found} or compile this project with version {compiler_version_found}")] + #[error("Incompatible compiler version in package {package_name}. Required compiler version is {required_compiler_version} but the compiler version is {compiler_version_found}.\n Update the compiler_version field in Nargo.toml to >={required_compiler_version} or compile this project with version {required_compiler_version}")] IncompatibleVersion { package_name: CrateName, required_compiler_version: String, @@ -81,4 +85,6 @@ pub enum SemverError { }, #[error("Could not parse the required compiler version for package {package_name} in Nargo.toml. Error: {error}")] CouldNotParseRequiredVersion { package_name: String, error: String }, + #[error("Could not parse the package version for package {package_name} in Nargo.toml. Error: {error}")] + CouldNotParsePackageVersion { package_name: String, error: String }, } diff --git a/tooling/nargo_toml/src/lib.rs b/tooling/nargo_toml/src/lib.rs index 223ed2da081..cecc3f7e26a 100644 --- a/tooling/nargo_toml/src/lib.rs +++ b/tooling/nargo_toml/src/lib.rs @@ -8,6 +8,7 @@ use std::{ path::{Component, Path, PathBuf}, }; +use errors::SemverError; use fm::{NormalizePath, FILE_EXTENSION}; use nargo::{ package::{Dependency, Package, PackageType}, @@ -23,6 +24,27 @@ mod semver; pub use errors::ManifestError; use git::clone_git_repo; +/// Searches for a `Nargo.toml` file in the current directory and all parent directories. +/// For example, if the current directory is `/workspace/package/src`, then this function +/// will search for a `Nargo.toml` file in +/// * `/workspace/package/src`, +/// * `/workspace/package`, +/// * `/workspace`. +/// +/// Returns the [PathBuf] of the `Nargo.toml` file if found, otherwise returns None. +/// +/// It will return innermost `Nargo.toml` file, which is the one closest to the current directory. +/// For example, if the current directory is `/workspace/package/src`, then this function +/// will return the `Nargo.toml` file in `/workspace/package/Nargo.toml` +pub fn find_file_manifest(current_path: &Path) -> Option { + for path in current_path.ancestors() { + if let Ok(toml_path) = get_package_manifest(path) { + return Some(toml_path); + } + } + None +} + /// Returns the [PathBuf] of the directory containing the `Nargo.toml` by searching from `current_path` to the root of its [Path]. /// /// Returns a [ManifestError] if no parent directories of `current_path` contain a manifest file. @@ -98,8 +120,12 @@ struct PackageConfig { } impl PackageConfig { - fn resolve_to_package(&self, root_dir: &Path) -> Result { - let name = if let Some(name) = &self.package.name { + fn resolve_to_package( + &self, + root_dir: &Path, + processed: &mut Vec, + ) -> Result { + let name: CrateName = if let Some(name) = &self.package.name { name.parse().map_err(|_| ManifestError::InvalidPackageName { toml: root_dir.join("Nargo.toml"), name: name.into(), @@ -114,7 +140,7 @@ impl PackageConfig { toml: root_dir.join("Nargo.toml"), name: name.into(), })?; - let resolved_dep = dep_config.resolve_to_dependency(root_dir)?; + let resolved_dep = dep_config.resolve_to_dependency(root_dir, processed)?; dependencies.insert(name, resolved_dep); } @@ -163,7 +189,18 @@ impl PackageConfig { } }; + // If there is a package version, ensure that it is semver compatible + if let Some(version) = &self.package.version { + semver::parse_semver_compatible_version(version).map_err(|err| { + ManifestError::SemverError(SemverError::CouldNotParsePackageVersion { + package_name: name.to_string(), + error: err.to_string(), + }) + })?; + } + Ok(Package { + version: self.package.version.clone(), compiler_required_version: self.package.compiler_version.clone(), root_dir: root_dir.to_path_buf(), entry_path, @@ -225,6 +262,7 @@ struct WorkspaceConfig { #[derive(Default, Debug, Deserialize, Clone)] struct PackageMetadata { name: Option, + version: Option, #[serde(alias = "type")] package_type: Option, entry: Option, @@ -236,7 +274,6 @@ struct PackageMetadata { // We also state that ACIR and the compiler will upgrade in lockstep. // so you will not need to supply an ACIR and compiler version compiler_version: Option, - backend: Option, license: Option, } @@ -250,7 +287,11 @@ enum DependencyConfig { } impl DependencyConfig { - fn resolve_to_dependency(&self, pkg_root: &Path) -> Result { + fn resolve_to_dependency( + &self, + pkg_root: &Path, + processed: &mut Vec, + ) -> Result { let dep = match self { Self::Github { git, tag, directory } => { let dir_path = clone_git_repo(git, tag).map_err(ManifestError::GitError)?; @@ -267,13 +308,13 @@ impl DependencyConfig { dir_path }; let toml_path = project_path.join("Nargo.toml"); - let package = resolve_package_from_toml(&toml_path)?; + let package = resolve_package_from_toml(&toml_path, processed)?; Dependency::Remote { package } } Self::Path { path } => { let dir_path = pkg_root.join(path); let toml_path = dir_path.join("Nargo.toml"); - let package = resolve_package_from_toml(&toml_path)?; + let package = resolve_package_from_toml(&toml_path, processed)?; Dependency::Local { package } } }; @@ -292,9 +333,10 @@ fn toml_to_workspace( nargo_toml: NargoToml, package_selection: PackageSelection, ) -> Result { + let mut resolved = Vec::new(); let workspace = match nargo_toml.config { Config::Package { package_config } => { - let member = package_config.resolve_to_package(&nargo_toml.root_dir)?; + let member = package_config.resolve_to_package(&nargo_toml.root_dir, &mut resolved)?; match &package_selection { PackageSelection::Selected(selected_name) if selected_name != &member.name => { return Err(ManifestError::MissingSelectedPackage(member.name)) @@ -312,7 +354,7 @@ fn toml_to_workspace( for (index, member_path) in workspace_config.members.into_iter().enumerate() { let package_root_dir = nargo_toml.root_dir.join(&member_path); let package_toml_path = package_root_dir.join("Nargo.toml"); - let member = resolve_package_from_toml(&package_toml_path)?; + let member = resolve_package_from_toml(&package_toml_path, &mut resolved)?; match &package_selection { PackageSelection::Selected(selected_name) => { @@ -369,17 +411,43 @@ fn read_toml(toml_path: &Path) -> Result { } /// Resolves a Nargo.toml file into a `Package` struct as defined by our `nargo` core. -fn resolve_package_from_toml(toml_path: &Path) -> Result { +fn resolve_package_from_toml( + toml_path: &Path, + processed: &mut Vec, +) -> Result { + // Checks for cyclic dependencies + let str_path = toml_path.to_str().expect("ICE - path is empty"); + if processed.contains(&str_path.to_string()) { + let mut cycle = false; + let mut message = String::new(); + for toml in processed { + cycle = cycle || toml == str_path; + if cycle { + message += &format!("{} referencing ", toml); + } + } + message += str_path; + return Err(ManifestError::CyclicDependency { cycle: message }); + } + // Adds the package to the set of resolved packages + if let Some(str) = toml_path.to_str() { + processed.push(str.to_string()); + } + let nargo_toml = read_toml(toml_path)?; - match nargo_toml.config { + let result = match nargo_toml.config { Config::Package { package_config } => { - package_config.resolve_to_package(&nargo_toml.root_dir) + package_config.resolve_to_package(&nargo_toml.root_dir, processed) } Config::Workspace { .. } => { Err(ManifestError::UnexpectedWorkspace(toml_path.to_path_buf())) } - } + }; + let pos = + processed.iter().position(|toml| toml == str_path).expect("added package must be here"); + processed.remove(pos); + result } #[derive(Debug, PartialEq, Eq)] @@ -398,7 +466,7 @@ pub fn resolve_workspace_from_toml( let nargo_toml = read_toml(toml_path)?; let workspace = toml_to_workspace(nargo_toml, package_selection)?; if let Some(current_compiler_version) = current_compiler_version { - semver::semver_check_workspace(workspace.clone(), current_compiler_version)?; + semver::semver_check_workspace(&workspace, current_compiler_version)?; } Ok(workspace) } diff --git a/tooling/nargo_toml/src/semver.rs b/tooling/nargo_toml/src/semver.rs index de722f06bd8..7c6e2a18b31 100644 --- a/tooling/nargo_toml/src/semver.rs +++ b/tooling/nargo_toml/src/semver.rs @@ -3,14 +3,19 @@ use nargo::{ package::{Dependency, Package}, workspace::Workspace, }; -use semver::{Version, VersionReq}; +use semver::{Error, Version, VersionReq}; + +// Parse a semver compatible version string +pub(crate) fn parse_semver_compatible_version(version: &str) -> Result { + Version::parse(version) +} // Check that all of the packages in the workspace are compatible with the current compiler version pub(crate) fn semver_check_workspace( - workspace: Workspace, + workspace: &Workspace, current_compiler_version: String, ) -> Result<(), ManifestError> { - let version = Version::parse(¤t_compiler_version) + let version = parse_semver_compatible_version(¤t_compiler_version) .expect("The compiler version is not a valid semver version"); for package in &workspace.members { semver_check_package(package, &version).map_err(ManifestError::SemverError)?; @@ -83,6 +88,7 @@ mod tests { entry_path: PathBuf::new(), name: CrateName::from_str("test").unwrap(), dependencies: BTreeMap::new(), + version: Some("1.0".to_string()), }; if let Err(err) = semver_check_package(&package, &compiler_version) { panic!("semver check should have passed. compiler version is 0.1.0 and required version from the package is 0.1.0\n error: {err:?}") @@ -113,6 +119,7 @@ mod tests { entry_path: PathBuf::new(), name: CrateName::from_str("test").unwrap(), dependencies: BTreeMap::new(), + version: Some("1.0".to_string()), }; let valid_dependency = Package { @@ -122,6 +129,7 @@ mod tests { entry_path: PathBuf::new(), name: CrateName::from_str("good_dependency").unwrap(), dependencies: BTreeMap::new(), + version: Some("1.0".to_string()), }; let invalid_dependency = Package { compiler_required_version: Some("0.2.0".to_string()), @@ -130,6 +138,7 @@ mod tests { entry_path: PathBuf::new(), name: CrateName::from_str("bad_dependency").unwrap(), dependencies: BTreeMap::new(), + version: Some("1.0".to_string()), }; package.dependencies.insert( @@ -169,6 +178,7 @@ mod tests { entry_path: PathBuf::new(), name: CrateName::from_str("test").unwrap(), dependencies: BTreeMap::new(), + version: Some("1.0".to_string()), }; if let Err(err) = semver_check_package(&package, &compiler_version) { @@ -187,6 +197,7 @@ mod tests { entry_path: PathBuf::new(), name: CrateName::from_str("test").unwrap(), dependencies: BTreeMap::new(), + version: Some("1.0".to_string()), }; if let Err(err) = semver_check_package(&package, &compiler_version) { diff --git a/tooling/noir_codegen/.gitignore b/tooling/noir_codegen/.gitignore index 721d05448d6..29b0e40ffa8 100644 --- a/tooling/noir_codegen/.gitignore +++ b/tooling/noir_codegen/.gitignore @@ -1,4 +1,5 @@ crs lib -!test/*/target +test/codegen +test/test_lib/export diff --git a/tooling/noir_codegen/package.json b/tooling/noir_codegen/package.json index 17772461dff..7d76b1a9138 100644 --- a/tooling/noir_codegen/package.json +++ b/tooling/noir_codegen/package.json @@ -1,16 +1,24 @@ { "name": "@noir-lang/noir_codegen", - "collaborators": [ + "contributors": [ "The Noir Team " ], - "version": "0.19.2", + "version": "0.22.0", "packageManager": "yarn@3.5.1", "license": "(MIT OR Apache-2.0)", "type": "module", + "homepage": "https://noir-lang.org/", + "repository": { + "url": "https://github.com/noir-lang/noir.git", + "directory": "compiler/wasm", + "type": "git" + }, + "bugs": { + "url": "https://github.com/noir-lang/noir/issues" + }, "dependencies": { "@noir-lang/types": "workspace:*", "glob": "^10.3.10", - "lodash": "^4.17.21", "ts-command-line-args": "^2.5.1" }, "files": [ @@ -26,8 +34,10 @@ "scripts": { "dev": "tsc-multi --watch", "build": "tsc", - "test": "ts-node --esm src/main.ts ./test/assert_lt/target/** --out-dir ./test/codegen && yarn test:node && rm -rf ./test/codegen", + "test": "yarn test:codegen && yarn test:node && yarn test:clean", + "test:codegen": "nargo export --program-dir=./test/test_lib && tsx src/main.ts ./test/test_lib/export/** --out-dir ./test/codegen", "test:node": "mocha --timeout 25000 --exit --config ./.mocharc.json", + "test:clean": "rm -rf ./test/codegen ./test/test_lib/export", "prettier": "prettier 'src/**/*.ts'", "prettier:fix": "prettier --write 'src/**/*.ts' 'test/**/*.ts'", "lint": "NODE_NO_WARNINGS=1 eslint . --ext .ts --ignore-path ./.eslintignore --max-warnings 0", @@ -38,7 +48,6 @@ "devDependencies": { "@noir-lang/noir_js": "workspace:*", "@types/chai": "^4", - "@types/lodash": "^4", "@types/mocha": "^10.0.1", "@types/node": "^20.6.2", "@types/prettier": "^3", @@ -48,6 +57,7 @@ "mocha": "^10.2.0", "prettier": "3.0.3", "ts-node": "^10.9.1", + "tsx": "^4.6.2", "typescript": "^5.2.2" } } diff --git a/tooling/noir_codegen/src/index.ts b/tooling/noir_codegen/src/index.ts index 1f8d2d183d4..fbbab07bcfe 100644 --- a/tooling/noir_codegen/src/index.ts +++ b/tooling/noir_codegen/src/index.ts @@ -1,24 +1,62 @@ +import { AbiType } from '@noir-lang/noirc_abi'; import { CompiledCircuit } from '@noir-lang/types'; +import { PrimitiveTypesUsed, generateTsInterface, codegenStructDefinitions } from './noir_types.js'; -const codegenImports = `import { InputMap, InputValue } from "@noir-lang/noirc_abi" -import { Noir } from "@noir-lang/noir_js"`; +// TODO: reenable this. See `abiTypeToTs` for reasoning. +// export type FixedLengthArray = L extends 0 ? never[]: T[] & { length: L }; + +const codegenPrelude = `/* Autogenerated file, do not edit! */ + +/* eslint-disable */ + +import { Noir, InputMap, CompiledCircuit, ForeignCallHandler } from "@noir-lang/noir_js" + +export { ForeignCallHandler } from "@noir-lang/noir_js" +`; const codegenFunction = ( name: string, compiled_program: CompiledCircuit, -) => `export async function ${name}(args: InputMap): Promise { - const program = new Noir(${JSON.stringify(compiled_program)}); - const { returnValue } = await program.execute(args); - return returnValue; -}`; + function_signature: { inputs: [string, string][]; returnValue: string | null }, +) => { + const args = function_signature.inputs.map(([name]) => `${name}`).join(', '); + const args_with_types = function_signature.inputs.map(([name, type]) => `${name}: ${type}`).join(', '); + + return `export const ${name}_circuit: CompiledCircuit = ${JSON.stringify(compiled_program)}; + +export async function ${name}(${args_with_types}, foreignCallHandler?: ForeignCallHandler): Promise<${ + function_signature.returnValue + }> { + const program = new Noir(${name}_circuit); + const args: InputMap = { ${args} }; + const { returnValue } = await program.execute(args, foreignCallHandler); + return returnValue as ${function_signature.returnValue}; +} +`; +}; export const codegen = (programs: [string, CompiledCircuit][]): string => { - const results = [codegenImports]; + let results = [codegenPrelude]; + const primitiveTypeMap = new Map(); + const structTypeMap = new Map(); + + const functions: string[] = []; for (const [name, program] of programs) { - results.push(codegenFunction(name, stripUnwantedFields(program))); + const function_sig = generateTsInterface(program.abi, structTypeMap, primitiveTypeMap); + functions.push(codegenFunction(name, stripUnwantedFields(program), function_sig)); + } + + const structTypeDefinitions: string = codegenStructDefinitions(structTypeMap, primitiveTypeMap); + + // Add the primitive Noir types that do not have a 1-1 mapping to TypeScript. + const primitiveTypeAliases: string[] = []; + for (const value of primitiveTypeMap.values()) { + primitiveTypeAliases.push(`export type ${value.aliasName} = ${value.tsType};`); } - return results.join('\n\n'); + results = results.concat(...primitiveTypeAliases, '', structTypeDefinitions, ...functions); + + return results.join('\n'); }; // eslint-disable-next-line @typescript-eslint/no-explicit-any diff --git a/tooling/noir_codegen/src/noir_types.ts b/tooling/noir_codegen/src/noir_types.ts new file mode 100644 index 00000000000..0c0e2b7c60f --- /dev/null +++ b/tooling/noir_codegen/src/noir_types.ts @@ -0,0 +1,187 @@ +import { AbiType, Abi } from '@noir-lang/noirc_abi'; + +/** + * Keep track off all of the Noir primitive types that were used. + * Most of these will not have a 1-1 definition in TypeScript, + * so we will need to generate type aliases for them. + * + * We want to generate type aliases + * for specific types that are used in the ABI. + * + * For example: + * - If `Field` is used we want to alias that + * with `number`. + * - If `u32` is used we want to alias that with `number` too. + */ +export type PrimitiveTypesUsed = { + /** + * The name of the type alias that we will generate. + */ + aliasName: string; + /** + * The TypeScript type that we will alias to. + */ + tsType: string; +}; + +/** + * Typescript does not allow us to check for equality of non-primitive types + * easily, so we create a addIfUnique function that will only add an item + * to the map if it is not already there by using JSON.stringify. + * @param item - The item to add to the map. + */ +function addIfUnique(primitiveTypeMap: Map, item: PrimitiveTypesUsed) { + const key = JSON.stringify(item); + if (!primitiveTypeMap.has(key)) { + primitiveTypeMap.set(key, item); + } +} + +/** + * Converts an ABI type to a TypeScript type. + * @param type - The ABI type to convert. + * @returns The typescript code to define the type. + */ +function abiTypeToTs(type: AbiType, primitiveTypeMap: Map): string { + switch (type.kind) { + case 'field': + addIfUnique(primitiveTypeMap, { aliasName: 'Field', tsType: 'string' }); + return 'Field'; + case 'integer': { + const typeName = type.sign === 'signed' ? `i${type.width}` : `u${type.width}`; + // Javascript cannot safely represent the full range of Noir's integer types as numbers. + // `Number.MAX_SAFE_INTEGER == 2**53 - 1` so we disallow passing numbers to types which may exceed this. + // 52 has been chosen as the cutoff rather than 53 for safety. + const tsType = type.width <= 52 ? `string | number` : `string`; + + addIfUnique(primitiveTypeMap, { aliasName: typeName, tsType }); + return typeName; + } + case 'boolean': + return `boolean`; + case 'array': + // We can't force the usage of fixed length arrays as this currently throws errors in TS. + // The array would need to be `as const` to support this whereas that's unlikely to happen in user code. + // return `FixedLengthArray<${abiTypeToTs(type.type, primitiveTypeMap)}, ${type.length}>`; + return `${abiTypeToTs(type.type, primitiveTypeMap)}[]`; + case 'string': + // We could enforce that literals are the correct length but not generally. + // This would run into similar problems to above. + return `string`; + case 'struct': + return getLastComponentOfPath(type.path); + case 'tuple': { + const field_types = type.fields.map((field) => abiTypeToTs(field, primitiveTypeMap)); + return `[${field_types.join(', ')}]`; + } + default: + throw new Error(`Unknown ABI type ${JSON.stringify(type)}`); + } +} + +/** + * Returns the last component of a path, e.g. "foo::bar::baz" -\> "baz" + * Note: that if we have a path such as "Baz", we will return "Baz". + * + * Since these paths corresponds to structs, we can assume that we + * cannot have "foo::bar::". + * + * We also make the assumption that since these paths are coming from + * Noir, then we will not have two paths that look like this: + * - foo::bar::Baz + * - cat::dog::Baz + * ie the last component of the path (struct name) is enough to uniquely identify + * the whole path. + * + * TODO: We should double check this assumption when we use type aliases, + * I expect that `foo::bar::Baz as Dog` would effectively give `foo::bar::Dog` + * @param str - The path to get the last component of. + * @returns The last component of the path. + */ +function getLastComponentOfPath(str: string): string { + const parts = str.split('::'); + const lastPart = parts[parts.length - 1]; + return lastPart; +} + +/** + * Generates TypeScript interfaces for the structs used in the ABI. + * @param type - The ABI type to generate the interface for. + * @param output - The set of structs that we have already generated bindings for. + * @returns The TypeScript code to define the struct. + */ +function generateStructInterfaces( + type: AbiType, + structsEncountered: Map, + primitiveTypeMap: Map, +) { + // Edge case to handle the array of structs case. + if ( + type.kind === 'array' && + type.type.kind === 'struct' && + !structsEncountered.has(getLastComponentOfPath(type.type.path)) + ) { + generateStructInterfaces(type.type, structsEncountered, primitiveTypeMap); + } + if (type.kind !== 'struct') return; + + const structName = getLastComponentOfPath(type.path); + if (!structsEncountered.has(structName)) { + for (const field of type.fields) { + generateStructInterfaces(field.type, structsEncountered, primitiveTypeMap); + } + structsEncountered.set(structName, type.fields); + } +} + +/** + * Generates a TypeScript interface for the ABI. + * @param abiObj - The ABI to generate the interface for. + * @returns The TypeScript code to define the interface. + */ +export function generateTsInterface( + abiObj: Abi, + structsEncountered: Map, + primitiveTypeMap: Map, +): { inputs: [string, string][]; returnValue: string | null } { + // Define structs for composite types + for (const param of abiObj.parameters) { + generateStructInterfaces(param.type, structsEncountered, primitiveTypeMap); + } + + // Generating Return type, if it exists + if (abiObj.return_type != null) { + generateStructInterfaces(abiObj.return_type.abi_type, structsEncountered, primitiveTypeMap); + } + + return getTsFunctionSignature(abiObj, primitiveTypeMap); +} + +export function codegenStructDefinitions( + structsEncountered: Map, + primitiveTypeMap: Map, +): string { + let codeGeneratedStruct = ''; + + for (const [structName, structFields] of structsEncountered) { + codeGeneratedStruct += `export type ${structName} = {\n`; + for (const field of structFields) { + codeGeneratedStruct += ` ${field.name}: ${abiTypeToTs(field.type, primitiveTypeMap)};\n`; + } + codeGeneratedStruct += `};\n\n`; + } + + return codeGeneratedStruct; +} + +function getTsFunctionSignature( + abi: Abi, + primitiveTypeMap: Map, +): { inputs: [string, string][]; returnValue: string | null } { + const inputs: [string, string][] = abi.parameters.map((param) => [ + param.name, + abiTypeToTs(param.type, primitiveTypeMap), + ]); + const returnValue = abi.return_type ? abiTypeToTs(abi.return_type.abi_type, primitiveTypeMap) : null; + return { inputs, returnValue }; +} diff --git a/tooling/noir_codegen/src/utils/glob.ts b/tooling/noir_codegen/src/utils/glob.ts index 15deaf72e44..a1e4c3b1ea1 100644 --- a/tooling/noir_codegen/src/utils/glob.ts +++ b/tooling/noir_codegen/src/utils/glob.ts @@ -1,9 +1,6 @@ import { sync as globSync } from 'glob'; -import _ from 'lodash'; -const { flatten, uniq } = _; export function glob(cwd: string, patternsOrFiles: string[]): string[] { const matches = patternsOrFiles.map((p) => globSync(p, { ignore: 'node_modules/**', absolute: true, cwd })); - - return uniq(flatten(matches)); + return [...new Set(matches.flat())]; } diff --git a/tooling/noir_codegen/test/assert_lt/Nargo.toml b/tooling/noir_codegen/test/assert_lt/Nargo.toml deleted file mode 100644 index f32ec18cae7..00000000000 --- a/tooling/noir_codegen/test/assert_lt/Nargo.toml +++ /dev/null @@ -1,5 +0,0 @@ -[package] -name = "assert_lt" -type = "bin" -authors = [""] -[dependencies] diff --git a/tooling/noir_codegen/test/assert_lt/src/main.nr b/tooling/noir_codegen/test/assert_lt/src/main.nr deleted file mode 100644 index 0698cbce4a8..00000000000 --- a/tooling/noir_codegen/test/assert_lt/src/main.nr +++ /dev/null @@ -1,4 +0,0 @@ -fn main(x: u64, y: pub u64) -> pub u64 { - assert(x < y); - x + y -} diff --git a/tooling/noir_codegen/test/assert_lt/target/assert_lt.json b/tooling/noir_codegen/test/assert_lt/target/assert_lt.json deleted file mode 100644 index 3b2b1b2c5a1..00000000000 --- a/tooling/noir_codegen/test/assert_lt/target/assert_lt.json +++ /dev/null @@ -1 +0,0 @@ -{"hash":13834844072603749544,"backend":"acvm-backend-barretenberg","abi":{"parameters":[{"name":"x","type":{"kind":"integer","sign":"unsigned","width":64},"visibility":"private"},{"name":"y","type":{"kind":"integer","sign":"unsigned","width":64},"visibility":"public"}],"param_witnesses":{"x":[1],"y":[2]},"return_type":{"kind":"integer","sign":"unsigned","width":64},"return_witnesses":[12]},"bytecode":"H4sIAAAAAAAA/+1WUW6DMAx1QksZoGr72jUcAiX8VbvJ0Oj9j7ChJpKbtXw0NpvUWkImUXixn53w3gDgHc6mfh7t/ZGMtR9TU96HeYuHtp36ZjLWfGIzjK7DthsPzjjTue6rcdZOrnX9MA49Dqa1kzl1gz3h2bL7sTDCMhmJbylmTDOT8WEhjXfjH/DcB8u8zwVygWifmL/9lTnWzSWKsxHA3QJf00vlveWvERJIUU4x0eb86aEJppljVox9oO+Py8QTV1Jnw6a85t7vSL8pwvN89j7gd88o8q79Gr2wRt3AeSFz4XvRSyokl5MAtSfgGO2ZCewdsDibLRVrDzIXTMxfqiLIGXPeMdY1gb/Fg8+tznJY50eSGmfB2DNrqciCD+tCRc4X5FNFJmIWnkhu3BL+t4qc8y75aySqIkvGOP9CRWKaGQ0ydUrsgUUVWXlfw4OpyAouVWQN66pITDPDqSJfQaZxuVVkxZhzzVgLTv5uHbDwXhN+vwGywklHPBQAAA=="} \ No newline at end of file diff --git a/tooling/noir_codegen/test/index.test.ts b/tooling/noir_codegen/test/index.test.ts index 702ba1f9cbb..03fb680a537 100644 --- a/tooling/noir_codegen/test/index.test.ts +++ b/tooling/noir_codegen/test/index.test.ts @@ -1,11 +1,106 @@ import { expect } from 'chai'; -import { assert_lt } from './codegen/index.js'; +// eslint-disable-next-line @typescript-eslint/ban-ts-comment +// @ts-ignore File is codegenned at test time. +import { exported_function_foo, MyStruct, u64, ForeignCallHandler } from './codegen/index.js'; it('codegens a callable function', async () => { - const result = await assert_lt({ - x: '2', - y: '3', - }); + const my_struct = { foo: true, bar: ['12345', '12345', '12345'], baz: '0x00' }; - expect(result).to.be.eq('0x05'); + const [sum, constant, struct]: [u64, u64, MyStruct] = await exported_function_foo( + '2', + '3', + [0, 0, 0, 0, 0], + { + foo: my_struct, + bar: [my_struct, my_struct, my_struct], + baz: '64', + }, + '12345', + ); + + expect(sum).to.be.eq('0x05'); + expect(constant).to.be.eq('0x03'); + expect(struct).to.be.deep.eq(my_struct); +}); + +it('allows passing a custom foreign call handler', async () => { + let observedName = ''; + let observedInputs: string[][] = []; + const foreignCallHandler: ForeignCallHandler = async (name: string, inputs: string[][]) => { + // Throwing inside the oracle callback causes a timeout so we log the observed values + // and defer the check against expected values until after the execution is complete. + observedName = name; + observedInputs = inputs; + + return []; + }; + + const my_struct = { foo: true, bar: ['12345', '12345', '12345'], baz: '0x00' }; + + const [sum, constant, struct]: [u64, u64, MyStruct] = await exported_function_foo( + '2', + '3', + [0, 0, 0, 0, 0], + { + foo: my_struct, + bar: [my_struct, my_struct, my_struct], + baz: '64', + }, + '12345', + foreignCallHandler, + ); + + expect(observedName).to.be.eq('print'); + expect(observedInputs).to.be.deep.eq([ + // add newline? + ['0x0000000000000000000000000000000000000000000000000000000000000000'], + // x + ['0x0000000000000000000000000000000000000000000000000000000000000002'], + // Type metadata + [ + '0x000000000000000000000000000000000000000000000000000000000000007b', + '0x0000000000000000000000000000000000000000000000000000000000000022', + '0x000000000000000000000000000000000000000000000000000000000000006b', + '0x0000000000000000000000000000000000000000000000000000000000000069', + '0x000000000000000000000000000000000000000000000000000000000000006e', + '0x0000000000000000000000000000000000000000000000000000000000000064', + '0x0000000000000000000000000000000000000000000000000000000000000022', + '0x000000000000000000000000000000000000000000000000000000000000003a', + '0x0000000000000000000000000000000000000000000000000000000000000022', + '0x0000000000000000000000000000000000000000000000000000000000000075', + '0x000000000000000000000000000000000000000000000000000000000000006e', + '0x0000000000000000000000000000000000000000000000000000000000000073', + '0x0000000000000000000000000000000000000000000000000000000000000069', + '0x0000000000000000000000000000000000000000000000000000000000000067', + '0x000000000000000000000000000000000000000000000000000000000000006e', + '0x0000000000000000000000000000000000000000000000000000000000000065', + '0x0000000000000000000000000000000000000000000000000000000000000064', + '0x0000000000000000000000000000000000000000000000000000000000000069', + '0x000000000000000000000000000000000000000000000000000000000000006e', + '0x0000000000000000000000000000000000000000000000000000000000000074', + '0x0000000000000000000000000000000000000000000000000000000000000065', + '0x0000000000000000000000000000000000000000000000000000000000000067', + '0x0000000000000000000000000000000000000000000000000000000000000065', + '0x0000000000000000000000000000000000000000000000000000000000000072', + '0x0000000000000000000000000000000000000000000000000000000000000022', + '0x000000000000000000000000000000000000000000000000000000000000002c', + '0x0000000000000000000000000000000000000000000000000000000000000022', + '0x0000000000000000000000000000000000000000000000000000000000000077', + '0x0000000000000000000000000000000000000000000000000000000000000069', + '0x0000000000000000000000000000000000000000000000000000000000000064', + '0x0000000000000000000000000000000000000000000000000000000000000074', + '0x0000000000000000000000000000000000000000000000000000000000000068', + '0x0000000000000000000000000000000000000000000000000000000000000022', + '0x000000000000000000000000000000000000000000000000000000000000003a', + '0x0000000000000000000000000000000000000000000000000000000000000036', + '0x0000000000000000000000000000000000000000000000000000000000000034', + '0x000000000000000000000000000000000000000000000000000000000000007d', + ], + // format string? + ['0x0000000000000000000000000000000000000000000000000000000000000000'], + ]); + + expect(sum).to.be.eq('0x05'); + expect(constant).to.be.eq('0x03'); + expect(struct).to.be.deep.eq(my_struct); }); diff --git a/tooling/noir_codegen/test/test_lib/Nargo.toml b/tooling/noir_codegen/test/test_lib/Nargo.toml new file mode 100644 index 00000000000..74b6167b614 --- /dev/null +++ b/tooling/noir_codegen/test/test_lib/Nargo.toml @@ -0,0 +1,5 @@ +[package] +name = "test_lib" +type = "lib" +authors = [""] +[dependencies] diff --git a/tooling/noir_codegen/test/test_lib/src/lib.nr b/tooling/noir_codegen/test/test_lib/src/lib.nr new file mode 100644 index 00000000000..23607c6f65f --- /dev/null +++ b/tooling/noir_codegen/test/test_lib/src/lib.nr @@ -0,0 +1,27 @@ +struct MyStruct { + foo: bool, + bar: [str<5>; 3], + baz: Field +} + +struct NestedStruct { + foo: MyStruct, + bar: [MyStruct; 3], + baz: u64 +} + +#[export] +fn exported_function_foo(x: u64, y: u64, array: [u8; 5], my_struct: NestedStruct, string: str<5>) -> (u64, u64, MyStruct) { + assert(array.len() == 5); + assert(my_struct.foo.foo); + assert(string == "12345"); + + print(x); + assert(x < y); + (x + y, 3, my_struct.foo) +} + +#[export] +fn exported_function_bar(my_struct: NestedStruct) -> (u64) { + my_struct.baz +} diff --git a/tooling/noir_js/.mocharc.json b/tooling/noir_js/.mocharc.json index c2e70b73d0f..be7e952e7ca 100644 --- a/tooling/noir_js/.mocharc.json +++ b/tooling/noir_js/.mocharc.json @@ -1,6 +1,5 @@ { - "require": "ts-node/register", - "loader": "ts-node/esm", + "require": "tsx", "extensions": ["ts", "cjs"], "spec": [ "test/node/**/*.test.ts*" diff --git a/tooling/noir_js/package.json b/tooling/noir_js/package.json index 51bc7550200..ed2fd225810 100644 --- a/tooling/noir_js/package.json +++ b/tooling/noir_js/package.json @@ -1,12 +1,21 @@ { "name": "@noir-lang/noir_js", - "collaborators": [ + "contributors": [ "The Noir Team " ], - "version": "0.19.2", + "version": "0.22.0", "packageManager": "yarn@3.5.1", "license": "(MIT OR Apache-2.0)", "type": "module", + "homepage": "https://noir-lang.org/", + "repository": { + "url": "https://github.com/noir-lang/noir.git", + "directory": "tooling/noir_js", + "type": "git" + }, + "bugs": { + "url": "https://github.com/noir-lang/noir/issues" + }, "dependencies": { "@noir-lang/acvm_js": "workspace:*", "@noir-lang/noirc_abi": "workspace:*", @@ -50,6 +59,7 @@ "prettier": "3.0.3", "ts-node": "^10.9.1", "tsc-multi": "^1.1.0", + "tsx": "^4.6.2", "typescript": "^5.2.2" } } diff --git a/tooling/noir_js/src/index.ts b/tooling/noir_js/src/index.ts index 5e700a5b583..bacb391a464 100644 --- a/tooling/noir_js/src/index.ts +++ b/tooling/noir_js/src/index.ts @@ -12,6 +12,7 @@ export { and, } from '@noir-lang/acvm_js'; +export { InputMap } from '@noir-lang/noirc_abi'; export { WitnessMap, ForeignCallHandler, ForeignCallInput, ForeignCallOutput } from '@noir-lang/acvm_js'; export { Noir } from './program.js'; diff --git a/tooling/noir_js/src/program.ts b/tooling/noir_js/src/program.ts index 711413bbc84..809943727eb 100644 --- a/tooling/noir_js/src/program.ts +++ b/tooling/noir_js/src/program.ts @@ -67,12 +67,12 @@ export class Noir { * * @example * ```typescript - * async generateFinalproof(input) + * async generateFinalProof(input) * ``` * */ - async generateFinalProof(inputs: InputMap): Promise { - const { witness } = await this.execute(inputs); + async generateFinalProof(inputs: InputMap, foreignCallHandler?: ForeignCallHandler): Promise { + const { witness } = await this.execute(inputs, foreignCallHandler); return this.getBackend().generateFinalProof(witness); } diff --git a/tooling/noir_js/src/witness_generation.ts b/tooling/noir_js/src/witness_generation.ts index e3ddb1a2a21..a329c79c919 100644 --- a/tooling/noir_js/src/witness_generation.ts +++ b/tooling/noir_js/src/witness_generation.ts @@ -3,7 +3,14 @@ import { base64Decode } from './base64_decode.js'; import { executeCircuit, WitnessMap, ForeignCallHandler, ForeignCallInput } from '@noir-lang/acvm_js'; import { CompiledCircuit } from '@noir-lang/types'; -const defaultForeignCallHandler: ForeignCallHandler = (name: string, args: ForeignCallInput[]) => { +const defaultForeignCallHandler: ForeignCallHandler = async (name: string, args: ForeignCallInput[]) => { + if (name == 'print') { + // By default we do not print anything for `print` foreign calls due to a need for formatting, + // however we provide an empty response in order to not halt execution. + // + // If a user needs to print values then they should provide a custom foreign call handler. + return []; + } throw Error(`Unexpected oracle during execution: ${name}(${args.join(', ')})`); }; diff --git a/tooling/noir_js/test/noir_compiled_examples/assert_lt/src/main.nr b/tooling/noir_js/test/noir_compiled_examples/assert_lt/src/main.nr index 0698cbce4a8..693e7285736 100644 --- a/tooling/noir_js/test/noir_compiled_examples/assert_lt/src/main.nr +++ b/tooling/noir_js/test/noir_compiled_examples/assert_lt/src/main.nr @@ -1,4 +1,9 @@ +use dep::std; + fn main(x: u64, y: pub u64) -> pub u64 { + // We include a println statement to show that noirJS will ignore this and continue execution + std::println("foo"); + assert(x < y); x + y } diff --git a/tooling/noir_js/test/noir_compiled_examples/assert_lt/target/assert_lt.json b/tooling/noir_js/test/noir_compiled_examples/assert_lt/target/assert_lt.json index c01b2c5d3f5..5b511cdc140 100644 --- a/tooling/noir_js/test/noir_compiled_examples/assert_lt/target/assert_lt.json +++ b/tooling/noir_js/test/noir_compiled_examples/assert_lt/target/assert_lt.json @@ -1 +1 @@ -{"hash":13834844072603749544,"backend":"acvm-backend-barretenberg","abi":{"parameters":[{"name":"x","type":{"kind":"integer","sign":"unsigned","width":64},"visibility":"private"},{"name":"y","type":{"kind":"integer","sign":"unsigned","width":64},"visibility":"public"}],"param_witnesses":{"x":[{ "start": 1, "end": 2 }],"y":[{ "start": 2, "end": 3 }]},"return_type":{"kind":"integer","sign":"unsigned","width":64},"return_witnesses":[12]},"bytecode":"H4sIAAAAAAAA/+1WUW6DMAx1QksZoGr72jUcAiX8VbvJ0Oj9j7ChJpKbtXw0NpvUWkImUXixn53w3gDgHc6mfh7t/ZGMtR9TU96HeYuHtp36ZjLWfGIzjK7DthsPzjjTue6rcdZOrnX9MA49Dqa1kzl1gz3h2bL7sTDCMhmJbylmTDOT8WEhjXfjH/DcB8u8zwVygWifmL/9lTnWzSWKsxHA3QJf00vlveWvERJIUU4x0eb86aEJppljVox9oO+Py8QTV1Jnw6a85t7vSL8pwvN89j7gd88o8q79Gr2wRt3AeSFz4XvRSyokl5MAtSfgGO2ZCewdsDibLRVrDzIXTMxfqiLIGXPeMdY1gb/Fg8+tznJY50eSGmfB2DNrqciCD+tCRc4X5FNFJmIWnkhu3BL+t4qc8y75aySqIkvGOP9CRWKaGQ0ydUrsgUUVWXlfw4OpyAouVWQN66pITDPDqSJfQaZxuVVkxZhzzVgLTv5uHbDwXhN+vwGywklHPBQAAA=="} \ No newline at end of file +{"noir_version":"0.20.0+010fdb69616f47fc0a9f252a65a903316d3cbe80","hash":17538653710107541030,"backend":"acvm-backend-barretenberg","abi":{"parameters":[{"name":"x","type":{"kind":"integer","sign":"unsigned","width":64},"visibility":"private"},{"name":"y","type":{"kind":"integer","sign":"unsigned","width":64},"visibility":"public"}],"param_witnesses":{"x":[{"start":1,"end":2}],"y":[{"start":2,"end":3}]},"return_type":{"abi_type":{"kind":"integer","sign":"unsigned","width":64},"visibility":"public"},"return_witnesses":[5]},"bytecode":"H4sIAAAAAAAA/9Wa627aMBiGHcKhBBLOR+0Hl+Ak5PQP9U6gBK3SNqoqWm9kFzzMYu2DujhbPjvFEkrsJu/7vJ+TFDAtQkiH/GnG6VXLtxvQr+V9Mx/jx5pE3Db5lpZqYahI96Ba91e+bee1g60J6objS90mehbqN8nfuUbSpLAeNVAjXg++bZxeD/m+k9esjlyz6+t3A/p1MFfYvkyzharpPrXzmsFmXPU3YL8F8jUV5HvA1aRMs42qGe2YhgVqwuvH2Tvg721QLwu5Xgbw5Lq8bynz9Tye8Vb+joCjozF/R5lveJ7/riR/V8DR1Zi/q8w3TJiGLclvCzhsjfltZb5hyjQcSX5HwOFozO8o8w3XTKMnyd8TcPQ05od8RVmtilnxff0t0+hL8vcFHH2N+SFfUVarYlZ838hnGgNJ/oGAY6Ax/0CZb3R+rgwl+YcCjqHG/ENlvtH5fdVIkn8k4BhpzA/5irJ274jVrpgV3zeMmMZYkn8s4BhrzA/5irJaFbPi+3pPTGMiyT8RcEw05od8RVmtilnxfcPzXE0l+acCjqnG/FNlvmHANGaS/DMBx0xjfshXlNW+I9bRHbEOKmbF9w1jpjGX5J8LOOYa80O+oqzWHbH2KmbF9/XPnwUXkvwLAcdCY/6FMt9ozzSWkvxLAcdSY/4l8MVet3gAmV9en39kHKAOYPg+X2xlzQRjXOALOIeDtsj7hR60qpnk/eolAWNYPgbQ8k/fTK7TyEtd391SL9nFAV0HuzB2YzeIg70X+34ar+Mo2SURTdy1n7qHIPEPuVjt/7nc6wFBdDRtWFe46tgAE18D44/geLgCb4A5eQTniI4xPtBpgzF+vkMUXljQHEvTJJc/T+C6ZS8oE4+R8kmtk8vlWELwfxKg6qYqq9VErOet+v0jJ73idE3EzHXEeS1Rv5sPuM9839yaZ1quXdwntFxzMe+TBsF/7nDNj/6B8P0GuXz4848R/B3INsvS7y/ZKjuutvv96u05+7o6/kxfD9+Ob78Bhjydn08mAAA="} \ No newline at end of file diff --git a/tooling/noir_js_backend_barretenberg/.mocharc.json b/tooling/noir_js_backend_barretenberg/.mocharc.json new file mode 100644 index 00000000000..e1023f56327 --- /dev/null +++ b/tooling/noir_js_backend_barretenberg/.mocharc.json @@ -0,0 +1,11 @@ +{ + "require": "ts-node/register", + "loader": "ts-node/esm", + "extensions": [ + "ts", + "cjs" + ], + "spec": [ + "test/**/*.test.ts*" + ] +} diff --git a/tooling/noir_js_backend_barretenberg/package.json b/tooling/noir_js_backend_barretenberg/package.json index 13c64dc1d65..1688db1ab2d 100644 --- a/tooling/noir_js_backend_barretenberg/package.json +++ b/tooling/noir_js_backend_barretenberg/package.json @@ -1,12 +1,21 @@ { "name": "@noir-lang/backend_barretenberg", - "collaborators": [ + "contributors": [ "The Noir Team " ], - "version": "0.19.2", + "version": "0.22.0", "packageManager": "yarn@3.5.1", "license": "(MIT OR Apache-2.0)", "type": "module", + "homepage": "https://noir-lang.org/", + "repository": { + "url": "https://github.com/noir-lang/noir.git", + "directory": "tooling/noir_js_backend_barretenberg", + "type": "git" + }, + "bugs": { + "url": "https://github.com/noir-lang/noir/issues" + }, "source": "src/index.ts", "main": "lib/cjs/index.js", "module": "lib/esm/index.js", @@ -25,6 +34,7 @@ "generate:package": "bash ./fixup.sh", "build": "yarn clean && tsc && tsc -p ./tsconfig.cjs.json && yarn generate:package", "clean": "rm -rf ./lib", + "test": "mocha --timeout 25000 --exit --config ./.mocharc.json", "prettier": "prettier 'src/**/*.ts'", "prettier:fix": "prettier --write 'src/**/*.ts' 'test/**/*.ts'", "nightly:version": "jq --arg new_version \"-$(git rev-parse --short HEAD)$1\" '.version = .version + $new_version' package.json > package-tmp.json && mv package-tmp.json package.json", @@ -32,16 +42,19 @@ "lint": "NODE_NO_WARNINGS=1 eslint . --ext .ts --ignore-path ./.eslintignore --max-warnings 0" }, "dependencies": { - "@aztec/bb.js": "0.12.0", + "@aztec/bb.js": "0.17.0", "@noir-lang/types": "workspace:*", "fflate": "^0.8.0" }, "devDependencies": { "@types/node": "^20.6.2", "@types/prettier": "^3", + "chai": "^4.3.8", "eslint": "^8.50.0", "eslint-plugin-prettier": "^5.0.0", + "mocha": "^10.2.0", "prettier": "3.0.3", + "ts-node": "^10.9.1", "typescript": "5.1.5" } } diff --git a/tooling/noir_js_backend_barretenberg/src/index.ts b/tooling/noir_js_backend_barretenberg/src/index.ts index 820cda93c83..6e619fd59cf 100644 --- a/tooling/noir_js_backend_barretenberg/src/index.ts +++ b/tooling/noir_js_backend_barretenberg/src/index.ts @@ -3,7 +3,9 @@ import { decompressSync as gunzip } from 'fflate'; import { acirToUint8Array } from './serialize.js'; import { Backend, CompiledCircuit, ProofData } from '@noir-lang/types'; import { BackendOptions } from './types.js'; +import { deflattenPublicInputs, flattenPublicInputsAsArray } from './public_inputs.js'; +export { publicInputsToWitnessMap } from './public_inputs.js'; // This is the number of bytes in a UltraPlonk proof // minus the public inputs. const numBytesInProofWithoutPublicInputs: number = 2144; @@ -18,7 +20,7 @@ export class BarretenbergBackend implements Backend { private acirUncompressedBytecode: Uint8Array; constructor( - acirCircuit: CompiledCircuit, + private acirCircuit: CompiledCircuit, private options: BackendOptions = { threads: 1 }, ) { const acirBytecodeBase64 = acirCircuit.bytecode; @@ -31,7 +33,7 @@ export class BarretenbergBackend implements Backend { // eslint-disable-next-line @typescript-eslint/ban-ts-comment //@ts-ignore const { Barretenberg, RawBuffer, Crs } = await import('@aztec/bb.js'); - const api = await Barretenberg.new(this.options.threads); + const api = await Barretenberg.new({ threads: this.options.threads }); const [_exact, _total, subgroupSize] = await api.acirGetCircuitSizes(this.acirUncompressedBytecode); const crs = await Crs.new(subgroupSize + 1); @@ -91,16 +93,8 @@ export class BarretenbergBackend implements Backend { const splitIndex = proofWithPublicInputs.length - numBytesInProofWithoutPublicInputs; const publicInputsConcatenated = proofWithPublicInputs.slice(0, splitIndex); - - const publicInputSize = 32; - const publicInputs: Uint8Array[] = []; - - for (let i = 0; i < publicInputsConcatenated.length; i += publicInputSize) { - const publicInput = publicInputsConcatenated.slice(i, i + publicInputSize); - publicInputs.push(publicInput); - } - const proof = proofWithPublicInputs.slice(splitIndex); + const publicInputs = deflattenPublicInputs(publicInputsConcatenated); return { proof, publicInputs }; } @@ -132,7 +126,9 @@ export class BarretenbergBackend implements Backend { }> { await this.instantiate(); const proof = reconstructProofWithPublicInputs(proofData); - const proofAsFields = await this.api.acirSerializeProofIntoFields(this.acirComposer, proof, numOfPublicInputs); + const proofAsFields = ( + await this.api.acirSerializeProofIntoFields(this.acirComposer, proof, numOfPublicInputs) + ).slice(numOfPublicInputs); // TODO: perhaps we should put this in the init function. Need to benchmark // TODO how long it takes. @@ -185,7 +181,7 @@ export class BarretenbergBackend implements Backend { function reconstructProofWithPublicInputs(proofData: ProofData): Uint8Array { // Flatten publicInputs - const publicInputsConcatenated = flattenUint8Arrays(proofData.publicInputs); + const publicInputsConcatenated = flattenPublicInputsAsArray(proofData.publicInputs); // Concatenate publicInputs and proof const proofWithPublicInputs = Uint8Array.from([...publicInputsConcatenated, ...proofData.proof]); @@ -193,18 +189,5 @@ function reconstructProofWithPublicInputs(proofData: ProofData): Uint8Array { return proofWithPublicInputs; } -function flattenUint8Arrays(arrays: Uint8Array[]): Uint8Array { - const totalLength = arrays.reduce((acc, val) => acc + val.length, 0); - const result = new Uint8Array(totalLength); - - let offset = 0; - for (const arr of arrays) { - result.set(arr, offset); - offset += arr.length; - } - - return result; -} - // typedoc exports export { Backend, BackendOptions, CompiledCircuit, ProofData }; diff --git a/tooling/noir_js_backend_barretenberg/src/public_inputs.ts b/tooling/noir_js_backend_barretenberg/src/public_inputs.ts new file mode 100644 index 00000000000..75ee0de6800 --- /dev/null +++ b/tooling/noir_js_backend_barretenberg/src/public_inputs.ts @@ -0,0 +1,92 @@ +import { Abi, WitnessMap } from '@noir-lang/types'; + +export function flattenPublicInputsAsArray(publicInputs: string[]): Uint8Array { + const flattenedPublicInputs = publicInputs.map(hexToUint8Array); + return flattenUint8Arrays(flattenedPublicInputs); +} + +export function deflattenPublicInputs(flattenedPublicInputs: Uint8Array): string[] { + const publicInputSize = 32; + const chunkedFlattenedPublicInputs: Uint8Array[] = []; + + for (let i = 0; i < flattenedPublicInputs.length; i += publicInputSize) { + const publicInput = flattenedPublicInputs.slice(i, i + publicInputSize); + chunkedFlattenedPublicInputs.push(publicInput); + } + + return chunkedFlattenedPublicInputs.map(uint8ArrayToHex); +} + +export function witnessMapToPublicInputs(publicInputs: WitnessMap): string[] { + const publicInputIndices = [...publicInputs.keys()].sort((a, b) => a - b); + const flattenedPublicInputs = publicInputIndices.map((index) => publicInputs.get(index) as string); + return flattenedPublicInputs; +} + +export function publicInputsToWitnessMap(publicInputs: string[], abi: Abi): WitnessMap { + const return_value_witnesses = abi.return_witnesses; + const public_parameters = abi.parameters.filter((param) => param.visibility === 'public'); + const public_parameter_witnesses: number[] = public_parameters.flatMap((param) => + abi.param_witnesses[param.name].flatMap((witness_range) => + Array.from({ length: witness_range.end - witness_range.start }, (_, i) => witness_range.start + i), + ), + ); + + // We now have an array of witness indices which have been deduplicated and sorted in ascending order. + // The elements of this array should correspond to the elements of `flattenedPublicInputs` so that we can build up a `WitnessMap`. + const public_input_witnesses = [...new Set(public_parameter_witnesses.concat(return_value_witnesses))].sort( + (a, b) => a - b, + ); + + const witnessMap: WitnessMap = new Map(); + public_input_witnesses.forEach((witness_index, index) => { + const witness_value = publicInputs[index]; + witnessMap.set(witness_index, witness_value); + }); + + return witnessMap; +} + +function flattenUint8Arrays(arrays: Uint8Array[]): Uint8Array { + const totalLength = arrays.reduce((acc, val) => acc + val.length, 0); + const result = new Uint8Array(totalLength); + + let offset = 0; + for (const arr of arrays) { + result.set(arr, offset); + offset += arr.length; + } + + return result; +} + +function uint8ArrayToHex(buffer: Uint8Array): string { + const hex: string[] = []; + + buffer.forEach(function (i) { + let h = i.toString(16); + if (h.length % 2) { + h = '0' + h; + } + hex.push(h); + }); + + return '0x' + hex.join(''); +} + +function hexToUint8Array(hex: string): Uint8Array { + const sanitised_hex = BigInt(hex).toString(16).padStart(64, '0'); + + const len = sanitised_hex.length / 2; + const u8 = new Uint8Array(len); + + let i = 0; + let j = 0; + while (i < len) { + u8[i] = parseInt(sanitised_hex.slice(j, j + 2), 16); + i += 1; + j += 2; + } + + return u8; +} diff --git a/tooling/noir_js_backend_barretenberg/test/public_input_deflattening.test.ts b/tooling/noir_js_backend_barretenberg/test/public_input_deflattening.test.ts new file mode 100644 index 00000000000..079a1ad268b --- /dev/null +++ b/tooling/noir_js_backend_barretenberg/test/public_input_deflattening.test.ts @@ -0,0 +1,96 @@ +import { Abi } from '@noir-lang/types'; +import { expect } from 'chai'; +import { witnessMapToPublicInputs, publicInputsToWitnessMap } from '../src/public_inputs.js'; + +const abi: Abi = { + parameters: [ + { + name: 'array_with_returned_element', + type: { + kind: 'array', + type: { + kind: 'field', + }, + length: 10, + }, + visibility: 'private', + }, + { + name: 'pub_field', + type: { + kind: 'field', + }, + visibility: 'public', + }, + ], + param_witnesses: { + array_with_returned_element: [ + { + start: 1, + end: 11, + }, + ], + pub_field: [ + { + start: 11, + end: 12, + }, + ], + }, + return_type: { + abi_type: { + kind: 'tuple', + fields: [ + { + kind: 'field', + }, + { + kind: 'field', + }, + { + kind: 'field', + }, + ], + }, + visibility: 'public', + }, + return_witnesses: [2, 13, 13], +}; + +it('flattens a witness map in order of its witness indices', async () => { + // Note that these are not in ascending order. This means that if we read from `witness_map` in insertion order + // then the witness values will be sorted incorrectly. + const public_input_indices = [2, 13, 11]; + + const witness_map = new Map( + public_input_indices.map((witness_index) => [ + witness_index, + '0x' + BigInt(witness_index).toString(16).padStart(64, '0'), + ]), + ); + + const flattened_public_inputs = witnessMapToPublicInputs(witness_map); + expect(flattened_public_inputs).to.be.deep.eq([ + '0x0000000000000000000000000000000000000000000000000000000000000002', + '0x000000000000000000000000000000000000000000000000000000000000000b', + '0x000000000000000000000000000000000000000000000000000000000000000d', + ]); +}); + +it('recovers the original witness map when deflattening a public input array', async () => { + // Note that these are not in ascending order. This means that if we read from `witness_map` in insertion order + // then the witness values will be sorted incorrectly. + const public_input_indices = [2, 13, 11]; + + const witness_map = new Map( + public_input_indices.map((witness_index) => [ + witness_index, + '0x' + BigInt(witness_index).toString(16).padStart(64, '0'), + ]), + ); + + const flattened_public_inputs = witnessMapToPublicInputs(witness_map); + const deflattened_public_inputs = publicInputsToWitnessMap(flattened_public_inputs, abi); + + expect(deflattened_public_inputs).to.be.deep.eq(witness_map); +}); diff --git a/tooling/noir_js_types/package.json b/tooling/noir_js_types/package.json index 3d74d335162..0276b8d087c 100644 --- a/tooling/noir_js_types/package.json +++ b/tooling/noir_js_types/package.json @@ -1,11 +1,20 @@ { "name": "@noir-lang/types", - "collaborators": [ + "contributors": [ "The Noir Team " ], "packageManager": "yarn@3.5.1", - "version": "0.19.2", + "version": "0.22.0", "license": "(MIT OR Apache-2.0)", + "homepage": "https://noir-lang.org/", + "repository": { + "url": "https://github.com/noir-lang/noir.git", + "directory": "tooling/noir_js_types", + "type": "git" + }, + "bugs": { + "url": "https://github.com/noir-lang/noir/issues" + }, "files": [ "lib", "package.json" @@ -19,7 +28,8 @@ "build": "yarn run build:cjs && yarn run build:esm", "nightly:version": "jq --arg new_version \"-$(git rev-parse --short HEAD)$1\" '.version = .version + $new_version' package.json > package-tmp.json && mv package-tmp.json package.json", "publish": "echo 📡 publishing `$npm_package_name` && yarn npm publish", - "lint": "NODE_NO_WARNINGS=1 eslint . --ext .ts --ignore-path ./.eslintignore --max-warnings 0" + "lint": "NODE_NO_WARNINGS=1 eslint . --ext .ts --ignore-path ./.eslintignore --max-warnings 0", + "clean": "rm -rf ./lib" }, "exports": { ".": { diff --git a/tooling/noir_js_types/src/types.ts b/tooling/noir_js_types/src/types.ts index 5ed6b1721e9..ee4921bd606 100644 --- a/tooling/noir_js_types/src/types.ts +++ b/tooling/noir_js_types/src/types.ts @@ -1,5 +1,7 @@ import { Abi } from '@noir-lang/noirc_abi'; +export { Abi, WitnessMap } from '@noir-lang/noirc_abi'; + export interface Backend { /** * @description Generates a final proof (not meant to be verified in another circuit) */ @@ -43,7 +45,7 @@ export interface Backend { * */ export type ProofData = { /** @description Public inputs of a proof */ - publicInputs: Uint8Array[]; + publicInputs: string[]; /** @description An byte array representing the proof */ proof: Uint8Array; }; diff --git a/tooling/noirc_abi/src/input_parser/json.rs b/tooling/noirc_abi/src/input_parser/json.rs index e2b1a83ee6b..7618cd6c15a 100644 --- a/tooling/noirc_abi/src/input_parser/json.rs +++ b/tooling/noirc_abi/src/input_parser/json.rs @@ -28,8 +28,11 @@ pub(crate) fn parse_json( if let (Some(return_type), Some(json_return_value)) = (&abi.return_type, data.get(MAIN_RETURN_NAME)) { - let return_value = - InputValue::try_from_json(json_return_value.clone(), return_type, MAIN_RETURN_NAME)?; + let return_value = InputValue::try_from_json( + json_return_value.clone(), + &return_type.abi_type, + MAIN_RETURN_NAME, + )?; parsed_inputs.insert(MAIN_RETURN_NAME.to_owned(), return_value); } @@ -48,7 +51,7 @@ pub(crate) fn serialize_to_json( if let (Some(return_type), Some(return_value)) = (&abi.return_type, input_map.get(MAIN_RETURN_NAME)) { - let return_value = JsonTypes::try_from_input_value(return_value, return_type)?; + let return_value = JsonTypes::try_from_input_value(return_value, &return_type.abi_type)?; json_map.insert(MAIN_RETURN_NAME.to_owned(), return_value); } diff --git a/tooling/noirc_abi/src/input_parser/mod.rs b/tooling/noirc_abi/src/input_parser/mod.rs index 7bbcb42296c..f66e069d487 100644 --- a/tooling/noirc_abi/src/input_parser/mod.rs +++ b/tooling/noirc_abi/src/input_parser/mod.rs @@ -130,7 +130,8 @@ mod serialization_tests { use strum::IntoEnumIterator; use crate::{ - input_parser::InputValue, Abi, AbiParameter, AbiType, AbiVisibility, Sign, MAIN_RETURN_NAME, + input_parser::InputValue, Abi, AbiParameter, AbiReturnType, AbiType, AbiVisibility, Sign, + MAIN_RETURN_NAME, }; use super::Format; @@ -159,7 +160,10 @@ mod serialization_tests { visibility: AbiVisibility::Private, }, ], - return_type: Some(AbiType::String { length: 5 }), + return_type: Some(AbiReturnType { + abi_type: AbiType::String { length: 5 }, + visibility: AbiVisibility::Public, + }), // These two fields are unused when serializing/deserializing to file. param_witnesses: BTreeMap::new(), return_witnesses: Vec::new(), @@ -211,7 +215,7 @@ fn parse_str_to_field(value: &str) -> Result { }) } -fn parse_str_to_signed(value: &str, witdh: u32) -> Result { +fn parse_str_to_signed(value: &str, width: u32) -> Result { let big_num = if let Some(hex) = value.strip_prefix("0x") { BigInt::from_str_radix(hex, 16) } else { @@ -221,7 +225,7 @@ fn parse_str_to_signed(value: &str, witdh: u32) -> Result for AbiVisibility { @@ -85,6 +86,7 @@ impl From for AbiVisibility { match value { Visibility::Public => AbiVisibility::Public, Visibility::Private => AbiVisibility::Private, + Visibility::DataBus => AbiVisibility::DataBus, } } } @@ -94,6 +96,7 @@ impl From<&Visibility> for AbiVisibility { match value { Visibility::Public => AbiVisibility::Public, Visibility::Private => AbiVisibility::Private, + Visibility::DataBus => AbiVisibility::DataBus, } } } @@ -154,11 +157,7 @@ impl AbiType { .expect("Cannot have variable sized strings as a parameter to main"); Self::String { length: size } } - Type::FmtString(_, _) => unreachable!("format strings cannot be used in the abi"), - Type::Error => unreachable!(), - Type::Unit => unreachable!(), - Type::Constant(_) => unreachable!(), - Type::TraitAsType(_) => unreachable!(), + Type::Struct(def, ref args) => { let struct_type = def.borrow(); let fields = struct_type.get_fields(args); @@ -172,12 +171,17 @@ impl AbiType { let fields = vecmap(fields, |typ| Self::from_type(context, typ)); Self::Tuple { fields } } - Type::TypeVariable(_, _) => unreachable!(), - Type::NamedGeneric(..) => unreachable!(), - Type::Forall(..) => unreachable!(), - Type::Function(_, _, _) => unreachable!(), + Type::Error + | Type::Unit + | Type::Constant(_) + | Type::TraitAsType(..) + | Type::TypeVariable(_, _) + | Type::NamedGeneric(..) + | Type::Forall(..) + | Type::NotConstant + | Type::Function(_, _, _) => unreachable!("Type cannot be used in the abi"), + Type::FmtString(_, _) => unreachable!("format strings cannot be used in the abi"), Type::MutableReference(_) => unreachable!("&mut cannot be used in the abi"), - Type::NotConstant => unreachable!(), } } @@ -212,6 +216,11 @@ impl AbiParameter { } } +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct AbiReturnType { + pub abi_type: AbiType, + pub visibility: AbiVisibility, +} #[derive(Clone, Debug, Serialize, Deserialize)] pub struct Abi { /// An ordered list of the arguments to the program's `main` function, specifying their types and visibility. @@ -219,7 +228,7 @@ pub struct Abi { /// A map from the ABI's parameters to the indices they are written to in the [`WitnessMap`]. /// This defines how to convert between the [`InputMap`] and [`WitnessMap`]. pub param_witnesses: BTreeMap>>, - pub return_type: Option, + pub return_type: Option, pub return_witnesses: Vec, } @@ -327,7 +336,7 @@ impl Abi { // When encoding public inputs to be passed to the verifier, the user can must provide a return value // to be inserted into the witness map. This is not needed when generating a witness when proving the circuit. match (&self.return_type, return_value) { - (Some(return_type), Some(return_value)) => { + (Some(AbiReturnType { abi_type: return_type, .. }), Some(return_value)) => { if !return_value.matches_abi(return_type) { return Err(AbiError::ReturnTypeMismatch { return_type: return_type.clone(), @@ -426,7 +435,7 @@ impl Abi { .copied() }) { - Some(decode_value(&mut return_witness_values.into_iter(), return_type)?) + Some(decode_value(&mut return_witness_values.into_iter(), &return_type.abi_type)?) } else { // Unlike for the circuit inputs, we tolerate not being able to find the witness values for the return value. // This is because the user may be decoding a partial witness map for which is hasn't been calculated yet. @@ -546,7 +555,10 @@ mod test { use acvm::{acir::native_types::Witness, FieldElement}; - use crate::{input_parser::InputValue, Abi, AbiParameter, AbiType, AbiVisibility, InputMap}; + use crate::{ + input_parser::InputValue, Abi, AbiParameter, AbiReturnType, AbiType, AbiVisibility, + InputMap, + }; #[test] fn witness_encoding_roundtrip() { @@ -568,7 +580,10 @@ mod test { ("thing1".to_string(), vec![(Witness(1)..Witness(3))]), ("thing2".to_string(), vec![(Witness(3)..Witness(4))]), ]), - return_type: Some(AbiType::Field), + return_type: Some(AbiReturnType { + abi_type: AbiType::Field, + visibility: AbiVisibility::Public, + }), return_witnesses: vec![Witness(3)], }; diff --git a/tooling/noirc_abi_wasm/build.sh b/tooling/noirc_abi_wasm/build.sh index 37f2fd0a5a9..24af149bcea 100755 --- a/tooling/noirc_abi_wasm/build.sh +++ b/tooling/noirc_abi_wasm/build.sh @@ -34,7 +34,7 @@ export CARGO_TARGET_DIR=$self_path/target rm -rf $self_path/outputs >/dev/null 2>&1 rm -rf $self_path/result >/dev/null 2>&1 -if [ -v out ]; then +if [ -n "$out" ]; then echo "Will install package to $out (defined outside installPhase.sh script)" else export out="$self_path/outputs/out" diff --git a/tooling/noirc_abi_wasm/package.json b/tooling/noirc_abi_wasm/package.json index d307a186a7d..d023e1e4391 100644 --- a/tooling/noirc_abi_wasm/package.json +++ b/tooling/noirc_abi_wasm/package.json @@ -1,10 +1,19 @@ { "name": "@noir-lang/noirc_abi", - "collaborators": [ + "contributors": [ "The Noir Team " ], - "version": "0.19.2", + "version": "0.22.0", "license": "(MIT OR Apache-2.0)", + "homepage": "https://noir-lang.org/", + "repository": { + "url": "https://github.com/noir-lang/noir.git", + "directory": "tooling/noirc_abi_wasm", + "type": "git" + }, + "bugs": { + "url": "https://github.com/noir-lang/noir/issues" + }, "files": [ "nodejs", "web", @@ -17,10 +26,6 @@ "types": "./web/noirc_abi_wasm.d.ts", "module": "./web/noirc_abi_wasm.js", "sideEffects": false, - "repository": { - "type": "git", - "url": "https://github.com/noir-lang/noir.git" - }, "scripts": { "build": "bash ./build.sh", "test": "env TS_NODE_COMPILER_OPTIONS='{\"module\": \"commonjs\" }' mocha", diff --git a/tooling/noirc_abi_wasm/src/lib.rs b/tooling/noirc_abi_wasm/src/lib.rs index 734ca1ff40e..5557cc917bf 100644 --- a/tooling/noirc_abi_wasm/src/lib.rs +++ b/tooling/noirc_abi_wasm/src/lib.rs @@ -27,7 +27,7 @@ use js_witness_map::JsWitnessMap; #[wasm_bindgen(typescript_custom_section)] const INPUT_MAP: &'static str = r#" export type Field = string | number | boolean; -export type InputValue = Field | Field[] | InputMap; +export type InputValue = Field | InputMap | (Field | InputMap)[]; export type InputMap = { [key: string]: InputValue }; "#; @@ -47,7 +47,7 @@ extern "C" { #[wasm_bindgen(typescript_custom_section)] const ABI: &'static str = r#" -export type Visibility = "public" | "private"; +export type Visibility = "public" | "private" | "databus"; export type Sign = "unsigned" | "signed"; export type AbiType = { kind: "field" } | @@ -56,7 +56,7 @@ export type AbiType = { kind: "integer", sign: Sign, width: number } | { kind: "array", length: number, type: AbiType } | { kind: "tuple", fields: AbiType[] } | - { kind: "struct", path: string, fields: [string, AbiType][] }; + { kind: "struct", path: string, fields: { name: string, type: AbiType }[] }; export type AbiParameter = { name: string, @@ -67,7 +67,7 @@ export type AbiParameter = { export type Abi = { parameters: AbiParameter[], param_witnesses: Record, - return_type: AbiType | null, + return_type: {abi_type: AbiType, visibility: Visibility} | null, return_witnesses: number[], } "#; @@ -96,7 +96,7 @@ pub fn abi_encode( .expect("could not decode return value"); InputValue::try_from_json( toml_return_value, - abi.return_type.as_ref().unwrap(), + &abi.return_type.as_ref().unwrap().abi_type, MAIN_RETURN_NAME, ) }) @@ -134,7 +134,7 @@ pub fn abi_decode(abi: JsAbi, witness_map: JsWitnessMap) -> Result { + await initNoirAbi(); +}); + +it('correctly handles struct inputs', async () => { + const { abi, inputs } = await import('../shared/structs'); + + const initial_witness: WitnessMap = abiEncode(abi, inputs); + const decoded_inputs: DecodedInputs = abiDecode(abi, initial_witness); + + const struct_arg: MyStruct = inputs.struct_arg as MyStruct; + const struct_array_arg: MyStruct[] = inputs.struct_array_arg as MyStruct[]; + const nested_struct_arg: MyNestedStruct = inputs.nested_struct_arg as MyNestedStruct; + + expect(BigInt(decoded_inputs.inputs.struct_arg.foo)).to.be.equal(BigInt(struct_arg.foo)); + expect(BigInt(decoded_inputs.inputs.struct_array_arg[0].foo)).to.be.equal(BigInt(struct_array_arg[0].foo)); + expect(BigInt(decoded_inputs.inputs.struct_array_arg[1].foo)).to.be.equal(BigInt(struct_array_arg[1].foo)); + expect(BigInt(decoded_inputs.inputs.struct_array_arg[2].foo)).to.be.equal(BigInt(struct_array_arg[2].foo)); + expect(BigInt(decoded_inputs.inputs.nested_struct_arg.foo.foo)).to.be.equal(BigInt(nested_struct_arg.foo.foo)); + expect(decoded_inputs.return_value).to.be.null; +}); diff --git a/tooling/noirc_abi_wasm/test/node/structs.test.ts b/tooling/noirc_abi_wasm/test/node/structs.test.ts new file mode 100644 index 00000000000..a7d104b46d3 --- /dev/null +++ b/tooling/noirc_abi_wasm/test/node/structs.test.ts @@ -0,0 +1,22 @@ +import { expect } from 'chai'; +import { abiEncode, abiDecode, WitnessMap } from '@noir-lang/noirc_abi'; +import { MyNestedStruct, MyStruct } from '../shared/structs'; +import { DecodedInputs } from '../types'; + +it('correctly handles struct inputs', async () => { + const { abi, inputs } = await import('../shared/structs'); + + const initial_witness: WitnessMap = abiEncode(abi, inputs); + const decoded_inputs: DecodedInputs = abiDecode(abi, initial_witness); + + const struct_arg: MyStruct = inputs.struct_arg as MyStruct; + const struct_array_arg: MyStruct[] = inputs.struct_array_arg as MyStruct[]; + const nested_struct_arg: MyNestedStruct = inputs.nested_struct_arg as MyNestedStruct; + + expect(BigInt(decoded_inputs.inputs.struct_arg.foo)).to.be.equal(BigInt(struct_arg.foo)); + expect(BigInt(decoded_inputs.inputs.struct_array_arg[0].foo)).to.be.equal(BigInt(struct_array_arg[0].foo)); + expect(BigInt(decoded_inputs.inputs.struct_array_arg[1].foo)).to.be.equal(BigInt(struct_array_arg[1].foo)); + expect(BigInt(decoded_inputs.inputs.struct_array_arg[2].foo)).to.be.equal(BigInt(struct_array_arg[2].foo)); + expect(BigInt(decoded_inputs.inputs.nested_struct_arg.foo.foo)).to.be.equal(BigInt(nested_struct_arg.foo.foo)); + expect(decoded_inputs.return_value).to.be.null; +}); diff --git a/tooling/noirc_abi_wasm/test/shared/structs.ts b/tooling/noirc_abi_wasm/test/shared/structs.ts new file mode 100644 index 00000000000..6614f8f278e --- /dev/null +++ b/tooling/noirc_abi_wasm/test/shared/structs.ts @@ -0,0 +1,79 @@ +import { Abi, Field, InputMap } from '@noir-lang/noirc_abi'; + +export type MyStruct = { + foo: Field; +}; + +export type MyNestedStruct = { + foo: MyStruct; +}; + +export const abi: Abi = { + parameters: [ + { + name: 'struct_arg', + type: { kind: 'struct', path: 'MyStruct', fields: [{ name: 'foo', type: { kind: 'field' } }] }, + visibility: 'private', + }, + { + name: 'struct_array_arg', + type: { + kind: 'array', + type: { + kind: 'struct', + path: 'MyStruct', + fields: [{ name: 'foo', type: { kind: 'field' } }], + }, + length: 3, + }, + visibility: 'private', + }, + { + name: 'nested_struct_arg', + type: { + kind: 'struct', + path: 'MyNestedStruct', + fields: [ + { + name: 'foo', + type: { + kind: 'struct', + path: 'MyStruct', + fields: [{ name: 'foo', type: { kind: 'field' } }], + }, + }, + ], + }, + visibility: 'private', + }, + ], + param_witnesses: { + struct_arg: [{ start: 1, end: 2 }], + struct_array_arg: [{ start: 2, end: 5 }], + nested_struct_arg: [{ start: 5, end: 6 }], + }, + return_type: null, + return_witnesses: [], +}; + +export const inputs: InputMap = { + struct_arg: { + foo: '1', + }, + struct_array_arg: [ + { + foo: '2', + }, + { + foo: '3', + }, + { + foo: '4', + }, + ], + nested_struct_arg: { + foo: { + foo: '5', + }, + }, +}; diff --git a/yarn.lock b/yarn.lock index 87713e1f915..f4e0ce7df36 100644 --- a/yarn.lock +++ b/yarn.lock @@ -22,10 +22,10 @@ __metadata: languageName: node linkType: hard -"@adraffy/ens-normalize@npm:1.9.2": - version: 1.9.2 - resolution: "@adraffy/ens-normalize@npm:1.9.2" - checksum: a9fdeb9e080774c19e4b7f9f60aa5b37cf23fe0e8fe80284bf8221f7396e9f78642bfd39a09a94a4dc3fb8e70f2ac81545204bdcaf222d93f4c4c2ae1f0dca0b +"@adraffy/ens-normalize@npm:1.10.0": + version: 1.10.0 + resolution: "@adraffy/ens-normalize@npm:1.10.0" + checksum: af0540f963a2632da2bbc37e36ea6593dcfc607b937857133791781e246d47f870d5e3d21fa70d5cfe94e772c284588c81ea3f5b7f4ea8fbb824369444e4dbcb languageName: node linkType: hard @@ -221,9 +221,23 @@ __metadata: languageName: node linkType: hard -"@aztec/bb.js@npm:0.12.0": - version: 0.12.0 - resolution: "@aztec/bb.js@npm:0.12.0" +"@aztec/bb.js@npm:0.16.0": + version: 0.16.0 + resolution: "@aztec/bb.js@npm:0.16.0" + dependencies: + comlink: ^4.4.1 + commander: ^10.0.1 + debug: ^4.3.4 + tslib: ^2.4.0 + bin: + bb.js: dest/node/main.js + checksum: 5f68b4ad16284a3a871e0ad21fea05aed670383bc639c9d07ab3bf9b7a9d15cc8a4e5cda404a9290775ad5023924739543a8aac37d602892dd1fb5087521970b + languageName: node + linkType: hard + +"@aztec/bb.js@npm:0.17.0": + version: 0.17.0 + resolution: "@aztec/bb.js@npm:0.17.0" dependencies: comlink: ^4.4.1 commander: ^10.0.1 @@ -231,24 +245,24 @@ __metadata: tslib: ^2.4.0 bin: bb.js: dest/node/main.js - checksum: d9d57b893b9b1c61949cb9bd911d4b7e1ece34965ccb9e122b39cd4e2edac9f06858abbe05c23f66141c9a74ff4861a354bfc5d62e5dcf772cfe8d1c783e2562 + checksum: 459838076e4db7e6ca17f95acbd5c83028ea3b5c21e016a1561cec065a95d20e8d65906ad63e44e071556bc331070ceda6d48c058f5accdb76b3c6daab8ef1a5 languageName: node linkType: hard -"@babel/code-frame@npm:^7.0.0, @babel/code-frame@npm:^7.10.4, @babel/code-frame@npm:^7.12.11, @babel/code-frame@npm:^7.16.0, @babel/code-frame@npm:^7.22.13, @babel/code-frame@npm:^7.8.3": - version: 7.22.13 - resolution: "@babel/code-frame@npm:7.22.13" +"@babel/code-frame@npm:^7.0.0, @babel/code-frame@npm:^7.10.4, @babel/code-frame@npm:^7.12.11, @babel/code-frame@npm:^7.16.0, @babel/code-frame@npm:^7.22.13, @babel/code-frame@npm:^7.23.5, @babel/code-frame@npm:^7.8.3": + version: 7.23.5 + resolution: "@babel/code-frame@npm:7.23.5" dependencies: - "@babel/highlight": ^7.22.13 + "@babel/highlight": ^7.23.4 chalk: ^2.4.2 - checksum: 22e342c8077c8b77eeb11f554ecca2ba14153f707b85294fcf6070b6f6150aae88a7b7436dd88d8c9289970585f3fe5b9b941c5aa3aa26a6d5a8ef3f292da058 + checksum: d90981fdf56a2824a9b14d19a4c0e8db93633fd488c772624b4e83e0ceac6039a27cd298a247c3214faa952bf803ba23696172ae7e7235f3b97f43ba278c569a languageName: node linkType: hard -"@babel/compat-data@npm:^7.22.6, @babel/compat-data@npm:^7.22.9, @babel/compat-data@npm:^7.23.2": - version: 7.23.2 - resolution: "@babel/compat-data@npm:7.23.2" - checksum: d8dc27437d40907b271161d4c88ffe72ccecb034c730deb1960a417b59a14d7c5ebca8cd80dd458a01cd396a7a329eb48cddcc3791b5a84da33d7f278f7bec6a +"@babel/compat-data@npm:^7.22.6, @babel/compat-data@npm:^7.22.9, @babel/compat-data@npm:^7.23.3, @babel/compat-data@npm:^7.23.5": + version: 7.23.5 + resolution: "@babel/compat-data@npm:7.23.5" + checksum: 06ce244cda5763295a0ea924728c09bae57d35713b675175227278896946f922a63edf803c322f855a3878323d48d0255a2a3023409d2a123483c8a69ebb4744 languageName: node linkType: hard @@ -276,38 +290,38 @@ __metadata: languageName: node linkType: hard -"@babel/core@npm:^7.18.6, @babel/core@npm:^7.19.6": - version: 7.23.2 - resolution: "@babel/core@npm:7.23.2" +"@babel/core@npm:^7.18.6, @babel/core@npm:^7.19.6, @babel/core@npm:^7.23.3": + version: 7.23.5 + resolution: "@babel/core@npm:7.23.5" dependencies: "@ampproject/remapping": ^2.2.0 - "@babel/code-frame": ^7.22.13 - "@babel/generator": ^7.23.0 + "@babel/code-frame": ^7.23.5 + "@babel/generator": ^7.23.5 "@babel/helper-compilation-targets": ^7.22.15 - "@babel/helper-module-transforms": ^7.23.0 - "@babel/helpers": ^7.23.2 - "@babel/parser": ^7.23.0 + "@babel/helper-module-transforms": ^7.23.3 + "@babel/helpers": ^7.23.5 + "@babel/parser": ^7.23.5 "@babel/template": ^7.22.15 - "@babel/traverse": ^7.23.2 - "@babel/types": ^7.23.0 + "@babel/traverse": ^7.23.5 + "@babel/types": ^7.23.5 convert-source-map: ^2.0.0 debug: ^4.1.0 gensync: ^1.0.0-beta.2 json5: ^2.2.3 semver: ^6.3.1 - checksum: 003897718ded16f3b75632d63cd49486bf67ff206cc7ebd1a10d49e2456f8d45740910d5ec7e42e3faf0deec7a2e96b1a02e766d19a67a8309053f0d4e57c0fe + checksum: 5e5dfb1e61f298676f1fca18c646dbf6fb164ca1056b0169b8d42b7f5c35e026d81823582ccb2358e93a61b035e22b3ad37e2abaae4bf43f1ffb93b6ce19466e languageName: node linkType: hard -"@babel/generator@npm:^7.12.5, @babel/generator@npm:^7.18.7, @babel/generator@npm:^7.23.0": - version: 7.23.0 - resolution: "@babel/generator@npm:7.23.0" +"@babel/generator@npm:^7.12.5, @babel/generator@npm:^7.18.7, @babel/generator@npm:^7.23.3, @babel/generator@npm:^7.23.5": + version: 7.23.5 + resolution: "@babel/generator@npm:7.23.5" dependencies: - "@babel/types": ^7.23.0 + "@babel/types": ^7.23.5 "@jridgewell/gen-mapping": ^0.3.2 "@jridgewell/trace-mapping": ^0.3.17 jsesc: ^2.5.1 - checksum: 8efe24adad34300f1f8ea2add420b28171a646edc70f2a1b3e1683842f23b8b7ffa7e35ef0119294e1901f45bfea5b3dc70abe1f10a1917ccdfb41bed69be5f1 + checksum: 845ddda7cf38a3edf4be221cc8a439dee9ea6031355146a1a74047aa8007bc030305b27d8c68ec9e311722c910610bde38c0e13a9ce55225251e7cb7e7f3edc8 languageName: node linkType: hard @@ -320,7 +334,7 @@ __metadata: languageName: node linkType: hard -"@babel/helper-builder-binary-assignment-operator-visitor@npm:^7.22.5": +"@babel/helper-builder-binary-assignment-operator-visitor@npm:^7.22.15": version: 7.22.15 resolution: "@babel/helper-builder-binary-assignment-operator-visitor@npm:7.22.15" dependencies: @@ -329,7 +343,7 @@ __metadata: languageName: node linkType: hard -"@babel/helper-compilation-targets@npm:^7.22.15, @babel/helper-compilation-targets@npm:^7.22.5, @babel/helper-compilation-targets@npm:^7.22.6": +"@babel/helper-compilation-targets@npm:^7.22.15, @babel/helper-compilation-targets@npm:^7.22.6": version: 7.22.15 resolution: "@babel/helper-compilation-targets@npm:7.22.15" dependencies: @@ -342,26 +356,26 @@ __metadata: languageName: node linkType: hard -"@babel/helper-create-class-features-plugin@npm:^7.22.11, @babel/helper-create-class-features-plugin@npm:^7.22.15, @babel/helper-create-class-features-plugin@npm:^7.22.5": - version: 7.22.15 - resolution: "@babel/helper-create-class-features-plugin@npm:7.22.15" +"@babel/helper-create-class-features-plugin@npm:^7.22.15, @babel/helper-create-class-features-plugin@npm:^7.23.5": + version: 7.23.5 + resolution: "@babel/helper-create-class-features-plugin@npm:7.23.5" dependencies: "@babel/helper-annotate-as-pure": ^7.22.5 - "@babel/helper-environment-visitor": ^7.22.5 - "@babel/helper-function-name": ^7.22.5 - "@babel/helper-member-expression-to-functions": ^7.22.15 + "@babel/helper-environment-visitor": ^7.22.20 + "@babel/helper-function-name": ^7.23.0 + "@babel/helper-member-expression-to-functions": ^7.23.0 "@babel/helper-optimise-call-expression": ^7.22.5 - "@babel/helper-replace-supers": ^7.22.9 + "@babel/helper-replace-supers": ^7.22.20 "@babel/helper-skip-transparent-expression-wrappers": ^7.22.5 "@babel/helper-split-export-declaration": ^7.22.6 semver: ^6.3.1 peerDependencies: "@babel/core": ^7.0.0 - checksum: 52c500d8d164abb3a360b1b7c4b8fff77bc4a5920d3a2b41ae6e1d30617b0dc0b972c1f5db35b1752007e04a748908b4a99bc872b73549ae837e87dcdde005a3 + checksum: fe7c6c0baca1838bba76ac1330df47b661d932354115ea9e2ea65b179f80b717987d3c3da7e1525fd648e5f2d86c620efc959cabda4d7562b125a27c3ac780d0 languageName: node linkType: hard -"@babel/helper-create-regexp-features-plugin@npm:^7.18.6, @babel/helper-create-regexp-features-plugin@npm:^7.22.5": +"@babel/helper-create-regexp-features-plugin@npm:^7.18.6, @babel/helper-create-regexp-features-plugin@npm:^7.22.15, @babel/helper-create-regexp-features-plugin@npm:^7.22.5": version: 7.22.15 resolution: "@babel/helper-create-regexp-features-plugin@npm:7.22.15" dependencies: @@ -389,7 +403,7 @@ __metadata: languageName: node linkType: hard -"@babel/helper-environment-visitor@npm:^7.22.20, @babel/helper-environment-visitor@npm:^7.22.5": +"@babel/helper-environment-visitor@npm:^7.22.20": version: 7.22.20 resolution: "@babel/helper-environment-visitor@npm:7.22.20" checksum: d80ee98ff66f41e233f36ca1921774c37e88a803b2f7dca3db7c057a5fea0473804db9fb6729e5dbfd07f4bed722d60f7852035c2c739382e84c335661590b69 @@ -415,7 +429,7 @@ __metadata: languageName: node linkType: hard -"@babel/helper-member-expression-to-functions@npm:^7.22.15": +"@babel/helper-member-expression-to-functions@npm:^7.22.15, @babel/helper-member-expression-to-functions@npm:^7.23.0": version: 7.23.0 resolution: "@babel/helper-member-expression-to-functions@npm:7.23.0" dependencies: @@ -424,7 +438,7 @@ __metadata: languageName: node linkType: hard -"@babel/helper-module-imports@npm:^7.22.15, @babel/helper-module-imports@npm:^7.22.5": +"@babel/helper-module-imports@npm:^7.22.15": version: 7.22.15 resolution: "@babel/helper-module-imports@npm:7.22.15" dependencies: @@ -433,9 +447,9 @@ __metadata: languageName: node linkType: hard -"@babel/helper-module-transforms@npm:^7.12.1, @babel/helper-module-transforms@npm:^7.22.5, @babel/helper-module-transforms@npm:^7.23.0": - version: 7.23.0 - resolution: "@babel/helper-module-transforms@npm:7.23.0" +"@babel/helper-module-transforms@npm:^7.12.1, @babel/helper-module-transforms@npm:^7.23.3": + version: 7.23.3 + resolution: "@babel/helper-module-transforms@npm:7.23.3" dependencies: "@babel/helper-environment-visitor": ^7.22.20 "@babel/helper-module-imports": ^7.22.15 @@ -444,7 +458,7 @@ __metadata: "@babel/helper-validator-identifier": ^7.22.20 peerDependencies: "@babel/core": ^7.0.0 - checksum: 6e2afffb058cf3f8ce92f5116f710dda4341c81cfcd872f9a0197ea594f7ce0ab3cb940b0590af2fe99e60d2e5448bfba6bca8156ed70a2ed4be2adc8586c891 + checksum: 5d0895cfba0e16ae16f3aa92fee108517023ad89a855289c4eb1d46f7aef4519adf8e6f971e1d55ac20c5461610e17213f1144097a8f932e768a9132e2278d71 languageName: node linkType: hard @@ -471,7 +485,7 @@ __metadata: languageName: node linkType: hard -"@babel/helper-remap-async-to-generator@npm:^7.22.20, @babel/helper-remap-async-to-generator@npm:^7.22.5": +"@babel/helper-remap-async-to-generator@npm:^7.22.20": version: 7.22.20 resolution: "@babel/helper-remap-async-to-generator@npm:7.22.20" dependencies: @@ -484,7 +498,7 @@ __metadata: languageName: node linkType: hard -"@babel/helper-replace-supers@npm:^7.22.5, @babel/helper-replace-supers@npm:^7.22.9": +"@babel/helper-replace-supers@npm:^7.22.20": version: 7.22.20 resolution: "@babel/helper-replace-supers@npm:7.22.20" dependencies: @@ -524,10 +538,10 @@ __metadata: languageName: node linkType: hard -"@babel/helper-string-parser@npm:^7.22.5": - version: 7.22.5 - resolution: "@babel/helper-string-parser@npm:7.22.5" - checksum: 836851ca5ec813077bbb303acc992d75a360267aa3b5de7134d220411c852a6f17de7c0d0b8c8dcc0f567f67874c00f4528672b2a4f1bc978a3ada64c8c78467 +"@babel/helper-string-parser@npm:^7.23.4": + version: 7.23.4 + resolution: "@babel/helper-string-parser@npm:7.23.4" + checksum: c0641144cf1a7e7dc93f3d5f16d5327465b6cf5d036b48be61ecba41e1eece161b48f46b7f960951b67f8c3533ce506b16dece576baef4d8b3b49f8c65410f90 languageName: node linkType: hard @@ -538,10 +552,10 @@ __metadata: languageName: node linkType: hard -"@babel/helper-validator-option@npm:^7.22.15": - version: 7.22.15 - resolution: "@babel/helper-validator-option@npm:7.22.15" - checksum: 68da52b1e10002a543161494c4bc0f4d0398c8fdf361d5f7f4272e95c45d5b32d974896d44f6a0ea7378c9204988879d73613ca683e13bd1304e46d25ff67a8d +"@babel/helper-validator-option@npm:^7.22.15, @babel/helper-validator-option@npm:^7.23.5": + version: 7.23.5 + resolution: "@babel/helper-validator-option@npm:7.23.5" + checksum: 537cde2330a8aede223552510e8a13e9c1c8798afee3757995a7d4acae564124fe2bf7e7c3d90d62d3657434a74340a274b3b3b1c6f17e9a2be1f48af29cb09e languageName: node linkType: hard @@ -556,58 +570,70 @@ __metadata: languageName: node linkType: hard -"@babel/helpers@npm:^7.12.5, @babel/helpers@npm:^7.23.2": - version: 7.23.2 - resolution: "@babel/helpers@npm:7.23.2" +"@babel/helpers@npm:^7.12.5, @babel/helpers@npm:^7.23.5": + version: 7.23.5 + resolution: "@babel/helpers@npm:7.23.5" dependencies: "@babel/template": ^7.22.15 - "@babel/traverse": ^7.23.2 - "@babel/types": ^7.23.0 - checksum: aaf4828df75ec460eaa70e5c9f66e6dadc28dae3728ddb7f6c13187dbf38030e142194b83d81aa8a31bbc35a5529a5d7d3f3cf59d5d0b595f5dd7f9d8f1ced8e + "@babel/traverse": ^7.23.5 + "@babel/types": ^7.23.5 + checksum: c16dc8a3bb3d0e02c7ee1222d9d0865ed4b92de44fb8db43ff5afd37a0fc9ea5e2906efa31542c95b30c1a3a9540d66314663c9a23b5bb9b5ec76e8ebc896064 languageName: node linkType: hard -"@babel/highlight@npm:^7.22.13": - version: 7.22.20 - resolution: "@babel/highlight@npm:7.22.20" +"@babel/highlight@npm:^7.23.4": + version: 7.23.4 + resolution: "@babel/highlight@npm:7.23.4" dependencies: "@babel/helper-validator-identifier": ^7.22.20 chalk: ^2.4.2 js-tokens: ^4.0.0 - checksum: 84bd034dca309a5e680083cd827a766780ca63cef37308404f17653d32366ea76262bd2364b2d38776232f2d01b649f26721417d507e8b4b6da3e4e739f6d134 + checksum: 643acecdc235f87d925979a979b539a5d7d1f31ae7db8d89047269082694122d11aa85351304c9c978ceeb6d250591ccadb06c366f358ccee08bb9c122476b89 languageName: node linkType: hard -"@babel/parser@npm:^7.12.7, @babel/parser@npm:^7.18.8, @babel/parser@npm:^7.22.15, @babel/parser@npm:^7.23.0": - version: 7.23.0 - resolution: "@babel/parser@npm:7.23.0" +"@babel/parser@npm:^7.12.7, @babel/parser@npm:^7.18.8, @babel/parser@npm:^7.22.15, @babel/parser@npm:^7.22.7, @babel/parser@npm:^7.23.5": + version: 7.23.5 + resolution: "@babel/parser@npm:7.23.5" bin: parser: ./bin/babel-parser.js - checksum: 453fdf8b9e2c2b7d7b02139e0ce003d1af21947bbc03eb350fb248ee335c9b85e4ab41697ddbdd97079698de825a265e45a0846bb2ed47a2c7c1df833f42a354 + checksum: ea763629310f71580c4a3ea9d3705195b7ba994ada2cc98f9a584ebfdacf54e92b2735d351672824c2c2b03c7f19206899f4d95650d85ce514a822b19a8734c7 languageName: node linkType: hard -"@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression@npm:^7.22.15": - version: 7.22.15 - resolution: "@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression@npm:7.22.15" +"@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression@npm:^7.23.3": + version: 7.23.3 + resolution: "@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression@npm:7.23.3" dependencies: "@babel/helper-plugin-utils": ^7.22.5 peerDependencies: "@babel/core": ^7.0.0 - checksum: 8910ca21a7ec7c06f7b247d4b86c97c5aa15ef321518f44f6f490c5912fdf82c605aaa02b90892e375d82ccbedeadfdeadd922c1b836c9dd4c596871bf654753 + checksum: ddbaf2c396b7780f15e80ee01d6dd790db076985f3dfeb6527d1a8d4cacf370e49250396a3aa005b2c40233cac214a106232f83703d5e8491848bde273938232 languageName: node linkType: hard -"@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining@npm:^7.22.15": - version: 7.22.15 - resolution: "@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining@npm:7.22.15" +"@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining@npm:^7.23.3": + version: 7.23.3 + resolution: "@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining@npm:7.23.3" dependencies: "@babel/helper-plugin-utils": ^7.22.5 "@babel/helper-skip-transparent-expression-wrappers": ^7.22.5 - "@babel/plugin-transform-optional-chaining": ^7.22.15 + "@babel/plugin-transform-optional-chaining": ^7.23.3 peerDependencies: "@babel/core": ^7.13.0 - checksum: fbefedc0da014c37f1a50a8094ce7dbbf2181ae93243f23d6ecba2499b5b20196c2124d6a4dfe3e9e0125798e80593103e456352a4beb4e5c6f7c75efb80fdac + checksum: 434b9d710ae856fa1a456678cc304fbc93915af86d581ee316e077af746a709a741ea39d7e1d4f5b98861b629cc7e87f002d3138f5e836775632466d4c74aef2 + languageName: node + linkType: hard + +"@babel/plugin-bugfix-v8-static-class-fields-redefine-readonly@npm:^7.23.3": + version: 7.23.3 + resolution: "@babel/plugin-bugfix-v8-static-class-fields-redefine-readonly@npm:7.23.3" + dependencies: + "@babel/helper-environment-visitor": ^7.22.20 + "@babel/helper-plugin-utils": ^7.22.5 + peerDependencies: + "@babel/core": ^7.0.0 + checksum: 4690123f0ef7c11d6bf1a9579e4f463ce363563b75ec3f6ca66cf68687e39d8d747a82c833847653962f79da367eca895d9095c60d8ebb224a1d4277003acc11 languageName: node linkType: hard @@ -688,25 +714,25 @@ __metadata: languageName: node linkType: hard -"@babel/plugin-syntax-import-assertions@npm:^7.22.5": - version: 7.22.5 - resolution: "@babel/plugin-syntax-import-assertions@npm:7.22.5" +"@babel/plugin-syntax-import-assertions@npm:^7.23.3": + version: 7.23.3 + resolution: "@babel/plugin-syntax-import-assertions@npm:7.23.3" dependencies: "@babel/helper-plugin-utils": ^7.22.5 peerDependencies: "@babel/core": ^7.0.0-0 - checksum: 2b8b5572db04a7bef1e6cd20debf447e4eef7cb012616f5eceb8fa3e23ce469b8f76ee74fd6d1e158ba17a8f58b0aec579d092fb67c5a30e83ccfbc5754916c1 + checksum: 883e6b35b2da205138caab832d54505271a3fee3fc1e8dc0894502434fc2b5d517cbe93bbfbfef8068a0fb6ec48ebc9eef3f605200a489065ba43d8cddc1c9a7 languageName: node linkType: hard -"@babel/plugin-syntax-import-attributes@npm:^7.22.5": - version: 7.22.5 - resolution: "@babel/plugin-syntax-import-attributes@npm:7.22.5" +"@babel/plugin-syntax-import-attributes@npm:^7.23.3": + version: 7.23.3 + resolution: "@babel/plugin-syntax-import-attributes@npm:7.23.3" dependencies: "@babel/helper-plugin-utils": ^7.22.5 peerDependencies: "@babel/core": ^7.0.0-0 - checksum: 197b3c5ea2a9649347f033342cb222ab47f4645633695205c0250c6bf2af29e643753b8bb24a2db39948bef08e7c540babfd365591eb57fc110cb30b425ffc47 + checksum: 9aed7661ffb920ca75df9f494757466ca92744e43072e0848d87fa4aa61a3f2ee5a22198ac1959856c036434b5614a8f46f1fb70298835dbe28220cdd1d4c11e languageName: node linkType: hard @@ -743,14 +769,14 @@ __metadata: languageName: node linkType: hard -"@babel/plugin-syntax-jsx@npm:^7.22.5": - version: 7.22.5 - resolution: "@babel/plugin-syntax-jsx@npm:7.22.5" +"@babel/plugin-syntax-jsx@npm:^7.23.3": + version: 7.23.3 + resolution: "@babel/plugin-syntax-jsx@npm:7.23.3" dependencies: "@babel/helper-plugin-utils": ^7.22.5 peerDependencies: "@babel/core": ^7.0.0-0 - checksum: 8829d30c2617ab31393d99cec2978e41f014f4ac6f01a1cecf4c4dd8320c3ec12fdc3ce121126b2d8d32f6887e99ca1a0bad53dedb1e6ad165640b92b24980ce + checksum: 89037694314a74e7f0e7a9c8d3793af5bf6b23d80950c29b360db1c66859d67f60711ea437e70ad6b5b4b29affe17eababda841b6c01107c2b638e0493bafb4e languageName: node linkType: hard @@ -842,14 +868,14 @@ __metadata: languageName: node linkType: hard -"@babel/plugin-syntax-typescript@npm:^7.22.5": - version: 7.22.5 - resolution: "@babel/plugin-syntax-typescript@npm:7.22.5" +"@babel/plugin-syntax-typescript@npm:^7.23.3": + version: 7.23.3 + resolution: "@babel/plugin-syntax-typescript@npm:7.23.3" dependencies: "@babel/helper-plugin-utils": ^7.22.5 peerDependencies: "@babel/core": ^7.0.0-0 - checksum: 8ab7718fbb026d64da93681a57797d60326097fd7cb930380c8bffd9eb101689e90142c760a14b51e8e69c88a73ba3da956cb4520a3b0c65743aee5c71ef360a + checksum: abfad3a19290d258b028e285a1f34c9b8a0cbe46ef79eafed4ed7ffce11b5d0720b5e536c82f91cbd8442cde35a3dd8e861fa70366d87ff06fdc0d4756e30876 languageName: node linkType: hard @@ -865,20 +891,20 @@ __metadata: languageName: node linkType: hard -"@babel/plugin-transform-arrow-functions@npm:^7.22.5": - version: 7.22.5 - resolution: "@babel/plugin-transform-arrow-functions@npm:7.22.5" +"@babel/plugin-transform-arrow-functions@npm:^7.23.3": + version: 7.23.3 + resolution: "@babel/plugin-transform-arrow-functions@npm:7.23.3" dependencies: "@babel/helper-plugin-utils": ^7.22.5 peerDependencies: "@babel/core": ^7.0.0-0 - checksum: 35abb6c57062802c7ce8bd96b2ef2883e3124370c688bbd67609f7d2453802fb73944df8808f893b6c67de978eb2bcf87bbfe325e46d6f39b5fcb09ece11d01a + checksum: 1e99118176e5366c2636064d09477016ab5272b2a92e78b8edb571d20bc3eaa881789a905b20042942c3c2d04efc530726cf703f937226db5ebc495f5d067e66 languageName: node linkType: hard -"@babel/plugin-transform-async-generator-functions@npm:^7.23.2": - version: 7.23.2 - resolution: "@babel/plugin-transform-async-generator-functions@npm:7.23.2" +"@babel/plugin-transform-async-generator-functions@npm:^7.23.4": + version: 7.23.4 + resolution: "@babel/plugin-transform-async-generator-functions@npm:7.23.4" dependencies: "@babel/helper-environment-visitor": ^7.22.20 "@babel/helper-plugin-utils": ^7.22.5 @@ -886,289 +912,289 @@ __metadata: "@babel/plugin-syntax-async-generators": ^7.8.4 peerDependencies: "@babel/core": ^7.0.0-0 - checksum: e1abae0edcda7304d7c17702ac25a127578791b89c4f767d60589249fa3e50ec33f8c9ff39d3d8d41f00b29947654eaddd4fd586e04c4d598122db745fab2868 + checksum: e2fc132c9033711d55209f4781e1fc73f0f4da5e0ca80a2da73dec805166b73c92a6e83571a8994cd2c893a28302e24107e90856202b24781bab734f800102bb languageName: node linkType: hard -"@babel/plugin-transform-async-to-generator@npm:^7.22.5": - version: 7.22.5 - resolution: "@babel/plugin-transform-async-to-generator@npm:7.22.5" +"@babel/plugin-transform-async-to-generator@npm:^7.23.3": + version: 7.23.3 + resolution: "@babel/plugin-transform-async-to-generator@npm:7.23.3" dependencies: - "@babel/helper-module-imports": ^7.22.5 + "@babel/helper-module-imports": ^7.22.15 "@babel/helper-plugin-utils": ^7.22.5 - "@babel/helper-remap-async-to-generator": ^7.22.5 + "@babel/helper-remap-async-to-generator": ^7.22.20 peerDependencies: "@babel/core": ^7.0.0-0 - checksum: b95f23f99dcb379a9f0a1c2a3bbea3f8dc0e1b16dc1ac8b484fe378370169290a7a63d520959a9ba1232837cf74a80e23f6facbe14fd42a3cda6d3c2d7168e62 + checksum: 2e9d9795d4b3b3d8090332104e37061c677f29a1ce65bcbda4099a32d243e5d9520270a44bbabf0fb1fb40d463bd937685b1a1042e646979086c546d55319c3c languageName: node linkType: hard -"@babel/plugin-transform-block-scoped-functions@npm:^7.22.5": - version: 7.22.5 - resolution: "@babel/plugin-transform-block-scoped-functions@npm:7.22.5" +"@babel/plugin-transform-block-scoped-functions@npm:^7.23.3": + version: 7.23.3 + resolution: "@babel/plugin-transform-block-scoped-functions@npm:7.23.3" dependencies: "@babel/helper-plugin-utils": ^7.22.5 peerDependencies: "@babel/core": ^7.0.0-0 - checksum: 416b1341858e8ca4e524dee66044735956ced5f478b2c3b9bc11ec2285b0c25d7dbb96d79887169eb938084c95d0a89338c8b2fe70d473bd9dc92e5d9db1732c + checksum: e63b16d94ee5f4d917e669da3db5ea53d1e7e79141a2ec873c1e644678cdafe98daa556d0d359963c827863d6b3665d23d4938a94a4c5053a1619c4ebd01d020 languageName: node linkType: hard -"@babel/plugin-transform-block-scoping@npm:^7.23.0": - version: 7.23.0 - resolution: "@babel/plugin-transform-block-scoping@npm:7.23.0" +"@babel/plugin-transform-block-scoping@npm:^7.23.4": + version: 7.23.4 + resolution: "@babel/plugin-transform-block-scoping@npm:7.23.4" dependencies: "@babel/helper-plugin-utils": ^7.22.5 peerDependencies: "@babel/core": ^7.0.0-0 - checksum: 0cfe925cc3b5a3ad407e2253fab3ceeaa117a4b291c9cb245578880872999bca91bd83ffa0128ae9ca356330702e1ef1dcb26804f28d2cef678239caf629f73e + checksum: fc4b2100dd9f2c47d694b4b35ae8153214ccb4e24ef545c259a9db17211b18b6a430f22799b56db8f6844deaeaa201af45a03331d0c80cc28b0c4e3c814570e4 languageName: node linkType: hard -"@babel/plugin-transform-class-properties@npm:^7.22.5": - version: 7.22.5 - resolution: "@babel/plugin-transform-class-properties@npm:7.22.5" +"@babel/plugin-transform-class-properties@npm:^7.23.3": + version: 7.23.3 + resolution: "@babel/plugin-transform-class-properties@npm:7.23.3" dependencies: - "@babel/helper-create-class-features-plugin": ^7.22.5 + "@babel/helper-create-class-features-plugin": ^7.22.15 "@babel/helper-plugin-utils": ^7.22.5 peerDependencies: "@babel/core": ^7.0.0-0 - checksum: b830152dfc2ff2f647f0abe76e6251babdfbef54d18c4b2c73a6bf76b1a00050a5d998dac80dc901a48514e95604324943a9dd39317073fe0928b559e0e0c579 + checksum: 9c6f8366f667897541d360246de176dd29efc7a13d80a5b48361882f7173d9173be4646c3b7d9b003ccc0e01e25df122330308f33db921fa553aa17ad544b3fc languageName: node linkType: hard -"@babel/plugin-transform-class-static-block@npm:^7.22.11": - version: 7.22.11 - resolution: "@babel/plugin-transform-class-static-block@npm:7.22.11" +"@babel/plugin-transform-class-static-block@npm:^7.23.4": + version: 7.23.4 + resolution: "@babel/plugin-transform-class-static-block@npm:7.23.4" dependencies: - "@babel/helper-create-class-features-plugin": ^7.22.11 + "@babel/helper-create-class-features-plugin": ^7.22.15 "@babel/helper-plugin-utils": ^7.22.5 "@babel/plugin-syntax-class-static-block": ^7.14.5 peerDependencies: "@babel/core": ^7.12.0 - checksum: 69f040506fad66f1c6918d288d0e0edbc5c8a07c8b4462c1184ad2f9f08995d68b057126c213871c0853ae0c72afc60ec87492049dfacb20902e32346a448bcb + checksum: c8bfaba19a674fc2eb54edad71e958647360474e3163e8226f1acd63e4e2dbec32a171a0af596c1dc5359aee402cc120fea7abd1fb0e0354b6527f0fc9e8aa1e languageName: node linkType: hard -"@babel/plugin-transform-classes@npm:^7.22.15": - version: 7.22.15 - resolution: "@babel/plugin-transform-classes@npm:7.22.15" +"@babel/plugin-transform-classes@npm:^7.23.5": + version: 7.23.5 + resolution: "@babel/plugin-transform-classes@npm:7.23.5" dependencies: "@babel/helper-annotate-as-pure": ^7.22.5 "@babel/helper-compilation-targets": ^7.22.15 - "@babel/helper-environment-visitor": ^7.22.5 - "@babel/helper-function-name": ^7.22.5 + "@babel/helper-environment-visitor": ^7.22.20 + "@babel/helper-function-name": ^7.23.0 "@babel/helper-optimise-call-expression": ^7.22.5 "@babel/helper-plugin-utils": ^7.22.5 - "@babel/helper-replace-supers": ^7.22.9 + "@babel/helper-replace-supers": ^7.22.20 "@babel/helper-split-export-declaration": ^7.22.6 globals: ^11.1.0 peerDependencies: "@babel/core": ^7.0.0-0 - checksum: d3f4d0c107dd8a3557ea3575cc777fab27efa92958b41e4a9822f7499725c1f554beae58855de16ddec0a7b694e45f59a26cea8fbde4275563f72f09c6e039a0 + checksum: 6d0dd3b0828e84a139a51b368f33f315edee5688ef72c68ba25e0175c68ea7357f9c8810b3f61713e368a3063cdcec94f3a2db952e453b0b14ef428a34aa8169 languageName: node linkType: hard -"@babel/plugin-transform-computed-properties@npm:^7.22.5": - version: 7.22.5 - resolution: "@babel/plugin-transform-computed-properties@npm:7.22.5" +"@babel/plugin-transform-computed-properties@npm:^7.23.3": + version: 7.23.3 + resolution: "@babel/plugin-transform-computed-properties@npm:7.23.3" dependencies: "@babel/helper-plugin-utils": ^7.22.5 - "@babel/template": ^7.22.5 + "@babel/template": ^7.22.15 peerDependencies: "@babel/core": ^7.0.0-0 - checksum: c2a77a0f94ec71efbc569109ec14ea2aa925b333289272ced8b33c6108bdbb02caf01830ffc7e49486b62dec51911924d13f3a76f1149f40daace1898009e131 + checksum: 80452661dc25a0956f89fe98cb562e8637a9556fb6c00d312c57653ce7df8798f58d138603c7e1aad96614ee9ccd10c47e50ab9ded6b6eded5adeb230d2a982e languageName: node linkType: hard -"@babel/plugin-transform-destructuring@npm:^7.23.0": - version: 7.23.0 - resolution: "@babel/plugin-transform-destructuring@npm:7.23.0" +"@babel/plugin-transform-destructuring@npm:^7.23.3": + version: 7.23.3 + resolution: "@babel/plugin-transform-destructuring@npm:7.23.3" dependencies: "@babel/helper-plugin-utils": ^7.22.5 peerDependencies: "@babel/core": ^7.0.0-0 - checksum: cd6dd454ccc2766be551e4f8a04b1acc2aa539fa19e5c7501c56cc2f8cc921dd41a7ffb78455b4c4b2f954fcab8ca4561ba7c9c7bd5af9f19465243603d18cc3 + checksum: 9e015099877272501162419bfe781689aec5c462cd2aec752ee22288f209eec65969ff11b8fdadca2eaddea71d705d3bba5b9c60752fcc1be67874fcec687105 languageName: node linkType: hard -"@babel/plugin-transform-dotall-regex@npm:^7.22.5": - version: 7.22.5 - resolution: "@babel/plugin-transform-dotall-regex@npm:7.22.5" +"@babel/plugin-transform-dotall-regex@npm:^7.23.3": + version: 7.23.3 + resolution: "@babel/plugin-transform-dotall-regex@npm:7.23.3" dependencies: - "@babel/helper-create-regexp-features-plugin": ^7.22.5 + "@babel/helper-create-regexp-features-plugin": ^7.22.15 "@babel/helper-plugin-utils": ^7.22.5 peerDependencies: "@babel/core": ^7.0.0-0 - checksum: 409b658d11e3082c8f69e9cdef2d96e4d6d11256f005772425fb230cc48fd05945edbfbcb709dab293a1a2f01f9c8a5bb7b4131e632b23264039d9f95864b453 + checksum: a2dbbf7f1ea16a97948c37df925cb364337668c41a3948b8d91453f140507bd8a3429030c7ce66d09c299987b27746c19a2dd18b6f17dcb474854b14fd9159a3 languageName: node linkType: hard -"@babel/plugin-transform-duplicate-keys@npm:^7.22.5": - version: 7.22.5 - resolution: "@babel/plugin-transform-duplicate-keys@npm:7.22.5" +"@babel/plugin-transform-duplicate-keys@npm:^7.23.3": + version: 7.23.3 + resolution: "@babel/plugin-transform-duplicate-keys@npm:7.23.3" dependencies: "@babel/helper-plugin-utils": ^7.22.5 peerDependencies: "@babel/core": ^7.0.0-0 - checksum: bb1280fbabaab6fab2ede585df34900712698210a3bd413f4df5bae6d8c24be36b496c92722ae676a7a67d060a4624f4d6c23b923485f906bfba8773c69f55b4 + checksum: c2a21c34dc0839590cd945192cbc46fde541a27e140c48fe1808315934664cdbf18db64889e23c4eeb6bad9d3e049482efdca91d29de5734ffc887c4fbabaa16 languageName: node linkType: hard -"@babel/plugin-transform-dynamic-import@npm:^7.22.11": - version: 7.22.11 - resolution: "@babel/plugin-transform-dynamic-import@npm:7.22.11" +"@babel/plugin-transform-dynamic-import@npm:^7.23.4": + version: 7.23.4 + resolution: "@babel/plugin-transform-dynamic-import@npm:7.23.4" dependencies: "@babel/helper-plugin-utils": ^7.22.5 "@babel/plugin-syntax-dynamic-import": ^7.8.3 peerDependencies: "@babel/core": ^7.0.0-0 - checksum: 78fc9c532210bf9e8f231747f542318568ac360ee6c27e80853962c984283c73da3f8f8aebe83c2096090a435b356b092ed85de617a156cbe0729d847632be45 + checksum: 57a722604c430d9f3dacff22001a5f31250e34785d4969527a2ae9160fa86858d0892c5b9ff7a06a04076f8c76c9e6862e0541aadca9c057849961343aab0845 languageName: node linkType: hard -"@babel/plugin-transform-exponentiation-operator@npm:^7.22.5": - version: 7.22.5 - resolution: "@babel/plugin-transform-exponentiation-operator@npm:7.22.5" +"@babel/plugin-transform-exponentiation-operator@npm:^7.23.3": + version: 7.23.3 + resolution: "@babel/plugin-transform-exponentiation-operator@npm:7.23.3" dependencies: - "@babel/helper-builder-binary-assignment-operator-visitor": ^7.22.5 + "@babel/helper-builder-binary-assignment-operator-visitor": ^7.22.15 "@babel/helper-plugin-utils": ^7.22.5 peerDependencies: "@babel/core": ^7.0.0-0 - checksum: f2d660c1b1d51ad5fec1cd5ad426a52187204068c4158f8c4aa977b31535c61b66898d532603eef21c15756827be8277f724c869b888d560f26d7fe848bb5eae + checksum: 00d05ab14ad0f299160fcf9d8f55a1cc1b740e012ab0b5ce30207d2365f091665115557af7d989cd6260d075a252d9e4283de5f2b247dfbbe0e42ae586e6bf66 languageName: node linkType: hard -"@babel/plugin-transform-export-namespace-from@npm:^7.22.11": - version: 7.22.11 - resolution: "@babel/plugin-transform-export-namespace-from@npm:7.22.11" +"@babel/plugin-transform-export-namespace-from@npm:^7.23.4": + version: 7.23.4 + resolution: "@babel/plugin-transform-export-namespace-from@npm:7.23.4" dependencies: "@babel/helper-plugin-utils": ^7.22.5 "@babel/plugin-syntax-export-namespace-from": ^7.8.3 peerDependencies: "@babel/core": ^7.0.0-0 - checksum: 73af5883a321ed56a4bfd43c8a7de0164faebe619287706896fc6ee2f7a4e69042adaa1338c0b8b4bdb9f7e5fdceb016fb1d40694cb43ca3b8827429e8aac4bf + checksum: 9f770a81bfd03b48d6ba155d452946fd56d6ffe5b7d871e9ec2a0b15e0f424273b632f3ed61838b90015b25bbda988896b7a46c7d964fbf8f6feb5820b309f93 languageName: node linkType: hard -"@babel/plugin-transform-for-of@npm:^7.22.15": - version: 7.22.15 - resolution: "@babel/plugin-transform-for-of@npm:7.22.15" +"@babel/plugin-transform-for-of@npm:^7.23.3": + version: 7.23.3 + resolution: "@babel/plugin-transform-for-of@npm:7.23.3" dependencies: "@babel/helper-plugin-utils": ^7.22.5 peerDependencies: "@babel/core": ^7.0.0-0 - checksum: f395ae7bce31e14961460f56cf751b5d6e37dd27d7df5b1f4e49fec1c11b6f9cf71991c7ffbe6549878591e87df0d66af798cf26edfa4bfa6b4c3dba1fb2f73a + checksum: a6288122a5091d96c744b9eb23dc1b2d4cce25f109ac1e26a0ea03c4ea60330e6f3cc58530b33ba7369fa07163b71001399a145238b7e92bff6270ef3b9c32a0 languageName: node linkType: hard -"@babel/plugin-transform-function-name@npm:^7.22.5": - version: 7.22.5 - resolution: "@babel/plugin-transform-function-name@npm:7.22.5" +"@babel/plugin-transform-function-name@npm:^7.23.3": + version: 7.23.3 + resolution: "@babel/plugin-transform-function-name@npm:7.23.3" dependencies: - "@babel/helper-compilation-targets": ^7.22.5 - "@babel/helper-function-name": ^7.22.5 + "@babel/helper-compilation-targets": ^7.22.15 + "@babel/helper-function-name": ^7.23.0 "@babel/helper-plugin-utils": ^7.22.5 peerDependencies: "@babel/core": ^7.0.0-0 - checksum: cff3b876357999cb8ae30e439c3ec6b0491a53b0aa6f722920a4675a6dd5b53af97a833051df4b34791fe5b3dd326ccf769d5c8e45b322aa50ee11a660b17845 + checksum: 355c6dbe07c919575ad42b2f7e020f320866d72f8b79181a16f8e0cd424a2c761d979f03f47d583d9471b55dcd68a8a9d829b58e1eebcd572145b934b48975a6 languageName: node linkType: hard -"@babel/plugin-transform-json-strings@npm:^7.22.11": - version: 7.22.11 - resolution: "@babel/plugin-transform-json-strings@npm:7.22.11" +"@babel/plugin-transform-json-strings@npm:^7.23.4": + version: 7.23.4 + resolution: "@babel/plugin-transform-json-strings@npm:7.23.4" dependencies: "@babel/helper-plugin-utils": ^7.22.5 "@babel/plugin-syntax-json-strings": ^7.8.3 peerDependencies: "@babel/core": ^7.0.0-0 - checksum: 50665e5979e66358c50e90a26db53c55917f78175127ac2fa05c7888d156d418ffb930ec0a109353db0a7c5f57c756ce01bfc9825d24cbfd2b3ec453f2ed8cba + checksum: f9019820233cf8955d8ba346df709a0683c120fe86a24ed1c9f003f2db51197b979efc88f010d558a12e1491210fc195a43cd1c7fee5e23b92da38f793a875de languageName: node linkType: hard -"@babel/plugin-transform-literals@npm:^7.22.5": - version: 7.22.5 - resolution: "@babel/plugin-transform-literals@npm:7.22.5" +"@babel/plugin-transform-literals@npm:^7.23.3": + version: 7.23.3 + resolution: "@babel/plugin-transform-literals@npm:7.23.3" dependencies: "@babel/helper-plugin-utils": ^7.22.5 peerDependencies: "@babel/core": ^7.0.0-0 - checksum: ec37cc2ffb32667af935ab32fe28f00920ec8a1eb999aa6dc6602f2bebd8ba205a558aeedcdccdebf334381d5c57106c61f52332045730393e73410892a9735b + checksum: 519a544cd58586b9001c4c9b18da25a62f17d23c48600ff7a685d75ca9eb18d2c5e8f5476f067f0a8f1fea2a31107eff950b9864833061e6076dcc4bdc3e71ed languageName: node linkType: hard -"@babel/plugin-transform-logical-assignment-operators@npm:^7.22.11": - version: 7.22.11 - resolution: "@babel/plugin-transform-logical-assignment-operators@npm:7.22.11" +"@babel/plugin-transform-logical-assignment-operators@npm:^7.23.4": + version: 7.23.4 + resolution: "@babel/plugin-transform-logical-assignment-operators@npm:7.23.4" dependencies: "@babel/helper-plugin-utils": ^7.22.5 "@babel/plugin-syntax-logical-assignment-operators": ^7.10.4 peerDependencies: "@babel/core": ^7.0.0-0 - checksum: c664e9798e85afa7f92f07b867682dee7392046181d82f5d21bae6f2ca26dfe9c8375cdc52b7483c3fc09a983c1989f60eff9fbc4f373b0c0a74090553d05739 + checksum: 2ae1dc9b4ff3bf61a990ff3accdecb2afe3a0ca649b3e74c010078d1cdf29ea490f50ac0a905306a2bcf9ac177889a39ac79bdcc3a0fdf220b3b75fac18d39b5 languageName: node linkType: hard -"@babel/plugin-transform-member-expression-literals@npm:^7.22.5": - version: 7.22.5 - resolution: "@babel/plugin-transform-member-expression-literals@npm:7.22.5" +"@babel/plugin-transform-member-expression-literals@npm:^7.23.3": + version: 7.23.3 + resolution: "@babel/plugin-transform-member-expression-literals@npm:7.23.3" dependencies: "@babel/helper-plugin-utils": ^7.22.5 peerDependencies: "@babel/core": ^7.0.0-0 - checksum: ec4b0e07915ddd4fda0142fd104ee61015c208608a84cfa13643a95d18760b1dc1ceb6c6e0548898b8c49e5959a994e46367260176dbabc4467f729b21868504 + checksum: 95cec13c36d447c5aa6b8e4c778b897eeba66dcb675edef01e0d2afcec9e8cb9726baf4f81b4bbae7a782595aed72e6a0d44ffb773272c3ca180fada99bf92db languageName: node linkType: hard -"@babel/plugin-transform-modules-amd@npm:^7.23.0": - version: 7.23.0 - resolution: "@babel/plugin-transform-modules-amd@npm:7.23.0" +"@babel/plugin-transform-modules-amd@npm:^7.23.3": + version: 7.23.3 + resolution: "@babel/plugin-transform-modules-amd@npm:7.23.3" dependencies: - "@babel/helper-module-transforms": ^7.23.0 + "@babel/helper-module-transforms": ^7.23.3 "@babel/helper-plugin-utils": ^7.22.5 peerDependencies: "@babel/core": ^7.0.0-0 - checksum: 5d92875170a37b8282d4bcd805f55829b8fab0f9c8d08b53d32a7a0bfdc62b868e489b52d329ae768ecafc0c993eed0ad7a387baa673ac33211390a9f833ab5d + checksum: d163737b6a3d67ea579c9aa3b83d4df4b5c34d9dcdf25f415f027c0aa8cded7bac2750d2de5464081f67a042ad9e1c03930c2fab42acd79f9e57c00cf969ddff languageName: node linkType: hard -"@babel/plugin-transform-modules-commonjs@npm:^7.23.0": - version: 7.23.0 - resolution: "@babel/plugin-transform-modules-commonjs@npm:7.23.0" +"@babel/plugin-transform-modules-commonjs@npm:^7.23.3": + version: 7.23.3 + resolution: "@babel/plugin-transform-modules-commonjs@npm:7.23.3" dependencies: - "@babel/helper-module-transforms": ^7.23.0 + "@babel/helper-module-transforms": ^7.23.3 "@babel/helper-plugin-utils": ^7.22.5 "@babel/helper-simple-access": ^7.22.5 peerDependencies: "@babel/core": ^7.0.0-0 - checksum: 7fb25997194053e167c4207c319ff05362392da841bd9f42ddb3caf9c8798a5d203bd926d23ddf5830fdf05eddc82c2810f40d1287e3a4f80b07eff13d1024b5 + checksum: 720a231ceade4ae4d2632478db4e7fecf21987d444942b72d523487ac8d715ca97de6c8f415c71e939595e1a4776403e7dc24ed68fe9125ad4acf57753c9bff7 languageName: node linkType: hard -"@babel/plugin-transform-modules-systemjs@npm:^7.23.0": - version: 7.23.0 - resolution: "@babel/plugin-transform-modules-systemjs@npm:7.23.0" +"@babel/plugin-transform-modules-systemjs@npm:^7.23.3": + version: 7.23.3 + resolution: "@babel/plugin-transform-modules-systemjs@npm:7.23.3" dependencies: "@babel/helper-hoist-variables": ^7.22.5 - "@babel/helper-module-transforms": ^7.23.0 + "@babel/helper-module-transforms": ^7.23.3 "@babel/helper-plugin-utils": ^7.22.5 "@babel/helper-validator-identifier": ^7.22.20 peerDependencies: "@babel/core": ^7.0.0-0 - checksum: 2d481458b22605046badea2317d5cc5c94ac3031c2293e34c96f02063f5b02af0979c4da6a8fbc67cc249541575dc9c6d710db6b919ede70b7337a22d9fd57a7 + checksum: 0d2fdd993c785aecac9e0850cd5ed7f7d448f0fbb42992a950cc0590167144df25d82af5aac9a5c99ef913d2286782afa44e577af30c10901c5ee8984910fa1f languageName: node linkType: hard -"@babel/plugin-transform-modules-umd@npm:^7.22.5": - version: 7.22.5 - resolution: "@babel/plugin-transform-modules-umd@npm:7.22.5" +"@babel/plugin-transform-modules-umd@npm:^7.23.3": + version: 7.23.3 + resolution: "@babel/plugin-transform-modules-umd@npm:7.23.3" dependencies: - "@babel/helper-module-transforms": ^7.22.5 + "@babel/helper-module-transforms": ^7.23.3 "@babel/helper-plugin-utils": ^7.22.5 peerDependencies: "@babel/core": ^7.0.0-0 - checksum: 46622834c54c551b231963b867adbc80854881b3e516ff29984a8da989bd81665bd70e8cba6710345248e97166689310f544aee1a5773e262845a8f1b3e5b8b4 + checksum: 586a7a2241e8b4e753a37af9466a9ffa8a67b4ba9aa756ad7500712c05d8fa9a8c1ed4f7bd25fae2a8265e6cf8fe781ec85a8ee885dd34cf50d8955ee65f12dc languageName: node linkType: hard @@ -1184,160 +1210,160 @@ __metadata: languageName: node linkType: hard -"@babel/plugin-transform-new-target@npm:^7.22.5": - version: 7.22.5 - resolution: "@babel/plugin-transform-new-target@npm:7.22.5" +"@babel/plugin-transform-new-target@npm:^7.23.3": + version: 7.23.3 + resolution: "@babel/plugin-transform-new-target@npm:7.23.3" dependencies: "@babel/helper-plugin-utils": ^7.22.5 peerDependencies: "@babel/core": ^7.0.0-0 - checksum: 6b72112773487a881a1d6ffa680afde08bad699252020e86122180ee7a88854d5da3f15d9bca3331cf2e025df045604494a8208a2e63b486266b07c14e2ffbf3 + checksum: e5053389316fce73ad5201b7777437164f333e24787fbcda4ae489cd2580dbbbdfb5694a7237bad91fabb46b591d771975d69beb1c740b82cb4761625379f00b languageName: node linkType: hard -"@babel/plugin-transform-nullish-coalescing-operator@npm:^7.22.11": - version: 7.22.11 - resolution: "@babel/plugin-transform-nullish-coalescing-operator@npm:7.22.11" +"@babel/plugin-transform-nullish-coalescing-operator@npm:^7.23.4": + version: 7.23.4 + resolution: "@babel/plugin-transform-nullish-coalescing-operator@npm:7.23.4" dependencies: "@babel/helper-plugin-utils": ^7.22.5 "@babel/plugin-syntax-nullish-coalescing-operator": ^7.8.3 peerDependencies: "@babel/core": ^7.0.0-0 - checksum: 167babecc8b8fe70796a7b7d34af667ebbf43da166c21689502e5e8cc93180b7a85979c77c9f64b7cce431b36718bd0a6df9e5e0ffea4ae22afb22cfef886372 + checksum: a27d73ea134d3d9560a6b2e26ab60012fba15f1db95865aa0153c18f5ec82cfef6a7b3d8df74e3c2fca81534fa5efeb6cacaf7b08bdb7d123e3dafdd079886a3 languageName: node linkType: hard -"@babel/plugin-transform-numeric-separator@npm:^7.22.11": - version: 7.22.11 - resolution: "@babel/plugin-transform-numeric-separator@npm:7.22.11" +"@babel/plugin-transform-numeric-separator@npm:^7.23.4": + version: 7.23.4 + resolution: "@babel/plugin-transform-numeric-separator@npm:7.23.4" dependencies: "@babel/helper-plugin-utils": ^7.22.5 "@babel/plugin-syntax-numeric-separator": ^7.10.4 peerDependencies: "@babel/core": ^7.0.0-0 - checksum: af064d06a4a041767ec396a5f258103f64785df290e038bba9f0ef454e6c914f2ac45d862bbdad8fac2c7ad47fa4e95356f29053c60c100a0160b02a995fe2a3 + checksum: 6ba0e5db3c620a3ec81f9e94507c821f483c15f196868df13fa454cbac719a5449baf73840f5b6eb7d77311b24a2cf8e45db53700d41727f693d46f7caf3eec3 languageName: node linkType: hard -"@babel/plugin-transform-object-rest-spread@npm:^7.22.15": - version: 7.22.15 - resolution: "@babel/plugin-transform-object-rest-spread@npm:7.22.15" +"@babel/plugin-transform-object-rest-spread@npm:^7.23.4": + version: 7.23.4 + resolution: "@babel/plugin-transform-object-rest-spread@npm:7.23.4" dependencies: - "@babel/compat-data": ^7.22.9 + "@babel/compat-data": ^7.23.3 "@babel/helper-compilation-targets": ^7.22.15 "@babel/helper-plugin-utils": ^7.22.5 "@babel/plugin-syntax-object-rest-spread": ^7.8.3 - "@babel/plugin-transform-parameters": ^7.22.15 + "@babel/plugin-transform-parameters": ^7.23.3 peerDependencies: "@babel/core": ^7.0.0-0 - checksum: 62197a6f12289c1c1bd57f3bed9f0f765ca32390bfe91e0b5561dd94dd9770f4480c4162dec98da094bc0ba99d2c2ebba68de47c019454041b0b7a68ba2ec66d + checksum: 73fec495e327ca3959c1c03d07a621be09df00036c69fff0455af9a008291677ee9d368eec48adacdc6feac703269a649747568b4af4c4e9f134aa71cc5b378d languageName: node linkType: hard -"@babel/plugin-transform-object-super@npm:^7.22.5": - version: 7.22.5 - resolution: "@babel/plugin-transform-object-super@npm:7.22.5" +"@babel/plugin-transform-object-super@npm:^7.23.3": + version: 7.23.3 + resolution: "@babel/plugin-transform-object-super@npm:7.23.3" dependencies: "@babel/helper-plugin-utils": ^7.22.5 - "@babel/helper-replace-supers": ^7.22.5 + "@babel/helper-replace-supers": ^7.22.20 peerDependencies: "@babel/core": ^7.0.0-0 - checksum: b71887877d74cb64dbccb5c0324fa67e31171e6a5311991f626650e44a4083e5436a1eaa89da78c0474fb095d4ec322d63ee778b202d33aa2e4194e1ed8e62d7 + checksum: e495497186f621fa79026e183b4f1fbb172fd9df812cbd2d7f02c05b08adbe58012b1a6eb6dd58d11a30343f6ec80d0f4074f9b501d70aa1c94df76d59164c53 languageName: node linkType: hard -"@babel/plugin-transform-optional-catch-binding@npm:^7.22.11": - version: 7.22.11 - resolution: "@babel/plugin-transform-optional-catch-binding@npm:7.22.11" +"@babel/plugin-transform-optional-catch-binding@npm:^7.23.4": + version: 7.23.4 + resolution: "@babel/plugin-transform-optional-catch-binding@npm:7.23.4" dependencies: "@babel/helper-plugin-utils": ^7.22.5 "@babel/plugin-syntax-optional-catch-binding": ^7.8.3 peerDependencies: "@babel/core": ^7.0.0-0 - checksum: f17abd90e1de67c84d63afea29c8021c74abb2794d3a6eeafb0bbe7372d3db32aefca386e392116ec63884537a4a2815d090d26264d259bacc08f6e3ed05294c + checksum: d50b5ee142cdb088d8b5de1ccf7cea85b18b85d85b52f86618f6e45226372f01ad4cdb29abd4fd35ea99a71fefb37009e0107db7a787dcc21d4d402f97470faf languageName: node linkType: hard -"@babel/plugin-transform-optional-chaining@npm:^7.22.15, @babel/plugin-transform-optional-chaining@npm:^7.23.0": - version: 7.23.0 - resolution: "@babel/plugin-transform-optional-chaining@npm:7.23.0" +"@babel/plugin-transform-optional-chaining@npm:^7.23.3, @babel/plugin-transform-optional-chaining@npm:^7.23.4": + version: 7.23.4 + resolution: "@babel/plugin-transform-optional-chaining@npm:7.23.4" dependencies: "@babel/helper-plugin-utils": ^7.22.5 "@babel/helper-skip-transparent-expression-wrappers": ^7.22.5 "@babel/plugin-syntax-optional-chaining": ^7.8.3 peerDependencies: "@babel/core": ^7.0.0-0 - checksum: f702634f2b97e5260dbec0d4bde05ccb6f4d96d7bfa946481aeacfa205ca846cb6e096a38312f9d51fdbdac1f258f211138c5f7075952e46a5bf8574de6a1329 + checksum: e7a4c08038288057b7a08d68c4d55396ada9278095509ca51ed8dfb72a7f13f26bdd7c5185de21079fe0a9d60d22c227cb32e300d266c1bda40f70eee9f4bc1e languageName: node linkType: hard -"@babel/plugin-transform-parameters@npm:^7.12.1, @babel/plugin-transform-parameters@npm:^7.22.15": - version: 7.22.15 - resolution: "@babel/plugin-transform-parameters@npm:7.22.15" +"@babel/plugin-transform-parameters@npm:^7.12.1, @babel/plugin-transform-parameters@npm:^7.23.3": + version: 7.23.3 + resolution: "@babel/plugin-transform-parameters@npm:7.23.3" dependencies: "@babel/helper-plugin-utils": ^7.22.5 peerDependencies: "@babel/core": ^7.0.0-0 - checksum: 541188bb7d1876cad87687b5c7daf90f63d8208ae83df24acb1e2b05020ad1c78786b2723ca4054a83fcb74fb6509f30c4cacc5b538ee684224261ad5fb047c1 + checksum: a735b3e85316d17ec102e3d3d1b6993b429bdb3b494651c9d754e3b7d270462ee1f1a126ccd5e3d871af5e683727e9ef98c9d34d4a42204fffaabff91052ed16 languageName: node linkType: hard -"@babel/plugin-transform-private-methods@npm:^7.22.5": - version: 7.22.5 - resolution: "@babel/plugin-transform-private-methods@npm:7.22.5" +"@babel/plugin-transform-private-methods@npm:^7.23.3": + version: 7.23.3 + resolution: "@babel/plugin-transform-private-methods@npm:7.23.3" dependencies: - "@babel/helper-create-class-features-plugin": ^7.22.5 + "@babel/helper-create-class-features-plugin": ^7.22.15 "@babel/helper-plugin-utils": ^7.22.5 peerDependencies: "@babel/core": ^7.0.0-0 - checksum: 321479b4fcb6d3b3ef622ab22fd24001e43d46e680e8e41324c033d5810c84646e470f81b44cbcbef5c22e99030784f7cac92f1829974da7a47a60a7139082c3 + checksum: cedc1285c49b5a6d9a3d0e5e413b756ac40b3ac2f8f68bdfc3ae268bc8d27b00abd8bb0861c72756ff5dd8bf1eb77211b7feb5baf4fdae2ebbaabe49b9adc1d0 languageName: node linkType: hard -"@babel/plugin-transform-private-property-in-object@npm:^7.22.11": - version: 7.22.11 - resolution: "@babel/plugin-transform-private-property-in-object@npm:7.22.11" +"@babel/plugin-transform-private-property-in-object@npm:^7.23.4": + version: 7.23.4 + resolution: "@babel/plugin-transform-private-property-in-object@npm:7.23.4" dependencies: "@babel/helper-annotate-as-pure": ^7.22.5 - "@babel/helper-create-class-features-plugin": ^7.22.11 + "@babel/helper-create-class-features-plugin": ^7.22.15 "@babel/helper-plugin-utils": ^7.22.5 "@babel/plugin-syntax-private-property-in-object": ^7.14.5 peerDependencies: "@babel/core": ^7.0.0-0 - checksum: 4d029d84901e53c46dead7a46e2990a7bc62470f4e4ca58a0d063394f86652fd58fe4eea1eb941da3669cd536b559b9d058b342b59300026346b7a2a51badac8 + checksum: fb7adfe94ea97542f250a70de32bddbc3e0b802381c92be947fec83ebffda57e68533c4d0697152719a3496fdd3ebf3798d451c024cd4ac848fc15ac26b70aa7 languageName: node linkType: hard -"@babel/plugin-transform-property-literals@npm:^7.22.5": - version: 7.22.5 - resolution: "@babel/plugin-transform-property-literals@npm:7.22.5" +"@babel/plugin-transform-property-literals@npm:^7.23.3": + version: 7.23.3 + resolution: "@babel/plugin-transform-property-literals@npm:7.23.3" dependencies: "@babel/helper-plugin-utils": ^7.22.5 peerDependencies: "@babel/core": ^7.0.0-0 - checksum: 796176a3176106f77fcb8cd04eb34a8475ce82d6d03a88db089531b8f0453a2fb8b0c6ec9a52c27948bc0ea478becec449893741fc546dfc3930ab927e3f9f2e + checksum: 16b048c8e87f25095f6d53634ab7912992f78e6997a6ff549edc3cf519db4fca01c7b4e0798530d7f6a05228ceee479251245cdd850a5531c6e6f404104d6cc9 languageName: node linkType: hard "@babel/plugin-transform-react-constant-elements@npm:^7.18.12": - version: 7.22.5 - resolution: "@babel/plugin-transform-react-constant-elements@npm:7.22.5" + version: 7.23.3 + resolution: "@babel/plugin-transform-react-constant-elements@npm:7.23.3" dependencies: "@babel/helper-plugin-utils": ^7.22.5 peerDependencies: "@babel/core": ^7.0.0-0 - checksum: 596db90e37174dd703f4859fef3c86156a7c8564d8351168ac6fdca79c912ef8b8746ae04516ac3909d2cc750702d58d451badacb3c54ea998938ad05d99f9d2 + checksum: f386fe59657910a00c5d276918765c6a74e52c9a223d79463a4eecd652b4da4a6c0a16710fcf5e17b838c336e0c46b552b79e47c1d6eeebc74a813788e0611f7 languageName: node linkType: hard -"@babel/plugin-transform-react-display-name@npm:^7.22.5": - version: 7.22.5 - resolution: "@babel/plugin-transform-react-display-name@npm:7.22.5" +"@babel/plugin-transform-react-display-name@npm:^7.23.3": + version: 7.23.3 + resolution: "@babel/plugin-transform-react-display-name@npm:7.23.3" dependencies: "@babel/helper-plugin-utils": ^7.22.5 peerDependencies: "@babel/core": ^7.0.0-0 - checksum: a12bfd1e4e93055efca3ace3c34722571bda59d9740dca364d225d9c6e3ca874f134694d21715c42cc63d79efd46db9665bd4a022998767f9245f1e29d5d204d + checksum: 7f86964e8434d3ddbd3c81d2690c9b66dbf1cd8bd9512e2e24500e9fa8cf378bc52c0853270b3b82143aba5965aec04721df7abdb768f952b44f5c6e0b198779 languageName: node linkType: hard @@ -1353,58 +1379,58 @@ __metadata: linkType: hard "@babel/plugin-transform-react-jsx@npm:^7.22.15, @babel/plugin-transform-react-jsx@npm:^7.22.5": - version: 7.22.15 - resolution: "@babel/plugin-transform-react-jsx@npm:7.22.15" + version: 7.23.4 + resolution: "@babel/plugin-transform-react-jsx@npm:7.23.4" dependencies: "@babel/helper-annotate-as-pure": ^7.22.5 "@babel/helper-module-imports": ^7.22.15 "@babel/helper-plugin-utils": ^7.22.5 - "@babel/plugin-syntax-jsx": ^7.22.5 - "@babel/types": ^7.22.15 + "@babel/plugin-syntax-jsx": ^7.23.3 + "@babel/types": ^7.23.4 peerDependencies: "@babel/core": ^7.0.0-0 - checksum: 3899054e89550c3a0ef041af7c47ee266e2e934f498ee80fefeda778a6aa177b48aa8b4d2a8bf5848de977fec564571699ab952d9fa089c4c19b45ddb121df09 + checksum: d8b8c52e8e22e833bf77c8d1a53b0a57d1fd52ba9596a319d572de79446a8ed9d95521035bc1175c1589d1a6a34600d2e678fa81d81bac8fac121137097f1f0a languageName: node linkType: hard -"@babel/plugin-transform-react-pure-annotations@npm:^7.22.5": - version: 7.22.5 - resolution: "@babel/plugin-transform-react-pure-annotations@npm:7.22.5" +"@babel/plugin-transform-react-pure-annotations@npm:^7.23.3": + version: 7.23.3 + resolution: "@babel/plugin-transform-react-pure-annotations@npm:7.23.3" dependencies: "@babel/helper-annotate-as-pure": ^7.22.5 "@babel/helper-plugin-utils": ^7.22.5 peerDependencies: "@babel/core": ^7.0.0-0 - checksum: 092021c4f404e267002099ec20b3f12dd730cb90b0d83c5feed3dc00dbe43b9c42c795a18e7c6c7d7bddea20c7dd56221b146aec81b37f2e7eb5137331c61120 + checksum: 9ea3698b1d422561d93c0187ac1ed8f2367e4250b10e259785ead5aa643c265830fd0f4cf5087a5bedbc4007444c06da2f2006686613220acf0949895f453666 languageName: node linkType: hard -"@babel/plugin-transform-regenerator@npm:^7.22.10": - version: 7.22.10 - resolution: "@babel/plugin-transform-regenerator@npm:7.22.10" +"@babel/plugin-transform-regenerator@npm:^7.23.3": + version: 7.23.3 + resolution: "@babel/plugin-transform-regenerator@npm:7.23.3" dependencies: "@babel/helper-plugin-utils": ^7.22.5 regenerator-transform: ^0.15.2 peerDependencies: "@babel/core": ^7.0.0-0 - checksum: e13678d62d6fa96f11cb8b863f00e8693491e7adc88bfca3f2820f80cbac8336e7dec3a596eee6a1c4663b7ececc3564f2cd7fb44ed6d4ce84ac2bb7f39ecc6e + checksum: 7fdacc7b40008883871b519c9e5cdea493f75495118ccc56ac104b874983569a24edd024f0f5894ba1875c54ee2b442f295d6241c3280e61c725d0dd3317c8e6 languageName: node linkType: hard -"@babel/plugin-transform-reserved-words@npm:^7.22.5": - version: 7.22.5 - resolution: "@babel/plugin-transform-reserved-words@npm:7.22.5" +"@babel/plugin-transform-reserved-words@npm:^7.23.3": + version: 7.23.3 + resolution: "@babel/plugin-transform-reserved-words@npm:7.23.3" dependencies: "@babel/helper-plugin-utils": ^7.22.5 peerDependencies: "@babel/core": ^7.0.0-0 - checksum: 3ffd7dbc425fe8132bfec118b9817572799cab1473113a635d25ab606c1f5a2341a636c04cf6b22df3813320365ed5a965b5eeb3192320a10e4cc2c137bd8bfc + checksum: 298c4440ddc136784ff920127cea137168e068404e635dc946ddb5d7b2a27b66f1dd4c4acb01f7184478ff7d5c3e7177a127279479926519042948fb7fa0fa48 languageName: node linkType: hard -"@babel/plugin-transform-runtime@npm:^7.18.6": - version: 7.23.2 - resolution: "@babel/plugin-transform-runtime@npm:7.23.2" +"@babel/plugin-transform-runtime@npm:^7.18.6, @babel/plugin-transform-runtime@npm:^7.22.9": + version: 7.23.4 + resolution: "@babel/plugin-transform-runtime@npm:7.23.4" dependencies: "@babel/helper-module-imports": ^7.22.15 "@babel/helper-plugin-utils": ^7.22.5 @@ -1414,145 +1440,146 @@ __metadata: semver: ^6.3.1 peerDependencies: "@babel/core": ^7.0.0-0 - checksum: 09f4273bfe9600c67e72e26f853f11c24ee4c1cbb3935c4a28a94d388e7c0d8733479d868c333cb34e9c236f1765788c6daef7852331f5c70a3b5543fd0247a1 + checksum: a1693d27cd5ce17d0917280942a62bbf4ee27f6f0fe7beb33789bdc699cda21e5253997663248b32e8e36c01ccd202f96246413b9328b70a05d4cf64faa3191e languageName: node linkType: hard -"@babel/plugin-transform-shorthand-properties@npm:^7.22.5": - version: 7.22.5 - resolution: "@babel/plugin-transform-shorthand-properties@npm:7.22.5" +"@babel/plugin-transform-shorthand-properties@npm:^7.23.3": + version: 7.23.3 + resolution: "@babel/plugin-transform-shorthand-properties@npm:7.23.3" dependencies: "@babel/helper-plugin-utils": ^7.22.5 peerDependencies: "@babel/core": ^7.0.0-0 - checksum: a5ac902c56ea8effa99f681340ee61bac21094588f7aef0bc01dff98246651702e677552fa6d10e548c4ac22a3ffad047dd2f8c8f0540b68316c2c203e56818b + checksum: 5d677a03676f9fff969b0246c423d64d77502e90a832665dc872a5a5e05e5708161ce1effd56bb3c0f2c20a1112fca874be57c8a759d8b08152755519281f326 languageName: node linkType: hard -"@babel/plugin-transform-spread@npm:^7.22.5": - version: 7.22.5 - resolution: "@babel/plugin-transform-spread@npm:7.22.5" +"@babel/plugin-transform-spread@npm:^7.23.3": + version: 7.23.3 + resolution: "@babel/plugin-transform-spread@npm:7.23.3" dependencies: "@babel/helper-plugin-utils": ^7.22.5 "@babel/helper-skip-transparent-expression-wrappers": ^7.22.5 peerDependencies: "@babel/core": ^7.0.0-0 - checksum: 5587f0deb60b3dfc9b274e269031cc45ec75facccf1933ea2ea71ced9fd3ce98ed91bb36d6cd26817c14474b90ed998c5078415f0eab531caf301496ce24c95c + checksum: 8fd5cac201e77a0b4825745f4e07a25f923842f282f006b3a79223c00f61075c8868d12eafec86b2642cd0b32077cdd32314e27bcb75ee5e6a68c0144140dcf2 languageName: node linkType: hard -"@babel/plugin-transform-sticky-regex@npm:^7.22.5": - version: 7.22.5 - resolution: "@babel/plugin-transform-sticky-regex@npm:7.22.5" +"@babel/plugin-transform-sticky-regex@npm:^7.23.3": + version: 7.23.3 + resolution: "@babel/plugin-transform-sticky-regex@npm:7.23.3" dependencies: "@babel/helper-plugin-utils": ^7.22.5 peerDependencies: "@babel/core": ^7.0.0-0 - checksum: 63b2c575e3e7f96c32d52ed45ee098fb7d354b35c2223b8c8e76840b32cc529ee0c0ceb5742fd082e56e91e3d82842a367ce177e82b05039af3d602c9627a729 + checksum: 53e55eb2575b7abfdb4af7e503a2bf7ef5faf8bf6b92d2cd2de0700bdd19e934e5517b23e6dfed94ba50ae516b62f3f916773ef7d9bc81f01503f585051e2949 languageName: node linkType: hard -"@babel/plugin-transform-template-literals@npm:^7.22.5": - version: 7.22.5 - resolution: "@babel/plugin-transform-template-literals@npm:7.22.5" +"@babel/plugin-transform-template-literals@npm:^7.23.3": + version: 7.23.3 + resolution: "@babel/plugin-transform-template-literals@npm:7.23.3" dependencies: "@babel/helper-plugin-utils": ^7.22.5 peerDependencies: "@babel/core": ^7.0.0-0 - checksum: 27e9bb030654cb425381c69754be4abe6a7c75b45cd7f962cd8d604b841b2f0fb7b024f2efc1c25cc53f5b16d79d5e8cfc47cacbdaa983895b3aeefa3e7e24ff + checksum: b16c5cb0b8796be0118e9c144d15bdc0d20a7f3f59009c6303a6e9a8b74c146eceb3f05186f5b97afcba7cfa87e34c1585a22186e3d5b22f2fd3d27d959d92b2 languageName: node linkType: hard -"@babel/plugin-transform-typeof-symbol@npm:^7.22.5": - version: 7.22.5 - resolution: "@babel/plugin-transform-typeof-symbol@npm:7.22.5" +"@babel/plugin-transform-typeof-symbol@npm:^7.23.3": + version: 7.23.3 + resolution: "@babel/plugin-transform-typeof-symbol@npm:7.23.3" dependencies: "@babel/helper-plugin-utils": ^7.22.5 peerDependencies: "@babel/core": ^7.0.0-0 - checksum: 82a53a63ffc3010b689ca9a54e5f53b2718b9f4b4a9818f36f9b7dba234f38a01876680553d2716a645a61920b5e6e4aaf8d4a0064add379b27ca0b403049512 + checksum: 0af7184379d43afac7614fc89b1bdecce4e174d52f4efaeee8ec1a4f2c764356c6dba3525c0685231f1cbf435b6dd4ee9e738d7417f3b10ce8bbe869c32f4384 languageName: node linkType: hard -"@babel/plugin-transform-typescript@npm:^7.22.15": - version: 7.22.15 - resolution: "@babel/plugin-transform-typescript@npm:7.22.15" +"@babel/plugin-transform-typescript@npm:^7.23.3": + version: 7.23.5 + resolution: "@babel/plugin-transform-typescript@npm:7.23.5" dependencies: "@babel/helper-annotate-as-pure": ^7.22.5 - "@babel/helper-create-class-features-plugin": ^7.22.15 + "@babel/helper-create-class-features-plugin": ^7.23.5 "@babel/helper-plugin-utils": ^7.22.5 - "@babel/plugin-syntax-typescript": ^7.22.5 + "@babel/plugin-syntax-typescript": ^7.23.3 peerDependencies: "@babel/core": ^7.0.0-0 - checksum: c5d96cdbf0e1512707aa1c1e3ac6b370a25fd9c545d26008ce44eb13a47bd7fd67a1eb799c98b5ccc82e33a345fda55c0055e1fe3ed97646ed405dd13020b226 + checksum: d77b5cc22cf48fe461de07e4f058dc9c0d8c4e3ca49de0e3a336a94ab39bfa4f4732598e36c479bec0dd1bf4aff9154bc2dcbfbe3145a751e4771ccae5afaaf8 languageName: node linkType: hard -"@babel/plugin-transform-unicode-escapes@npm:^7.22.10": - version: 7.22.10 - resolution: "@babel/plugin-transform-unicode-escapes@npm:7.22.10" +"@babel/plugin-transform-unicode-escapes@npm:^7.23.3": + version: 7.23.3 + resolution: "@babel/plugin-transform-unicode-escapes@npm:7.23.3" dependencies: "@babel/helper-plugin-utils": ^7.22.5 peerDependencies: "@babel/core": ^7.0.0-0 - checksum: 807f40ed1324c8cb107c45358f1903384ca3f0ef1d01c5a3c5c9b271c8d8eec66936a3dcc8d75ddfceea9421420368c2e77ae3adef0a50557e778dfe296bf382 + checksum: 561c429183a54b9e4751519a3dfba6014431e9cdc1484fad03bdaf96582dfc72c76a4f8661df2aeeae7c34efd0fa4d02d3b83a2f63763ecf71ecc925f9cc1f60 languageName: node linkType: hard -"@babel/plugin-transform-unicode-property-regex@npm:^7.22.5": - version: 7.22.5 - resolution: "@babel/plugin-transform-unicode-property-regex@npm:7.22.5" +"@babel/plugin-transform-unicode-property-regex@npm:^7.23.3": + version: 7.23.3 + resolution: "@babel/plugin-transform-unicode-property-regex@npm:7.23.3" dependencies: - "@babel/helper-create-regexp-features-plugin": ^7.22.5 + "@babel/helper-create-regexp-features-plugin": ^7.22.15 "@babel/helper-plugin-utils": ^7.22.5 peerDependencies: "@babel/core": ^7.0.0-0 - checksum: 2495e5f663cb388e3d888b4ba3df419ac436a5012144ac170b622ddfc221f9ea9bdba839fa2bc0185cb776b578030666406452ec7791cbf0e7a3d4c88ae9574c + checksum: 2298461a194758086d17c23c26c7de37aa533af910f9ebf31ebd0893d4aa317468043d23f73edc782ec21151d3c46cf0ff8098a83b725c49a59de28a1d4d6225 languageName: node linkType: hard -"@babel/plugin-transform-unicode-regex@npm:^7.22.5": - version: 7.22.5 - resolution: "@babel/plugin-transform-unicode-regex@npm:7.22.5" +"@babel/plugin-transform-unicode-regex@npm:^7.23.3": + version: 7.23.3 + resolution: "@babel/plugin-transform-unicode-regex@npm:7.23.3" dependencies: - "@babel/helper-create-regexp-features-plugin": ^7.22.5 + "@babel/helper-create-regexp-features-plugin": ^7.22.15 "@babel/helper-plugin-utils": ^7.22.5 peerDependencies: "@babel/core": ^7.0.0-0 - checksum: 6b5d1404c8c623b0ec9bd436c00d885a17d6a34f3f2597996343ddb9d94f6379705b21582dfd4cec2c47fd34068872e74ab6b9580116c0566b3f9447e2a7fa06 + checksum: c5f835d17483ba899787f92e313dfa5b0055e3deab332f1d254078a2bba27ede47574b6599fcf34d3763f0c048ae0779dc21d2d8db09295edb4057478dc80a9a languageName: node linkType: hard -"@babel/plugin-transform-unicode-sets-regex@npm:^7.22.5": - version: 7.22.5 - resolution: "@babel/plugin-transform-unicode-sets-regex@npm:7.22.5" +"@babel/plugin-transform-unicode-sets-regex@npm:^7.23.3": + version: 7.23.3 + resolution: "@babel/plugin-transform-unicode-sets-regex@npm:7.23.3" dependencies: - "@babel/helper-create-regexp-features-plugin": ^7.22.5 + "@babel/helper-create-regexp-features-plugin": ^7.22.15 "@babel/helper-plugin-utils": ^7.22.5 peerDependencies: "@babel/core": ^7.0.0 - checksum: c042070f980b139547f8b0179efbc049ac5930abec7fc26ed7a41d89a048d8ab17d362200e204b6f71c3c20d6991a0e74415e1a412a49adc8131c2a40c04822e + checksum: 79d0b4c951955ca68235c87b91ab2b393c96285f8aeaa34d6db416d2ddac90000c9bd6e8c4d82b60a2b484da69930507245035f28ba63c6cae341cf3ba68fdef languageName: node linkType: hard -"@babel/preset-env@npm:^7.18.6, @babel/preset-env@npm:^7.19.4": - version: 7.23.2 - resolution: "@babel/preset-env@npm:7.23.2" +"@babel/preset-env@npm:^7.18.6, @babel/preset-env@npm:^7.19.4, @babel/preset-env@npm:^7.22.9": + version: 7.23.5 + resolution: "@babel/preset-env@npm:7.23.5" dependencies: - "@babel/compat-data": ^7.23.2 + "@babel/compat-data": ^7.23.5 "@babel/helper-compilation-targets": ^7.22.15 "@babel/helper-plugin-utils": ^7.22.5 - "@babel/helper-validator-option": ^7.22.15 - "@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression": ^7.22.15 - "@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining": ^7.22.15 + "@babel/helper-validator-option": ^7.23.5 + "@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression": ^7.23.3 + "@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining": ^7.23.3 + "@babel/plugin-bugfix-v8-static-class-fields-redefine-readonly": ^7.23.3 "@babel/plugin-proposal-private-property-in-object": 7.21.0-placeholder-for-preset-env.2 "@babel/plugin-syntax-async-generators": ^7.8.4 "@babel/plugin-syntax-class-properties": ^7.12.13 "@babel/plugin-syntax-class-static-block": ^7.14.5 "@babel/plugin-syntax-dynamic-import": ^7.8.3 "@babel/plugin-syntax-export-namespace-from": ^7.8.3 - "@babel/plugin-syntax-import-assertions": ^7.22.5 - "@babel/plugin-syntax-import-attributes": ^7.22.5 + "@babel/plugin-syntax-import-assertions": ^7.23.3 + "@babel/plugin-syntax-import-attributes": ^7.23.3 "@babel/plugin-syntax-import-meta": ^7.10.4 "@babel/plugin-syntax-json-strings": ^7.8.3 "@babel/plugin-syntax-logical-assignment-operators": ^7.10.4 @@ -1564,56 +1591,55 @@ __metadata: "@babel/plugin-syntax-private-property-in-object": ^7.14.5 "@babel/plugin-syntax-top-level-await": ^7.14.5 "@babel/plugin-syntax-unicode-sets-regex": ^7.18.6 - "@babel/plugin-transform-arrow-functions": ^7.22.5 - "@babel/plugin-transform-async-generator-functions": ^7.23.2 - "@babel/plugin-transform-async-to-generator": ^7.22.5 - "@babel/plugin-transform-block-scoped-functions": ^7.22.5 - "@babel/plugin-transform-block-scoping": ^7.23.0 - "@babel/plugin-transform-class-properties": ^7.22.5 - "@babel/plugin-transform-class-static-block": ^7.22.11 - "@babel/plugin-transform-classes": ^7.22.15 - "@babel/plugin-transform-computed-properties": ^7.22.5 - "@babel/plugin-transform-destructuring": ^7.23.0 - "@babel/plugin-transform-dotall-regex": ^7.22.5 - "@babel/plugin-transform-duplicate-keys": ^7.22.5 - "@babel/plugin-transform-dynamic-import": ^7.22.11 - "@babel/plugin-transform-exponentiation-operator": ^7.22.5 - "@babel/plugin-transform-export-namespace-from": ^7.22.11 - "@babel/plugin-transform-for-of": ^7.22.15 - "@babel/plugin-transform-function-name": ^7.22.5 - "@babel/plugin-transform-json-strings": ^7.22.11 - "@babel/plugin-transform-literals": ^7.22.5 - "@babel/plugin-transform-logical-assignment-operators": ^7.22.11 - "@babel/plugin-transform-member-expression-literals": ^7.22.5 - "@babel/plugin-transform-modules-amd": ^7.23.0 - "@babel/plugin-transform-modules-commonjs": ^7.23.0 - "@babel/plugin-transform-modules-systemjs": ^7.23.0 - "@babel/plugin-transform-modules-umd": ^7.22.5 + "@babel/plugin-transform-arrow-functions": ^7.23.3 + "@babel/plugin-transform-async-generator-functions": ^7.23.4 + "@babel/plugin-transform-async-to-generator": ^7.23.3 + "@babel/plugin-transform-block-scoped-functions": ^7.23.3 + "@babel/plugin-transform-block-scoping": ^7.23.4 + "@babel/plugin-transform-class-properties": ^7.23.3 + "@babel/plugin-transform-class-static-block": ^7.23.4 + "@babel/plugin-transform-classes": ^7.23.5 + "@babel/plugin-transform-computed-properties": ^7.23.3 + "@babel/plugin-transform-destructuring": ^7.23.3 + "@babel/plugin-transform-dotall-regex": ^7.23.3 + "@babel/plugin-transform-duplicate-keys": ^7.23.3 + "@babel/plugin-transform-dynamic-import": ^7.23.4 + "@babel/plugin-transform-exponentiation-operator": ^7.23.3 + "@babel/plugin-transform-export-namespace-from": ^7.23.4 + "@babel/plugin-transform-for-of": ^7.23.3 + "@babel/plugin-transform-function-name": ^7.23.3 + "@babel/plugin-transform-json-strings": ^7.23.4 + "@babel/plugin-transform-literals": ^7.23.3 + "@babel/plugin-transform-logical-assignment-operators": ^7.23.4 + "@babel/plugin-transform-member-expression-literals": ^7.23.3 + "@babel/plugin-transform-modules-amd": ^7.23.3 + "@babel/plugin-transform-modules-commonjs": ^7.23.3 + "@babel/plugin-transform-modules-systemjs": ^7.23.3 + "@babel/plugin-transform-modules-umd": ^7.23.3 "@babel/plugin-transform-named-capturing-groups-regex": ^7.22.5 - "@babel/plugin-transform-new-target": ^7.22.5 - "@babel/plugin-transform-nullish-coalescing-operator": ^7.22.11 - "@babel/plugin-transform-numeric-separator": ^7.22.11 - "@babel/plugin-transform-object-rest-spread": ^7.22.15 - "@babel/plugin-transform-object-super": ^7.22.5 - "@babel/plugin-transform-optional-catch-binding": ^7.22.11 - "@babel/plugin-transform-optional-chaining": ^7.23.0 - "@babel/plugin-transform-parameters": ^7.22.15 - "@babel/plugin-transform-private-methods": ^7.22.5 - "@babel/plugin-transform-private-property-in-object": ^7.22.11 - "@babel/plugin-transform-property-literals": ^7.22.5 - "@babel/plugin-transform-regenerator": ^7.22.10 - "@babel/plugin-transform-reserved-words": ^7.22.5 - "@babel/plugin-transform-shorthand-properties": ^7.22.5 - "@babel/plugin-transform-spread": ^7.22.5 - "@babel/plugin-transform-sticky-regex": ^7.22.5 - "@babel/plugin-transform-template-literals": ^7.22.5 - "@babel/plugin-transform-typeof-symbol": ^7.22.5 - "@babel/plugin-transform-unicode-escapes": ^7.22.10 - "@babel/plugin-transform-unicode-property-regex": ^7.22.5 - "@babel/plugin-transform-unicode-regex": ^7.22.5 - "@babel/plugin-transform-unicode-sets-regex": ^7.22.5 + "@babel/plugin-transform-new-target": ^7.23.3 + "@babel/plugin-transform-nullish-coalescing-operator": ^7.23.4 + "@babel/plugin-transform-numeric-separator": ^7.23.4 + "@babel/plugin-transform-object-rest-spread": ^7.23.4 + "@babel/plugin-transform-object-super": ^7.23.3 + "@babel/plugin-transform-optional-catch-binding": ^7.23.4 + "@babel/plugin-transform-optional-chaining": ^7.23.4 + "@babel/plugin-transform-parameters": ^7.23.3 + "@babel/plugin-transform-private-methods": ^7.23.3 + "@babel/plugin-transform-private-property-in-object": ^7.23.4 + "@babel/plugin-transform-property-literals": ^7.23.3 + "@babel/plugin-transform-regenerator": ^7.23.3 + "@babel/plugin-transform-reserved-words": ^7.23.3 + "@babel/plugin-transform-shorthand-properties": ^7.23.3 + "@babel/plugin-transform-spread": ^7.23.3 + "@babel/plugin-transform-sticky-regex": ^7.23.3 + "@babel/plugin-transform-template-literals": ^7.23.3 + "@babel/plugin-transform-typeof-symbol": ^7.23.3 + "@babel/plugin-transform-unicode-escapes": ^7.23.3 + "@babel/plugin-transform-unicode-property-regex": ^7.23.3 + "@babel/plugin-transform-unicode-regex": ^7.23.3 + "@babel/plugin-transform-unicode-sets-regex": ^7.23.3 "@babel/preset-modules": 0.1.6-no-external-plugins - "@babel/types": ^7.23.0 babel-plugin-polyfill-corejs2: ^0.4.6 babel-plugin-polyfill-corejs3: ^0.8.5 babel-plugin-polyfill-regenerator: ^0.5.3 @@ -1621,7 +1647,7 @@ __metadata: semver: ^6.3.1 peerDependencies: "@babel/core": ^7.0.0-0 - checksum: 49327ef584b529b56aedd6577937b80c0d89603c68b23795495a13af04b5aa008db9ad04cd280423600cdc0d3cce13ae9d0d9a977db5c8193697b20ced8a10b2 + checksum: adddd58d14fc1b2e5f8cf90995f522879362a0543e316afe9e5783f1bd715bb1e92300cd49d7ce3a95c64a96d60788d0089651e2cf4cac937f5469aac1087bb1 languageName: node linkType: hard @@ -1638,34 +1664,34 @@ __metadata: languageName: node linkType: hard -"@babel/preset-react@npm:^7.18.6": - version: 7.22.15 - resolution: "@babel/preset-react@npm:7.22.15" +"@babel/preset-react@npm:^7.18.6, @babel/preset-react@npm:^7.22.5": + version: 7.23.3 + resolution: "@babel/preset-react@npm:7.23.3" dependencies: "@babel/helper-plugin-utils": ^7.22.5 "@babel/helper-validator-option": ^7.22.15 - "@babel/plugin-transform-react-display-name": ^7.22.5 + "@babel/plugin-transform-react-display-name": ^7.23.3 "@babel/plugin-transform-react-jsx": ^7.22.15 "@babel/plugin-transform-react-jsx-development": ^7.22.5 - "@babel/plugin-transform-react-pure-annotations": ^7.22.5 + "@babel/plugin-transform-react-pure-annotations": ^7.23.3 peerDependencies: "@babel/core": ^7.0.0-0 - checksum: c3ef99dfa2e9f57d2e08603e883aa20f47630a826c8e413888a93ae6e0084b5016871e463829be125329d40a1ba0a89f7c43d77b6dab52083c225cb43e63d10e + checksum: 2d90961e7e627a74b44551e88ad36a440579e283e8dc27972bf2f50682152bbc77228673a3ea22c0e0d005b70cbc487eccd64897c5e5e0384e5ce18f300b21eb languageName: node linkType: hard -"@babel/preset-typescript@npm:^7.18.6": - version: 7.23.2 - resolution: "@babel/preset-typescript@npm:7.23.2" +"@babel/preset-typescript@npm:^7.18.6, @babel/preset-typescript@npm:^7.22.5": + version: 7.23.3 + resolution: "@babel/preset-typescript@npm:7.23.3" dependencies: "@babel/helper-plugin-utils": ^7.22.5 "@babel/helper-validator-option": ^7.22.15 - "@babel/plugin-syntax-jsx": ^7.22.5 - "@babel/plugin-transform-modules-commonjs": ^7.23.0 - "@babel/plugin-transform-typescript": ^7.22.15 + "@babel/plugin-syntax-jsx": ^7.23.3 + "@babel/plugin-transform-modules-commonjs": ^7.23.3 + "@babel/plugin-transform-typescript": ^7.23.3 peerDependencies: "@babel/core": ^7.0.0-0 - checksum: c4b065c90e7f085dd7a0e57032983ac230c7ffd1d616e4c2b66581e765d5befc9271495f33250bf1cf9b4d436239c8ca3b19ada9f6c419c70bdab2cf6c868f9f + checksum: 105a2d39bbc464da0f7e1ad7f535c77c5f62d6b410219355b20e552e7d29933567a5c55339b5d0aec1a5c7a0a7dfdf1b54aae601a4fe15a157d54dcbfcb3e854 languageName: node linkType: hard @@ -1676,26 +1702,26 @@ __metadata: languageName: node linkType: hard -"@babel/runtime-corejs3@npm:^7.18.6": - version: 7.23.2 - resolution: "@babel/runtime-corejs3@npm:7.23.2" +"@babel/runtime-corejs3@npm:^7.18.6, @babel/runtime-corejs3@npm:^7.22.6": + version: 7.23.5 + resolution: "@babel/runtime-corejs3@npm:7.23.5" dependencies: core-js-pure: ^3.30.2 regenerator-runtime: ^0.14.0 - checksum: 922f25c47996a8af604cea82441e41be8b11910e96c662511e54120078f4c64258c045a28a311467a8f14a0c17f46a1f057f7c0501e567869a4343a6ce017962 + checksum: 2798c77047d70f1483090aaa67ff5ea76c19687f15b5d7bc5e3772ea5c4ca4dc2d5fa3328ec94013981a627f50f5cb7c488972be415b832c1be76a20e13aa1e2 languageName: node linkType: hard -"@babel/runtime@npm:^7.1.2, @babel/runtime@npm:^7.10.3, @babel/runtime@npm:^7.12.13, @babel/runtime@npm:^7.12.5, @babel/runtime@npm:^7.18.6, @babel/runtime@npm:^7.20.13, @babel/runtime@npm:^7.8.4": - version: 7.23.2 - resolution: "@babel/runtime@npm:7.23.2" +"@babel/runtime@npm:^7.1.2, @babel/runtime@npm:^7.10.3, @babel/runtime@npm:^7.12.13, @babel/runtime@npm:^7.12.5, @babel/runtime@npm:^7.18.6, @babel/runtime@npm:^7.22.6, @babel/runtime@npm:^7.8.4": + version: 7.23.5 + resolution: "@babel/runtime@npm:7.23.5" dependencies: regenerator-runtime: ^0.14.0 - checksum: 6c4df4839ec75ca10175f636d6362f91df8a3137f86b38f6cd3a4c90668a0fe8e9281d320958f4fbd43b394988958585a17c3aab2a4ea6bf7316b22916a371fb + checksum: 164d9802424f06908e62d29b8fd3a87db55accf82f46f964ac481dcead11ff7df8391e3696e5fa91a8ca10ea8845bf650acd730fa88cf13f8026cd8d5eec6936 languageName: node linkType: hard -"@babel/template@npm:^7.12.7, @babel/template@npm:^7.22.15, @babel/template@npm:^7.22.5": +"@babel/template@npm:^7.12.7, @babel/template@npm:^7.22.15": version: 7.22.15 resolution: "@babel/template@npm:7.22.15" dependencies: @@ -1706,32 +1732,32 @@ __metadata: languageName: node linkType: hard -"@babel/traverse@npm:^7.12.9, @babel/traverse@npm:^7.18.8, @babel/traverse@npm:^7.23.2": - version: 7.23.2 - resolution: "@babel/traverse@npm:7.23.2" +"@babel/traverse@npm:^7.12.9, @babel/traverse@npm:^7.18.8, @babel/traverse@npm:^7.22.8, @babel/traverse@npm:^7.23.5": + version: 7.23.5 + resolution: "@babel/traverse@npm:7.23.5" dependencies: - "@babel/code-frame": ^7.22.13 - "@babel/generator": ^7.23.0 + "@babel/code-frame": ^7.23.5 + "@babel/generator": ^7.23.5 "@babel/helper-environment-visitor": ^7.22.20 "@babel/helper-function-name": ^7.23.0 "@babel/helper-hoist-variables": ^7.22.5 "@babel/helper-split-export-declaration": ^7.22.6 - "@babel/parser": ^7.23.0 - "@babel/types": ^7.23.0 + "@babel/parser": ^7.23.5 + "@babel/types": ^7.23.5 debug: ^4.1.0 globals: ^11.1.0 - checksum: 26a1eea0dde41ab99dde8b9773a013a0dc50324e5110a049f5d634e721ff08afffd54940b3974a20308d7952085ac769689369e9127dea655f868c0f6e1ab35d + checksum: 0558b05360850c3ad6384e85bd55092126a8d5f93e29a8e227dd58fa1f9e1a4c25fd337c07c7ae509f0983e7a2b1e761ffdcfaa77a1e1bedbc867058e1de5a7d languageName: node linkType: hard -"@babel/types@npm:^7.12.7, @babel/types@npm:^7.20.0, @babel/types@npm:^7.22.15, @babel/types@npm:^7.22.19, @babel/types@npm:^7.22.5, @babel/types@npm:^7.23.0, @babel/types@npm:^7.4.4, @babel/types@npm:^7.8.3": - version: 7.23.0 - resolution: "@babel/types@npm:7.23.0" +"@babel/types@npm:^7.12.7, @babel/types@npm:^7.20.0, @babel/types@npm:^7.22.15, @babel/types@npm:^7.22.19, @babel/types@npm:^7.22.5, @babel/types@npm:^7.23.0, @babel/types@npm:^7.23.4, @babel/types@npm:^7.23.5, @babel/types@npm:^7.4.4, @babel/types@npm:^7.8.3": + version: 7.23.5 + resolution: "@babel/types@npm:7.23.5" dependencies: - "@babel/helper-string-parser": ^7.22.5 + "@babel/helper-string-parser": ^7.23.4 "@babel/helper-validator-identifier": ^7.22.20 to-fast-properties: ^2.0.0 - checksum: 215fe04bd7feef79eeb4d33374b39909ce9cad1611c4135a4f7fdf41fe3280594105af6d7094354751514625ea92d0875aba355f53e86a92600f290e77b0e604 + checksum: 3d21774480a459ef13b41c2e32700d927af649e04b70c5d164814d8e04ab584af66a93330602c2925e1a6925c2b829cc153418a613a4e7d79d011be1f29ad4b2 languageName: node linkType: hard @@ -1788,10569 +1814,14224 @@ __metadata: languageName: node linkType: hard -"@cspotcode/source-map-support@npm:^0.8.0": - version: 0.8.1 - resolution: "@cspotcode/source-map-support@npm:0.8.1" +"@cspell/cspell-bundled-dicts@npm:8.3.2": + version: 8.3.2 + resolution: "@cspell/cspell-bundled-dicts@npm:8.3.2" + dependencies: + "@cspell/dict-ada": ^4.0.2 + "@cspell/dict-aws": ^4.0.1 + "@cspell/dict-bash": ^4.1.3 + "@cspell/dict-companies": ^3.0.29 + "@cspell/dict-cpp": ^5.0.10 + "@cspell/dict-cryptocurrencies": ^5.0.0 + "@cspell/dict-csharp": ^4.0.2 + "@cspell/dict-css": ^4.0.12 + "@cspell/dict-dart": ^2.0.3 + "@cspell/dict-django": ^4.1.0 + "@cspell/dict-docker": ^1.1.7 + "@cspell/dict-dotnet": ^5.0.0 + "@cspell/dict-elixir": ^4.0.3 + "@cspell/dict-en-common-misspellings": ^2.0.0 + "@cspell/dict-en-gb": 1.1.33 + "@cspell/dict-en_us": ^4.3.13 + "@cspell/dict-filetypes": ^3.0.3 + "@cspell/dict-fonts": ^4.0.0 + "@cspell/dict-fsharp": ^1.0.1 + "@cspell/dict-fullstack": ^3.1.5 + "@cspell/dict-gaming-terms": ^1.0.4 + "@cspell/dict-git": ^3.0.0 + "@cspell/dict-golang": ^6.0.5 + "@cspell/dict-haskell": ^4.0.1 + "@cspell/dict-html": ^4.0.5 + "@cspell/dict-html-symbol-entities": ^4.0.0 + "@cspell/dict-java": ^5.0.6 + "@cspell/dict-k8s": ^1.0.2 + "@cspell/dict-latex": ^4.0.0 + "@cspell/dict-lorem-ipsum": ^4.0.0 + "@cspell/dict-lua": ^4.0.3 + "@cspell/dict-makefile": ^1.0.0 + "@cspell/dict-node": ^4.0.3 + "@cspell/dict-npm": ^5.0.14 + "@cspell/dict-php": ^4.0.5 + "@cspell/dict-powershell": ^5.0.3 + "@cspell/dict-public-licenses": ^2.0.5 + "@cspell/dict-python": ^4.1.11 + "@cspell/dict-r": ^2.0.1 + "@cspell/dict-ruby": ^5.0.2 + "@cspell/dict-rust": ^4.0.1 + "@cspell/dict-scala": ^5.0.0 + "@cspell/dict-software-terms": ^3.3.15 + "@cspell/dict-sql": ^2.1.3 + "@cspell/dict-svelte": ^1.0.2 + "@cspell/dict-swift": ^2.0.1 + "@cspell/dict-typescript": ^3.1.2 + "@cspell/dict-vue": ^3.0.0 + checksum: 2b50d8a3d056a6e261e4d525fe8f70b8d4e9860c305f6ba21ea7869807b2b8100b30d6f4ff4da91ea0ac6fcea7191158c7091e839b3b41ee3052f3e3b401b347 + languageName: node + linkType: hard + +"@cspell/cspell-json-reporter@npm:8.3.2": + version: 8.3.2 + resolution: "@cspell/cspell-json-reporter@npm:8.3.2" dependencies: - "@jridgewell/trace-mapping": 0.3.9 - checksum: 5718f267085ed8edb3e7ef210137241775e607ee18b77d95aa5bd7514f47f5019aa2d82d96b3bf342ef7aa890a346fa1044532ff7cc3009e7d24fce3ce6200fa + "@cspell/cspell-types": 8.3.2 + checksum: 25926f8cfef378dbce59f140d9f383db4598222a3de5f2a0a6840e225d694afce2c069498668304f4ec39e58f5570887ef0d31f45824ca16b0fc31f20180352e languageName: node linkType: hard -"@discoveryjs/json-ext@npm:0.5.7": - version: 0.5.7 - resolution: "@discoveryjs/json-ext@npm:0.5.7" - checksum: 2176d301cc258ea5c2324402997cf8134ebb212469c0d397591636cea8d3c02f2b3cf9fd58dcb748c7a0dade77ebdc1b10284fa63e608c033a1db52fddc69918 +"@cspell/cspell-pipe@npm:8.3.2": + version: 8.3.2 + resolution: "@cspell/cspell-pipe@npm:8.3.2" + checksum: 14b01d2cdc6af931a0b5ac9fcd5e5c358fcdd7710ed82d35a9dcf6c7d3e917bc5ac953571ec9fffa4ed22d8f872e2a74a10258cccac10b7cd49878c8c2e15a3c languageName: node linkType: hard -"@docsearch/css@npm:3.5.2": - version: 3.5.2 - resolution: "@docsearch/css@npm:3.5.2" - checksum: d1d60dd230dd48f896755f21bd20b59583ba844212d7d336953ae48d389baaf868bdf83320fb734a4ed679c3f95b15d620cf3764cd538f6941cae239f8c9d35d +"@cspell/cspell-resolver@npm:8.3.2": + version: 8.3.2 + resolution: "@cspell/cspell-resolver@npm:8.3.2" + dependencies: + global-directory: ^4.0.1 + checksum: fe723a1b8407b4168f6262cd30a97ad04eb5eb40f4cac3a11c6bf674ed67cc08525b0282b95edbcaf1d014dec3880f6f01b3f49e4319ade5b8e169f0a910f8d1 languageName: node linkType: hard -"@docsearch/react@npm:^3.1.1": - version: 3.5.2 - resolution: "@docsearch/react@npm:3.5.2" - dependencies: - "@algolia/autocomplete-core": 1.9.3 - "@algolia/autocomplete-preset-algolia": 1.9.3 - "@docsearch/css": 3.5.2 - algoliasearch: ^4.19.1 - peerDependencies: - "@types/react": ">= 16.8.0 < 19.0.0" - react: ">= 16.8.0 < 19.0.0" - react-dom: ">= 16.8.0 < 19.0.0" - search-insights: ">= 1 < 3" - peerDependenciesMeta: - "@types/react": - optional: true - react: - optional: true - react-dom: - optional: true - search-insights: - optional: true - checksum: 4b4584c2c73fc18cbd599047538896450974e134c2c74f19eb202db0ce8e6c3c49c6f65ed6ade61c796d476d3cbb55d6be58df62bc9568a0c72d88e42fca1d16 +"@cspell/cspell-service-bus@npm:8.3.2": + version: 8.3.2 + resolution: "@cspell/cspell-service-bus@npm:8.3.2" + checksum: 9bec7ddafcd8acab743248eb547fa5a84cf5363b0d0da354fdb2a0a6268a516b3600c7034ea71b6fca1c2517818f939876d961a4a3fc73a175eb62f5cbb6b7ae languageName: node linkType: hard -"@docusaurus/core@npm:2.4.3, @docusaurus/core@npm:^2.4.0": - version: 2.4.3 - resolution: "@docusaurus/core@npm:2.4.3" - dependencies: - "@babel/core": ^7.18.6 - "@babel/generator": ^7.18.7 - "@babel/plugin-syntax-dynamic-import": ^7.8.3 - "@babel/plugin-transform-runtime": ^7.18.6 - "@babel/preset-env": ^7.18.6 - "@babel/preset-react": ^7.18.6 - "@babel/preset-typescript": ^7.18.6 - "@babel/runtime": ^7.18.6 - "@babel/runtime-corejs3": ^7.18.6 - "@babel/traverse": ^7.18.8 - "@docusaurus/cssnano-preset": 2.4.3 - "@docusaurus/logger": 2.4.3 - "@docusaurus/mdx-loader": 2.4.3 - "@docusaurus/react-loadable": 5.5.2 - "@docusaurus/utils": 2.4.3 - "@docusaurus/utils-common": 2.4.3 - "@docusaurus/utils-validation": 2.4.3 - "@slorber/static-site-generator-webpack-plugin": ^4.0.7 - "@svgr/webpack": ^6.2.1 - autoprefixer: ^10.4.7 - babel-loader: ^8.2.5 - babel-plugin-dynamic-import-node: ^2.3.3 - boxen: ^6.2.1 - chalk: ^4.1.2 - chokidar: ^3.5.3 - clean-css: ^5.3.0 - cli-table3: ^0.6.2 - combine-promises: ^1.1.0 - commander: ^5.1.0 - copy-webpack-plugin: ^11.0.0 - core-js: ^3.23.3 - css-loader: ^6.7.1 - css-minimizer-webpack-plugin: ^4.0.0 - cssnano: ^5.1.12 - del: ^6.1.1 - detect-port: ^1.3.0 - escape-html: ^1.0.3 - eta: ^2.0.0 - file-loader: ^6.2.0 - fs-extra: ^10.1.0 - html-minifier-terser: ^6.1.0 - html-tags: ^3.2.0 - html-webpack-plugin: ^5.5.0 - import-fresh: ^3.3.0 - leven: ^3.1.0 - lodash: ^4.17.21 - mini-css-extract-plugin: ^2.6.1 - postcss: ^8.4.14 - postcss-loader: ^7.0.0 - prompts: ^2.4.2 - react-dev-utils: ^12.0.1 - react-helmet-async: ^1.3.0 - react-loadable: "npm:@docusaurus/react-loadable@5.5.2" - react-loadable-ssr-addon-v5-slorber: ^1.0.1 - react-router: ^5.3.3 - react-router-config: ^5.1.1 - react-router-dom: ^5.3.3 - rtl-detect: ^1.0.4 - semver: ^7.3.7 - serve-handler: ^6.1.3 - shelljs: ^0.8.5 - terser-webpack-plugin: ^5.3.3 - tslib: ^2.4.0 - update-notifier: ^5.1.0 - url-loader: ^4.1.1 - wait-on: ^6.0.1 - webpack: ^5.73.0 - webpack-bundle-analyzer: ^4.5.0 - webpack-dev-server: ^4.9.3 - webpack-merge: ^5.8.0 - webpackbar: ^5.0.2 - peerDependencies: - react: ^16.8.4 || ^17.0.0 - react-dom: ^16.8.4 || ^17.0.0 - bin: - docusaurus: bin/docusaurus.mjs - checksum: cce7173ee131364857c16f70f94155ba0e1b044cde54045fb0cf62ad138f8d8ef093f5aba7c7617a9aa0545b3ee3930aec2e09f645daec015696968338963013 +"@cspell/cspell-types@npm:8.3.2": + version: 8.3.2 + resolution: "@cspell/cspell-types@npm:8.3.2" + checksum: 5f0b038e6717a3fa9c4109845c94e2a3d561a8b1a5dac073535929dee81e4d6d18af96759135a4dab6b06529a86ab1bd4e6b4a70121561bf22dfe4508140a5c3 languageName: node linkType: hard -"@docusaurus/cssnano-preset@npm:2.4.3": - version: 2.4.3 - resolution: "@docusaurus/cssnano-preset@npm:2.4.3" - dependencies: - cssnano-preset-advanced: ^5.3.8 - postcss: ^8.4.14 - postcss-sort-media-queries: ^4.2.1 - tslib: ^2.4.0 - checksum: f4a4c60b075c23541da90e00ae26af2e7eaadf20d783b37b9110a5e34599e4e91947425e33bad58ba71abee81c85cca99f5d7d76575f53fbaf73617b55e39c62 +"@cspell/dict-ada@npm:^4.0.2": + version: 4.0.2 + resolution: "@cspell/dict-ada@npm:4.0.2" + checksum: 847729d40022db4df698aa9511c6b9073954f71268b64ad4fa354e6ac3eb5b03486bcb566ecadd4bccbebb4f188752eff2b2bdd9021b58dbf2cd61cd6a426752 languageName: node linkType: hard -"@docusaurus/logger@npm:2.4.3": - version: 2.4.3 - resolution: "@docusaurus/logger@npm:2.4.3" - dependencies: - chalk: ^4.1.2 - tslib: ^2.4.0 - checksum: f026a8233aa317f16ce5b25c6785a431f319c52fc07a1b9e26f4b3df2197974e75830a16b6140314f8f4ef02dc19242106ec2ae1599740b26d516cc34c56102f +"@cspell/dict-aws@npm:^4.0.1": + version: 4.0.1 + resolution: "@cspell/dict-aws@npm:4.0.1" + checksum: 8445468151205fdfc51993ebc329e3a210a1628069971f973fb87c4d32cbb33676b32a370021efe0863ef63414632c8edf025ba7d296e932a93f3b05ba6193fc languageName: node linkType: hard -"@docusaurus/mdx-loader@npm:2.4.3": - version: 2.4.3 - resolution: "@docusaurus/mdx-loader@npm:2.4.3" - dependencies: - "@babel/parser": ^7.18.8 - "@babel/traverse": ^7.18.8 - "@docusaurus/logger": 2.4.3 - "@docusaurus/utils": 2.4.3 - "@mdx-js/mdx": ^1.6.22 - escape-html: ^1.0.3 - file-loader: ^6.2.0 - fs-extra: ^10.1.0 - image-size: ^1.0.1 - mdast-util-to-string: ^2.0.0 - remark-emoji: ^2.2.0 - stringify-object: ^3.3.0 - tslib: ^2.4.0 - unified: ^9.2.2 - unist-util-visit: ^2.0.3 - url-loader: ^4.1.1 - webpack: ^5.73.0 - peerDependencies: - react: ^16.8.4 || ^17.0.0 - react-dom: ^16.8.4 || ^17.0.0 - checksum: 5a774f7ea5f484e888b2bd1bf8b182279e3788afec779eb8920cf468b92ab8d83a1ae8be51925074241a4d1a38d989cfb366d2baf0f67ed6f063342395a7ca8e +"@cspell/dict-bash@npm:^4.1.3": + version: 4.1.3 + resolution: "@cspell/dict-bash@npm:4.1.3" + checksum: 4ba66c76c144d4c7ea1dd0fb92dfb0d7fd1e43a106a73fc7e9010b4a5c276aa4ef791c7161f56bf911356e3667ba043ee63271c1ffc485d9f8712553770e3ea9 languageName: node linkType: hard -"@docusaurus/module-type-aliases@npm:2.4.3, @docusaurus/module-type-aliases@npm:^2.4.0": - version: 2.4.3 - resolution: "@docusaurus/module-type-aliases@npm:2.4.3" - dependencies: - "@docusaurus/react-loadable": 5.5.2 - "@docusaurus/types": 2.4.3 - "@types/history": ^4.7.11 - "@types/react": "*" - "@types/react-router-config": "*" - "@types/react-router-dom": "*" - react-helmet-async: "*" - react-loadable: "npm:@docusaurus/react-loadable@5.5.2" - peerDependencies: - react: "*" - react-dom: "*" - checksum: 22ce1a6a20acc35cdd2ec57e55f29e65dbe0fb3a46aaa8c033ec78bf04cd3087f0523c816c744ed311095512dd686c83e0a8619cc1a2a937c27cd54527739c38 +"@cspell/dict-companies@npm:^3.0.29": + version: 3.0.29 + resolution: "@cspell/dict-companies@npm:3.0.29" + checksum: ba2ee3fc7942ff932316038dc2a59b0edcf48321d9f31b75e1858637bd51be9b00bdafc2f3b70fa9d7e78cfbd016e08ef782cdf50bfc76b51e1bf22b1340e286 languageName: node linkType: hard -"@docusaurus/plugin-content-blog@npm:2.4.3": - version: 2.4.3 - resolution: "@docusaurus/plugin-content-blog@npm:2.4.3" - dependencies: - "@docusaurus/core": 2.4.3 - "@docusaurus/logger": 2.4.3 - "@docusaurus/mdx-loader": 2.4.3 - "@docusaurus/types": 2.4.3 - "@docusaurus/utils": 2.4.3 - "@docusaurus/utils-common": 2.4.3 - "@docusaurus/utils-validation": 2.4.3 - cheerio: ^1.0.0-rc.12 - feed: ^4.2.2 - fs-extra: ^10.1.0 - lodash: ^4.17.21 - reading-time: ^1.5.0 - tslib: ^2.4.0 - unist-util-visit: ^2.0.3 - utility-types: ^3.10.0 - webpack: ^5.73.0 - peerDependencies: - react: ^16.8.4 || ^17.0.0 - react-dom: ^16.8.4 || ^17.0.0 - checksum: 9fd41331c609b9488eea363e617e3763a814c75f83eb1b858cef402a0f5b96f67a342e25ff8c333489e550eb4d379eae09a88b986a97c25170fe203662e2f1ae +"@cspell/dict-cpp@npm:^5.0.10": + version: 5.1.1 + resolution: "@cspell/dict-cpp@npm:5.1.1" + checksum: 2991434d97882c884ebef48d700ed80f1d0136b5dda9f97f49763874ba061103336fa97c07581ca73f23176030702778a93b43af49e73899f027c2b3263cdb6e languageName: node linkType: hard -"@docusaurus/plugin-content-docs@npm:2.4.3, @docusaurus/plugin-content-docs@npm:^2.0.0-rc.1": - version: 2.4.3 - resolution: "@docusaurus/plugin-content-docs@npm:2.4.3" - dependencies: - "@docusaurus/core": 2.4.3 - "@docusaurus/logger": 2.4.3 - "@docusaurus/mdx-loader": 2.4.3 - "@docusaurus/module-type-aliases": 2.4.3 - "@docusaurus/types": 2.4.3 - "@docusaurus/utils": 2.4.3 - "@docusaurus/utils-validation": 2.4.3 - "@types/react-router-config": ^5.0.6 - combine-promises: ^1.1.0 - fs-extra: ^10.1.0 - import-fresh: ^3.3.0 - js-yaml: ^4.1.0 - lodash: ^4.17.21 - tslib: ^2.4.0 - utility-types: ^3.10.0 - webpack: ^5.73.0 - peerDependencies: - react: ^16.8.4 || ^17.0.0 - react-dom: ^16.8.4 || ^17.0.0 - checksum: bc01201f64721131eb84f264e51c7497b8034d2a3d99d762169f5dc456c3d8882acfa01fdbaa8fdc6e2e220479b36e0c9e8e17397bf887884589535bdeaeb4bb +"@cspell/dict-cryptocurrencies@npm:^5.0.0": + version: 5.0.0 + resolution: "@cspell/dict-cryptocurrencies@npm:5.0.0" + checksum: 116e7f117b59ea4c9fa7ae1c3b47fc963e050448d43e059fb93731a256881ee262420edd5b9701ffe88af3d5e95b0337fc99b4dde1b0283ee0aaed45b23e281e languageName: node linkType: hard -"@docusaurus/plugin-content-pages@npm:2.4.3": - version: 2.4.3 - resolution: "@docusaurus/plugin-content-pages@npm:2.4.3" - dependencies: - "@docusaurus/core": 2.4.3 - "@docusaurus/mdx-loader": 2.4.3 - "@docusaurus/types": 2.4.3 - "@docusaurus/utils": 2.4.3 - "@docusaurus/utils-validation": 2.4.3 - fs-extra: ^10.1.0 - tslib: ^2.4.0 - webpack: ^5.73.0 - peerDependencies: - react: ^16.8.4 || ^17.0.0 - react-dom: ^16.8.4 || ^17.0.0 - checksum: 00439c2e1a1f345cd549739db13a3610b6d9f7ffa6cf7507ad6ac1f3c8d24041947acc2a446be7edf1a613cf354a50d1133aa28ddf64a0eff6ed8a31bf1a542f +"@cspell/dict-csharp@npm:^4.0.2": + version: 4.0.2 + resolution: "@cspell/dict-csharp@npm:4.0.2" + checksum: d2ecb2aada51c5f0d6d557fd4f0c6eddb5b299e0955e066c49cd2afe96a1c6fe0afde699fdb885dd3183603a1efbd1d793b6a490b8d039256445b4b154b7375b languageName: node linkType: hard -"@docusaurus/plugin-debug@npm:2.4.3": - version: 2.4.3 - resolution: "@docusaurus/plugin-debug@npm:2.4.3" - dependencies: - "@docusaurus/core": 2.4.3 - "@docusaurus/types": 2.4.3 - "@docusaurus/utils": 2.4.3 - fs-extra: ^10.1.0 - react-json-view: ^1.21.3 - tslib: ^2.4.0 - peerDependencies: - react: ^16.8.4 || ^17.0.0 - react-dom: ^16.8.4 || ^17.0.0 - checksum: 88955828b72e463e04501cc6bedf802208e377ae0f4d72735625bcbb47918afc4f2588355c6914064cfdbe4945d3da6473ce76319aa1f66dd975b3b43c4c39b0 +"@cspell/dict-css@npm:^4.0.12": + version: 4.0.12 + resolution: "@cspell/dict-css@npm:4.0.12" + checksum: 208c9434b8f5c8a33a96bb087572c10d5c946cd0847b9439271d0c4d2dcde5ee2588aca73bfea0c868d0124731b3ca890fab4762724d16435f161d4d5e7f3b9b languageName: node linkType: hard -"@docusaurus/plugin-google-analytics@npm:2.4.3": - version: 2.4.3 - resolution: "@docusaurus/plugin-google-analytics@npm:2.4.3" - dependencies: - "@docusaurus/core": 2.4.3 - "@docusaurus/types": 2.4.3 - "@docusaurus/utils-validation": 2.4.3 - tslib: ^2.4.0 - peerDependencies: - react: ^16.8.4 || ^17.0.0 - react-dom: ^16.8.4 || ^17.0.0 - checksum: 6e30de6b5c479493614a5552a295f07ffb9c83f3740a68c7d4dbac378b8288da7430f26cdc246d763855c6084ad86a6f87286e6c8b40f4817794bb1a04e109ea +"@cspell/dict-dart@npm:^2.0.3": + version: 2.0.3 + resolution: "@cspell/dict-dart@npm:2.0.3" + checksum: 66bfcfa029baacd0b14b3ff5b6ab7597cf9459f77185d88b25123b42a4babb66df6786806843f1b6506c335326100599a2e1db6e6104e66bd021ede9ccb3cec4 languageName: node linkType: hard -"@docusaurus/plugin-google-gtag@npm:2.4.3, @docusaurus/plugin-google-gtag@npm:^2.4.0": - version: 2.4.3 - resolution: "@docusaurus/plugin-google-gtag@npm:2.4.3" - dependencies: - "@docusaurus/core": 2.4.3 - "@docusaurus/types": 2.4.3 - "@docusaurus/utils-validation": 2.4.3 - tslib: ^2.4.0 - peerDependencies: - react: ^16.8.4 || ^17.0.0 - react-dom: ^16.8.4 || ^17.0.0 - checksum: 4aaac4d262b3bb7fc3f16620c5329b90db92bf28361ced54f2945fc0e4669483e2f36b076332e0ee9d11b6233cd2c81ca35c953119bad42171e62571c1692d6a +"@cspell/dict-data-science@npm:^1.0.11": + version: 1.0.11 + resolution: "@cspell/dict-data-science@npm:1.0.11" + checksum: 513f8f416f584f46576d45be23a4aa354e46d244f10a3d466222ffc13afe475e676639e4a24ab3a1ba157239f9ce23f7eef59c9f4c7a877a044db3a6344b18c6 languageName: node linkType: hard -"@docusaurus/plugin-google-tag-manager@npm:2.4.3": - version: 2.4.3 - resolution: "@docusaurus/plugin-google-tag-manager@npm:2.4.3" - dependencies: - "@docusaurus/core": 2.4.3 - "@docusaurus/types": 2.4.3 - "@docusaurus/utils-validation": 2.4.3 - tslib: ^2.4.0 - peerDependencies: - react: ^16.8.4 || ^17.0.0 - react-dom: ^16.8.4 || ^17.0.0 - checksum: c3af89b4d41fab463d853cbfbe8f43d384f702dd09fd914fffcca01fdf94c282d1b98d762c9142fe21f6471f5dd643679e8d11344c95fdf6657aff0618c3c7a5 +"@cspell/dict-django@npm:^4.1.0": + version: 4.1.0 + resolution: "@cspell/dict-django@npm:4.1.0" + checksum: b8a66135525e235bd6f2ea02de84ac7eae78e1068418f36b0c2260f9516b72492ef73f3fdc5fe8db2a6933747ff45a3eb743423a7dbf5b74548b1b1f30792679 languageName: node linkType: hard -"@docusaurus/plugin-sitemap@npm:2.4.3": - version: 2.4.3 - resolution: "@docusaurus/plugin-sitemap@npm:2.4.3" - dependencies: - "@docusaurus/core": 2.4.3 - "@docusaurus/logger": 2.4.3 - "@docusaurus/types": 2.4.3 - "@docusaurus/utils": 2.4.3 - "@docusaurus/utils-common": 2.4.3 - "@docusaurus/utils-validation": 2.4.3 - fs-extra: ^10.1.0 - sitemap: ^7.1.1 - tslib: ^2.4.0 - peerDependencies: - react: ^16.8.4 || ^17.0.0 - react-dom: ^16.8.4 || ^17.0.0 - checksum: cf96b9f0e32cefa58e37a4bc2f0a112ea657f06faf47b780ec2ba39d5e2daca6486a73f3b376c56ad3bb42f3f0c3f70a783f1ce1964b74e2ba273e6f439e439b +"@cspell/dict-docker@npm:^1.1.7": + version: 1.1.7 + resolution: "@cspell/dict-docker@npm:1.1.7" + checksum: 307f8b5132edca7cd291ba0ab6ed88f8787df984d6a42401b12a0da1ecb935d50af3a108ede885ce5bede96c445acdc88bb9ea8396de151c565a90a3bf66853e languageName: node linkType: hard -"@docusaurus/preset-classic@npm:^2.4.0": - version: 2.4.3 - resolution: "@docusaurus/preset-classic@npm:2.4.3" - dependencies: - "@docusaurus/core": 2.4.3 - "@docusaurus/plugin-content-blog": 2.4.3 - "@docusaurus/plugin-content-docs": 2.4.3 - "@docusaurus/plugin-content-pages": 2.4.3 - "@docusaurus/plugin-debug": 2.4.3 - "@docusaurus/plugin-google-analytics": 2.4.3 - "@docusaurus/plugin-google-gtag": 2.4.3 - "@docusaurus/plugin-google-tag-manager": 2.4.3 - "@docusaurus/plugin-sitemap": 2.4.3 - "@docusaurus/theme-classic": 2.4.3 - "@docusaurus/theme-common": 2.4.3 - "@docusaurus/theme-search-algolia": 2.4.3 - "@docusaurus/types": 2.4.3 - peerDependencies: - react: ^16.8.4 || ^17.0.0 - react-dom: ^16.8.4 || ^17.0.0 - checksum: a321badc44696adf4ab2d4a5d6c93f595e8c17988aec9609d325928a1d60f5e0205b23fe849b28ddaed24f7935829e86c402f6b761d6e65db4224270b9dd443c +"@cspell/dict-dotnet@npm:^5.0.0": + version: 5.0.0 + resolution: "@cspell/dict-dotnet@npm:5.0.0" + checksum: 3e55abd3cc0ecb0924caa245b83595c8e90b42a8fb438f3294d06ad32d30f3235dc8943a2865f06eaec5285a8d6a7df1db71fb228753d56a678a0f0cff87c24c languageName: node linkType: hard -"@docusaurus/react-loadable@npm:5.5.2, react-loadable@npm:@docusaurus/react-loadable@5.5.2": - version: 5.5.2 - resolution: "@docusaurus/react-loadable@npm:5.5.2" - dependencies: - "@types/react": "*" - prop-types: ^15.6.2 - peerDependencies: - react: "*" - checksum: 930fb9e2936412a12461f210acdc154a433283921ca43ac3fc3b84cb6c12eb738b3a3719373022bf68004efeb1a928dbe36c467d7a1f86454ed6241576d936e7 +"@cspell/dict-elixir@npm:^4.0.3": + version: 4.0.3 + resolution: "@cspell/dict-elixir@npm:4.0.3" + checksum: f084449b2de5a2fa08076ac699c6073beaa4bb43796a662d681ea8fe5cba31f9efe718f3f98ef432ba75d4ea574316de34ab8422f79f4f2022cfddee7a7b8653 languageName: node linkType: hard -"@docusaurus/theme-classic@npm:2.4.3": - version: 2.4.3 - resolution: "@docusaurus/theme-classic@npm:2.4.3" - dependencies: - "@docusaurus/core": 2.4.3 - "@docusaurus/mdx-loader": 2.4.3 - "@docusaurus/module-type-aliases": 2.4.3 - "@docusaurus/plugin-content-blog": 2.4.3 - "@docusaurus/plugin-content-docs": 2.4.3 - "@docusaurus/plugin-content-pages": 2.4.3 - "@docusaurus/theme-common": 2.4.3 - "@docusaurus/theme-translations": 2.4.3 - "@docusaurus/types": 2.4.3 - "@docusaurus/utils": 2.4.3 - "@docusaurus/utils-common": 2.4.3 - "@docusaurus/utils-validation": 2.4.3 - "@mdx-js/react": ^1.6.22 - clsx: ^1.2.1 - copy-text-to-clipboard: ^3.0.1 - infima: 0.2.0-alpha.43 - lodash: ^4.17.21 - nprogress: ^0.2.0 - postcss: ^8.4.14 - prism-react-renderer: ^1.3.5 - prismjs: ^1.28.0 - react-router-dom: ^5.3.3 - rtlcss: ^3.5.0 - tslib: ^2.4.0 - utility-types: ^3.10.0 - peerDependencies: - react: ^16.8.4 || ^17.0.0 - react-dom: ^16.8.4 || ^17.0.0 - checksum: 215b7fa416f40ce68773265a168af47fa770583ebe33ec7b34c7e082dfe7c79252b589a6b26532cb0ab7dd089611a9cd0e20c94df097be320a227b98e3b3fbb8 +"@cspell/dict-en-common-misspellings@npm:^2.0.0": + version: 2.0.0 + resolution: "@cspell/dict-en-common-misspellings@npm:2.0.0" + checksum: 977aac18d737d88e4cdca0771b664078a2d8cde1a0313696882581e67cb1acbf1f6a3923c1ee3f05878cfe3ad6f063a2e451f33e7df61cd1e0eb5de425bb4f2d languageName: node linkType: hard -"@docusaurus/theme-common@npm:2.4.3": - version: 2.4.3 - resolution: "@docusaurus/theme-common@npm:2.4.3" - dependencies: - "@docusaurus/mdx-loader": 2.4.3 - "@docusaurus/module-type-aliases": 2.4.3 - "@docusaurus/plugin-content-blog": 2.4.3 - "@docusaurus/plugin-content-docs": 2.4.3 - "@docusaurus/plugin-content-pages": 2.4.3 - "@docusaurus/utils": 2.4.3 - "@docusaurus/utils-common": 2.4.3 - "@types/history": ^4.7.11 - "@types/react": "*" - "@types/react-router-config": "*" - clsx: ^1.2.1 - parse-numeric-range: ^1.3.0 - prism-react-renderer: ^1.3.5 - tslib: ^2.4.0 - use-sync-external-store: ^1.2.0 - utility-types: ^3.10.0 - peerDependencies: - react: ^16.8.4 || ^17.0.0 - react-dom: ^16.8.4 || ^17.0.0 - checksum: 76817f548705542124d708c804e724674ec9bf996a5cb2a5c9a2919416367567cca4a3fa6055589990c339f6e1fb9d3944e25ed30b79fabe191db00d6ef986ca +"@cspell/dict-en-gb@npm:1.1.33": + version: 1.1.33 + resolution: "@cspell/dict-en-gb@npm:1.1.33" + checksum: 09a9e7a3ee4cad75c87cc7adf6b5981b3ec52d4e3707e8de2e1a2a55cd5c8539057a7742d9c7035e23eb0aeff80a95b9599696c7192c9b3b9d8f14440fe01938 languageName: node linkType: hard -"@docusaurus/theme-search-algolia@npm:2.4.3": - version: 2.4.3 - resolution: "@docusaurus/theme-search-algolia@npm:2.4.3" - dependencies: - "@docsearch/react": ^3.1.1 - "@docusaurus/core": 2.4.3 - "@docusaurus/logger": 2.4.3 - "@docusaurus/plugin-content-docs": 2.4.3 - "@docusaurus/theme-common": 2.4.3 - "@docusaurus/theme-translations": 2.4.3 - "@docusaurus/utils": 2.4.3 - "@docusaurus/utils-validation": 2.4.3 - algoliasearch: ^4.13.1 - algoliasearch-helper: ^3.10.0 - clsx: ^1.2.1 - eta: ^2.0.0 - fs-extra: ^10.1.0 - lodash: ^4.17.21 - tslib: ^2.4.0 - utility-types: ^3.10.0 - peerDependencies: - react: ^16.8.4 || ^17.0.0 - react-dom: ^16.8.4 || ^17.0.0 - checksum: 665d244c25bff21dd45c983c9b85f9827d2dd58945b802d645370b5e7092820532faf488c0bc0ce88e8fc0088c7f56eb9abb96589cf3857372c1b61bba6cbed7 +"@cspell/dict-en_us@npm:^4.3.13": + version: 4.3.13 + resolution: "@cspell/dict-en_us@npm:4.3.13" + checksum: 4fc0d8458f3760a436956870742207214c73ae97da294117cae4fa8cf174b40dd6d237f8cb5d1bd4d8f82e3bfbd98cf885a9d79da1823f354d92a2724090a4f2 languageName: node linkType: hard -"@docusaurus/theme-translations@npm:2.4.3, @docusaurus/theme-translations@npm:^2.0.0-rc.1": - version: 2.4.3 - resolution: "@docusaurus/theme-translations@npm:2.4.3" - dependencies: - fs-extra: ^10.1.0 - tslib: ^2.4.0 - checksum: 8424583a130b0d32b6adf578dc5daeefaad199019c8a6a23fbd67577209be64923cde59d423ea9d41d6e7cfc2318e7fa6a17a665e8ae1c871ce0880525f9b8fd +"@cspell/dict-filetypes@npm:^3.0.3": + version: 3.0.3 + resolution: "@cspell/dict-filetypes@npm:3.0.3" + checksum: 22c38a0b2e98d6223b364ddb5948d14bf6427c8286d4ddb111d5da9bdd4c47ddc0c9199a575c314142da9aefcaa5777a4ea33ac07f239cb4b9b303e4bd888aa1 languageName: node linkType: hard -"@docusaurus/types@npm:2.4.3, @docusaurus/types@npm:^2.4.1": - version: 2.4.3 - resolution: "@docusaurus/types@npm:2.4.3" - dependencies: - "@types/history": ^4.7.11 - "@types/react": "*" - commander: ^5.1.0 - joi: ^17.6.0 - react-helmet-async: ^1.3.0 - utility-types: ^3.10.0 - webpack: ^5.73.0 - webpack-merge: ^5.8.0 - peerDependencies: - react: ^16.8.4 || ^17.0.0 - react-dom: ^16.8.4 || ^17.0.0 - checksum: c123c45630e885b588f808baa06a97f8408a3381906f65cb92ae75732aedfca6ab2cada94f969c08e043b885b95298616440326259b789010e0986cbcd7a960b +"@cspell/dict-fonts@npm:^4.0.0": + version: 4.0.0 + resolution: "@cspell/dict-fonts@npm:4.0.0" + checksum: 7e33e4b39fb071165d81920dd0ccc07e4b737a7b09522acf3781b26136526e445e03e456caaecff261d76b711196b84cff7c21293853bf00ebe93f2e64c42520 languageName: node linkType: hard -"@docusaurus/utils-common@npm:2.4.3, @docusaurus/utils-common@npm:^2.0.0-rc.1": - version: 2.4.3 - resolution: "@docusaurus/utils-common@npm:2.4.3" - dependencies: - tslib: ^2.4.0 - peerDependencies: - "@docusaurus/types": "*" - peerDependenciesMeta: - "@docusaurus/types": - optional: true - checksum: 1ae315d8d8ce7a0163a698ffdca55b734d21f336512138c128bc0fa2a8d224edbaad0c8dbd7a3de2e8ef734dc2656c505d09066dee4fc84819d153593abb8984 +"@cspell/dict-fsharp@npm:^1.0.1": + version: 1.0.1 + resolution: "@cspell/dict-fsharp@npm:1.0.1" + checksum: ce0df20704bf95d1fe434d2889cc764279cbce2b057fc5247be1ccaf7a8cc57372de3da2cdab6643b3df5221119716929b2e2aaad3f60533dcf0bd3c7d892fab languageName: node linkType: hard -"@docusaurus/utils-validation@npm:2.4.3, @docusaurus/utils-validation@npm:^2.0.0-rc.1": - version: 2.4.3 - resolution: "@docusaurus/utils-validation@npm:2.4.3" - dependencies: - "@docusaurus/logger": 2.4.3 - "@docusaurus/utils": 2.4.3 - joi: ^17.6.0 - js-yaml: ^4.1.0 - tslib: ^2.4.0 - checksum: d3472b3f7a0a029c2cef1f00bc9db403d5f7e74e2091eccbc45d06f5776a84fd73bd1a18cf3a8a3cc0348ce49f753a1300deac670c2a82c56070cc40ca9df06e +"@cspell/dict-fullstack@npm:^3.1.5": + version: 3.1.5 + resolution: "@cspell/dict-fullstack@npm:3.1.5" + checksum: 01c98a3408d4bf4832f1f110252399e663ce869bb097d681558828bb0e22725c7fe7b43077aa57afc2c3158515eaa744074826c020825af5856a0950219785a6 languageName: node linkType: hard -"@docusaurus/utils@npm:2.4.3, @docusaurus/utils@npm:^2.0.0-rc.1": - version: 2.4.3 - resolution: "@docusaurus/utils@npm:2.4.3" - dependencies: - "@docusaurus/logger": 2.4.3 - "@svgr/webpack": ^6.2.1 - escape-string-regexp: ^4.0.0 - file-loader: ^6.2.0 - fs-extra: ^10.1.0 - github-slugger: ^1.4.0 - globby: ^11.1.0 - gray-matter: ^4.0.3 - js-yaml: ^4.1.0 - lodash: ^4.17.21 - micromatch: ^4.0.5 - resolve-pathname: ^3.0.0 - shelljs: ^0.8.5 - tslib: ^2.4.0 - url-loader: ^4.1.1 - webpack: ^5.73.0 - peerDependencies: - "@docusaurus/types": "*" - peerDependenciesMeta: - "@docusaurus/types": - optional: true - checksum: dd1aa7688d1a4b2775e13a91d528608ceab33c57a921404d9a989867c31c8ef17fe3892e4f5680dfb4a783da7b9973e2077e907ff4ac172927433e606e8fa9b9 +"@cspell/dict-gaming-terms@npm:^1.0.4": + version: 1.0.4 + resolution: "@cspell/dict-gaming-terms@npm:1.0.4" + checksum: 3e57f5567747a8598b3e4de4f63a3b8090cccf7688f3e91f0a9e005e916645db1224ea600afd5b497b7e8c6a1f9291dfd4cb932278dfd423657107203a2ace0b languageName: node linkType: hard -"@easyops-cn/autocomplete.js@npm:^0.38.1": - version: 0.38.1 - resolution: "@easyops-cn/autocomplete.js@npm:0.38.1" - dependencies: - cssesc: ^3.0.0 - immediate: ^3.2.3 - checksum: d88b61f12c383856b8d5cedf176a6d07a21e013dc2c78be029af81e2e026ece2bb988c6ea7f9951a2759c2e6f806ea1d1c9627bf36d9cbe376e897a94ce5da09 +"@cspell/dict-git@npm:^3.0.0": + version: 3.0.0 + resolution: "@cspell/dict-git@npm:3.0.0" + checksum: 97b6da58c93108bae0867515f790d84728f0bce580cc8ad6f0f5f63b2c81eaf6d084d543d99b693ff4d7fbea2413ff068c3e4811fc107820d243da2c06d381fa languageName: node linkType: hard -"@easyops-cn/docusaurus-search-local@npm:^0.35.0": - version: 0.35.0 - resolution: "@easyops-cn/docusaurus-search-local@npm:0.35.0" - dependencies: - "@docusaurus/plugin-content-docs": ^2.0.0-rc.1 - "@docusaurus/theme-translations": ^2.0.0-rc.1 - "@docusaurus/utils": ^2.0.0-rc.1 - "@docusaurus/utils-common": ^2.0.0-rc.1 - "@docusaurus/utils-validation": ^2.0.0-rc.1 - "@easyops-cn/autocomplete.js": ^0.38.1 - "@node-rs/jieba": ^1.6.0 - cheerio: ^1.0.0-rc.3 - clsx: ^1.1.1 - debug: ^4.2.0 - fs-extra: ^10.0.0 - klaw-sync: ^6.0.0 - lunr: ^2.3.9 - lunr-languages: ^1.4.0 - mark.js: ^8.11.1 - tslib: ^2.4.0 - peerDependencies: - "@docusaurus/theme-common": ^2.0.0-rc.1 - react: ^16.14.0 || ^17.0.0 || ^18.0.0 - react-dom: ^16.14.0 || ^17.0.0 || ^18.0.0 - checksum: 726b7b5d52f3fd01286e5a97bb2f5d27ae0ace2be7a7742c92b0bf11d56f2f44a16f6b7af556f5676ffa2a3b94c244d17ea9804894553f94ba66b98249e1e10f +"@cspell/dict-golang@npm:^6.0.5": + version: 6.0.5 + resolution: "@cspell/dict-golang@npm:6.0.5" + checksum: 20bf2c6a23d26f23e39629f3a48c31c2993d126b03ca31892e4e03ed48d2f5d5d929675987df54b6dad95828f6baa080111167e81a2dc3836c1f5b0b6db04a56 languageName: node linkType: hard -"@esbuild/android-arm64@npm:0.17.19": - version: 0.17.19 - resolution: "@esbuild/android-arm64@npm:0.17.19" - conditions: os=android & cpu=arm64 +"@cspell/dict-haskell@npm:^4.0.1": + version: 4.0.1 + resolution: "@cspell/dict-haskell@npm:4.0.1" + checksum: cfb51e415b60c5eb266a5782d0a4b19a37f1389b9b018d1bbb2ff4358bd739af1f76f68f26a138d4b4bd0ab67146d6eb9032fc3d3c212695237c134e05339c79 languageName: node linkType: hard -"@esbuild/android-arm@npm:0.17.19": - version: 0.17.19 - resolution: "@esbuild/android-arm@npm:0.17.19" - conditions: os=android & cpu=arm +"@cspell/dict-html-symbol-entities@npm:^4.0.0": + version: 4.0.0 + resolution: "@cspell/dict-html-symbol-entities@npm:4.0.0" + checksum: 79f05f9080f39dbde703980eb587ed6624b8fc2f5cedc297327bc1b9b7e6022a7c382e6013149b1afe00609b96003ab5c8d18d378979f76f336ab626317183f4 languageName: node linkType: hard -"@esbuild/android-x64@npm:0.17.19": - version: 0.17.19 - resolution: "@esbuild/android-x64@npm:0.17.19" - conditions: os=android & cpu=x64 +"@cspell/dict-html@npm:^4.0.5": + version: 4.0.5 + resolution: "@cspell/dict-html@npm:4.0.5" + checksum: 2273e77cad6f373c4b0a43c5fb707725ff7c845e6de3545c8b05cbb2d82e1205f004a817498f561ced3d8d8b1d15848a82f2a667c0b64a1ee46cbde67d8ac136 languageName: node linkType: hard -"@esbuild/darwin-arm64@npm:0.17.19": - version: 0.17.19 - resolution: "@esbuild/darwin-arm64@npm:0.17.19" - conditions: os=darwin & cpu=arm64 +"@cspell/dict-java@npm:^5.0.6": + version: 5.0.6 + resolution: "@cspell/dict-java@npm:5.0.6" + checksum: 7d5df7831e8a4c0295ebbcfdf9a3185b197f1aa60ec85f7edf4aaec4cc1c53f37a291f3f95543642951d21ff487285f5cfb6e853b7f96f514a35052d59252909 languageName: node linkType: hard -"@esbuild/darwin-x64@npm:0.17.19": - version: 0.17.19 - resolution: "@esbuild/darwin-x64@npm:0.17.19" - conditions: os=darwin & cpu=x64 +"@cspell/dict-k8s@npm:^1.0.2": + version: 1.0.2 + resolution: "@cspell/dict-k8s@npm:1.0.2" + checksum: 4afd7806033b2bf71b17f4cf3fbc33449492bfb2a33a8301cc97b2e55583c07a4a07c288f50f445261c1de4b95494e495b8b982ca428d285393f7eb917bb5a61 languageName: node linkType: hard -"@esbuild/freebsd-arm64@npm:0.17.19": - version: 0.17.19 - resolution: "@esbuild/freebsd-arm64@npm:0.17.19" - conditions: os=freebsd & cpu=arm64 +"@cspell/dict-latex@npm:^4.0.0": + version: 4.0.0 + resolution: "@cspell/dict-latex@npm:4.0.0" + checksum: 33a3f158d8c0151cbb4e6bd79ba1189d167b3916e1ce37d7b5754d18dffefe061320fa54c3cb482bd5c7cf37392d0112530b07a3eca63dffbe1069de317dc652 languageName: node linkType: hard -"@esbuild/freebsd-x64@npm:0.17.19": - version: 0.17.19 - resolution: "@esbuild/freebsd-x64@npm:0.17.19" - conditions: os=freebsd & cpu=x64 +"@cspell/dict-lorem-ipsum@npm:^4.0.0": + version: 4.0.0 + resolution: "@cspell/dict-lorem-ipsum@npm:4.0.0" + checksum: d3575fb7b9684480192d2cd647484312c555f3d1215d6b35371b70de3ecde4273010e5916cc2d130ff1e1223a1a49f75825651671a76d3dabdec98acf67a3902 languageName: node linkType: hard -"@esbuild/linux-arm64@npm:0.17.19": - version: 0.17.19 - resolution: "@esbuild/linux-arm64@npm:0.17.19" - conditions: os=linux & cpu=arm64 +"@cspell/dict-lua@npm:^4.0.3": + version: 4.0.3 + resolution: "@cspell/dict-lua@npm:4.0.3" + checksum: eee20135a4f0620302c5feeb50485f59af93c24b64eb2081a9e2096a106ce33ae565d6d92607582b44c9f17c8a091d82fbcb443ebae9b77f8512b0d66a703c3b languageName: node linkType: hard -"@esbuild/linux-arm@npm:0.17.19": - version: 0.17.19 - resolution: "@esbuild/linux-arm@npm:0.17.19" - conditions: os=linux & cpu=arm +"@cspell/dict-makefile@npm:^1.0.0": + version: 1.0.0 + resolution: "@cspell/dict-makefile@npm:1.0.0" + checksum: f0cac4caf31e27accd5df5e0c2f53097cccbbd085126c4b4ecc08be2a32bd7f89fe6b052e9eae4ec99843175cafa94561868271fb53c5389f27cc078565b5123 languageName: node linkType: hard -"@esbuild/linux-ia32@npm:0.17.19": - version: 0.17.19 - resolution: "@esbuild/linux-ia32@npm:0.17.19" - conditions: os=linux & cpu=ia32 +"@cspell/dict-node@npm:^4.0.3": + version: 4.0.3 + resolution: "@cspell/dict-node@npm:4.0.3" + checksum: 178e7f3ab45f30722cae7354803dd98ea6577c025a11eda9362fa795a06dd8e934f833bfc7d46816617974822ace11217505a1bd0ea2955aaee92cf94cc6b127 languageName: node linkType: hard -"@esbuild/linux-loong64@npm:0.17.19": - version: 0.17.19 - resolution: "@esbuild/linux-loong64@npm:0.17.19" - conditions: os=linux & cpu=loong64 +"@cspell/dict-npm@npm:^5.0.14": + version: 5.0.14 + resolution: "@cspell/dict-npm@npm:5.0.14" + checksum: 791a23e8b6a726d77a9098f25b582e6a511be699f878526a036f9f0ec08ccf0c2082ec53a5ee056b59d3a22fe5f21e8e3d11e5c839a10e17cb919c8c71f74537 languageName: node linkType: hard -"@esbuild/linux-mips64el@npm:0.17.19": - version: 0.17.19 - resolution: "@esbuild/linux-mips64el@npm:0.17.19" - conditions: os=linux & cpu=mips64el +"@cspell/dict-php@npm:^4.0.5": + version: 4.0.5 + resolution: "@cspell/dict-php@npm:4.0.5" + checksum: 017375f8a08f93872672b9d14c885b3114ea1b82cb8a4375029c7e71dcb303b82bf684fc84df3cbd8d74b9d1e74c80234280605ef8ca26a7972d99bbf15b14f9 languageName: node linkType: hard -"@esbuild/linux-ppc64@npm:0.17.19": - version: 0.17.19 - resolution: "@esbuild/linux-ppc64@npm:0.17.19" - conditions: os=linux & cpu=ppc64 +"@cspell/dict-powershell@npm:^5.0.3": + version: 5.0.3 + resolution: "@cspell/dict-powershell@npm:5.0.3" + checksum: 18eac3be8545b3df110bf867bd6285b11d7e67da037e00c9bc1376c5e322092bc1d925375a09df8b7420a6a35847aa20558610ffb491763eb82949f3af764e1d languageName: node linkType: hard -"@esbuild/linux-riscv64@npm:0.17.19": - version: 0.17.19 - resolution: "@esbuild/linux-riscv64@npm:0.17.19" - conditions: os=linux & cpu=riscv64 +"@cspell/dict-public-licenses@npm:^2.0.5": + version: 2.0.5 + resolution: "@cspell/dict-public-licenses@npm:2.0.5" + checksum: 07e647c24ed1a5f0e88828264581e9f8fde179f776a50c4389ac0bc5c09a2bb94280d299c94d1884a8da01fbf112c5640178789b457f06b1a97414b18cda99e0 languageName: node linkType: hard -"@esbuild/linux-s390x@npm:0.17.19": - version: 0.17.19 - resolution: "@esbuild/linux-s390x@npm:0.17.19" - conditions: os=linux & cpu=s390x +"@cspell/dict-python@npm:^4.1.11": + version: 4.1.11 + resolution: "@cspell/dict-python@npm:4.1.11" + dependencies: + "@cspell/dict-data-science": ^1.0.11 + checksum: cdfc493d3a260887dc31b115ab880849895522c4a0c0d4fe379a2e42ebf41f0724f02f778629e3e972fc6fa93e6d7bc7892e4b26b3b6fbf72399cf5d3b67585b languageName: node linkType: hard -"@esbuild/linux-x64@npm:0.17.19": - version: 0.17.19 - resolution: "@esbuild/linux-x64@npm:0.17.19" - conditions: os=linux & cpu=x64 +"@cspell/dict-r@npm:^2.0.1": + version: 2.0.1 + resolution: "@cspell/dict-r@npm:2.0.1" + checksum: fe85939ad4c8ada34284a673918be711cca60b6d6f1c48ee98602c27905228dfbaea3462a350094633032c1d6b6bba9548df7019e0b21673cf1cf887c57ca228 languageName: node linkType: hard -"@esbuild/netbsd-x64@npm:0.17.19": - version: 0.17.19 - resolution: "@esbuild/netbsd-x64@npm:0.17.19" - conditions: os=netbsd & cpu=x64 +"@cspell/dict-ruby@npm:^5.0.2": + version: 5.0.2 + resolution: "@cspell/dict-ruby@npm:5.0.2" + checksum: c2006bcc808448b1eef146eb4b6b74388113c50334206191a9fe5817fb13669482ecd114f7bbd397562ad2e19a9683266ff396f48c6ce282f6445c2cfa8e82c7 languageName: node linkType: hard -"@esbuild/openbsd-x64@npm:0.17.19": - version: 0.17.19 - resolution: "@esbuild/openbsd-x64@npm:0.17.19" - conditions: os=openbsd & cpu=x64 +"@cspell/dict-rust@npm:^4.0.1": + version: 4.0.1 + resolution: "@cspell/dict-rust@npm:4.0.1" + checksum: 146d3af5d0b1b84ec62059353416cd5d4b53995ed0a0edb47b96ed89f1b8b82881e76c1bac46311318f41d1619eab87d81e0cdc94855f50b79cfa0719333cbb1 languageName: node linkType: hard -"@esbuild/sunos-x64@npm:0.17.19": - version: 0.17.19 - resolution: "@esbuild/sunos-x64@npm:0.17.19" - conditions: os=sunos & cpu=x64 +"@cspell/dict-scala@npm:^5.0.0": + version: 5.0.0 + resolution: "@cspell/dict-scala@npm:5.0.0" + checksum: 759dd8746e68e45299b65eeaf1dfd32d1e345fd80fd9a623af502266598c384198853001f70a700c454d8490fb9a5e1358ca1e0d7c0d43154a4f07e2d5531c72 languageName: node linkType: hard -"@esbuild/win32-arm64@npm:0.17.19": - version: 0.17.19 - resolution: "@esbuild/win32-arm64@npm:0.17.19" - conditions: os=win32 & cpu=arm64 +"@cspell/dict-software-terms@npm:^3.3.15": + version: 3.3.16 + resolution: "@cspell/dict-software-terms@npm:3.3.16" + checksum: 85ae16ecb3d4594da37d199113fdf3b77038cfe8d3b9dbaea248c597e484e45a9fcf42d4c234d4548911caace037f01d5f8442a787348dddea5777c8b023d441 languageName: node linkType: hard -"@esbuild/win32-ia32@npm:0.17.19": - version: 0.17.19 - resolution: "@esbuild/win32-ia32@npm:0.17.19" - conditions: os=win32 & cpu=ia32 +"@cspell/dict-sql@npm:^2.1.3": + version: 2.1.3 + resolution: "@cspell/dict-sql@npm:2.1.3" + checksum: a435812cc697d4c453f11efa49962992150702518e49808381ea34548b8a8ed81432a10cca36682007912b013c28e9ce3c6c183341c6cde58c8af0eef25cddc3 languageName: node linkType: hard -"@esbuild/win32-x64@npm:0.17.19": - version: 0.17.19 - resolution: "@esbuild/win32-x64@npm:0.17.19" - conditions: os=win32 & cpu=x64 +"@cspell/dict-svelte@npm:^1.0.2": + version: 1.0.2 + resolution: "@cspell/dict-svelte@npm:1.0.2" + checksum: 5b42989bc6743a26ca5172cc23ebc1449d930695b10c908376048ce1835bf57fef7a0004f02ec5e43219f24a97f154e125041df470441199a045ed0be9e654fc languageName: node linkType: hard -"@eslint-community/eslint-utils@npm:^4.2.0, @eslint-community/eslint-utils@npm:^4.4.0": - version: 4.4.0 - resolution: "@eslint-community/eslint-utils@npm:4.4.0" - dependencies: - eslint-visitor-keys: ^3.3.0 - peerDependencies: - eslint: ^6.0.0 || ^7.0.0 || >=8.0.0 - checksum: cdfe3ae42b4f572cbfb46d20edafe6f36fc5fb52bf2d90875c58aefe226892b9677fef60820e2832caf864a326fe4fc225714c46e8389ccca04d5f9288aabd22 +"@cspell/dict-swift@npm:^2.0.1": + version: 2.0.1 + resolution: "@cspell/dict-swift@npm:2.0.1" + checksum: 0bbb106266205c5f5e12886a73ebf0db2078bab1bdd2e1f304fe28445cd72d847a4c5072bf4fe8f9e8cdb4bc69d52fffec0806aea19ea9b64b7a87c67ee01175 languageName: node linkType: hard -"@eslint-community/regexpp@npm:^4.5.1": - version: 4.9.1 - resolution: "@eslint-community/regexpp@npm:4.9.1" - checksum: 06fb839e9c756f6375cc545c2f2e05a0a64576bd6370e8e3c07983fd29a3d6e164ef4aa48a361f7d27e6713ab79c83053ff6a2ccb78748bc955e344279c4a3b6 +"@cspell/dict-typescript@npm:^3.1.2": + version: 3.1.2 + resolution: "@cspell/dict-typescript@npm:3.1.2" + checksum: 3cd0fa39856002975cf05b5584de42f58700a3420bd08b7b073af87dbcc1f66cab45e36a2156dd4e6c926cf67baaa192f02ccf61b9a9e5e94e69e035af54cec1 languageName: node linkType: hard -"@eslint-community/regexpp@npm:^4.6.1": - version: 4.8.1 - resolution: "@eslint-community/regexpp@npm:4.8.1" - checksum: 82d62c845ef42b810f268cfdc84d803a2da01735fb52e902fd34bdc09f92464a094fd8e4802839874b000b2f73f67c972859e813ba705233515d3e954f234bf2 +"@cspell/dict-vue@npm:^3.0.0": + version: 3.0.0 + resolution: "@cspell/dict-vue@npm:3.0.0" + checksum: 4db58b1d6f9be1a523a35678877f2cca2bb04548b136ec5ec4e7186500978dbc32cc8747ced80ade3cad3acc3c80eb23afe980679165810f8f8f26802e952e2f languageName: node linkType: hard -"@eslint/eslintrc@npm:^2.1.2": - version: 2.1.2 - resolution: "@eslint/eslintrc@npm:2.1.2" +"@cspell/dynamic-import@npm:8.3.2": + version: 8.3.2 + resolution: "@cspell/dynamic-import@npm:8.3.2" dependencies: - ajv: ^6.12.4 - debug: ^4.3.2 - espree: ^9.6.0 - globals: ^13.19.0 - ignore: ^5.2.0 - import-fresh: ^3.2.1 - js-yaml: ^4.1.0 - minimatch: ^3.1.2 - strip-json-comments: ^3.1.1 - checksum: bc742a1e3b361f06fedb4afb6bf32cbd27171292ef7924f61c62f2aed73048367bcc7ac68f98c06d4245cd3fabc43270f844e3c1699936d4734b3ac5398814a7 + import-meta-resolve: ^4.0.0 + checksum: 176a5684922e9d3b3b277cdacbac14da509f691bd62da5135fab7c75db57f41c52ef0386c6ba3958e48dbc1b7b6f2751067c63bc1167ee09fae427c86dac71f7 languageName: node linkType: hard -"@eslint/js@npm:8.50.0": - version: 8.50.0 - resolution: "@eslint/js@npm:8.50.0" - checksum: 302478f2acaaa7228729ec6a04f56641590185e1d8cd1c836a6db8a6b8009f80a57349341be9fbb9aa1721a7a569d1be3ffc598a33300d22816f11832095386c +"@cspell/strong-weak-map@npm:8.3.2": + version: 8.3.2 + resolution: "@cspell/strong-weak-map@npm:8.3.2" + checksum: 696389c5ecb985bdded3530a088712ad03354051efc6506fae0328353a476604008a03364f3736de9041f51128b46ff6ffdda628c03c2c4800dc256a1b75e6ba languageName: node linkType: hard -"@esm-bundle/chai@npm:^4.3.4-fix.0": - version: 4.3.4 - resolution: "@esm-bundle/chai@npm:4.3.4" +"@cspotcode/source-map-support@npm:^0.8.0": + version: 0.8.1 + resolution: "@cspotcode/source-map-support@npm:0.8.1" dependencies: - "@types/chai": ^4.2.12 - checksum: 6d1237e9b8309b31ca55d12abe03642ab58550fdac24d0acbfeae6ab14182f72cedf646c6e858fd7ef592b4034ddd23ce5882ff22b8ab9b7952327e9f3f8c3f5 + "@jridgewell/trace-mapping": 0.3.9 + checksum: 5718f267085ed8edb3e7ef210137241775e607ee18b77d95aa5bd7514f47f5019aa2d82d96b3bf342ef7aa890a346fa1044532ff7cc3009e7d24fce3ce6200fa languageName: node linkType: hard -"@ethersproject/abi@npm:5.7.0, @ethersproject/abi@npm:^5.1.2, @ethersproject/abi@npm:^5.7.0": - version: 5.7.0 - resolution: "@ethersproject/abi@npm:5.7.0" - dependencies: - "@ethersproject/address": ^5.7.0 - "@ethersproject/bignumber": ^5.7.0 - "@ethersproject/bytes": ^5.7.0 - "@ethersproject/constants": ^5.7.0 - "@ethersproject/hash": ^5.7.0 - "@ethersproject/keccak256": ^5.7.0 - "@ethersproject/logger": ^5.7.0 - "@ethersproject/properties": ^5.7.0 - "@ethersproject/strings": ^5.7.0 - checksum: bc6962bb6cb854e4d2a4d65b2c49c716477675b131b1363312234bdbb7e19badb7d9ce66f4ca2a70ae2ea84f7123dbc4e300a1bfe5d58864a7eafabc1466627e +"@discoveryjs/json-ext@npm:0.5.7, @discoveryjs/json-ext@npm:^0.5.0": + version: 0.5.7 + resolution: "@discoveryjs/json-ext@npm:0.5.7" + checksum: 2176d301cc258ea5c2324402997cf8134ebb212469c0d397591636cea8d3c02f2b3cf9fd58dcb748c7a0dade77ebdc1b10284fa63e608c033a1db52fddc69918 languageName: node linkType: hard -"@ethersproject/abstract-provider@npm:5.7.0, @ethersproject/abstract-provider@npm:^5.7.0": - version: 5.7.0 - resolution: "@ethersproject/abstract-provider@npm:5.7.0" - dependencies: - "@ethersproject/bignumber": ^5.7.0 - "@ethersproject/bytes": ^5.7.0 - "@ethersproject/logger": ^5.7.0 - "@ethersproject/networks": ^5.7.0 - "@ethersproject/properties": ^5.7.0 - "@ethersproject/transactions": ^5.7.0 - "@ethersproject/web": ^5.7.0 - checksum: 74cf4696245cf03bb7cc5b6cbf7b4b89dd9a79a1c4688126d214153a938126d4972d42c93182198653ce1de35f2a2cad68be40337d4774b3698a39b28f0228a8 +"@docsearch/css@npm:3.5.2": + version: 3.5.2 + resolution: "@docsearch/css@npm:3.5.2" + checksum: d1d60dd230dd48f896755f21bd20b59583ba844212d7d336953ae48d389baaf868bdf83320fb734a4ed679c3f95b15d620cf3764cd538f6941cae239f8c9d35d languageName: node linkType: hard -"@ethersproject/abstract-signer@npm:5.7.0, @ethersproject/abstract-signer@npm:^5.7.0": - version: 5.7.0 - resolution: "@ethersproject/abstract-signer@npm:5.7.0" +"@docsearch/react@npm:^3.5.2": + version: 3.5.2 + resolution: "@docsearch/react@npm:3.5.2" dependencies: - "@ethersproject/abstract-provider": ^5.7.0 - "@ethersproject/bignumber": ^5.7.0 - "@ethersproject/bytes": ^5.7.0 - "@ethersproject/logger": ^5.7.0 - "@ethersproject/properties": ^5.7.0 - checksum: a823dac9cfb761e009851050ebebd5b229d1b1cc4a75b125c2da130ff37e8218208f7f9d1386f77407705b889b23d4a230ad67185f8872f083143e0073cbfbe3 - languageName: node - linkType: hard - -"@ethersproject/address@npm:5.7.0, @ethersproject/address@npm:^5.7.0": - version: 5.7.0 - resolution: "@ethersproject/address@npm:5.7.0" + "@algolia/autocomplete-core": 1.9.3 + "@algolia/autocomplete-preset-algolia": 1.9.3 + "@docsearch/css": 3.5.2 + algoliasearch: ^4.19.1 + peerDependencies: + "@types/react": ">= 16.8.0 < 19.0.0" + react: ">= 16.8.0 < 19.0.0" + react-dom: ">= 16.8.0 < 19.0.0" + search-insights: ">= 1 < 3" + peerDependenciesMeta: + "@types/react": + optional: true + react: + optional: true + react-dom: + optional: true + search-insights: + optional: true + checksum: 4b4584c2c73fc18cbd599047538896450974e134c2c74f19eb202db0ce8e6c3c49c6f65ed6ade61c796d476d3cbb55d6be58df62bc9568a0c72d88e42fca1d16 + languageName: node + linkType: hard + +"@docusaurus/core@npm:2.4.3": + version: 2.4.3 + resolution: "@docusaurus/core@npm:2.4.3" + dependencies: + "@babel/core": ^7.18.6 + "@babel/generator": ^7.18.7 + "@babel/plugin-syntax-dynamic-import": ^7.8.3 + "@babel/plugin-transform-runtime": ^7.18.6 + "@babel/preset-env": ^7.18.6 + "@babel/preset-react": ^7.18.6 + "@babel/preset-typescript": ^7.18.6 + "@babel/runtime": ^7.18.6 + "@babel/runtime-corejs3": ^7.18.6 + "@babel/traverse": ^7.18.8 + "@docusaurus/cssnano-preset": 2.4.3 + "@docusaurus/logger": 2.4.3 + "@docusaurus/mdx-loader": 2.4.3 + "@docusaurus/react-loadable": 5.5.2 + "@docusaurus/utils": 2.4.3 + "@docusaurus/utils-common": 2.4.3 + "@docusaurus/utils-validation": 2.4.3 + "@slorber/static-site-generator-webpack-plugin": ^4.0.7 + "@svgr/webpack": ^6.2.1 + autoprefixer: ^10.4.7 + babel-loader: ^8.2.5 + babel-plugin-dynamic-import-node: ^2.3.3 + boxen: ^6.2.1 + chalk: ^4.1.2 + chokidar: ^3.5.3 + clean-css: ^5.3.0 + cli-table3: ^0.6.2 + combine-promises: ^1.1.0 + commander: ^5.1.0 + copy-webpack-plugin: ^11.0.0 + core-js: ^3.23.3 + css-loader: ^6.7.1 + css-minimizer-webpack-plugin: ^4.0.0 + cssnano: ^5.1.12 + del: ^6.1.1 + detect-port: ^1.3.0 + escape-html: ^1.0.3 + eta: ^2.0.0 + file-loader: ^6.2.0 + fs-extra: ^10.1.0 + html-minifier-terser: ^6.1.0 + html-tags: ^3.2.0 + html-webpack-plugin: ^5.5.0 + import-fresh: ^3.3.0 + leven: ^3.1.0 + lodash: ^4.17.21 + mini-css-extract-plugin: ^2.6.1 + postcss: ^8.4.14 + postcss-loader: ^7.0.0 + prompts: ^2.4.2 + react-dev-utils: ^12.0.1 + react-helmet-async: ^1.3.0 + react-loadable: "npm:@docusaurus/react-loadable@5.5.2" + react-loadable-ssr-addon-v5-slorber: ^1.0.1 + react-router: ^5.3.3 + react-router-config: ^5.1.1 + react-router-dom: ^5.3.3 + rtl-detect: ^1.0.4 + semver: ^7.3.7 + serve-handler: ^6.1.3 + shelljs: ^0.8.5 + terser-webpack-plugin: ^5.3.3 + tslib: ^2.4.0 + update-notifier: ^5.1.0 + url-loader: ^4.1.1 + wait-on: ^6.0.1 + webpack: ^5.73.0 + webpack-bundle-analyzer: ^4.5.0 + webpack-dev-server: ^4.9.3 + webpack-merge: ^5.8.0 + webpackbar: ^5.0.2 + peerDependencies: + react: ^16.8.4 || ^17.0.0 + react-dom: ^16.8.4 || ^17.0.0 + bin: + docusaurus: bin/docusaurus.mjs + checksum: cce7173ee131364857c16f70f94155ba0e1b044cde54045fb0cf62ad138f8d8ef093f5aba7c7617a9aa0545b3ee3930aec2e09f645daec015696968338963013 + languageName: node + linkType: hard + +"@docusaurus/core@npm:3.0.1, @docusaurus/core@npm:^3.0.1": + version: 3.0.1 + resolution: "@docusaurus/core@npm:3.0.1" + dependencies: + "@babel/core": ^7.23.3 + "@babel/generator": ^7.23.3 + "@babel/plugin-syntax-dynamic-import": ^7.8.3 + "@babel/plugin-transform-runtime": ^7.22.9 + "@babel/preset-env": ^7.22.9 + "@babel/preset-react": ^7.22.5 + "@babel/preset-typescript": ^7.22.5 + "@babel/runtime": ^7.22.6 + "@babel/runtime-corejs3": ^7.22.6 + "@babel/traverse": ^7.22.8 + "@docusaurus/cssnano-preset": 3.0.1 + "@docusaurus/logger": 3.0.1 + "@docusaurus/mdx-loader": 3.0.1 + "@docusaurus/react-loadable": 5.5.2 + "@docusaurus/utils": 3.0.1 + "@docusaurus/utils-common": 3.0.1 + "@docusaurus/utils-validation": 3.0.1 + "@slorber/static-site-generator-webpack-plugin": ^4.0.7 + "@svgr/webpack": ^6.5.1 + autoprefixer: ^10.4.14 + babel-loader: ^9.1.3 + babel-plugin-dynamic-import-node: ^2.3.3 + boxen: ^6.2.1 + chalk: ^4.1.2 + chokidar: ^3.5.3 + clean-css: ^5.3.2 + cli-table3: ^0.6.3 + combine-promises: ^1.1.0 + commander: ^5.1.0 + copy-webpack-plugin: ^11.0.0 + core-js: ^3.31.1 + css-loader: ^6.8.1 + css-minimizer-webpack-plugin: ^4.2.2 + cssnano: ^5.1.15 + del: ^6.1.1 + detect-port: ^1.5.1 + escape-html: ^1.0.3 + eta: ^2.2.0 + file-loader: ^6.2.0 + fs-extra: ^11.1.1 + html-minifier-terser: ^7.2.0 + html-tags: ^3.3.1 + html-webpack-plugin: ^5.5.3 + leven: ^3.1.0 + lodash: ^4.17.21 + mini-css-extract-plugin: ^2.7.6 + postcss: ^8.4.26 + postcss-loader: ^7.3.3 + prompts: ^2.4.2 + react-dev-utils: ^12.0.1 + react-helmet-async: ^1.3.0 + react-loadable: "npm:@docusaurus/react-loadable@5.5.2" + react-loadable-ssr-addon-v5-slorber: ^1.0.1 + react-router: ^5.3.4 + react-router-config: ^5.1.1 + react-router-dom: ^5.3.4 + rtl-detect: ^1.0.4 + semver: ^7.5.4 + serve-handler: ^6.1.5 + shelljs: ^0.8.5 + terser-webpack-plugin: ^5.3.9 + tslib: ^2.6.0 + update-notifier: ^6.0.2 + url-loader: ^4.1.1 + webpack: ^5.88.1 + webpack-bundle-analyzer: ^4.9.0 + webpack-dev-server: ^4.15.1 + webpack-merge: ^5.9.0 + webpackbar: ^5.0.2 + peerDependencies: + react: ^18.0.0 + react-dom: ^18.0.0 + bin: + docusaurus: bin/docusaurus.mjs + checksum: 56767f7e629edce4d23c19403abf4039daeea25db20c695fb7c3a1ce04a90f182f14ea1f70286afb221b8c1593823ebb0d28cbc2ca5d9d38d707a0338d544f64 + languageName: node + linkType: hard + +"@docusaurus/cssnano-preset@npm:2.4.3": + version: 2.4.3 + resolution: "@docusaurus/cssnano-preset@npm:2.4.3" + dependencies: + cssnano-preset-advanced: ^5.3.8 + postcss: ^8.4.14 + postcss-sort-media-queries: ^4.2.1 + tslib: ^2.4.0 + checksum: f4a4c60b075c23541da90e00ae26af2e7eaadf20d783b37b9110a5e34599e4e91947425e33bad58ba71abee81c85cca99f5d7d76575f53fbaf73617b55e39c62 + languageName: node + linkType: hard + +"@docusaurus/cssnano-preset@npm:3.0.1": + version: 3.0.1 + resolution: "@docusaurus/cssnano-preset@npm:3.0.1" + dependencies: + cssnano-preset-advanced: ^5.3.10 + postcss: ^8.4.26 + postcss-sort-media-queries: ^4.4.1 + tslib: ^2.6.0 + checksum: 3a04606d362c84398a5af9a98de4995958e2705086af83388479feaa157cbe3164281006e64036f9337e96b0cec62bd1362fc0f910075e6eeec930f0a519801d + languageName: node + linkType: hard + +"@docusaurus/logger@npm:2.4.3": + version: 2.4.3 + resolution: "@docusaurus/logger@npm:2.4.3" + dependencies: + chalk: ^4.1.2 + tslib: ^2.4.0 + checksum: f026a8233aa317f16ce5b25c6785a431f319c52fc07a1b9e26f4b3df2197974e75830a16b6140314f8f4ef02dc19242106ec2ae1599740b26d516cc34c56102f + languageName: node + linkType: hard + +"@docusaurus/logger@npm:3.0.1": + version: 3.0.1 + resolution: "@docusaurus/logger@npm:3.0.1" + dependencies: + chalk: ^4.1.2 + tslib: ^2.6.0 + checksum: 4d4ffcd08f9c76c105d2d2b95974f5c33941e5346c5de1b19ee3f55a4f5011bb7db3875349e25da02750cea5fb357ba00be271ea24368c75b8e29189d04e9f7c + languageName: node + linkType: hard + +"@docusaurus/mdx-loader@npm:2.4.3": + version: 2.4.3 + resolution: "@docusaurus/mdx-loader@npm:2.4.3" + dependencies: + "@babel/parser": ^7.18.8 + "@babel/traverse": ^7.18.8 + "@docusaurus/logger": 2.4.3 + "@docusaurus/utils": 2.4.3 + "@mdx-js/mdx": ^1.6.22 + escape-html: ^1.0.3 + file-loader: ^6.2.0 + fs-extra: ^10.1.0 + image-size: ^1.0.1 + mdast-util-to-string: ^2.0.0 + remark-emoji: ^2.2.0 + stringify-object: ^3.3.0 + tslib: ^2.4.0 + unified: ^9.2.2 + unist-util-visit: ^2.0.3 + url-loader: ^4.1.1 + webpack: ^5.73.0 + peerDependencies: + react: ^16.8.4 || ^17.0.0 + react-dom: ^16.8.4 || ^17.0.0 + checksum: 5a774f7ea5f484e888b2bd1bf8b182279e3788afec779eb8920cf468b92ab8d83a1ae8be51925074241a4d1a38d989cfb366d2baf0f67ed6f063342395a7ca8e + languageName: node + linkType: hard + +"@docusaurus/mdx-loader@npm:3.0.1": + version: 3.0.1 + resolution: "@docusaurus/mdx-loader@npm:3.0.1" + dependencies: + "@babel/parser": ^7.22.7 + "@babel/traverse": ^7.22.8 + "@docusaurus/logger": 3.0.1 + "@docusaurus/utils": 3.0.1 + "@docusaurus/utils-validation": 3.0.1 + "@mdx-js/mdx": ^3.0.0 + "@slorber/remark-comment": ^1.0.0 + escape-html: ^1.0.3 + estree-util-value-to-estree: ^3.0.1 + file-loader: ^6.2.0 + fs-extra: ^11.1.1 + image-size: ^1.0.2 + mdast-util-mdx: ^3.0.0 + mdast-util-to-string: ^4.0.0 + rehype-raw: ^7.0.0 + remark-directive: ^3.0.0 + remark-emoji: ^4.0.0 + remark-frontmatter: ^5.0.0 + remark-gfm: ^4.0.0 + stringify-object: ^3.3.0 + tslib: ^2.6.0 + unified: ^11.0.3 + unist-util-visit: ^5.0.0 + url-loader: ^4.1.1 + vfile: ^6.0.1 + webpack: ^5.88.1 + peerDependencies: + react: ^18.0.0 + react-dom: ^18.0.0 + checksum: 8ba9774cd2cc7216f645d54a6f6f6cba34e39e371f0de09e56f60a27dde95a8e42ab92cf0a6f384dce01960c68a1e720868c56b6aa8929d23bafe9f523941151 + languageName: node + linkType: hard + +"@docusaurus/module-type-aliases@npm:2.4.3": + version: 2.4.3 + resolution: "@docusaurus/module-type-aliases@npm:2.4.3" + dependencies: + "@docusaurus/react-loadable": 5.5.2 + "@docusaurus/types": 2.4.3 + "@types/history": ^4.7.11 + "@types/react": "*" + "@types/react-router-config": "*" + "@types/react-router-dom": "*" + react-helmet-async: "*" + react-loadable: "npm:@docusaurus/react-loadable@5.5.2" + peerDependencies: + react: "*" + react-dom: "*" + checksum: 22ce1a6a20acc35cdd2ec57e55f29e65dbe0fb3a46aaa8c033ec78bf04cd3087f0523c816c744ed311095512dd686c83e0a8619cc1a2a937c27cd54527739c38 + languageName: node + linkType: hard + +"@docusaurus/module-type-aliases@npm:3.0.1, @docusaurus/module-type-aliases@npm:^3.0.1": + version: 3.0.1 + resolution: "@docusaurus/module-type-aliases@npm:3.0.1" + dependencies: + "@docusaurus/react-loadable": 5.5.2 + "@docusaurus/types": 3.0.1 + "@types/history": ^4.7.11 + "@types/react": "*" + "@types/react-router-config": "*" + "@types/react-router-dom": "*" + react-helmet-async: "*" + react-loadable: "npm:@docusaurus/react-loadable@5.5.2" + peerDependencies: + react: "*" + react-dom: "*" + checksum: 08895f8b100df772bb9c9a8fcf9cd3ee83f0deafeb76fb9b14efd5cdd3313abb4a02032868bd458cb3a5f345942fd9f4c44833ce5042279b2241d462a1bf4cc2 + languageName: node + linkType: hard + +"@docusaurus/plugin-content-blog@npm:3.0.1": + version: 3.0.1 + resolution: "@docusaurus/plugin-content-blog@npm:3.0.1" + dependencies: + "@docusaurus/core": 3.0.1 + "@docusaurus/logger": 3.0.1 + "@docusaurus/mdx-loader": 3.0.1 + "@docusaurus/types": 3.0.1 + "@docusaurus/utils": 3.0.1 + "@docusaurus/utils-common": 3.0.1 + "@docusaurus/utils-validation": 3.0.1 + cheerio: ^1.0.0-rc.12 + feed: ^4.2.2 + fs-extra: ^11.1.1 + lodash: ^4.17.21 + reading-time: ^1.5.0 + srcset: ^4.0.0 + tslib: ^2.6.0 + unist-util-visit: ^5.0.0 + utility-types: ^3.10.0 + webpack: ^5.88.1 + peerDependencies: + react: ^18.0.0 + react-dom: ^18.0.0 + checksum: 20985fac48d2f77d560483d06d8fc21ea8c3a009be8d040da76bd4363279ad7fe8f98353bc6a50504403be3315508344faa62123ac3691912d27710fe3c6ec90 + languageName: node + linkType: hard + +"@docusaurus/plugin-content-docs@npm:3.0.1": + version: 3.0.1 + resolution: "@docusaurus/plugin-content-docs@npm:3.0.1" + dependencies: + "@docusaurus/core": 3.0.1 + "@docusaurus/logger": 3.0.1 + "@docusaurus/mdx-loader": 3.0.1 + "@docusaurus/module-type-aliases": 3.0.1 + "@docusaurus/types": 3.0.1 + "@docusaurus/utils": 3.0.1 + "@docusaurus/utils-validation": 3.0.1 + "@types/react-router-config": ^5.0.7 + combine-promises: ^1.1.0 + fs-extra: ^11.1.1 + js-yaml: ^4.1.0 + lodash: ^4.17.21 + tslib: ^2.6.0 + utility-types: ^3.10.0 + webpack: ^5.88.1 + peerDependencies: + react: ^18.0.0 + react-dom: ^18.0.0 + checksum: ee3a12a49df2db112798e8d080365c9cc2afc4959f28772abe03eb9c806b919a9837669354b04a1ff99bf473cab1aa3b8b6ad740947a440a6b9cae09823ef6b2 + languageName: node + linkType: hard + +"@docusaurus/plugin-content-docs@npm:^2.0.0-rc.1": + version: 2.4.3 + resolution: "@docusaurus/plugin-content-docs@npm:2.4.3" + dependencies: + "@docusaurus/core": 2.4.3 + "@docusaurus/logger": 2.4.3 + "@docusaurus/mdx-loader": 2.4.3 + "@docusaurus/module-type-aliases": 2.4.3 + "@docusaurus/types": 2.4.3 + "@docusaurus/utils": 2.4.3 + "@docusaurus/utils-validation": 2.4.3 + "@types/react-router-config": ^5.0.6 + combine-promises: ^1.1.0 + fs-extra: ^10.1.0 + import-fresh: ^3.3.0 + js-yaml: ^4.1.0 + lodash: ^4.17.21 + tslib: ^2.4.0 + utility-types: ^3.10.0 + webpack: ^5.73.0 + peerDependencies: + react: ^16.8.4 || ^17.0.0 + react-dom: ^16.8.4 || ^17.0.0 + checksum: bc01201f64721131eb84f264e51c7497b8034d2a3d99d762169f5dc456c3d8882acfa01fdbaa8fdc6e2e220479b36e0c9e8e17397bf887884589535bdeaeb4bb + languageName: node + linkType: hard + +"@docusaurus/plugin-content-pages@npm:3.0.1": + version: 3.0.1 + resolution: "@docusaurus/plugin-content-pages@npm:3.0.1" + dependencies: + "@docusaurus/core": 3.0.1 + "@docusaurus/mdx-loader": 3.0.1 + "@docusaurus/types": 3.0.1 + "@docusaurus/utils": 3.0.1 + "@docusaurus/utils-validation": 3.0.1 + fs-extra: ^11.1.1 + tslib: ^2.6.0 + webpack: ^5.88.1 + peerDependencies: + react: ^18.0.0 + react-dom: ^18.0.0 + checksum: 0a3bd568e4b9df11b5926c5be10f2ced08b241f1a6b8a08f556c57ce707ebb788b19937ec1d884474c4e275dc71affb91dd55a2965ad02a03545e3eae4976141 + languageName: node + linkType: hard + +"@docusaurus/plugin-debug@npm:3.0.1": + version: 3.0.1 + resolution: "@docusaurus/plugin-debug@npm:3.0.1" + dependencies: + "@docusaurus/core": 3.0.1 + "@docusaurus/types": 3.0.1 + "@docusaurus/utils": 3.0.1 + fs-extra: ^11.1.1 + react-json-view-lite: ^1.2.0 + tslib: ^2.6.0 + peerDependencies: + react: ^18.0.0 + react-dom: ^18.0.0 + checksum: 419f2bb61aceca70ffbba03e5e885303cea72055a41328d09d78fa2e40e7d5feb0ee4d66f056d54ac01f8d2361e890a072da6570da16f290c84746ced1582823 + languageName: node + linkType: hard + +"@docusaurus/plugin-google-analytics@npm:3.0.1": + version: 3.0.1 + resolution: "@docusaurus/plugin-google-analytics@npm:3.0.1" + dependencies: + "@docusaurus/core": 3.0.1 + "@docusaurus/types": 3.0.1 + "@docusaurus/utils-validation": 3.0.1 + tslib: ^2.6.0 + peerDependencies: + react: ^18.0.0 + react-dom: ^18.0.0 + checksum: 850930ed0860411142fe058562040f0b3a776be755670790273f48bfa37c7ee904d9107ec23d2ce210904610b72769ce0996a558c89414ac3687bd38bb50edf4 + languageName: node + linkType: hard + +"@docusaurus/plugin-google-gtag@npm:3.0.1": + version: 3.0.1 + resolution: "@docusaurus/plugin-google-gtag@npm:3.0.1" + dependencies: + "@docusaurus/core": 3.0.1 + "@docusaurus/types": 3.0.1 + "@docusaurus/utils-validation": 3.0.1 + "@types/gtag.js": ^0.0.12 + tslib: ^2.6.0 + peerDependencies: + react: ^18.0.0 + react-dom: ^18.0.0 + checksum: 579a19a6dad3940801a170efc7e5c763c7f90b68d5ecdb2707b61311af321122e84cd0bb5ceb45669e76df712ea1747d6d30fa5a0574b69a7f337dd66b346a04 + languageName: node + linkType: hard + +"@docusaurus/plugin-google-tag-manager@npm:3.0.1": + version: 3.0.1 + resolution: "@docusaurus/plugin-google-tag-manager@npm:3.0.1" + dependencies: + "@docusaurus/core": 3.0.1 + "@docusaurus/types": 3.0.1 + "@docusaurus/utils-validation": 3.0.1 + tslib: ^2.6.0 + peerDependencies: + react: ^18.0.0 + react-dom: ^18.0.0 + checksum: 1e3faf9496f75d43a81a5ff2921e783c87ef13d852cf678b54275fa0f79d70efdc127df6ae9c90ddce58b81384f39ec62de75d7e64e34ae96ea938cf234268c0 + languageName: node + linkType: hard + +"@docusaurus/plugin-sitemap@npm:3.0.1": + version: 3.0.1 + resolution: "@docusaurus/plugin-sitemap@npm:3.0.1" + dependencies: + "@docusaurus/core": 3.0.1 + "@docusaurus/logger": 3.0.1 + "@docusaurus/types": 3.0.1 + "@docusaurus/utils": 3.0.1 + "@docusaurus/utils-common": 3.0.1 + "@docusaurus/utils-validation": 3.0.1 + fs-extra: ^11.1.1 + sitemap: ^7.1.1 + tslib: ^2.6.0 + peerDependencies: + react: ^18.0.0 + react-dom: ^18.0.0 + checksum: 464359fa44143f3e686d02cd70f86741cdd4a74f29f212b83767617fc1dacbfddfa4321c16e0c253849ff41a75078fabbfdf8637d7a141fb1a0354360db2b2bb + languageName: node + linkType: hard + +"@docusaurus/preset-classic@npm:^3.0.1": + version: 3.0.1 + resolution: "@docusaurus/preset-classic@npm:3.0.1" + dependencies: + "@docusaurus/core": 3.0.1 + "@docusaurus/plugin-content-blog": 3.0.1 + "@docusaurus/plugin-content-docs": 3.0.1 + "@docusaurus/plugin-content-pages": 3.0.1 + "@docusaurus/plugin-debug": 3.0.1 + "@docusaurus/plugin-google-analytics": 3.0.1 + "@docusaurus/plugin-google-gtag": 3.0.1 + "@docusaurus/plugin-google-tag-manager": 3.0.1 + "@docusaurus/plugin-sitemap": 3.0.1 + "@docusaurus/theme-classic": 3.0.1 + "@docusaurus/theme-common": 3.0.1 + "@docusaurus/theme-search-algolia": 3.0.1 + "@docusaurus/types": 3.0.1 + peerDependencies: + react: ^18.0.0 + react-dom: ^18.0.0 + checksum: 03e75324c92a70aea9980f29a993e79967e5ca85d2db1b18bcb00e6c3d8711fec1a1728f92247d4d35a119ae5c3fb5b5d728ea33591f36e8bd43fa6acb1c791c + languageName: node + linkType: hard + +"@docusaurus/react-loadable@npm:5.5.2, react-loadable@npm:@docusaurus/react-loadable@5.5.2": + version: 5.5.2 + resolution: "@docusaurus/react-loadable@npm:5.5.2" + dependencies: + "@types/react": "*" + prop-types: ^15.6.2 + peerDependencies: + react: "*" + checksum: 930fb9e2936412a12461f210acdc154a433283921ca43ac3fc3b84cb6c12eb738b3a3719373022bf68004efeb1a928dbe36c467d7a1f86454ed6241576d936e7 + languageName: node + linkType: hard + +"@docusaurus/theme-classic@npm:3.0.1": + version: 3.0.1 + resolution: "@docusaurus/theme-classic@npm:3.0.1" + dependencies: + "@docusaurus/core": 3.0.1 + "@docusaurus/mdx-loader": 3.0.1 + "@docusaurus/module-type-aliases": 3.0.1 + "@docusaurus/plugin-content-blog": 3.0.1 + "@docusaurus/plugin-content-docs": 3.0.1 + "@docusaurus/plugin-content-pages": 3.0.1 + "@docusaurus/theme-common": 3.0.1 + "@docusaurus/theme-translations": 3.0.1 + "@docusaurus/types": 3.0.1 + "@docusaurus/utils": 3.0.1 + "@docusaurus/utils-common": 3.0.1 + "@docusaurus/utils-validation": 3.0.1 + "@mdx-js/react": ^3.0.0 + clsx: ^2.0.0 + copy-text-to-clipboard: ^3.2.0 + infima: 0.2.0-alpha.43 + lodash: ^4.17.21 + nprogress: ^0.2.0 + postcss: ^8.4.26 + prism-react-renderer: ^2.3.0 + prismjs: ^1.29.0 + react-router-dom: ^5.3.4 + rtlcss: ^4.1.0 + tslib: ^2.6.0 + utility-types: ^3.10.0 + peerDependencies: + react: ^18.0.0 + react-dom: ^18.0.0 + checksum: 86cef28b5f93d01f15cb134283b8d1006466d661cc39c09c585e56a6a98b09816f8e7cef24b164e8a378b6deb4ed8984fdc329d09fdcbe83fa51529091ccfad8 + languageName: node + linkType: hard + +"@docusaurus/theme-common@npm:3.0.1": + version: 3.0.1 + resolution: "@docusaurus/theme-common@npm:3.0.1" + dependencies: + "@docusaurus/mdx-loader": 3.0.1 + "@docusaurus/module-type-aliases": 3.0.1 + "@docusaurus/plugin-content-blog": 3.0.1 + "@docusaurus/plugin-content-docs": 3.0.1 + "@docusaurus/plugin-content-pages": 3.0.1 + "@docusaurus/utils": 3.0.1 + "@docusaurus/utils-common": 3.0.1 + "@types/history": ^4.7.11 + "@types/react": "*" + "@types/react-router-config": "*" + clsx: ^2.0.0 + parse-numeric-range: ^1.3.0 + prism-react-renderer: ^2.3.0 + tslib: ^2.6.0 + utility-types: ^3.10.0 + peerDependencies: + react: ^18.0.0 + react-dom: ^18.0.0 + checksum: 99fb138fd2fb499d53ee81ae717768b5cb6556ddd337b6d1a399815cb428eed2c04d2823e2040fd4db27bc79681f6333ac1ea78d760ff7fc4475d16d1790552a + languageName: node + linkType: hard + +"@docusaurus/theme-search-algolia@npm:3.0.1": + version: 3.0.1 + resolution: "@docusaurus/theme-search-algolia@npm:3.0.1" + dependencies: + "@docsearch/react": ^3.5.2 + "@docusaurus/core": 3.0.1 + "@docusaurus/logger": 3.0.1 + "@docusaurus/plugin-content-docs": 3.0.1 + "@docusaurus/theme-common": 3.0.1 + "@docusaurus/theme-translations": 3.0.1 + "@docusaurus/utils": 3.0.1 + "@docusaurus/utils-validation": 3.0.1 + algoliasearch: ^4.18.0 + algoliasearch-helper: ^3.13.3 + clsx: ^2.0.0 + eta: ^2.2.0 + fs-extra: ^11.1.1 + lodash: ^4.17.21 + tslib: ^2.6.0 + utility-types: ^3.10.0 + peerDependencies: + react: ^18.0.0 + react-dom: ^18.0.0 + checksum: 24a38dbd7085ea78c412e50c94dda7e0ecb80046dd18c1fdb515d81b21be5cdbc706705a5155600510b0814698abb234885a576d90e0db9cf3c5983d0bf51462 + languageName: node + linkType: hard + +"@docusaurus/theme-translations@npm:3.0.1": + version: 3.0.1 + resolution: "@docusaurus/theme-translations@npm:3.0.1" + dependencies: + fs-extra: ^11.1.1 + tslib: ^2.6.0 + checksum: a1df314ddaeb7f453867c5ee5424b36d31c6d6541f86b3927881b77333e997b87e720c0285f3be507283cb851537ff154ce0ddbd5e771c184c8aa10af721d7c2 + languageName: node + linkType: hard + +"@docusaurus/theme-translations@npm:^2.0.0-rc.1": + version: 2.4.3 + resolution: "@docusaurus/theme-translations@npm:2.4.3" + dependencies: + fs-extra: ^10.1.0 + tslib: ^2.4.0 + checksum: 8424583a130b0d32b6adf578dc5daeefaad199019c8a6a23fbd67577209be64923cde59d423ea9d41d6e7cfc2318e7fa6a17a665e8ae1c871ce0880525f9b8fd + languageName: node + linkType: hard + +"@docusaurus/tsconfig@npm:^3.0.1": + version: 3.0.1 + resolution: "@docusaurus/tsconfig@npm:3.0.1" + checksum: a191e527740ea09cc4783ab45f106989f692d100e83768a4398fc08d3d41f0645afdce83aa89a1b251d5899544105af09e795af4d0db54247403a180f3c43098 + languageName: node + linkType: hard + +"@docusaurus/types@npm:2.4.3, @docusaurus/types@npm:^2.4.1": + version: 2.4.3 + resolution: "@docusaurus/types@npm:2.4.3" + dependencies: + "@types/history": ^4.7.11 + "@types/react": "*" + commander: ^5.1.0 + joi: ^17.6.0 + react-helmet-async: ^1.3.0 + utility-types: ^3.10.0 + webpack: ^5.73.0 + webpack-merge: ^5.8.0 + peerDependencies: + react: ^16.8.4 || ^17.0.0 + react-dom: ^16.8.4 || ^17.0.0 + checksum: c123c45630e885b588f808baa06a97f8408a3381906f65cb92ae75732aedfca6ab2cada94f969c08e043b885b95298616440326259b789010e0986cbcd7a960b + languageName: node + linkType: hard + +"@docusaurus/types@npm:3.0.1, @docusaurus/types@npm:^3.0.1": + version: 3.0.1 + resolution: "@docusaurus/types@npm:3.0.1" + dependencies: + "@types/history": ^4.7.11 + "@types/react": "*" + commander: ^5.1.0 + joi: ^17.9.2 + react-helmet-async: ^1.3.0 + utility-types: ^3.10.0 + webpack: ^5.88.1 + webpack-merge: ^5.9.0 + peerDependencies: + react: ^18.0.0 + react-dom: ^18.0.0 + checksum: 1874e66435e986262ad06639b812d49aa5c81a29815b27d31370d055335cebdad77ee0276504497b1765c489e5c5faf9795df97e52649af931d1cf381c4afa77 + languageName: node + linkType: hard + +"@docusaurus/utils-common@npm:2.4.3, @docusaurus/utils-common@npm:^2.0.0-rc.1": + version: 2.4.3 + resolution: "@docusaurus/utils-common@npm:2.4.3" + dependencies: + tslib: ^2.4.0 + peerDependencies: + "@docusaurus/types": "*" + peerDependenciesMeta: + "@docusaurus/types": + optional: true + checksum: 1ae315d8d8ce7a0163a698ffdca55b734d21f336512138c128bc0fa2a8d224edbaad0c8dbd7a3de2e8ef734dc2656c505d09066dee4fc84819d153593abb8984 + languageName: node + linkType: hard + +"@docusaurus/utils-common@npm:3.0.1": + version: 3.0.1 + resolution: "@docusaurus/utils-common@npm:3.0.1" + dependencies: + tslib: ^2.6.0 + peerDependencies: + "@docusaurus/types": "*" + peerDependenciesMeta: + "@docusaurus/types": + optional: true + checksum: 7d4eb39258539d594cf1432d07be0325de5a02c2a00418e022b0cd2d4374788a7cc5dd3febad6f34744e5a1e76646ae909ffbdf2024284f31c579d1f1ff055d8 + languageName: node + linkType: hard + +"@docusaurus/utils-validation@npm:2.4.3, @docusaurus/utils-validation@npm:^2.0.0-rc.1": + version: 2.4.3 + resolution: "@docusaurus/utils-validation@npm:2.4.3" + dependencies: + "@docusaurus/logger": 2.4.3 + "@docusaurus/utils": 2.4.3 + joi: ^17.6.0 + js-yaml: ^4.1.0 + tslib: ^2.4.0 + checksum: d3472b3f7a0a029c2cef1f00bc9db403d5f7e74e2091eccbc45d06f5776a84fd73bd1a18cf3a8a3cc0348ce49f753a1300deac670c2a82c56070cc40ca9df06e + languageName: node + linkType: hard + +"@docusaurus/utils-validation@npm:3.0.1": + version: 3.0.1 + resolution: "@docusaurus/utils-validation@npm:3.0.1" + dependencies: + "@docusaurus/logger": 3.0.1 + "@docusaurus/utils": 3.0.1 + joi: ^17.9.2 + js-yaml: ^4.1.0 + tslib: ^2.6.0 + checksum: c52edd61906ee004cea95ca33f81ec10a40276cad29f1aef505220cea4b922c1734b765d9c55b0889822351876ea545a73f7f3a4fbbb574f625fe455f8387033 + languageName: node + linkType: hard + +"@docusaurus/utils@npm:2.4.3, @docusaurus/utils@npm:^2.0.0-rc.1": + version: 2.4.3 + resolution: "@docusaurus/utils@npm:2.4.3" + dependencies: + "@docusaurus/logger": 2.4.3 + "@svgr/webpack": ^6.2.1 + escape-string-regexp: ^4.0.0 + file-loader: ^6.2.0 + fs-extra: ^10.1.0 + github-slugger: ^1.4.0 + globby: ^11.1.0 + gray-matter: ^4.0.3 + js-yaml: ^4.1.0 + lodash: ^4.17.21 + micromatch: ^4.0.5 + resolve-pathname: ^3.0.0 + shelljs: ^0.8.5 + tslib: ^2.4.0 + url-loader: ^4.1.1 + webpack: ^5.73.0 + peerDependencies: + "@docusaurus/types": "*" + peerDependenciesMeta: + "@docusaurus/types": + optional: true + checksum: dd1aa7688d1a4b2775e13a91d528608ceab33c57a921404d9a989867c31c8ef17fe3892e4f5680dfb4a783da7b9973e2077e907ff4ac172927433e606e8fa9b9 + languageName: node + linkType: hard + +"@docusaurus/utils@npm:3.0.1": + version: 3.0.1 + resolution: "@docusaurus/utils@npm:3.0.1" + dependencies: + "@docusaurus/logger": 3.0.1 + "@svgr/webpack": ^6.5.1 + escape-string-regexp: ^4.0.0 + file-loader: ^6.2.0 + fs-extra: ^11.1.1 + github-slugger: ^1.5.0 + globby: ^11.1.0 + gray-matter: ^4.0.3 + jiti: ^1.20.0 + js-yaml: ^4.1.0 + lodash: ^4.17.21 + micromatch: ^4.0.5 + resolve-pathname: ^3.0.0 + shelljs: ^0.8.5 + tslib: ^2.6.0 + url-loader: ^4.1.1 + webpack: ^5.88.1 + peerDependencies: + "@docusaurus/types": "*" + peerDependenciesMeta: + "@docusaurus/types": + optional: true + checksum: 5a8c5d8dd9cf1ad9ed1cecff3be3cbe041ebf8f51e2744af8aa006df67367f24d0888181566ed9ab2837b931a4fb135d943eadfde99708468f90f18795d413b5 + languageName: node + linkType: hard + +"@easyops-cn/autocomplete.js@npm:^0.38.1": + version: 0.38.1 + resolution: "@easyops-cn/autocomplete.js@npm:0.38.1" + dependencies: + cssesc: ^3.0.0 + immediate: ^3.2.3 + checksum: d88b61f12c383856b8d5cedf176a6d07a21e013dc2c78be029af81e2e026ece2bb988c6ea7f9951a2759c2e6f806ea1d1c9627bf36d9cbe376e897a94ce5da09 + languageName: node + linkType: hard + +"@easyops-cn/docusaurus-search-local@npm:^0.35.0": + version: 0.35.0 + resolution: "@easyops-cn/docusaurus-search-local@npm:0.35.0" + dependencies: + "@docusaurus/plugin-content-docs": ^2.0.0-rc.1 + "@docusaurus/theme-translations": ^2.0.0-rc.1 + "@docusaurus/utils": ^2.0.0-rc.1 + "@docusaurus/utils-common": ^2.0.0-rc.1 + "@docusaurus/utils-validation": ^2.0.0-rc.1 + "@easyops-cn/autocomplete.js": ^0.38.1 + "@node-rs/jieba": ^1.6.0 + cheerio: ^1.0.0-rc.3 + clsx: ^1.1.1 + debug: ^4.2.0 + fs-extra: ^10.0.0 + klaw-sync: ^6.0.0 + lunr: ^2.3.9 + lunr-languages: ^1.4.0 + mark.js: ^8.11.1 + tslib: ^2.4.0 + peerDependencies: + "@docusaurus/theme-common": ^2.0.0-rc.1 + react: ^16.14.0 || ^17.0.0 || ^18.0.0 + react-dom: ^16.14.0 || ^17.0.0 || ^18.0.0 + checksum: 726b7b5d52f3fd01286e5a97bb2f5d27ae0ace2be7a7742c92b0bf11d56f2f44a16f6b7af556f5676ffa2a3b94c244d17ea9804894553f94ba66b98249e1e10f + languageName: node + linkType: hard + +"@esbuild/android-arm64@npm:0.17.19": + version: 0.17.19 + resolution: "@esbuild/android-arm64@npm:0.17.19" + conditions: os=android & cpu=arm64 + languageName: node + linkType: hard + +"@esbuild/android-arm64@npm:0.18.20": + version: 0.18.20 + resolution: "@esbuild/android-arm64@npm:0.18.20" + conditions: os=android & cpu=arm64 + languageName: node + linkType: hard + +"@esbuild/android-arm@npm:0.17.19": + version: 0.17.19 + resolution: "@esbuild/android-arm@npm:0.17.19" + conditions: os=android & cpu=arm + languageName: node + linkType: hard + +"@esbuild/android-arm@npm:0.18.20": + version: 0.18.20 + resolution: "@esbuild/android-arm@npm:0.18.20" + conditions: os=android & cpu=arm + languageName: node + linkType: hard + +"@esbuild/android-x64@npm:0.17.19": + version: 0.17.19 + resolution: "@esbuild/android-x64@npm:0.17.19" + conditions: os=android & cpu=x64 + languageName: node + linkType: hard + +"@esbuild/android-x64@npm:0.18.20": + version: 0.18.20 + resolution: "@esbuild/android-x64@npm:0.18.20" + conditions: os=android & cpu=x64 + languageName: node + linkType: hard + +"@esbuild/darwin-arm64@npm:0.17.19": + version: 0.17.19 + resolution: "@esbuild/darwin-arm64@npm:0.17.19" + conditions: os=darwin & cpu=arm64 + languageName: node + linkType: hard + +"@esbuild/darwin-arm64@npm:0.18.20": + version: 0.18.20 + resolution: "@esbuild/darwin-arm64@npm:0.18.20" + conditions: os=darwin & cpu=arm64 + languageName: node + linkType: hard + +"@esbuild/darwin-x64@npm:0.17.19": + version: 0.17.19 + resolution: "@esbuild/darwin-x64@npm:0.17.19" + conditions: os=darwin & cpu=x64 + languageName: node + linkType: hard + +"@esbuild/darwin-x64@npm:0.18.20": + version: 0.18.20 + resolution: "@esbuild/darwin-x64@npm:0.18.20" + conditions: os=darwin & cpu=x64 + languageName: node + linkType: hard + +"@esbuild/freebsd-arm64@npm:0.17.19": + version: 0.17.19 + resolution: "@esbuild/freebsd-arm64@npm:0.17.19" + conditions: os=freebsd & cpu=arm64 + languageName: node + linkType: hard + +"@esbuild/freebsd-arm64@npm:0.18.20": + version: 0.18.20 + resolution: "@esbuild/freebsd-arm64@npm:0.18.20" + conditions: os=freebsd & cpu=arm64 + languageName: node + linkType: hard + +"@esbuild/freebsd-x64@npm:0.17.19": + version: 0.17.19 + resolution: "@esbuild/freebsd-x64@npm:0.17.19" + conditions: os=freebsd & cpu=x64 + languageName: node + linkType: hard + +"@esbuild/freebsd-x64@npm:0.18.20": + version: 0.18.20 + resolution: "@esbuild/freebsd-x64@npm:0.18.20" + conditions: os=freebsd & cpu=x64 + languageName: node + linkType: hard + +"@esbuild/linux-arm64@npm:0.17.19": + version: 0.17.19 + resolution: "@esbuild/linux-arm64@npm:0.17.19" + conditions: os=linux & cpu=arm64 + languageName: node + linkType: hard + +"@esbuild/linux-arm64@npm:0.18.20": + version: 0.18.20 + resolution: "@esbuild/linux-arm64@npm:0.18.20" + conditions: os=linux & cpu=arm64 + languageName: node + linkType: hard + +"@esbuild/linux-arm@npm:0.17.19": + version: 0.17.19 + resolution: "@esbuild/linux-arm@npm:0.17.19" + conditions: os=linux & cpu=arm + languageName: node + linkType: hard + +"@esbuild/linux-arm@npm:0.18.20": + version: 0.18.20 + resolution: "@esbuild/linux-arm@npm:0.18.20" + conditions: os=linux & cpu=arm + languageName: node + linkType: hard + +"@esbuild/linux-ia32@npm:0.17.19": + version: 0.17.19 + resolution: "@esbuild/linux-ia32@npm:0.17.19" + conditions: os=linux & cpu=ia32 + languageName: node + linkType: hard + +"@esbuild/linux-ia32@npm:0.18.20": + version: 0.18.20 + resolution: "@esbuild/linux-ia32@npm:0.18.20" + conditions: os=linux & cpu=ia32 + languageName: node + linkType: hard + +"@esbuild/linux-loong64@npm:0.17.19": + version: 0.17.19 + resolution: "@esbuild/linux-loong64@npm:0.17.19" + conditions: os=linux & cpu=loong64 + languageName: node + linkType: hard + +"@esbuild/linux-loong64@npm:0.18.20": + version: 0.18.20 + resolution: "@esbuild/linux-loong64@npm:0.18.20" + conditions: os=linux & cpu=loong64 + languageName: node + linkType: hard + +"@esbuild/linux-mips64el@npm:0.17.19": + version: 0.17.19 + resolution: "@esbuild/linux-mips64el@npm:0.17.19" + conditions: os=linux & cpu=mips64el + languageName: node + linkType: hard + +"@esbuild/linux-mips64el@npm:0.18.20": + version: 0.18.20 + resolution: "@esbuild/linux-mips64el@npm:0.18.20" + conditions: os=linux & cpu=mips64el + languageName: node + linkType: hard + +"@esbuild/linux-ppc64@npm:0.17.19": + version: 0.17.19 + resolution: "@esbuild/linux-ppc64@npm:0.17.19" + conditions: os=linux & cpu=ppc64 + languageName: node + linkType: hard + +"@esbuild/linux-ppc64@npm:0.18.20": + version: 0.18.20 + resolution: "@esbuild/linux-ppc64@npm:0.18.20" + conditions: os=linux & cpu=ppc64 + languageName: node + linkType: hard + +"@esbuild/linux-riscv64@npm:0.17.19": + version: 0.17.19 + resolution: "@esbuild/linux-riscv64@npm:0.17.19" + conditions: os=linux & cpu=riscv64 + languageName: node + linkType: hard + +"@esbuild/linux-riscv64@npm:0.18.20": + version: 0.18.20 + resolution: "@esbuild/linux-riscv64@npm:0.18.20" + conditions: os=linux & cpu=riscv64 + languageName: node + linkType: hard + +"@esbuild/linux-s390x@npm:0.17.19": + version: 0.17.19 + resolution: "@esbuild/linux-s390x@npm:0.17.19" + conditions: os=linux & cpu=s390x + languageName: node + linkType: hard + +"@esbuild/linux-s390x@npm:0.18.20": + version: 0.18.20 + resolution: "@esbuild/linux-s390x@npm:0.18.20" + conditions: os=linux & cpu=s390x + languageName: node + linkType: hard + +"@esbuild/linux-x64@npm:0.17.19": + version: 0.17.19 + resolution: "@esbuild/linux-x64@npm:0.17.19" + conditions: os=linux & cpu=x64 + languageName: node + linkType: hard + +"@esbuild/linux-x64@npm:0.18.20": + version: 0.18.20 + resolution: "@esbuild/linux-x64@npm:0.18.20" + conditions: os=linux & cpu=x64 + languageName: node + linkType: hard + +"@esbuild/netbsd-x64@npm:0.17.19": + version: 0.17.19 + resolution: "@esbuild/netbsd-x64@npm:0.17.19" + conditions: os=netbsd & cpu=x64 + languageName: node + linkType: hard + +"@esbuild/netbsd-x64@npm:0.18.20": + version: 0.18.20 + resolution: "@esbuild/netbsd-x64@npm:0.18.20" + conditions: os=netbsd & cpu=x64 + languageName: node + linkType: hard + +"@esbuild/openbsd-x64@npm:0.17.19": + version: 0.17.19 + resolution: "@esbuild/openbsd-x64@npm:0.17.19" + conditions: os=openbsd & cpu=x64 + languageName: node + linkType: hard + +"@esbuild/openbsd-x64@npm:0.18.20": + version: 0.18.20 + resolution: "@esbuild/openbsd-x64@npm:0.18.20" + conditions: os=openbsd & cpu=x64 + languageName: node + linkType: hard + +"@esbuild/sunos-x64@npm:0.17.19": + version: 0.17.19 + resolution: "@esbuild/sunos-x64@npm:0.17.19" + conditions: os=sunos & cpu=x64 + languageName: node + linkType: hard + +"@esbuild/sunos-x64@npm:0.18.20": + version: 0.18.20 + resolution: "@esbuild/sunos-x64@npm:0.18.20" + conditions: os=sunos & cpu=x64 + languageName: node + linkType: hard + +"@esbuild/win32-arm64@npm:0.17.19": + version: 0.17.19 + resolution: "@esbuild/win32-arm64@npm:0.17.19" + conditions: os=win32 & cpu=arm64 + languageName: node + linkType: hard + +"@esbuild/win32-arm64@npm:0.18.20": + version: 0.18.20 + resolution: "@esbuild/win32-arm64@npm:0.18.20" + conditions: os=win32 & cpu=arm64 + languageName: node + linkType: hard + +"@esbuild/win32-ia32@npm:0.17.19": + version: 0.17.19 + resolution: "@esbuild/win32-ia32@npm:0.17.19" + conditions: os=win32 & cpu=ia32 + languageName: node + linkType: hard + +"@esbuild/win32-ia32@npm:0.18.20": + version: 0.18.20 + resolution: "@esbuild/win32-ia32@npm:0.18.20" + conditions: os=win32 & cpu=ia32 + languageName: node + linkType: hard + +"@esbuild/win32-x64@npm:0.17.19": + version: 0.17.19 + resolution: "@esbuild/win32-x64@npm:0.17.19" + conditions: os=win32 & cpu=x64 + languageName: node + linkType: hard + +"@esbuild/win32-x64@npm:0.18.20": + version: 0.18.20 + resolution: "@esbuild/win32-x64@npm:0.18.20" + conditions: os=win32 & cpu=x64 + languageName: node + linkType: hard + +"@eslint-community/eslint-utils@npm:^4.2.0, @eslint-community/eslint-utils@npm:^4.4.0": + version: 4.4.0 + resolution: "@eslint-community/eslint-utils@npm:4.4.0" + dependencies: + eslint-visitor-keys: ^3.3.0 + peerDependencies: + eslint: ^6.0.0 || ^7.0.0 || >=8.0.0 + checksum: cdfe3ae42b4f572cbfb46d20edafe6f36fc5fb52bf2d90875c58aefe226892b9677fef60820e2832caf864a326fe4fc225714c46e8389ccca04d5f9288aabd22 + languageName: node + linkType: hard + +"@eslint-community/regexpp@npm:^4.5.1, @eslint-community/regexpp@npm:^4.6.1": + version: 4.10.0 + resolution: "@eslint-community/regexpp@npm:4.10.0" + checksum: 2a6e345429ea8382aaaf3a61f865cae16ed44d31ca917910033c02dc00d505d939f10b81e079fa14d43b51499c640138e153b7e40743c4c094d9df97d4e56f7b + languageName: node + linkType: hard + +"@eslint/eslintrc@npm:^2.1.4": + version: 2.1.4 + resolution: "@eslint/eslintrc@npm:2.1.4" + dependencies: + ajv: ^6.12.4 + debug: ^4.3.2 + espree: ^9.6.0 + globals: ^13.19.0 + ignore: ^5.2.0 + import-fresh: ^3.2.1 + js-yaml: ^4.1.0 + minimatch: ^3.1.2 + strip-json-comments: ^3.1.1 + checksum: 10957c7592b20ca0089262d8c2a8accbad14b4f6507e35416c32ee6b4dbf9cad67dfb77096bbd405405e9ada2b107f3797fe94362e1c55e0b09d6e90dd149127 + languageName: node + linkType: hard + +"@eslint/js@npm:8.55.0": + version: 8.55.0 + resolution: "@eslint/js@npm:8.55.0" + checksum: fa33ef619f0646ed15649b0c2e313e4d9ccee8425884bdbfc78020d6b6b64c0c42fa9d83061d0e6158e1d4274f03f0f9008786540e2efab8fcdc48082259908c + languageName: node + linkType: hard + +"@esm-bundle/chai@npm:^4.3.4-fix.0": + version: 4.3.4 + resolution: "@esm-bundle/chai@npm:4.3.4" + dependencies: + "@types/chai": ^4.2.12 + checksum: 6d1237e9b8309b31ca55d12abe03642ab58550fdac24d0acbfeae6ab14182f72cedf646c6e858fd7ef592b4034ddd23ce5882ff22b8ab9b7952327e9f3f8c3f5 + languageName: node + linkType: hard + +"@ethersproject/abi@npm:5.7.0, @ethersproject/abi@npm:^5.1.2, @ethersproject/abi@npm:^5.7.0": + version: 5.7.0 + resolution: "@ethersproject/abi@npm:5.7.0" + dependencies: + "@ethersproject/address": ^5.7.0 + "@ethersproject/bignumber": ^5.7.0 + "@ethersproject/bytes": ^5.7.0 + "@ethersproject/constants": ^5.7.0 + "@ethersproject/hash": ^5.7.0 + "@ethersproject/keccak256": ^5.7.0 + "@ethersproject/logger": ^5.7.0 + "@ethersproject/properties": ^5.7.0 + "@ethersproject/strings": ^5.7.0 + checksum: bc6962bb6cb854e4d2a4d65b2c49c716477675b131b1363312234bdbb7e19badb7d9ce66f4ca2a70ae2ea84f7123dbc4e300a1bfe5d58864a7eafabc1466627e + languageName: node + linkType: hard + +"@ethersproject/abstract-provider@npm:5.7.0, @ethersproject/abstract-provider@npm:^5.7.0": + version: 5.7.0 + resolution: "@ethersproject/abstract-provider@npm:5.7.0" + dependencies: + "@ethersproject/bignumber": ^5.7.0 + "@ethersproject/bytes": ^5.7.0 + "@ethersproject/logger": ^5.7.0 + "@ethersproject/networks": ^5.7.0 + "@ethersproject/properties": ^5.7.0 + "@ethersproject/transactions": ^5.7.0 + "@ethersproject/web": ^5.7.0 + checksum: 74cf4696245cf03bb7cc5b6cbf7b4b89dd9a79a1c4688126d214153a938126d4972d42c93182198653ce1de35f2a2cad68be40337d4774b3698a39b28f0228a8 + languageName: node + linkType: hard + +"@ethersproject/abstract-signer@npm:5.7.0, @ethersproject/abstract-signer@npm:^5.7.0": + version: 5.7.0 + resolution: "@ethersproject/abstract-signer@npm:5.7.0" + dependencies: + "@ethersproject/abstract-provider": ^5.7.0 + "@ethersproject/bignumber": ^5.7.0 + "@ethersproject/bytes": ^5.7.0 + "@ethersproject/logger": ^5.7.0 + "@ethersproject/properties": ^5.7.0 + checksum: a823dac9cfb761e009851050ebebd5b229d1b1cc4a75b125c2da130ff37e8218208f7f9d1386f77407705b889b23d4a230ad67185f8872f083143e0073cbfbe3 + languageName: node + linkType: hard + +"@ethersproject/address@npm:5.7.0, @ethersproject/address@npm:^5.7.0": + version: 5.7.0 + resolution: "@ethersproject/address@npm:5.7.0" + dependencies: + "@ethersproject/bignumber": ^5.7.0 + "@ethersproject/bytes": ^5.7.0 + "@ethersproject/keccak256": ^5.7.0 + "@ethersproject/logger": ^5.7.0 + "@ethersproject/rlp": ^5.7.0 + checksum: 64ea5ebea9cc0e845c413e6cb1e54e157dd9fc0dffb98e239d3a3efc8177f2ff798cd4e3206cf3660ee8faeb7bef1a47dc0ebef0d7b132c32e61e550c7d4c843 + languageName: node + linkType: hard + +"@ethersproject/base64@npm:5.7.0, @ethersproject/base64@npm:^5.7.0": + version: 5.7.0 + resolution: "@ethersproject/base64@npm:5.7.0" + dependencies: + "@ethersproject/bytes": ^5.7.0 + checksum: 7dd5d734d623582f08f665434f53685041a3d3b334a0e96c0c8afa8bbcaab934d50e5b6b980e826a8fde8d353e0b18f11e61faf17468177274b8e7c69cd9742b + languageName: node + linkType: hard + +"@ethersproject/basex@npm:5.7.0, @ethersproject/basex@npm:^5.7.0": + version: 5.7.0 + resolution: "@ethersproject/basex@npm:5.7.0" + dependencies: + "@ethersproject/bytes": ^5.7.0 + "@ethersproject/properties": ^5.7.0 + checksum: 326087b7e1f3787b5fe6cd1cf2b4b5abfafbc355a45e88e22e5e9d6c845b613ffc5301d629b28d5c4d5e2bfe9ec424e6782c804956dff79be05f0098cb5817de + languageName: node + linkType: hard + +"@ethersproject/bignumber@npm:5.7.0, @ethersproject/bignumber@npm:^5.7.0": + version: 5.7.0 + resolution: "@ethersproject/bignumber@npm:5.7.0" + dependencies: + "@ethersproject/bytes": ^5.7.0 + "@ethersproject/logger": ^5.7.0 + bn.js: ^5.2.1 + checksum: 8c9a134b76f3feb4ec26a5a27379efb4e156b8fb2de0678a67788a91c7f4e30abe9d948638458e4b20f2e42380da0adacc7c9389d05fce070692edc6ae9b4904 + languageName: node + linkType: hard + +"@ethersproject/bytes@npm:5.7.0, @ethersproject/bytes@npm:^5.7.0": + version: 5.7.0 + resolution: "@ethersproject/bytes@npm:5.7.0" + dependencies: + "@ethersproject/logger": ^5.7.0 + checksum: 66ad365ceaab5da1b23b72225c71dce472cf37737af5118181fa8ab7447d696bea15ca22e3a0e8836fdd8cfac161afe321a7c67d0dde96f9f645ddd759676621 + languageName: node + linkType: hard + +"@ethersproject/constants@npm:5.7.0, @ethersproject/constants@npm:^5.7.0": + version: 5.7.0 + resolution: "@ethersproject/constants@npm:5.7.0" + dependencies: + "@ethersproject/bignumber": ^5.7.0 + checksum: 6d4b1355747cce837b3e76ec3bde70e4732736f23b04f196f706ebfa5d4d9c2be50904a390d4d40ce77803b98d03d16a9b6898418e04ba63491933ce08c4ba8a + languageName: node + linkType: hard + +"@ethersproject/contracts@npm:5.7.0": + version: 5.7.0 + resolution: "@ethersproject/contracts@npm:5.7.0" + dependencies: + "@ethersproject/abi": ^5.7.0 + "@ethersproject/abstract-provider": ^5.7.0 + "@ethersproject/abstract-signer": ^5.7.0 + "@ethersproject/address": ^5.7.0 + "@ethersproject/bignumber": ^5.7.0 + "@ethersproject/bytes": ^5.7.0 + "@ethersproject/constants": ^5.7.0 + "@ethersproject/logger": ^5.7.0 + "@ethersproject/properties": ^5.7.0 + "@ethersproject/transactions": ^5.7.0 + checksum: 6ccf1121cba01b31e02f8c507cb971ab6bfed85706484a9ec09878ef1594a62215f43c4fdef8f4a4875b99c4a800bc95e3be69b1803f8ce479e07634b5a740c0 + languageName: node + linkType: hard + +"@ethersproject/hash@npm:5.7.0, @ethersproject/hash@npm:^5.7.0": + version: 5.7.0 + resolution: "@ethersproject/hash@npm:5.7.0" + dependencies: + "@ethersproject/abstract-signer": ^5.7.0 + "@ethersproject/address": ^5.7.0 + "@ethersproject/base64": ^5.7.0 + "@ethersproject/bignumber": ^5.7.0 + "@ethersproject/bytes": ^5.7.0 + "@ethersproject/keccak256": ^5.7.0 + "@ethersproject/logger": ^5.7.0 + "@ethersproject/properties": ^5.7.0 + "@ethersproject/strings": ^5.7.0 + checksum: 6e9fa8d14eb08171cd32f17f98cc108ec2aeca74a427655f0d689c550fee0b22a83b3b400fad7fb3f41cf14d4111f87f170aa7905bcbcd1173a55f21b06262ef + languageName: node + linkType: hard + +"@ethersproject/hdnode@npm:5.7.0, @ethersproject/hdnode@npm:^5.7.0": + version: 5.7.0 + resolution: "@ethersproject/hdnode@npm:5.7.0" + dependencies: + "@ethersproject/abstract-signer": ^5.7.0 + "@ethersproject/basex": ^5.7.0 + "@ethersproject/bignumber": ^5.7.0 + "@ethersproject/bytes": ^5.7.0 + "@ethersproject/logger": ^5.7.0 + "@ethersproject/pbkdf2": ^5.7.0 + "@ethersproject/properties": ^5.7.0 + "@ethersproject/sha2": ^5.7.0 + "@ethersproject/signing-key": ^5.7.0 + "@ethersproject/strings": ^5.7.0 + "@ethersproject/transactions": ^5.7.0 + "@ethersproject/wordlists": ^5.7.0 + checksum: bfe5ca2d89a42de73655f853170ef4766b933c5f481cddad709b3aca18823275b096e572f92d1602a052f80b426edde44ad6b9d028799775a7dad4a5bbed2133 + languageName: node + linkType: hard + +"@ethersproject/json-wallets@npm:5.7.0, @ethersproject/json-wallets@npm:^5.7.0": + version: 5.7.0 + resolution: "@ethersproject/json-wallets@npm:5.7.0" + dependencies: + "@ethersproject/abstract-signer": ^5.7.0 + "@ethersproject/address": ^5.7.0 + "@ethersproject/bytes": ^5.7.0 + "@ethersproject/hdnode": ^5.7.0 + "@ethersproject/keccak256": ^5.7.0 + "@ethersproject/logger": ^5.7.0 + "@ethersproject/pbkdf2": ^5.7.0 + "@ethersproject/properties": ^5.7.0 + "@ethersproject/random": ^5.7.0 + "@ethersproject/strings": ^5.7.0 + "@ethersproject/transactions": ^5.7.0 + aes-js: 3.0.0 + scrypt-js: 3.0.1 + checksum: f583458d22db62efaaf94d38dd243482776a45bf90f9f3882fbad5aa0b8fd288b41eb7c1ff8ec0b99c9b751088e43d6173530db64dd33c59f9d8daa8d7ad5aa2 + languageName: node + linkType: hard + +"@ethersproject/keccak256@npm:5.7.0, @ethersproject/keccak256@npm:^5.7.0": + version: 5.7.0 + resolution: "@ethersproject/keccak256@npm:5.7.0" + dependencies: + "@ethersproject/bytes": ^5.7.0 + js-sha3: 0.8.0 + checksum: ff70950d82203aab29ccda2553422cbac2e7a0c15c986bd20a69b13606ed8bb6e4fdd7b67b8d3b27d4f841e8222cbaccd33ed34be29f866fec7308f96ed244c6 + languageName: node + linkType: hard + +"@ethersproject/logger@npm:5.7.0, @ethersproject/logger@npm:^5.7.0": + version: 5.7.0 + resolution: "@ethersproject/logger@npm:5.7.0" + checksum: 075ab2f605f1fd0813f2e39c3308f77b44a67732b36e712d9bc085f22a84aac4da4f71b39bee50fe78da3e1c812673fadc41180c9970fe5e486e91ea17befe0d + languageName: node + linkType: hard + +"@ethersproject/networks@npm:5.7.1, @ethersproject/networks@npm:^5.7.0": + version: 5.7.1 + resolution: "@ethersproject/networks@npm:5.7.1" + dependencies: + "@ethersproject/logger": ^5.7.0 + checksum: 0339f312304c17d9a0adce550edb825d4d2c8c9468c1634c44172c67a9ed256f594da62c4cda5c3837a0f28b7fabc03aca9b492f68ff1fdad337ee861b27bd5d + languageName: node + linkType: hard + +"@ethersproject/pbkdf2@npm:5.7.0, @ethersproject/pbkdf2@npm:^5.7.0": + version: 5.7.0 + resolution: "@ethersproject/pbkdf2@npm:5.7.0" + dependencies: + "@ethersproject/bytes": ^5.7.0 + "@ethersproject/sha2": ^5.7.0 + checksum: b895adb9e35a8a127e794f7aadc31a2424ef355a70e51cde10d457e3e888bb8102373199a540cf61f2d6b9a32e47358f9c65b47d559f42bf8e596b5fd67901e9 + languageName: node + linkType: hard + +"@ethersproject/properties@npm:5.7.0, @ethersproject/properties@npm:^5.7.0": + version: 5.7.0 + resolution: "@ethersproject/properties@npm:5.7.0" + dependencies: + "@ethersproject/logger": ^5.7.0 + checksum: 6ab0ccf0c3aadc9221e0cdc5306ce6cd0df7f89f77d77bccdd1277182c9ead0202cd7521329ba3acde130820bf8af299e17cf567d0d497c736ee918207bbf59f + languageName: node + linkType: hard + +"@ethersproject/providers@npm:5.7.2, @ethersproject/providers@npm:^5.7.1, @ethersproject/providers@npm:^5.7.2": + version: 5.7.2 + resolution: "@ethersproject/providers@npm:5.7.2" + dependencies: + "@ethersproject/abstract-provider": ^5.7.0 + "@ethersproject/abstract-signer": ^5.7.0 + "@ethersproject/address": ^5.7.0 + "@ethersproject/base64": ^5.7.0 + "@ethersproject/basex": ^5.7.0 + "@ethersproject/bignumber": ^5.7.0 + "@ethersproject/bytes": ^5.7.0 + "@ethersproject/constants": ^5.7.0 + "@ethersproject/hash": ^5.7.0 + "@ethersproject/logger": ^5.7.0 + "@ethersproject/networks": ^5.7.0 + "@ethersproject/properties": ^5.7.0 + "@ethersproject/random": ^5.7.0 + "@ethersproject/rlp": ^5.7.0 + "@ethersproject/sha2": ^5.7.0 + "@ethersproject/strings": ^5.7.0 + "@ethersproject/transactions": ^5.7.0 + "@ethersproject/web": ^5.7.0 + bech32: 1.1.4 + ws: 7.4.6 + checksum: 1754c731a5ca6782ae9677f4a9cd8b6246c4ef21a966c9a01b133750f3c578431ec43ec254e699969c4a0f87e84463ded50f96b415600aabd37d2056aee58c19 + languageName: node + linkType: hard + +"@ethersproject/random@npm:5.7.0, @ethersproject/random@npm:^5.7.0": + version: 5.7.0 + resolution: "@ethersproject/random@npm:5.7.0" + dependencies: + "@ethersproject/bytes": ^5.7.0 + "@ethersproject/logger": ^5.7.0 + checksum: 017829c91cff6c76470852855108115b0b52c611b6be817ed1948d56ba42d6677803ec2012aa5ae298a7660024156a64c11fcf544e235e239ab3f89f0fff7345 + languageName: node + linkType: hard + +"@ethersproject/rlp@npm:5.7.0, @ethersproject/rlp@npm:^5.7.0": + version: 5.7.0 + resolution: "@ethersproject/rlp@npm:5.7.0" + dependencies: + "@ethersproject/bytes": ^5.7.0 + "@ethersproject/logger": ^5.7.0 + checksum: bce165b0f7e68e4d091c9d3cf47b247cac33252df77a095ca4281d32d5eeaaa3695d9bc06b2b057c5015353a68df89f13a4a54a72e888e4beeabbe56b15dda6e + languageName: node + linkType: hard + +"@ethersproject/sha2@npm:5.7.0, @ethersproject/sha2@npm:^5.7.0": + version: 5.7.0 + resolution: "@ethersproject/sha2@npm:5.7.0" + dependencies: + "@ethersproject/bytes": ^5.7.0 + "@ethersproject/logger": ^5.7.0 + hash.js: 1.1.7 + checksum: 09321057c022effbff4cc2d9b9558228690b5dd916329d75c4b1ffe32ba3d24b480a367a7cc92d0f0c0b1c896814d03351ae4630e2f1f7160be2bcfbde435dbc + languageName: node + linkType: hard + +"@ethersproject/signing-key@npm:5.7.0, @ethersproject/signing-key@npm:^5.7.0": + version: 5.7.0 + resolution: "@ethersproject/signing-key@npm:5.7.0" + dependencies: + "@ethersproject/bytes": ^5.7.0 + "@ethersproject/logger": ^5.7.0 + "@ethersproject/properties": ^5.7.0 + bn.js: ^5.2.1 + elliptic: 6.5.4 + hash.js: 1.1.7 + checksum: 8f8de09b0aac709683bbb49339bc0a4cd2f95598f3546436c65d6f3c3a847ffa98e06d35e9ed2b17d8030bd2f02db9b7bd2e11c5cf8a71aad4537487ab4cf03a + languageName: node + linkType: hard + +"@ethersproject/solidity@npm:5.7.0": + version: 5.7.0 + resolution: "@ethersproject/solidity@npm:5.7.0" + dependencies: + "@ethersproject/bignumber": ^5.7.0 + "@ethersproject/bytes": ^5.7.0 + "@ethersproject/keccak256": ^5.7.0 + "@ethersproject/logger": ^5.7.0 + "@ethersproject/sha2": ^5.7.0 + "@ethersproject/strings": ^5.7.0 + checksum: 9a02f37f801c96068c3e7721f83719d060175bc4e80439fe060e92bd7acfcb6ac1330c7e71c49f4c2535ca1308f2acdcb01e00133129aac00581724c2d6293f3 + languageName: node + linkType: hard + +"@ethersproject/strings@npm:5.7.0, @ethersproject/strings@npm:^5.7.0": + version: 5.7.0 + resolution: "@ethersproject/strings@npm:5.7.0" + dependencies: + "@ethersproject/bytes": ^5.7.0 + "@ethersproject/constants": ^5.7.0 + "@ethersproject/logger": ^5.7.0 + checksum: 5ff78693ae3fdf3cf23e1f6dc047a61e44c8197d2408c42719fef8cb7b7b3613a4eec88ac0ed1f9f5558c74fe0de7ae3195a29ca91a239c74b9f444d8e8b50df + languageName: node + linkType: hard + +"@ethersproject/transactions@npm:5.7.0, @ethersproject/transactions@npm:^5.7.0": + version: 5.7.0 + resolution: "@ethersproject/transactions@npm:5.7.0" + dependencies: + "@ethersproject/address": ^5.7.0 + "@ethersproject/bignumber": ^5.7.0 + "@ethersproject/bytes": ^5.7.0 + "@ethersproject/constants": ^5.7.0 + "@ethersproject/keccak256": ^5.7.0 + "@ethersproject/logger": ^5.7.0 + "@ethersproject/properties": ^5.7.0 + "@ethersproject/rlp": ^5.7.0 + "@ethersproject/signing-key": ^5.7.0 + checksum: a31b71996d2b283f68486241bff0d3ea3f1ba0e8f1322a8fffc239ccc4f4a7eb2ea9994b8fd2f093283fd75f87bae68171e01b6265261f821369aca319884a79 + languageName: node + linkType: hard + +"@ethersproject/units@npm:5.7.0": + version: 5.7.0 + resolution: "@ethersproject/units@npm:5.7.0" + dependencies: + "@ethersproject/bignumber": ^5.7.0 + "@ethersproject/constants": ^5.7.0 + "@ethersproject/logger": ^5.7.0 + checksum: 304714f848cd32e57df31bf545f7ad35c2a72adae957198b28cbc62166daa929322a07bff6e9c9ac4577ab6aa0de0546b065ed1b2d20b19e25748b7d475cb0fc + languageName: node + linkType: hard + +"@ethersproject/wallet@npm:5.7.0": + version: 5.7.0 + resolution: "@ethersproject/wallet@npm:5.7.0" + dependencies: + "@ethersproject/abstract-provider": ^5.7.0 + "@ethersproject/abstract-signer": ^5.7.0 + "@ethersproject/address": ^5.7.0 + "@ethersproject/bignumber": ^5.7.0 + "@ethersproject/bytes": ^5.7.0 + "@ethersproject/hash": ^5.7.0 + "@ethersproject/hdnode": ^5.7.0 + "@ethersproject/json-wallets": ^5.7.0 + "@ethersproject/keccak256": ^5.7.0 + "@ethersproject/logger": ^5.7.0 + "@ethersproject/properties": ^5.7.0 + "@ethersproject/random": ^5.7.0 + "@ethersproject/signing-key": ^5.7.0 + "@ethersproject/transactions": ^5.7.0 + "@ethersproject/wordlists": ^5.7.0 + checksum: a4009bf7331eddab38e3015b5e9101ef92de7f705b00a6196b997db0e5635b6d83561674d46c90c6f77b87c0500fe4a6b0183ba13749efc22db59c99deb82fbd + languageName: node + linkType: hard + +"@ethersproject/web@npm:5.7.1, @ethersproject/web@npm:^5.7.0": + version: 5.7.1 + resolution: "@ethersproject/web@npm:5.7.1" + dependencies: + "@ethersproject/base64": ^5.7.0 + "@ethersproject/bytes": ^5.7.0 + "@ethersproject/logger": ^5.7.0 + "@ethersproject/properties": ^5.7.0 + "@ethersproject/strings": ^5.7.0 + checksum: 7028c47103f82fd2e2c197ce0eecfacaa9180ffeec7de7845b1f4f9b19d84081b7a48227aaddde05a4aaa526af574a9a0ce01cc0fc75e3e371f84b38b5b16b2b + languageName: node + linkType: hard + +"@ethersproject/wordlists@npm:5.7.0, @ethersproject/wordlists@npm:^5.7.0": + version: 5.7.0 + resolution: "@ethersproject/wordlists@npm:5.7.0" + dependencies: + "@ethersproject/bytes": ^5.7.0 + "@ethersproject/hash": ^5.7.0 + "@ethersproject/logger": ^5.7.0 + "@ethersproject/properties": ^5.7.0 + "@ethersproject/strings": ^5.7.0 + checksum: 30eb6eb0731f9ef5faa44bf9c0c6e950bcaaef61e4d2d9ce0ae6d341f4e2d6d1f4ab4f8880bfce03b7aac4b862fb740e1421170cfbf8e2aafc359277d49e6e97 + languageName: node + linkType: hard + +"@fastify/busboy@npm:^2.0.0": + version: 2.1.0 + resolution: "@fastify/busboy@npm:2.1.0" + checksum: 3233abd10f73e50668cb4bb278a79b7b3fadd30215ac6458299b0e5a09a29c3586ec07597aae6bd93f5cbedfcef43a8aeea51829cd28fc13850cdbcd324c28d5 + languageName: node + linkType: hard + +"@hapi/hoek@npm:^9.0.0": + version: 9.3.0 + resolution: "@hapi/hoek@npm:9.3.0" + checksum: 4771c7a776242c3c022b168046af4e324d116a9d2e1d60631ee64f474c6e38d1bb07092d898bf95c7bc5d334c5582798a1456321b2e53ca817d4e7c88bc25b43 + languageName: node + linkType: hard + +"@hapi/topo@npm:^5.0.0": + version: 5.1.0 + resolution: "@hapi/topo@npm:5.1.0" + dependencies: + "@hapi/hoek": ^9.0.0 + checksum: 604dfd5dde76d5c334bd03f9001fce69c7ce529883acf92da96f4fe7e51221bf5e5110e964caca287a6a616ba027c071748ab636ff178ad750547fba611d6014 + languageName: node + linkType: hard + +"@humanwhocodes/config-array@npm:^0.11.13": + version: 0.11.13 + resolution: "@humanwhocodes/config-array@npm:0.11.13" + dependencies: + "@humanwhocodes/object-schema": ^2.0.1 + debug: ^4.1.1 + minimatch: ^3.0.5 + checksum: f8ea57b0d7ed7f2d64cd3944654976829d9da91c04d9c860e18804729a33f7681f78166ef4c761850b8c324d362f7d53f14c5c44907a6b38b32c703ff85e4805 + languageName: node + linkType: hard + +"@humanwhocodes/module-importer@npm:^1.0.1": + version: 1.0.1 + resolution: "@humanwhocodes/module-importer@npm:1.0.1" + checksum: 0fd22007db8034a2cdf2c764b140d37d9020bbfce8a49d3ec5c05290e77d4b0263b1b972b752df8c89e5eaa94073408f2b7d977aed131faf6cf396ebb5d7fb61 + languageName: node + linkType: hard + +"@humanwhocodes/object-schema@npm:^2.0.1": + version: 2.0.1 + resolution: "@humanwhocodes/object-schema@npm:2.0.1" + checksum: 24929487b1ed48795d2f08346a0116cc5ee4634848bce64161fb947109352c562310fd159fc64dda0e8b853307f5794605191a9547f7341158559ca3c8262a45 + languageName: node + linkType: hard + +"@isaacs/cliui@npm:^8.0.2": + version: 8.0.2 + resolution: "@isaacs/cliui@npm:8.0.2" + dependencies: + string-width: ^5.1.2 + string-width-cjs: "npm:string-width@^4.2.0" + strip-ansi: ^7.0.1 + strip-ansi-cjs: "npm:strip-ansi@^6.0.1" + wrap-ansi: ^8.1.0 + wrap-ansi-cjs: "npm:wrap-ansi@^7.0.0" + checksum: 4a473b9b32a7d4d3cfb7a614226e555091ff0c5a29a1734c28c72a182c2f6699b26fc6b5c2131dfd841e86b185aea714c72201d7c98c2fba5f17709333a67aeb + languageName: node + linkType: hard + +"@jest/schemas@npm:^29.6.3": + version: 29.6.3 + resolution: "@jest/schemas@npm:29.6.3" + dependencies: + "@sinclair/typebox": ^0.27.8 + checksum: 910040425f0fc93cd13e68c750b7885590b8839066dfa0cd78e7def07bbb708ad869381f725945d66f2284de5663bbecf63e8fdd856e2ae6e261ba30b1687e93 + languageName: node + linkType: hard + +"@jest/types@npm:^29.6.3": + version: 29.6.3 + resolution: "@jest/types@npm:29.6.3" + dependencies: + "@jest/schemas": ^29.6.3 + "@types/istanbul-lib-coverage": ^2.0.0 + "@types/istanbul-reports": ^3.0.0 + "@types/node": "*" + "@types/yargs": ^17.0.8 + chalk: ^4.0.0 + checksum: a0bcf15dbb0eca6bdd8ce61a3fb055349d40268622a7670a3b2eb3c3dbafe9eb26af59938366d520b86907b9505b0f9b29b85cec11579a9e580694b87cd90fcc + languageName: node + linkType: hard + +"@jridgewell/gen-mapping@npm:^0.3.0, @jridgewell/gen-mapping@npm:^0.3.2": + version: 0.3.3 + resolution: "@jridgewell/gen-mapping@npm:0.3.3" + dependencies: + "@jridgewell/set-array": ^1.0.1 + "@jridgewell/sourcemap-codec": ^1.4.10 + "@jridgewell/trace-mapping": ^0.3.9 + checksum: 4a74944bd31f22354fc01c3da32e83c19e519e3bbadafa114f6da4522ea77dd0c2842607e923a591d60a76699d819a2fbb6f3552e277efdb9b58b081390b60ab + languageName: node + linkType: hard + +"@jridgewell/resolve-uri@npm:^3.0.3, @jridgewell/resolve-uri@npm:^3.1.0": + version: 3.1.1 + resolution: "@jridgewell/resolve-uri@npm:3.1.1" + checksum: f5b441fe7900eab4f9155b3b93f9800a916257f4e8563afbcd3b5a5337b55e52bd8ae6735453b1b745457d9f6cdb16d74cd6220bbdd98cf153239e13f6cbb653 + languageName: node + linkType: hard + +"@jridgewell/set-array@npm:^1.0.1": + version: 1.1.2 + resolution: "@jridgewell/set-array@npm:1.1.2" + checksum: 69a84d5980385f396ff60a175f7177af0b8da4ddb81824cb7016a9ef914eee9806c72b6b65942003c63f7983d4f39a5c6c27185bbca88eb4690b62075602e28e + languageName: node + linkType: hard + +"@jridgewell/source-map@npm:^0.3.3": + version: 0.3.5 + resolution: "@jridgewell/source-map@npm:0.3.5" + dependencies: + "@jridgewell/gen-mapping": ^0.3.0 + "@jridgewell/trace-mapping": ^0.3.9 + checksum: 1ad4dec0bdafbade57920a50acec6634f88a0eb735851e0dda906fa9894e7f0549c492678aad1a10f8e144bfe87f238307bf2a914a1bc85b7781d345417e9f6f + languageName: node + linkType: hard + +"@jridgewell/sourcemap-codec@npm:^1.4.10, @jridgewell/sourcemap-codec@npm:^1.4.14": + version: 1.4.15 + resolution: "@jridgewell/sourcemap-codec@npm:1.4.15" + checksum: b881c7e503db3fc7f3c1f35a1dd2655a188cc51a3612d76efc8a6eb74728bef5606e6758ee77423e564092b4a518aba569bbb21c9bac5ab7a35b0c6ae7e344c8 + languageName: node + linkType: hard + +"@jridgewell/trace-mapping@npm:0.3.9": + version: 0.3.9 + resolution: "@jridgewell/trace-mapping@npm:0.3.9" + dependencies: + "@jridgewell/resolve-uri": ^3.0.3 + "@jridgewell/sourcemap-codec": ^1.4.10 + checksum: d89597752fd88d3f3480845691a05a44bd21faac18e2185b6f436c3b0fd0c5a859fbbd9aaa92050c4052caf325ad3e10e2e1d1b64327517471b7d51babc0ddef + languageName: node + linkType: hard + +"@jridgewell/trace-mapping@npm:^0.3.12, @jridgewell/trace-mapping@npm:^0.3.17, @jridgewell/trace-mapping@npm:^0.3.9": + version: 0.3.20 + resolution: "@jridgewell/trace-mapping@npm:0.3.20" + dependencies: + "@jridgewell/resolve-uri": ^3.1.0 + "@jridgewell/sourcemap-codec": ^1.4.14 + checksum: cd1a7353135f385909468ff0cf20bdd37e59f2ee49a13a966dedf921943e222082c583ade2b579ff6cd0d8faafcb5461f253e1bf2a9f48fec439211fdbe788f5 + languageName: node + linkType: hard + +"@leichtgewicht/ip-codec@npm:^2.0.1": + version: 2.0.4 + resolution: "@leichtgewicht/ip-codec@npm:2.0.4" + checksum: 468de1f04d33de6d300892683d7c8aecbf96d1e2c5fe084f95f816e50a054d45b7c1ebfb141a1447d844b86a948733f6eebd92234da8581c84a1ad4de2946a2d + languageName: node + linkType: hard + +"@ltd/j-toml@npm:^1.38.0": + version: 1.38.0 + resolution: "@ltd/j-toml@npm:1.38.0" + checksum: 34f5d0ec652e790a7a733f0d3a8d9957d63997bd0efc13a61beb9d772bae75519453884fbc3fd6a2d5fe15674834bdd57ca1824bb1de8f829e5ce195fc5fa3ea + languageName: node + linkType: hard + +"@mdn/browser-compat-data@npm:^4.0.0": + version: 4.2.1 + resolution: "@mdn/browser-compat-data@npm:4.2.1" + checksum: 76eaa7dafed154040e769ba6d23f2dcb58e805ed3ccb376a5c4b76326c92643753c20194faed363870800dc3c1af26c107b8562710c8bb37aaee8c5ffe2a89cd + languageName: node + linkType: hard + +"@mdx-js/mdx@npm:^1.6.22": + version: 1.6.22 + resolution: "@mdx-js/mdx@npm:1.6.22" + dependencies: + "@babel/core": 7.12.9 + "@babel/plugin-syntax-jsx": 7.12.1 + "@babel/plugin-syntax-object-rest-spread": 7.8.3 + "@mdx-js/util": 1.6.22 + babel-plugin-apply-mdx-type-prop: 1.6.22 + babel-plugin-extract-import-names: 1.6.22 + camelcase-css: 2.0.1 + detab: 2.0.4 + hast-util-raw: 6.0.1 + lodash.uniq: 4.5.0 + mdast-util-to-hast: 10.0.1 + remark-footnotes: 2.0.0 + remark-mdx: 1.6.22 + remark-parse: 8.0.3 + remark-squeeze-paragraphs: 4.0.0 + style-to-object: 0.3.0 + unified: 9.2.0 + unist-builder: 2.0.3 + unist-util-visit: 2.0.3 + checksum: 0839b4a3899416326ea6578fe9e470af319da559bc6d3669c60942e456b49a98eebeb3358c623007b4786a2175a450d2c51cd59df64639013c5a3d22366931a6 + languageName: node + linkType: hard + +"@mdx-js/mdx@npm:^3.0.0": + version: 3.0.0 + resolution: "@mdx-js/mdx@npm:3.0.0" + dependencies: + "@types/estree": ^1.0.0 + "@types/estree-jsx": ^1.0.0 + "@types/hast": ^3.0.0 + "@types/mdx": ^2.0.0 + collapse-white-space: ^2.0.0 + devlop: ^1.0.0 + estree-util-build-jsx: ^3.0.0 + estree-util-is-identifier-name: ^3.0.0 + estree-util-to-js: ^2.0.0 + estree-walker: ^3.0.0 + hast-util-to-estree: ^3.0.0 + hast-util-to-jsx-runtime: ^2.0.0 + markdown-extensions: ^2.0.0 + periscopic: ^3.0.0 + remark-mdx: ^3.0.0 + remark-parse: ^11.0.0 + remark-rehype: ^11.0.0 + source-map: ^0.7.0 + unified: ^11.0.0 + unist-util-position-from-estree: ^2.0.0 + unist-util-stringify-position: ^4.0.0 + unist-util-visit: ^5.0.0 + vfile: ^6.0.0 + checksum: da4305dcfd9012521170e0ed439eb336900fb84a5784e5e3dac2144855fa603325477855e17a04b7c673cc24699cf2bfd611c958f591bb3a9afb5608c259bbd3 + languageName: node + linkType: hard + +"@mdx-js/react@npm:^3.0.0": + version: 3.0.0 + resolution: "@mdx-js/react@npm:3.0.0" + dependencies: + "@types/mdx": ^2.0.0 + peerDependencies: + "@types/react": ">=16" + react: ">=16" + checksum: a780cff9d7f7639d6fc21c9d4e0a6ac1370c3209ea0db176923df7f9145785309591cf871f227f5135d1fe2accce0d5df9a22fc0530e5dda0c7b4b105705f20d + languageName: node + linkType: hard + +"@mdx-js/util@npm:1.6.22": + version: 1.6.22 + resolution: "@mdx-js/util@npm:1.6.22" + checksum: 4b393907e39a1a75214f0314bf72a0adfa5e5adffd050dd5efe9c055b8549481a3cfc9f308c16dfb33311daf3ff63added7d5fd1fe52db614c004f886e0e559a + languageName: node + linkType: hard + +"@metamask/eth-sig-util@npm:^4.0.0": + version: 4.0.1 + resolution: "@metamask/eth-sig-util@npm:4.0.1" + dependencies: + ethereumjs-abi: ^0.6.8 + ethereumjs-util: ^6.2.1 + ethjs-util: ^0.1.6 + tweetnacl: ^1.0.3 + tweetnacl-util: ^0.15.1 + checksum: 740df4c92a1282e6be4c00c86c1a8ccfb93e767596e43f6da895aa5bab4a28fc3c2209f0327db34924a4a1e9db72bc4d3dddfcfc45cca0b218c9ccbf7d1b1445 + languageName: node + linkType: hard + +"@monaco-editor/loader@npm:^1.4.0": + version: 1.4.0 + resolution: "@monaco-editor/loader@npm:1.4.0" + dependencies: + state-local: ^1.0.6 + peerDependencies: + monaco-editor: ">= 0.21.0 < 1" + checksum: 374ec0ea872ee15b33310e105a43217148161480d3955c5cece87d0f801754cd2c45a3f6c539a75da18a066c1615756fb87eaf1003f1df6a64a0cbce5d2c3749 + languageName: node + linkType: hard + +"@monaco-editor/react@npm:4.6.0": + version: 4.6.0 + resolution: "@monaco-editor/react@npm:4.6.0" + dependencies: + "@monaco-editor/loader": ^1.4.0 + peerDependencies: + monaco-editor: ">= 0.25.0 < 1" + react: ^16.8.0 || ^17.0.0 || ^18.0.0 + react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 + checksum: 9d44e76c5baad6db5f84c90a5540fbd3c9af691b97d76cf2a99b3c8273004d0efe44c2572d80e9d975c9af10022c21e4a66923924950a5201e82017c8b20428c + languageName: node + linkType: hard + +"@noble/curves@npm:1.2.0": + version: 1.2.0 + resolution: "@noble/curves@npm:1.2.0" + dependencies: + "@noble/hashes": 1.3.2 + checksum: bb798d7a66d8e43789e93bc3c2ddff91a1e19fdb79a99b86cd98f1e5eff0ee2024a2672902c2576ef3577b6f282f3b5c778bebd55761ddbb30e36bf275e83dd0 + languageName: node + linkType: hard + +"@noble/hashes@npm:1.2.0, @noble/hashes@npm:~1.2.0": + version: 1.2.0 + resolution: "@noble/hashes@npm:1.2.0" + checksum: 8ca080ce557b8f40fb2f78d3aedffd95825a415ac8e13d7ffe3643f8626a8c2d99a3e5975b555027ac24316d8b3c02a35b8358567c0c23af681e6573602aa434 + languageName: node + linkType: hard + +"@noble/hashes@npm:1.3.2": + version: 1.3.2 + resolution: "@noble/hashes@npm:1.3.2" + checksum: fe23536b436539d13f90e4b9be843cc63b1b17666a07634a2b1259dded6f490be3d050249e6af98076ea8f2ea0d56f578773c2197f2aa0eeaa5fba5bc18ba474 + languageName: node + linkType: hard + +"@noble/secp256k1@npm:1.7.1, @noble/secp256k1@npm:~1.7.0": + version: 1.7.1 + resolution: "@noble/secp256k1@npm:1.7.1" + checksum: d2301f1f7690368d8409a3152450458f27e54df47e3f917292de3de82c298770890c2de7c967d237eff9c95b70af485389a9695f73eb05a43e2bd562d18b18cb + languageName: node + linkType: hard + +"@node-rs/jieba-android-arm-eabi@npm:1.7.2": + version: 1.7.2 + resolution: "@node-rs/jieba-android-arm-eabi@npm:1.7.2" + conditions: os=android & cpu=arm + languageName: node + linkType: hard + +"@node-rs/jieba-android-arm64@npm:1.7.2": + version: 1.7.2 + resolution: "@node-rs/jieba-android-arm64@npm:1.7.2" + conditions: os=android & cpu=arm64 + languageName: node + linkType: hard + +"@node-rs/jieba-darwin-arm64@npm:1.7.2": + version: 1.7.2 + resolution: "@node-rs/jieba-darwin-arm64@npm:1.7.2" + conditions: os=darwin & cpu=arm64 + languageName: node + linkType: hard + +"@node-rs/jieba-darwin-x64@npm:1.7.2": + version: 1.7.2 + resolution: "@node-rs/jieba-darwin-x64@npm:1.7.2" + conditions: os=darwin & cpu=x64 + languageName: node + linkType: hard + +"@node-rs/jieba-freebsd-x64@npm:1.7.2": + version: 1.7.2 + resolution: "@node-rs/jieba-freebsd-x64@npm:1.7.2" + conditions: os=freebsd & cpu=x64 + languageName: node + linkType: hard + +"@node-rs/jieba-linux-arm-gnueabihf@npm:1.7.2": + version: 1.7.2 + resolution: "@node-rs/jieba-linux-arm-gnueabihf@npm:1.7.2" + conditions: os=linux & cpu=arm + languageName: node + linkType: hard + +"@node-rs/jieba-linux-arm64-gnu@npm:1.7.2": + version: 1.7.2 + resolution: "@node-rs/jieba-linux-arm64-gnu@npm:1.7.2" + conditions: os=linux & cpu=arm64 & libc=glibc + languageName: node + linkType: hard + +"@node-rs/jieba-linux-arm64-musl@npm:1.7.2": + version: 1.7.2 + resolution: "@node-rs/jieba-linux-arm64-musl@npm:1.7.2" + conditions: os=linux & cpu=arm64 & libc=musl + languageName: node + linkType: hard + +"@node-rs/jieba-linux-x64-gnu@npm:1.7.2": + version: 1.7.2 + resolution: "@node-rs/jieba-linux-x64-gnu@npm:1.7.2" + conditions: os=linux & cpu=x64 & libc=glibc + languageName: node + linkType: hard + +"@node-rs/jieba-linux-x64-musl@npm:1.7.2": + version: 1.7.2 + resolution: "@node-rs/jieba-linux-x64-musl@npm:1.7.2" + conditions: os=linux & cpu=x64 & libc=musl + languageName: node + linkType: hard + +"@node-rs/jieba-win32-arm64-msvc@npm:1.7.2": + version: 1.7.2 + resolution: "@node-rs/jieba-win32-arm64-msvc@npm:1.7.2" + conditions: os=win32 & cpu=arm64 + languageName: node + linkType: hard + +"@node-rs/jieba-win32-ia32-msvc@npm:1.7.2": + version: 1.7.2 + resolution: "@node-rs/jieba-win32-ia32-msvc@npm:1.7.2" + conditions: os=win32 & cpu=ia32 + languageName: node + linkType: hard + +"@node-rs/jieba-win32-x64-msvc@npm:1.7.2": + version: 1.7.2 + resolution: "@node-rs/jieba-win32-x64-msvc@npm:1.7.2" + conditions: os=win32 & cpu=x64 + languageName: node + linkType: hard + +"@node-rs/jieba@npm:^1.6.0": + version: 1.7.2 + resolution: "@node-rs/jieba@npm:1.7.2" + dependencies: + "@node-rs/jieba-android-arm-eabi": 1.7.2 + "@node-rs/jieba-android-arm64": 1.7.2 + "@node-rs/jieba-darwin-arm64": 1.7.2 + "@node-rs/jieba-darwin-x64": 1.7.2 + "@node-rs/jieba-freebsd-x64": 1.7.2 + "@node-rs/jieba-linux-arm-gnueabihf": 1.7.2 + "@node-rs/jieba-linux-arm64-gnu": 1.7.2 + "@node-rs/jieba-linux-arm64-musl": 1.7.2 + "@node-rs/jieba-linux-x64-gnu": 1.7.2 + "@node-rs/jieba-linux-x64-musl": 1.7.2 + "@node-rs/jieba-win32-arm64-msvc": 1.7.2 + "@node-rs/jieba-win32-ia32-msvc": 1.7.2 + "@node-rs/jieba-win32-x64-msvc": 1.7.2 + dependenciesMeta: + "@node-rs/jieba-android-arm-eabi": + optional: true + "@node-rs/jieba-android-arm64": + optional: true + "@node-rs/jieba-darwin-arm64": + optional: true + "@node-rs/jieba-darwin-x64": + optional: true + "@node-rs/jieba-freebsd-x64": + optional: true + "@node-rs/jieba-linux-arm-gnueabihf": + optional: true + "@node-rs/jieba-linux-arm64-gnu": + optional: true + "@node-rs/jieba-linux-arm64-musl": + optional: true + "@node-rs/jieba-linux-x64-gnu": + optional: true + "@node-rs/jieba-linux-x64-musl": + optional: true + "@node-rs/jieba-win32-arm64-msvc": + optional: true + "@node-rs/jieba-win32-ia32-msvc": + optional: true + "@node-rs/jieba-win32-x64-msvc": + optional: true + checksum: 47108c5e84993ba2ddbc1d922ecb840b74668d61ff8ec83137a300c02e6c97748e64d98f23484dce7d1aee9d00dcf3d1921ccb5375b83ed8474650d0fc1e7e60 + languageName: node + linkType: hard + +"@nodelib/fs.scandir@npm:2.1.5": + version: 2.1.5 + resolution: "@nodelib/fs.scandir@npm:2.1.5" + dependencies: + "@nodelib/fs.stat": 2.0.5 + run-parallel: ^1.1.9 + checksum: a970d595bd23c66c880e0ef1817791432dbb7acbb8d44b7e7d0e7a22f4521260d4a83f7f9fd61d44fda4610105577f8f58a60718105fb38352baed612fd79e59 + languageName: node + linkType: hard + +"@nodelib/fs.stat@npm:2.0.5, @nodelib/fs.stat@npm:^2.0.2": + version: 2.0.5 + resolution: "@nodelib/fs.stat@npm:2.0.5" + checksum: 012480b5ca9d97bff9261571dbbec7bbc6033f69cc92908bc1ecfad0792361a5a1994bc48674b9ef76419d056a03efadfce5a6cf6dbc0a36559571a7a483f6f0 + languageName: node + linkType: hard + +"@nodelib/fs.walk@npm:^1.2.3, @nodelib/fs.walk@npm:^1.2.8": + version: 1.2.8 + resolution: "@nodelib/fs.walk@npm:1.2.8" + dependencies: + "@nodelib/fs.scandir": 2.1.5 + fastq: ^1.6.0 + checksum: 190c643f156d8f8f277bf2a6078af1ffde1fd43f498f187c2db24d35b4b4b5785c02c7dc52e356497b9a1b65b13edc996de08de0b961c32844364da02986dc53 + languageName: node + linkType: hard + +"@noir-lang/acvm_js@npm:0.35.0": + version: 0.35.0 + resolution: "@noir-lang/acvm_js@npm:0.35.0" + checksum: 0a2209f60242d9dbef31e72b738d70ff802da71f12d4b4f278144d373941f30844c8d041518970e71ba82eb9337cf4cc3a40fbd4b2c50acff45d84d2ee7dd435 + languageName: node + linkType: hard + +"@noir-lang/acvm_js@workspace:*, @noir-lang/acvm_js@workspace:acvm-repo/acvm_js": + version: 0.0.0-use.local + resolution: "@noir-lang/acvm_js@workspace:acvm-repo/acvm_js" + dependencies: + "@esm-bundle/chai": ^4.3.4-fix.0 + "@web/dev-server-esbuild": ^0.3.6 + "@web/test-runner": ^0.15.3 + "@web/test-runner-playwright": ^0.10.0 + chai: ^4.3.7 + eslint: ^8.50.0 + eslint-plugin-prettier: ^5.0.0 + mocha: ^10.2.0 + prettier: 3.0.3 + ts-node: ^10.9.1 + typescript: ^5.0.4 + languageName: unknown + linkType: soft + +"@noir-lang/backend_barretenberg@npm:^0.19.2": + version: 0.19.4 + resolution: "@noir-lang/backend_barretenberg@npm:0.19.4" + dependencies: + "@aztec/bb.js": 0.16.0 + "@noir-lang/types": 0.19.4 + fflate: ^0.8.0 + checksum: b84382e71f29f0e10415f8c3a68eb7bb0653f56df1b3e8afd66c22df7adc950fb9016124f32253abad3d05583bccf26b1d0c0cabc601218083fd27d0601a3c02 + languageName: node + linkType: hard + +"@noir-lang/backend_barretenberg@workspace:*, @noir-lang/backend_barretenberg@workspace:tooling/noir_js_backend_barretenberg": + version: 0.0.0-use.local + resolution: "@noir-lang/backend_barretenberg@workspace:tooling/noir_js_backend_barretenberg" + dependencies: + "@aztec/bb.js": 0.17.0 + "@noir-lang/types": "workspace:*" + "@types/node": ^20.6.2 + "@types/prettier": ^3 + chai: ^4.3.8 + eslint: ^8.50.0 + eslint-plugin-prettier: ^5.0.0 + fflate: ^0.8.0 + mocha: ^10.2.0 + prettier: 3.0.3 + ts-node: ^10.9.1 + typescript: 5.1.5 + languageName: unknown + linkType: soft + +"@noir-lang/noir_codegen@workspace:tooling/noir_codegen": + version: 0.0.0-use.local + resolution: "@noir-lang/noir_codegen@workspace:tooling/noir_codegen" + dependencies: + "@noir-lang/noir_js": "workspace:*" + "@noir-lang/types": "workspace:*" + "@types/chai": ^4 + "@types/mocha": ^10.0.1 + "@types/node": ^20.6.2 + "@types/prettier": ^3 + chai: ^4.3.8 + eslint: ^8.50.0 + eslint-plugin-prettier: ^5.0.0 + glob: ^10.3.10 + mocha: ^10.2.0 + prettier: 3.0.3 + ts-command-line-args: ^2.5.1 + ts-node: ^10.9.1 + tsx: ^4.6.2 + typescript: ^5.2.2 + bin: + noir-codegen: lib/main.js + languageName: unknown + linkType: soft + +"@noir-lang/noir_js@npm:^0.19.2": + version: 0.19.4 + resolution: "@noir-lang/noir_js@npm:0.19.4" + dependencies: + "@noir-lang/acvm_js": 0.35.0 + "@noir-lang/noirc_abi": 0.19.4 + "@noir-lang/types": 0.19.4 + checksum: 1ee6140019861cee3a27d511951e6b3ed629925a83328b0fbe18791a534c36d26d33ae3ca3aa25252fac8932dfeefedc0d5f07169d5d08f1485d6290965d2491 + languageName: node + linkType: hard + +"@noir-lang/noir_js@workspace:*, @noir-lang/noir_js@workspace:tooling/noir_js": + version: 0.0.0-use.local + resolution: "@noir-lang/noir_js@workspace:tooling/noir_js" + dependencies: + "@noir-lang/acvm_js": "workspace:*" + "@noir-lang/noirc_abi": "workspace:*" + "@noir-lang/types": "workspace:*" + "@types/chai": ^4 + "@types/mocha": ^10.0.1 + "@types/node": ^20.6.2 + "@types/prettier": ^3 + chai: ^4.3.8 + eslint: ^8.50.0 + eslint-plugin-prettier: ^5.0.0 + mocha: ^10.2.0 + prettier: 3.0.3 + ts-node: ^10.9.1 + tsc-multi: ^1.1.0 + tsx: ^4.6.2 + typescript: ^5.2.2 + languageName: unknown + linkType: soft + +"@noir-lang/noir_wasm@npm:^0.19.2": + version: 0.19.4 + resolution: "@noir-lang/noir_wasm@npm:0.19.4" + peerDependencies: + "@noir-lang/source-resolver": 0.19.4 + checksum: b69748240e27851f19b20b4ce1d0c57da7e9055b9d0e0c16f699338394912c00277e5586afcff77d3f6f08e17bbf1bc6253ff1ad156b8f4985c4435adad0ba49 + languageName: node + linkType: hard + +"@noir-lang/noir_wasm@workspace:*, @noir-lang/noir_wasm@workspace:compiler/wasm": + version: 0.0.0-use.local + resolution: "@noir-lang/noir_wasm@workspace:compiler/wasm" + dependencies: + "@esm-bundle/chai": ^4.3.4-fix.0 + "@ltd/j-toml": ^1.38.0 + "@noir-lang/noirc_abi": "workspace:*" + "@types/adm-zip": ^0.5.0 + "@types/chai": ^4 + "@types/mocha": ^10.0.6 + "@types/mocha-each": ^2 + "@types/node": ^20.10.5 + "@types/path-browserify": ^1 + "@types/readable-stream": ^4 + "@types/sinon": ^17 + "@wasm-tool/wasm-pack-plugin": ^1.7.0 + "@web/dev-server-esbuild": ^0.3.6 + "@web/test-runner": ^0.18.0 + "@web/test-runner-playwright": ^0.11.0 + adm-zip: ^0.5.0 + assert: ^2.1.0 + browserify-fs: ^1.0.0 + chai: ^4.3.10 + copy-webpack-plugin: ^11.0.0 + html-webpack-plugin: ^5.5.4 + memfs: ^4.6.0 + mocha: ^10.2.0 + mocha-each: ^2.0.1 + path-browserify: ^1.0.1 + process: ^0.11.10 + readable-stream: ^4.4.2 + sinon: ^17.0.1 + ts-loader: ^9.5.1 + ts-node: ^10.9.1 + typescript: ~5.2.2 + unzipit: ^1.4.3 + url: ^0.11.3 + webpack: ^5.49.0 + webpack-cli: ^4.7.2 + languageName: unknown + linkType: soft + +"@noir-lang/noirc_abi@npm:0.19.4": + version: 0.19.4 + resolution: "@noir-lang/noirc_abi@npm:0.19.4" + checksum: 7446127935c954ae1b2bfdfa6cf111a9c14a2024f13a2d7e7e0a612319b0b4cc199acdbceaa94168e3bd5d82760b9d2fd2c9eec6677d57ed76e7b666e2b28c7d + languageName: node + linkType: hard + +"@noir-lang/noirc_abi@workspace:*, @noir-lang/noirc_abi@workspace:tooling/noirc_abi_wasm": + version: 0.0.0-use.local + resolution: "@noir-lang/noirc_abi@workspace:tooling/noirc_abi_wasm" + dependencies: + "@esm-bundle/chai": ^4.3.4-fix.0 + "@web/dev-server-esbuild": ^0.3.6 + "@web/test-runner": ^0.15.3 + "@web/test-runner-playwright": ^0.10.0 + eslint: ^8.50.0 + mocha: ^10.2.0 + languageName: unknown + linkType: soft + +"@noir-lang/root@workspace:.": + version: 0.0.0-use.local + resolution: "@noir-lang/root@workspace:." + dependencies: + "@typescript-eslint/eslint-plugin": ^6.7.3 + "@typescript-eslint/parser": ^6.7.3 + chai: ^4.3.7 + cspell: ^8.3.2 + eslint: ^8.50.0 + eslint-plugin-prettier: ^5.0.0 + mocha: ^10.2.0 + prettier: 3.0.3 + ts-node: ^10.9.1 + typescript: ^5.0.4 + languageName: unknown + linkType: soft + +"@noir-lang/source-resolver@npm:^0.19.2": + version: 0.19.4 + resolution: "@noir-lang/source-resolver@npm:0.19.4" + checksum: f0e51bb54daa1197894a17e5d0d125481aa0872c49c0d955feec2629b8ff51b9b48144778def6539a87b723a9b4c5eacc79d6eb935f0da6a1893d8d403689683 + languageName: node + linkType: hard + +"@noir-lang/types@npm:0.19.4, @noir-lang/types@npm:^0.19.2": + version: 0.19.4 + resolution: "@noir-lang/types@npm:0.19.4" + dependencies: + "@noir-lang/noirc_abi": 0.19.4 + checksum: 1d0ce99895c0bc98e9daf67028132f71e30f60b2dc0799ee99f311d917e84459a0d2a37c318f5ab14e594e7dd26f5e612e5fd9a7e0a20a692a7017d7c883bb4d + languageName: node + linkType: hard + +"@noir-lang/types@workspace:*, @noir-lang/types@workspace:tooling/noir_js_types": + version: 0.0.0-use.local + resolution: "@noir-lang/types@workspace:tooling/noir_js_types" + dependencies: + "@noir-lang/noirc_abi": "workspace:*" + "@types/prettier": ^3 + eslint: ^8.50.0 + eslint-plugin-prettier: ^5.0.0 + prettier: 3.0.3 + typescript: ^5.2.2 + languageName: unknown + linkType: soft + +"@nomicfoundation/ethereumjs-block@npm:5.0.2": + version: 5.0.2 + resolution: "@nomicfoundation/ethereumjs-block@npm:5.0.2" + dependencies: + "@nomicfoundation/ethereumjs-common": 4.0.2 + "@nomicfoundation/ethereumjs-rlp": 5.0.2 + "@nomicfoundation/ethereumjs-trie": 6.0.2 + "@nomicfoundation/ethereumjs-tx": 5.0.2 + "@nomicfoundation/ethereumjs-util": 9.0.2 + ethereum-cryptography: 0.1.3 + ethers: ^5.7.1 + checksum: 7ff744f44a01f1c059ca7812a1cfc8089f87aa506af6cb39c78331dca71b32993cbd6fa05ad03f8c4f4fab73bb998a927af69e0d8ff01ae192ee5931606e09f5 + languageName: node + linkType: hard + +"@nomicfoundation/ethereumjs-blockchain@npm:7.0.2": + version: 7.0.2 + resolution: "@nomicfoundation/ethereumjs-blockchain@npm:7.0.2" + dependencies: + "@nomicfoundation/ethereumjs-block": 5.0.2 + "@nomicfoundation/ethereumjs-common": 4.0.2 + "@nomicfoundation/ethereumjs-ethash": 3.0.2 + "@nomicfoundation/ethereumjs-rlp": 5.0.2 + "@nomicfoundation/ethereumjs-trie": 6.0.2 + "@nomicfoundation/ethereumjs-tx": 5.0.2 + "@nomicfoundation/ethereumjs-util": 9.0.2 + abstract-level: ^1.0.3 + debug: ^4.3.3 + ethereum-cryptography: 0.1.3 + level: ^8.0.0 + lru-cache: ^5.1.1 + memory-level: ^1.0.0 + checksum: b7e440dcd73e32aa72d13bfd28cb472773c9c60ea808a884131bf7eb3f42286ad594a0864215f599332d800f3fe1f772fff4b138d2dcaa8f41e4d8389bff33e7 + languageName: node + linkType: hard + +"@nomicfoundation/ethereumjs-common@npm:4.0.2": + version: 4.0.2 + resolution: "@nomicfoundation/ethereumjs-common@npm:4.0.2" + dependencies: + "@nomicfoundation/ethereumjs-util": 9.0.2 + crc-32: ^1.2.0 + checksum: f0d84704d6254d374299c19884312bd5666974b4b6f342d3f10bc76e549de78d20e45a53d25fbdc146268a52335497127e4f069126da7c60ac933a158e704887 + languageName: node + linkType: hard + +"@nomicfoundation/ethereumjs-ethash@npm:3.0.2": + version: 3.0.2 + resolution: "@nomicfoundation/ethereumjs-ethash@npm:3.0.2" + dependencies: + "@nomicfoundation/ethereumjs-block": 5.0.2 + "@nomicfoundation/ethereumjs-rlp": 5.0.2 + "@nomicfoundation/ethereumjs-util": 9.0.2 + abstract-level: ^1.0.3 + bigint-crypto-utils: ^3.0.23 + ethereum-cryptography: 0.1.3 + checksum: e4011e4019dd9b92f7eeebfc1e6c9a9685c52d8fd0ee4f28f03e50048a23b600c714490827f59fdce497b3afb503b3fd2ebf6815ff307e9949c3efeff1403278 + languageName: node + linkType: hard + +"@nomicfoundation/ethereumjs-evm@npm:2.0.2": + version: 2.0.2 + resolution: "@nomicfoundation/ethereumjs-evm@npm:2.0.2" + dependencies: + "@ethersproject/providers": ^5.7.1 + "@nomicfoundation/ethereumjs-common": 4.0.2 + "@nomicfoundation/ethereumjs-tx": 5.0.2 + "@nomicfoundation/ethereumjs-util": 9.0.2 + debug: ^4.3.3 + ethereum-cryptography: 0.1.3 + mcl-wasm: ^0.7.1 + rustbn.js: ~0.2.0 + checksum: a23cf570836ddc147606b02df568069de946108e640f902358fef67e589f6b371d856056ee44299d9b4e3497f8ae25faa45e6b18fefd90e9b222dc6a761d85f0 + languageName: node + linkType: hard + +"@nomicfoundation/ethereumjs-rlp@npm:5.0.2": + version: 5.0.2 + resolution: "@nomicfoundation/ethereumjs-rlp@npm:5.0.2" + bin: + rlp: bin/rlp + checksum: a74434cadefca9aa8754607cc1ad7bb4bbea4ee61c6214918e60a5bbee83206850346eb64e39fd1fe97f854c7ec0163e01148c0c881dda23881938f0645a0ef2 + languageName: node + linkType: hard + +"@nomicfoundation/ethereumjs-statemanager@npm:2.0.2": + version: 2.0.2 + resolution: "@nomicfoundation/ethereumjs-statemanager@npm:2.0.2" + dependencies: + "@nomicfoundation/ethereumjs-common": 4.0.2 + "@nomicfoundation/ethereumjs-rlp": 5.0.2 + debug: ^4.3.3 + ethereum-cryptography: 0.1.3 + ethers: ^5.7.1 + js-sdsl: ^4.1.4 + checksum: 3ab6578e252e53609afd98d8ba42a99f182dcf80252f23ed9a5e0471023ffb2502130f85fc47fa7c94cd149f9be799ed9a0942ca52a143405be9267f4ad94e64 + languageName: node + linkType: hard + +"@nomicfoundation/ethereumjs-trie@npm:6.0.2": + version: 6.0.2 + resolution: "@nomicfoundation/ethereumjs-trie@npm:6.0.2" + dependencies: + "@nomicfoundation/ethereumjs-rlp": 5.0.2 + "@nomicfoundation/ethereumjs-util": 9.0.2 + "@types/readable-stream": ^2.3.13 + ethereum-cryptography: 0.1.3 + readable-stream: ^3.6.0 + checksum: d4da918d333851b9f2cce7dbd25ab5753e0accd43d562d98fd991b168b6a08d1794528f0ade40fe5617c84900378376fe6256cdbe52c8d66bf4c53293bbc7c40 + languageName: node + linkType: hard + +"@nomicfoundation/ethereumjs-tx@npm:5.0.2": + version: 5.0.2 + resolution: "@nomicfoundation/ethereumjs-tx@npm:5.0.2" + dependencies: + "@chainsafe/ssz": ^0.9.2 + "@ethersproject/providers": ^5.7.2 + "@nomicfoundation/ethereumjs-common": 4.0.2 + "@nomicfoundation/ethereumjs-rlp": 5.0.2 + "@nomicfoundation/ethereumjs-util": 9.0.2 + ethereum-cryptography: 0.1.3 + checksum: 0bbcea75786b2ccb559afe2ecc9866fb4566a9f157b6ffba4f50960d14f4b3da2e86e273f6fadda9b860e67cfcabf589970fb951b328cb5f900a585cd21842a2 + languageName: node + linkType: hard + +"@nomicfoundation/ethereumjs-util@npm:9.0.2": + version: 9.0.2 + resolution: "@nomicfoundation/ethereumjs-util@npm:9.0.2" + dependencies: + "@chainsafe/ssz": ^0.10.0 + "@nomicfoundation/ethereumjs-rlp": 5.0.2 + ethereum-cryptography: 0.1.3 + checksum: 3a08f7b88079ef9f53b43da9bdcb8195498fd3d3911c2feee2571f4d1204656053f058b2f650471c86f7d2d0ba2f814768c7cfb0f266eede41c848356afc4900 + languageName: node + linkType: hard + +"@nomicfoundation/ethereumjs-vm@npm:7.0.2": + version: 7.0.2 + resolution: "@nomicfoundation/ethereumjs-vm@npm:7.0.2" + dependencies: + "@nomicfoundation/ethereumjs-block": 5.0.2 + "@nomicfoundation/ethereumjs-blockchain": 7.0.2 + "@nomicfoundation/ethereumjs-common": 4.0.2 + "@nomicfoundation/ethereumjs-evm": 2.0.2 + "@nomicfoundation/ethereumjs-rlp": 5.0.2 + "@nomicfoundation/ethereumjs-statemanager": 2.0.2 + "@nomicfoundation/ethereumjs-trie": 6.0.2 + "@nomicfoundation/ethereumjs-tx": 5.0.2 + "@nomicfoundation/ethereumjs-util": 9.0.2 + debug: ^4.3.3 + ethereum-cryptography: 0.1.3 + mcl-wasm: ^0.7.1 + rustbn.js: ~0.2.0 + checksum: 1c25ba4d0644cadb8a2b0241a4bb02e578bfd7f70e3492b855c2ab5c120cb159cb8f7486f84dc1597884bd1697feedbfb5feb66e91352afb51f3694fd8e4a043 + languageName: node + linkType: hard + +"@nomicfoundation/hardhat-chai-matchers@npm:^2.0.0": + version: 2.0.2 + resolution: "@nomicfoundation/hardhat-chai-matchers@npm:2.0.2" + dependencies: + "@types/chai-as-promised": ^7.1.3 + chai-as-promised: ^7.1.1 + deep-eql: ^4.0.1 + ordinal: ^1.0.3 + peerDependencies: + "@nomicfoundation/hardhat-ethers": ^3.0.0 + chai: ^4.2.0 + ethers: ^6.1.0 + hardhat: ^2.9.4 + checksum: 62d7d69f6b34a06bc43fe0dab8adc9e3b6f907f1b68bb5cf47feb78a4c7ef057b9a4aa713611abeca38df9d8fe166bbd9bbf98e42c4edbdf7aece477b3f9485a + languageName: node + linkType: hard + +"@nomicfoundation/hardhat-ethers@npm:^3.0.0": + version: 3.0.5 + resolution: "@nomicfoundation/hardhat-ethers@npm:3.0.5" + dependencies: + debug: ^4.1.1 + lodash.isequal: ^4.5.0 + peerDependencies: + ethers: ^6.1.0 + hardhat: ^2.0.0 + checksum: 34b092dfec68f8d8673c96af717660327edc814bc5c9cdb5bc1f82d5bde2b18bc9b9d3499a632784a3d4f2505ac174217e55d31b546b7eaa77a5bb30b3c80bb4 + languageName: node + linkType: hard + +"@nomicfoundation/solidity-analyzer-darwin-arm64@npm:0.1.1": + version: 0.1.1 + resolution: "@nomicfoundation/solidity-analyzer-darwin-arm64@npm:0.1.1" + conditions: os=darwin & cpu=arm64 + languageName: node + linkType: hard + +"@nomicfoundation/solidity-analyzer-darwin-x64@npm:0.1.1": + version: 0.1.1 + resolution: "@nomicfoundation/solidity-analyzer-darwin-x64@npm:0.1.1" + conditions: os=darwin & cpu=x64 + languageName: node + linkType: hard + +"@nomicfoundation/solidity-analyzer-freebsd-x64@npm:0.1.1": + version: 0.1.1 + resolution: "@nomicfoundation/solidity-analyzer-freebsd-x64@npm:0.1.1" + conditions: os=freebsd & cpu=x64 + languageName: node + linkType: hard + +"@nomicfoundation/solidity-analyzer-linux-arm64-gnu@npm:0.1.1": + version: 0.1.1 + resolution: "@nomicfoundation/solidity-analyzer-linux-arm64-gnu@npm:0.1.1" + conditions: os=linux & cpu=arm64 & libc=glibc + languageName: node + linkType: hard + +"@nomicfoundation/solidity-analyzer-linux-arm64-musl@npm:0.1.1": + version: 0.1.1 + resolution: "@nomicfoundation/solidity-analyzer-linux-arm64-musl@npm:0.1.1" + conditions: os=linux & cpu=arm64 & libc=musl + languageName: node + linkType: hard + +"@nomicfoundation/solidity-analyzer-linux-x64-gnu@npm:0.1.1": + version: 0.1.1 + resolution: "@nomicfoundation/solidity-analyzer-linux-x64-gnu@npm:0.1.1" + conditions: os=linux & cpu=x64 & libc=glibc + languageName: node + linkType: hard + +"@nomicfoundation/solidity-analyzer-linux-x64-musl@npm:0.1.1": + version: 0.1.1 + resolution: "@nomicfoundation/solidity-analyzer-linux-x64-musl@npm:0.1.1" + conditions: os=linux & cpu=x64 & libc=musl + languageName: node + linkType: hard + +"@nomicfoundation/solidity-analyzer-win32-arm64-msvc@npm:0.1.1": + version: 0.1.1 + resolution: "@nomicfoundation/solidity-analyzer-win32-arm64-msvc@npm:0.1.1" + conditions: os=win32 & cpu=arm64 + languageName: node + linkType: hard + +"@nomicfoundation/solidity-analyzer-win32-ia32-msvc@npm:0.1.1": + version: 0.1.1 + resolution: "@nomicfoundation/solidity-analyzer-win32-ia32-msvc@npm:0.1.1" + conditions: os=win32 & cpu=ia32 + languageName: node + linkType: hard + +"@nomicfoundation/solidity-analyzer-win32-x64-msvc@npm:0.1.1": + version: 0.1.1 + resolution: "@nomicfoundation/solidity-analyzer-win32-x64-msvc@npm:0.1.1" + conditions: os=win32 & cpu=x64 + languageName: node + linkType: hard + +"@nomicfoundation/solidity-analyzer@npm:^0.1.0": + version: 0.1.1 + resolution: "@nomicfoundation/solidity-analyzer@npm:0.1.1" + dependencies: + "@nomicfoundation/solidity-analyzer-darwin-arm64": 0.1.1 + "@nomicfoundation/solidity-analyzer-darwin-x64": 0.1.1 + "@nomicfoundation/solidity-analyzer-freebsd-x64": 0.1.1 + "@nomicfoundation/solidity-analyzer-linux-arm64-gnu": 0.1.1 + "@nomicfoundation/solidity-analyzer-linux-arm64-musl": 0.1.1 + "@nomicfoundation/solidity-analyzer-linux-x64-gnu": 0.1.1 + "@nomicfoundation/solidity-analyzer-linux-x64-musl": 0.1.1 + "@nomicfoundation/solidity-analyzer-win32-arm64-msvc": 0.1.1 + "@nomicfoundation/solidity-analyzer-win32-ia32-msvc": 0.1.1 + "@nomicfoundation/solidity-analyzer-win32-x64-msvc": 0.1.1 + dependenciesMeta: + "@nomicfoundation/solidity-analyzer-darwin-arm64": + optional: true + "@nomicfoundation/solidity-analyzer-darwin-x64": + optional: true + "@nomicfoundation/solidity-analyzer-freebsd-x64": + optional: true + "@nomicfoundation/solidity-analyzer-linux-arm64-gnu": + optional: true + "@nomicfoundation/solidity-analyzer-linux-arm64-musl": + optional: true + "@nomicfoundation/solidity-analyzer-linux-x64-gnu": + optional: true + "@nomicfoundation/solidity-analyzer-linux-x64-musl": + optional: true + "@nomicfoundation/solidity-analyzer-win32-arm64-msvc": + optional: true + "@nomicfoundation/solidity-analyzer-win32-ia32-msvc": + optional: true + "@nomicfoundation/solidity-analyzer-win32-x64-msvc": + optional: true + checksum: 038cffafd5769e25256b5b8bef88d95cc1c021274a65c020cf84aceb3237752a3b51645fdb0687f5516a2bdfebf166fcf50b08ab64857925100213e0654b266b + languageName: node + linkType: hard + +"@npmcli/agent@npm:^2.0.0": + version: 2.2.0 + resolution: "@npmcli/agent@npm:2.2.0" + dependencies: + agent-base: ^7.1.0 + http-proxy-agent: ^7.0.0 + https-proxy-agent: ^7.0.1 + lru-cache: ^10.0.1 + socks-proxy-agent: ^8.0.1 + checksum: 3b25312edbdfaa4089af28e2d423b6f19838b945e47765b0c8174c1395c79d43c3ad6d23cb364b43f59fd3acb02c93e3b493f72ddbe3dfea04c86843a7311fc4 + languageName: node + linkType: hard + +"@npmcli/fs@npm:^3.1.0": + version: 3.1.0 + resolution: "@npmcli/fs@npm:3.1.0" + dependencies: + semver: ^7.3.5 + checksum: a50a6818de5fc557d0b0e6f50ec780a7a02ab8ad07e5ac8b16bf519e0ad60a144ac64f97d05c443c3367235d337182e1d012bbac0eb8dbae8dc7b40b193efd0e + languageName: node + linkType: hard + +"@pkgjs/parseargs@npm:^0.11.0": + version: 0.11.0 + resolution: "@pkgjs/parseargs@npm:0.11.0" + checksum: 6ad6a00fc4f2f2cfc6bff76fb1d88b8ee20bc0601e18ebb01b6d4be583733a860239a521a7fbca73b612e66705078809483549d2b18f370eb346c5155c8e4a0f + languageName: node + linkType: hard + +"@pkgr/utils@npm:^2.4.2": + version: 2.4.2 + resolution: "@pkgr/utils@npm:2.4.2" + dependencies: + cross-spawn: ^7.0.3 + fast-glob: ^3.3.0 + is-glob: ^4.0.3 + open: ^9.1.0 + picocolors: ^1.0.0 + tslib: ^2.6.0 + checksum: 24e04c121269317d259614cd32beea3af38277151c4002df5883c4be920b8e3490bb897748e844f9d46bf68230f86dabd4e8f093773130e7e60529a769a132fc + languageName: node + linkType: hard + +"@pnpm/config.env-replace@npm:^1.1.0": + version: 1.1.0 + resolution: "@pnpm/config.env-replace@npm:1.1.0" + checksum: a3d2b57e35eec9543d9eb085854f6e33e8102dac99fdef2fad2eebdbbfc345e93299f0c20e8eb61c1b4c7aa123bfd47c175678626f161cda65dd147c2b6e1fa0 + languageName: node + linkType: hard + +"@pnpm/network.ca-file@npm:^1.0.1": + version: 1.0.2 + resolution: "@pnpm/network.ca-file@npm:1.0.2" + dependencies: + graceful-fs: 4.2.10 + checksum: d8d0884646500576bd5390464d13db1bb9a62e32a1069293e5bddb2ad8354b354b7e2d2a35e12850025651e795e6a80ce9e601c66312504667b7e3ee7b52becc + languageName: node + linkType: hard + +"@pnpm/npm-conf@npm:^2.1.0": + version: 2.2.2 + resolution: "@pnpm/npm-conf@npm:2.2.2" + dependencies: + "@pnpm/config.env-replace": ^1.1.0 + "@pnpm/network.ca-file": ^1.0.1 + config-chain: ^1.1.11 + checksum: d64aa4464be584caa855eafa8f109509390489997e36d602d6215784e2973b896bef3968426bb00896cf4ae7d440fed2cee7bb4e0dbc90362f024ea3f9e27ab1 + languageName: node + linkType: hard + +"@polka/url@npm:^1.0.0-next.20": + version: 1.0.0-next.24 + resolution: "@polka/url@npm:1.0.0-next.24" + checksum: 00baec4458ac86ca27edf7ce807ccfad97cd1d4b67bdedaf3401a9e755757588f3331e891290d1deea52d88df2bf2387caf8d94a6835b614d5b37b638a688273 + languageName: node + linkType: hard + +"@puppeteer/browsers@npm:0.5.0": + version: 0.5.0 + resolution: "@puppeteer/browsers@npm:0.5.0" + dependencies: + debug: 4.3.4 + extract-zip: 2.0.1 + https-proxy-agent: 5.0.1 + progress: 2.0.3 + proxy-from-env: 1.1.0 + tar-fs: 2.1.1 + unbzip2-stream: 1.4.3 + yargs: 17.7.1 + peerDependencies: + typescript: ">= 4.7.4" + peerDependenciesMeta: + typescript: + optional: true + bin: + browsers: lib/cjs/main-cli.js + checksum: d75fde03be4be106ca907834739251c2bb0b33a09fa23315c5dbe8b8b4cfed2f1b26af62e1dbe5fccc227e9bc87b51da0815461b982477eb01439bfdd6e7b01a + languageName: node + linkType: hard + +"@puppeteer/browsers@npm:1.4.6": + version: 1.4.6 + resolution: "@puppeteer/browsers@npm:1.4.6" + dependencies: + debug: 4.3.4 + extract-zip: 2.0.1 + progress: 2.0.3 + proxy-agent: 6.3.0 + tar-fs: 3.0.4 + unbzip2-stream: 1.4.3 + yargs: 17.7.1 + peerDependencies: + typescript: ">= 4.7.4" + peerDependenciesMeta: + typescript: + optional: true + bin: + browsers: lib/cjs/main-cli.js + checksum: 29569dd8a8a41737bb0dd40cce6279cfc8764afc6242d2f9d8ae610bed7e466fc77eeb27b9b3ac53dd04927a1a0e26389f282f6ba057210979b36ab455009d64 + languageName: node + linkType: hard + +"@rollup/plugin-node-resolve@npm:^13.0.4": + version: 13.3.0 + resolution: "@rollup/plugin-node-resolve@npm:13.3.0" + dependencies: + "@rollup/pluginutils": ^3.1.0 + "@types/resolve": 1.17.1 + deepmerge: ^4.2.2 + is-builtin-module: ^3.1.0 + is-module: ^1.0.0 + resolve: ^1.19.0 + peerDependencies: + rollup: ^2.42.0 + checksum: ec5418e6b3c23a9e30683056b3010e9d325316dcfae93fbc673ae64dad8e56a2ce761c15c48f5e2dcfe0c822fdc4a4905ee6346e3dcf90603ba2260afef5a5e6 + languageName: node + linkType: hard + +"@rollup/plugin-node-resolve@npm:^15.0.1": + version: 15.2.3 + resolution: "@rollup/plugin-node-resolve@npm:15.2.3" + dependencies: + "@rollup/pluginutils": ^5.0.1 + "@types/resolve": 1.20.2 + deepmerge: ^4.2.2 + is-builtin-module: ^3.2.1 + is-module: ^1.0.0 + resolve: ^1.22.1 + peerDependencies: + rollup: ^2.78.0||^3.0.0||^4.0.0 + peerDependenciesMeta: + rollup: + optional: true + checksum: 730f32c2f8fdddff07cf0fca86a5dac7c475605fb96930197a868c066e62eb6388c557545e4f7d99b7a283411754c9fbf98944ab086b6074e04fc1292e234aa8 + languageName: node + linkType: hard + +"@rollup/pluginutils@npm:^3.1.0": + version: 3.1.0 + resolution: "@rollup/pluginutils@npm:3.1.0" + dependencies: + "@types/estree": 0.0.39 + estree-walker: ^1.0.1 + picomatch: ^2.2.2 + peerDependencies: + rollup: ^1.20.0||^2.0.0 + checksum: 8be16e27863c219edbb25a4e6ec2fe0e1e451d9e917b6a43cf2ae5bc025a6b8faaa40f82a6e53b66d0de37b58ff472c6c3d57a83037ae635041f8df959d6d9aa + languageName: node + linkType: hard + +"@rollup/pluginutils@npm:^5.0.1": + version: 5.1.0 + resolution: "@rollup/pluginutils@npm:5.1.0" + dependencies: + "@types/estree": ^1.0.0 + estree-walker: ^2.0.2 + picomatch: ^2.3.1 + peerDependencies: + rollup: ^1.20.0||^2.0.0||^3.0.0||^4.0.0 + peerDependenciesMeta: + rollup: + optional: true + checksum: 3cc5a6d91452a6eabbfd1ae79b4dd1f1e809d2eecda6e175deb784e75b0911f47e9ecce73f8dd315d6a8b3f362582c91d3c0f66908b6ced69345b3cbe28f8ce8 + languageName: node + linkType: hard + +"@rollup/rollup-android-arm-eabi@npm:4.9.4": + version: 4.9.4 + resolution: "@rollup/rollup-android-arm-eabi@npm:4.9.4" + conditions: os=android & cpu=arm + languageName: node + linkType: hard + +"@rollup/rollup-android-arm64@npm:4.9.4": + version: 4.9.4 + resolution: "@rollup/rollup-android-arm64@npm:4.9.4" + conditions: os=android & cpu=arm64 + languageName: node + linkType: hard + +"@rollup/rollup-darwin-arm64@npm:4.9.4": + version: 4.9.4 + resolution: "@rollup/rollup-darwin-arm64@npm:4.9.4" + conditions: os=darwin & cpu=arm64 + languageName: node + linkType: hard + +"@rollup/rollup-darwin-x64@npm:4.9.4": + version: 4.9.4 + resolution: "@rollup/rollup-darwin-x64@npm:4.9.4" + conditions: os=darwin & cpu=x64 + languageName: node + linkType: hard + +"@rollup/rollup-linux-arm-gnueabihf@npm:4.9.4": + version: 4.9.4 + resolution: "@rollup/rollup-linux-arm-gnueabihf@npm:4.9.4" + conditions: os=linux & cpu=arm + languageName: node + linkType: hard + +"@rollup/rollup-linux-arm64-gnu@npm:4.9.4": + version: 4.9.4 + resolution: "@rollup/rollup-linux-arm64-gnu@npm:4.9.4" + conditions: os=linux & cpu=arm64 & libc=glibc + languageName: node + linkType: hard + +"@rollup/rollup-linux-arm64-musl@npm:4.9.4": + version: 4.9.4 + resolution: "@rollup/rollup-linux-arm64-musl@npm:4.9.4" + conditions: os=linux & cpu=arm64 & libc=musl + languageName: node + linkType: hard + +"@rollup/rollup-linux-riscv64-gnu@npm:4.9.4": + version: 4.9.4 + resolution: "@rollup/rollup-linux-riscv64-gnu@npm:4.9.4" + conditions: os=linux & cpu=riscv64 & libc=glibc + languageName: node + linkType: hard + +"@rollup/rollup-linux-x64-gnu@npm:4.9.4": + version: 4.9.4 + resolution: "@rollup/rollup-linux-x64-gnu@npm:4.9.4" + conditions: os=linux & cpu=x64 & libc=glibc + languageName: node + linkType: hard + +"@rollup/rollup-linux-x64-musl@npm:4.9.4": + version: 4.9.4 + resolution: "@rollup/rollup-linux-x64-musl@npm:4.9.4" + conditions: os=linux & cpu=x64 & libc=musl + languageName: node + linkType: hard + +"@rollup/rollup-win32-arm64-msvc@npm:4.9.4": + version: 4.9.4 + resolution: "@rollup/rollup-win32-arm64-msvc@npm:4.9.4" + conditions: os=win32 & cpu=arm64 + languageName: node + linkType: hard + +"@rollup/rollup-win32-ia32-msvc@npm:4.9.4": + version: 4.9.4 + resolution: "@rollup/rollup-win32-ia32-msvc@npm:4.9.4" + conditions: os=win32 & cpu=ia32 + languageName: node + linkType: hard + +"@rollup/rollup-win32-x64-msvc@npm:4.9.4": + version: 4.9.4 + resolution: "@rollup/rollup-win32-x64-msvc@npm:4.9.4" + conditions: os=win32 & cpu=x64 + languageName: node + linkType: hard + +"@scure/base@npm:~1.1.0": + version: 1.1.3 + resolution: "@scure/base@npm:1.1.3" + checksum: 1606ab8a4db898cb3a1ada16c15437c3bce4e25854fadc8eb03ae93cbbbac1ed90655af4b0be3da37e12056fef11c0374499f69b9e658c9e5b7b3e06353c630c + languageName: node + linkType: hard + +"@scure/bip32@npm:1.1.5": + version: 1.1.5 + resolution: "@scure/bip32@npm:1.1.5" + dependencies: + "@noble/hashes": ~1.2.0 + "@noble/secp256k1": ~1.7.0 + "@scure/base": ~1.1.0 + checksum: b08494ab0d2b1efee7226d1b5100db5157ebea22a78bb87126982a76a186cb3048413e8be0ba2622d00d048a20acbba527af730de86c132a77de616eb9907a3b + languageName: node + linkType: hard + +"@scure/bip39@npm:1.1.1": + version: 1.1.1 + resolution: "@scure/bip39@npm:1.1.1" + dependencies: + "@noble/hashes": ~1.2.0 + "@scure/base": ~1.1.0 + checksum: fbb594c50696fa9c14e891d872f382e50a3f919b6c96c55ef2fb10c7102c546dafb8f099a62bd114c12a00525b595dcf7381846f383f0ddcedeaa6e210747d2f + languageName: node + linkType: hard + +"@sentry/core@npm:5.30.0": + version: 5.30.0 + resolution: "@sentry/core@npm:5.30.0" + dependencies: + "@sentry/hub": 5.30.0 + "@sentry/minimal": 5.30.0 + "@sentry/types": 5.30.0 + "@sentry/utils": 5.30.0 + tslib: ^1.9.3 + checksum: 8a2b22687e70d76fa4381bce215d770b6c08561c5ff5d6afe39c8c3c509c18ee7384ad0be3aee18d3a858a3c88e1d2821cf10eb5e05646376a33200903b56da2 + languageName: node + linkType: hard + +"@sentry/hub@npm:5.30.0": + version: 5.30.0 + resolution: "@sentry/hub@npm:5.30.0" + dependencies: + "@sentry/types": 5.30.0 + "@sentry/utils": 5.30.0 + tslib: ^1.9.3 + checksum: 09f778cc78765213f1e35a3ee6da3a8e02a706e8a7e5b7f84614707f4b665c7297b700a1849ab2ca1f02ede5884fd9ae893e58dc65f04f35ccdfee17e99ee93d + languageName: node + linkType: hard + +"@sentry/minimal@npm:5.30.0": + version: 5.30.0 + resolution: "@sentry/minimal@npm:5.30.0" + dependencies: + "@sentry/hub": 5.30.0 + "@sentry/types": 5.30.0 + tslib: ^1.9.3 + checksum: 934650f6989ce51f425c7c4b4d4d9bfecface8162a36d21df8a241f780ab1716dd47b81e2170e4cc624797ed1eebe10f71e4876c1e25b787860daaef75ca7a0c + languageName: node + linkType: hard + +"@sentry/node@npm:^5.18.1": + version: 5.30.0 + resolution: "@sentry/node@npm:5.30.0" + dependencies: + "@sentry/core": 5.30.0 + "@sentry/hub": 5.30.0 + "@sentry/tracing": 5.30.0 + "@sentry/types": 5.30.0 + "@sentry/utils": 5.30.0 + cookie: ^0.4.1 + https-proxy-agent: ^5.0.0 + lru_map: ^0.3.3 + tslib: ^1.9.3 + checksum: 5f0367cc52f9d716c64ba727e2a5c8592364494c8fdadfb3df2d0ee9d7956b886fb3ec674370292d2a7b7e1d9a8e1b84c69c06e8a4a064be8d4687698df0090c + languageName: node + linkType: hard + +"@sentry/tracing@npm:5.30.0": + version: 5.30.0 + resolution: "@sentry/tracing@npm:5.30.0" + dependencies: + "@sentry/hub": 5.30.0 + "@sentry/minimal": 5.30.0 + "@sentry/types": 5.30.0 + "@sentry/utils": 5.30.0 + tslib: ^1.9.3 + checksum: 720c07b111e8128e70a939ab4e9f9cfd13dc23303b27575afddabab08d08f9b94499017c76a9ffe253bf3ca40833e8f9262cf6dc546ba24da6eb74fedae5f92b + languageName: node + linkType: hard + +"@sentry/types@npm:5.30.0": + version: 5.30.0 + resolution: "@sentry/types@npm:5.30.0" + checksum: de7df777824c8e311f143c6fd7de220b24f25b5018312fe8f67d93bebf0f3cdd32bbca9f155846f5c31441d940eebe27c8338000321559a743264c7e41dda560 + languageName: node + linkType: hard + +"@sentry/utils@npm:5.30.0": + version: 5.30.0 + resolution: "@sentry/utils@npm:5.30.0" + dependencies: + "@sentry/types": 5.30.0 + tslib: ^1.9.3 + checksum: 27b259a136c664427641dd32ee3dc490553f3b5e92986accfa829d14063ebc69b191e92209ac9c40fbc367f74cfa17dc93b4c40981d666711fd57b4d51a82062 + languageName: node + linkType: hard + +"@sideway/address@npm:^4.1.3": + version: 4.1.4 + resolution: "@sideway/address@npm:4.1.4" + dependencies: + "@hapi/hoek": ^9.0.0 + checksum: b9fca2a93ac2c975ba12e0a6d97853832fb1f4fb02393015e012b47fa916a75ca95102d77214b2a29a2784740df2407951af8c5dde054824c65577fd293c4cdb + languageName: node + linkType: hard + +"@sideway/formula@npm:^3.0.1": + version: 3.0.1 + resolution: "@sideway/formula@npm:3.0.1" + checksum: e4beeebc9dbe2ff4ef0def15cec0165e00d1612e3d7cea0bc9ce5175c3263fc2c818b679bd558957f49400ee7be9d4e5ac90487e1625b4932e15c4aa7919c57a + languageName: node + linkType: hard + +"@sideway/pinpoint@npm:^2.0.0": + version: 2.0.0 + resolution: "@sideway/pinpoint@npm:2.0.0" + checksum: 0f4491e5897fcf5bf02c46f5c359c56a314e90ba243f42f0c100437935daa2488f20482f0f77186bd6bf43345095a95d8143ecf8b1f4d876a7bc0806aba9c3d2 + languageName: node + linkType: hard + +"@signorecello/noir_playground@npm:^0.6.0": + version: 0.6.0 + resolution: "@signorecello/noir_playground@npm:0.6.0" + dependencies: + "@monaco-editor/react": 4.6.0 + "@noir-lang/backend_barretenberg": ^0.19.2 + "@noir-lang/noir_js": ^0.19.2 + "@noir-lang/noir_wasm": ^0.19.2 + "@noir-lang/source-resolver": ^0.19.2 + "@noir-lang/types": ^0.19.2 + fflate: ^0.8.1 + js-base64: ^3.7.5 + monaco-editor: ^0.44.0 + monaco-editor-textmate: ^4.0.0 + monaco-textmate: ^3.0.1 + onigasm: ^2.2.5 + react-toastify: ^9.1.3 + checksum: a1fa6f5b6aded80ad8ecdf8841dbbcd70e2a3faa479bc39f8077d3e96d65ceab53488f278db1a991a6cc8c6ce63206cc1627fe87e683187941359251e67c48ed + languageName: node + linkType: hard + +"@sinclair/typebox@npm:^0.27.8": + version: 0.27.8 + resolution: "@sinclair/typebox@npm:0.27.8" + checksum: 00bd7362a3439021aa1ea51b0e0d0a0e8ca1351a3d54c606b115fdcc49b51b16db6e5f43b4fe7a28c38688523e22a94d49dd31168868b655f0d4d50f032d07a1 + languageName: node + linkType: hard + +"@sindresorhus/is@npm:^0.14.0": + version: 0.14.0 + resolution: "@sindresorhus/is@npm:0.14.0" + checksum: 971e0441dd44ba3909b467219a5e242da0fc584048db5324cfb8048148fa8dcc9d44d71e3948972c4f6121d24e5da402ef191420d1266a95f713bb6d6e59c98a + languageName: node + linkType: hard + +"@sindresorhus/is@npm:^4.6.0": + version: 4.6.0 + resolution: "@sindresorhus/is@npm:4.6.0" + checksum: 83839f13da2c29d55c97abc3bc2c55b250d33a0447554997a85c539e058e57b8da092da396e252b11ec24a0279a0bed1f537fa26302209327060643e327f81d2 + languageName: node + linkType: hard + +"@sindresorhus/is@npm:^5.2.0": + version: 5.6.0 + resolution: "@sindresorhus/is@npm:5.6.0" + checksum: 2e6e0c3acf188dcd9aea0f324ac1b6ad04c9fc672392a7b5a1218512fcde066965797eba8b9fe2108657a504388bd4a6664e6e6602555168e828a6df08b9f10e + languageName: node + linkType: hard + +"@sinonjs/commons@npm:^2.0.0": + version: 2.0.0 + resolution: "@sinonjs/commons@npm:2.0.0" + dependencies: + type-detect: 4.0.8 + checksum: 5023ba17edf2b85ed58262313b8e9b59e23c6860681a9af0200f239fe939e2b79736d04a260e8270ddd57196851dde3ba754d7230be5c5234e777ae2ca8af137 + languageName: node + linkType: hard + +"@sinonjs/commons@npm:^3.0.0": + version: 3.0.0 + resolution: "@sinonjs/commons@npm:3.0.0" + dependencies: + type-detect: 4.0.8 + checksum: b4b5b73d4df4560fb8c0c7b38c7ad4aeabedd362f3373859d804c988c725889cde33550e4bcc7cd316a30f5152a2d1d43db71b6d0c38f5feef71fd8d016763f8 + languageName: node + linkType: hard + +"@sinonjs/fake-timers@npm:^10.0.2": + version: 10.3.0 + resolution: "@sinonjs/fake-timers@npm:10.3.0" + dependencies: + "@sinonjs/commons": ^3.0.0 + checksum: 614d30cb4d5201550c940945d44c9e0b6d64a888ff2cd5b357f95ad6721070d6b8839cd10e15b76bf5e14af0bcc1d8f9ec00d49a46318f1f669a4bec1d7f3148 + languageName: node + linkType: hard + +"@sinonjs/fake-timers@npm:^11.2.2": + version: 11.2.2 + resolution: "@sinonjs/fake-timers@npm:11.2.2" + dependencies: + "@sinonjs/commons": ^3.0.0 + checksum: 68c29b0e1856fdc280df03ddbf57c726420b78e9f943a241b471edc018fb14ff36fdc1daafd6026cba08c3c7f50c976fb7ae11b88ff44cd7f609692ca7d25158 + languageName: node + linkType: hard + +"@sinonjs/samsam@npm:^8.0.0": + version: 8.0.0 + resolution: "@sinonjs/samsam@npm:8.0.0" + dependencies: + "@sinonjs/commons": ^2.0.0 + lodash.get: ^4.4.2 + type-detect: ^4.0.8 + checksum: 95e40d0bb9f7288e27c379bee1b03c3dc51e7e78b9d5ea6aef66a690da7e81efc4715145b561b449cefc5361a171791e3ce30fb1a46ab247d4c0766024c60a60 + languageName: node + linkType: hard + +"@sinonjs/text-encoding@npm:^0.7.1": + version: 0.7.2 + resolution: "@sinonjs/text-encoding@npm:0.7.2" + checksum: fe690002a32ba06906cf87e2e8fe84d1590294586f2a7fd180a65355b53660c155c3273d8011a5f2b77209b819aa7306678ae6e4aea0df014bd7ffd4bbbcf1ab + languageName: node + linkType: hard + +"@slorber/remark-comment@npm:^1.0.0": + version: 1.0.0 + resolution: "@slorber/remark-comment@npm:1.0.0" + dependencies: + micromark-factory-space: ^1.0.0 + micromark-util-character: ^1.1.0 + micromark-util-symbol: ^1.0.1 + checksum: c96f1533d09913c57381859966f10a706afd8eb680923924af1c451f3b72f22c31e394028d7535131c10f8682d3c60206da95c50fb4f016fbbd04218c853cc88 + languageName: node + linkType: hard + +"@slorber/static-site-generator-webpack-plugin@npm:^4.0.7": + version: 4.0.7 + resolution: "@slorber/static-site-generator-webpack-plugin@npm:4.0.7" + dependencies: + eval: ^0.1.8 + p-map: ^4.0.0 + webpack-sources: ^3.2.2 + checksum: a1e1d8b22dd51059524993f3fdd6861db10eb950debc389e5dd650702287fa2004eace03e6bc8f25b977bd7bc01d76a50aa271cbb73c58a8ec558945d728f307 + languageName: node + linkType: hard + +"@svgr/babel-plugin-add-jsx-attribute@npm:^6.5.1": + version: 6.5.1 + resolution: "@svgr/babel-plugin-add-jsx-attribute@npm:6.5.1" + peerDependencies: + "@babel/core": ^7.0.0-0 + checksum: cab83832830a57735329ed68f67c03b57ca21fa037b0134847b0c5c0ef4beca89956d7dacfbf7b2a10fd901e7009e877512086db2ee918b8c69aee7742ae32c0 + languageName: node + linkType: hard + +"@svgr/babel-plugin-remove-jsx-attribute@npm:*": + version: 8.0.0 + resolution: "@svgr/babel-plugin-remove-jsx-attribute@npm:8.0.0" + peerDependencies: + "@babel/core": ^7.0.0-0 + checksum: ff992893c6c4ac802713ba3a97c13be34e62e6d981c813af40daabcd676df68a72a61bd1e692bb1eda3587f1b1d700ea462222ae2153bb0f46886632d4f88d08 + languageName: node + linkType: hard + +"@svgr/babel-plugin-remove-jsx-empty-expression@npm:*": + version: 8.0.0 + resolution: "@svgr/babel-plugin-remove-jsx-empty-expression@npm:8.0.0" + peerDependencies: + "@babel/core": ^7.0.0-0 + checksum: 0fb691b63a21bac00da3aa2dccec50d0d5a5b347ff408d60803b84410d8af168f2656e4ba1ee1f24dab0ae4e4af77901f2928752bb0434c1f6788133ec599ec8 + languageName: node + linkType: hard + +"@svgr/babel-plugin-replace-jsx-attribute-value@npm:^6.5.1": + version: 6.5.1 + resolution: "@svgr/babel-plugin-replace-jsx-attribute-value@npm:6.5.1" + peerDependencies: + "@babel/core": ^7.0.0-0 + checksum: b7d2125758e766e1ebd14b92216b800bdc976959bc696dbfa1e28682919147c1df4bb8b1b5fd037d7a83026e27e681fea3b8d3741af8d3cf4c9dfa3d412125df + languageName: node + linkType: hard + +"@svgr/babel-plugin-svg-dynamic-title@npm:^6.5.1": + version: 6.5.1 + resolution: "@svgr/babel-plugin-svg-dynamic-title@npm:6.5.1" + peerDependencies: + "@babel/core": ^7.0.0-0 + checksum: 0fd42ebf127ae9163ef341e84972daa99bdcb9e6ed3f83aabd95ee173fddc43e40e02fa847fbc0a1058cf5549f72b7960a2c5e22c3e4ac18f7e3ac81277852ae + languageName: node + linkType: hard + +"@svgr/babel-plugin-svg-em-dimensions@npm:^6.5.1": + version: 6.5.1 + resolution: "@svgr/babel-plugin-svg-em-dimensions@npm:6.5.1" + peerDependencies: + "@babel/core": ^7.0.0-0 + checksum: c1550ee9f548526fa66fd171e3ffb5696bfc4e4cd108a631d39db492c7410dc10bba4eb5a190e9df824bf806130ccc586ae7d2e43c547e6a4f93bbb29a18f344 + languageName: node + linkType: hard + +"@svgr/babel-plugin-transform-react-native-svg@npm:^6.5.1": + version: 6.5.1 + resolution: "@svgr/babel-plugin-transform-react-native-svg@npm:6.5.1" + peerDependencies: + "@babel/core": ^7.0.0-0 + checksum: 4c924af22b948b812629e80efb90ad1ec8faae26a232d8ca8a06b46b53e966a2c415a57806a3ff0ea806a622612e546422719b69ec6839717a7755dac19171d9 + languageName: node + linkType: hard + +"@svgr/babel-plugin-transform-svg-component@npm:^6.5.1": + version: 6.5.1 + resolution: "@svgr/babel-plugin-transform-svg-component@npm:6.5.1" + peerDependencies: + "@babel/core": ^7.0.0-0 + checksum: e496bb5ee871feb6bcab250b6e067322da7dd5c9c2b530b41e5586fe090f86611339b49d0a909c334d9b24cbca0fa755c949a2526c6ad03c6b5885666874cf5f + languageName: node + linkType: hard + +"@svgr/babel-preset@npm:^6.5.1": + version: 6.5.1 + resolution: "@svgr/babel-preset@npm:6.5.1" + dependencies: + "@svgr/babel-plugin-add-jsx-attribute": ^6.5.1 + "@svgr/babel-plugin-remove-jsx-attribute": "*" + "@svgr/babel-plugin-remove-jsx-empty-expression": "*" + "@svgr/babel-plugin-replace-jsx-attribute-value": ^6.5.1 + "@svgr/babel-plugin-svg-dynamic-title": ^6.5.1 + "@svgr/babel-plugin-svg-em-dimensions": ^6.5.1 + "@svgr/babel-plugin-transform-react-native-svg": ^6.5.1 + "@svgr/babel-plugin-transform-svg-component": ^6.5.1 + peerDependencies: + "@babel/core": ^7.0.0-0 + checksum: 9f124be39a8e64f909162f925b3a63ddaa5a342a5e24fc0b7f7d9d4d7f7e3b916596c754fb557dc259928399cad5366a27cb231627a0d2dcc4b13ac521cf05af + languageName: node + linkType: hard + +"@svgr/core@npm:^6.5.1": + version: 6.5.1 + resolution: "@svgr/core@npm:6.5.1" + dependencies: + "@babel/core": ^7.19.6 + "@svgr/babel-preset": ^6.5.1 + "@svgr/plugin-jsx": ^6.5.1 + camelcase: ^6.2.0 + cosmiconfig: ^7.0.1 + checksum: fd6d6d5da5aeb956703310480b626c1fb3e3973ad9fe8025efc1dcf3d895f857b70d100c63cf32cebb20eb83c9607bafa464c9436e18fe6fe4fafdc73ed6b1a5 + languageName: node + linkType: hard + +"@svgr/hast-util-to-babel-ast@npm:^6.5.1": + version: 6.5.1 + resolution: "@svgr/hast-util-to-babel-ast@npm:6.5.1" dependencies: - "@ethersproject/bignumber": ^5.7.0 - "@ethersproject/bytes": ^5.7.0 - "@ethersproject/keccak256": ^5.7.0 - "@ethersproject/logger": ^5.7.0 - "@ethersproject/rlp": ^5.7.0 - checksum: 64ea5ebea9cc0e845c413e6cb1e54e157dd9fc0dffb98e239d3a3efc8177f2ff798cd4e3206cf3660ee8faeb7bef1a47dc0ebef0d7b132c32e61e550c7d4c843 + "@babel/types": ^7.20.0 + entities: ^4.4.0 + checksum: 37923cce1b3f4e2039077b0c570b6edbabe37d1cf1a6ee35e71e0fe00f9cffac450eec45e9720b1010418131a999cb0047331ba1b6d1d2c69af1b92ac785aacf languageName: node linkType: hard -"@ethersproject/base64@npm:5.7.0, @ethersproject/base64@npm:^5.7.0": - version: 5.7.0 - resolution: "@ethersproject/base64@npm:5.7.0" +"@svgr/plugin-jsx@npm:^6.5.1": + version: 6.5.1 + resolution: "@svgr/plugin-jsx@npm:6.5.1" dependencies: - "@ethersproject/bytes": ^5.7.0 - checksum: 7dd5d734d623582f08f665434f53685041a3d3b334a0e96c0c8afa8bbcaab934d50e5b6b980e826a8fde8d353e0b18f11e61faf17468177274b8e7c69cd9742b + "@babel/core": ^7.19.6 + "@svgr/babel-preset": ^6.5.1 + "@svgr/hast-util-to-babel-ast": ^6.5.1 + svg-parser: ^2.0.4 + peerDependencies: + "@svgr/core": ^6.0.0 + checksum: 42f22847a6bdf930514d7bedd3c5e1fd8d53eb3594779f9db16cb94c762425907c375cd8ec789114e100a4d38068aca6c7ab5efea4c612fba63f0630c44cc859 languageName: node linkType: hard -"@ethersproject/basex@npm:5.7.0, @ethersproject/basex@npm:^5.7.0": - version: 5.7.0 - resolution: "@ethersproject/basex@npm:5.7.0" +"@svgr/plugin-svgo@npm:^6.5.1": + version: 6.5.1 + resolution: "@svgr/plugin-svgo@npm:6.5.1" dependencies: - "@ethersproject/bytes": ^5.7.0 - "@ethersproject/properties": ^5.7.0 - checksum: 326087b7e1f3787b5fe6cd1cf2b4b5abfafbc355a45e88e22e5e9d6c845b613ffc5301d629b28d5c4d5e2bfe9ec424e6782c804956dff79be05f0098cb5817de + cosmiconfig: ^7.0.1 + deepmerge: ^4.2.2 + svgo: ^2.8.0 + peerDependencies: + "@svgr/core": "*" + checksum: cd2833530ac0485221adc2146fd992ab20d79f4b12eebcd45fa859721dd779483158e11dfd9a534858fe468416b9412416e25cbe07ac7932c44ed5fa2021c72e + languageName: node + linkType: hard + +"@svgr/webpack@npm:^6.2.1, @svgr/webpack@npm:^6.5.1": + version: 6.5.1 + resolution: "@svgr/webpack@npm:6.5.1" + dependencies: + "@babel/core": ^7.19.6 + "@babel/plugin-transform-react-constant-elements": ^7.18.12 + "@babel/preset-env": ^7.19.4 + "@babel/preset-react": ^7.18.6 + "@babel/preset-typescript": ^7.18.6 + "@svgr/core": ^6.5.1 + "@svgr/plugin-jsx": ^6.5.1 + "@svgr/plugin-svgo": ^6.5.1 + checksum: d10582eb4fa82a5b6d314cb49f2c640af4fd3a60f5b76095d2b14e383ef6a43a6f4674b68774a21787dbde69dec0a251cfcfc3f9a96c82754ba5d5c6daf785f0 + languageName: node + linkType: hard + +"@szmarczak/http-timer@npm:^1.1.2": + version: 1.1.2 + resolution: "@szmarczak/http-timer@npm:1.1.2" + dependencies: + defer-to-connect: ^1.0.1 + checksum: 4d9158061c5f397c57b4988cde33a163244e4f02df16364f103971957a32886beb104d6180902cbe8b38cb940e234d9f98a4e486200deca621923f62f50a06fe + languageName: node + linkType: hard + +"@szmarczak/http-timer@npm:^5.0.1": + version: 5.0.1 + resolution: "@szmarczak/http-timer@npm:5.0.1" + dependencies: + defer-to-connect: ^2.0.1 + checksum: fc9cb993e808806692e4a3337c90ece0ec00c89f4b67e3652a356b89730da98bc824273a6d67ca84d5f33cd85f317dcd5ce39d8cc0a2f060145a608a7cb8ce92 + languageName: node + linkType: hard + +"@tootallnate/quickjs-emscripten@npm:^0.23.0": + version: 0.23.0 + resolution: "@tootallnate/quickjs-emscripten@npm:0.23.0" + checksum: c350a2947ffb80b22e14ff35099fd582d1340d65723384a0fd0515e905e2534459ad2f301a43279a37308a27c99273c932e64649abd57d0bb3ca8c557150eccc + languageName: node + linkType: hard + +"@trysound/sax@npm:0.2.0": + version: 0.2.0 + resolution: "@trysound/sax@npm:0.2.0" + checksum: 11226c39b52b391719a2a92e10183e4260d9651f86edced166da1d95f39a0a1eaa470e44d14ac685ccd6d3df7e2002433782872c0feeb260d61e80f21250e65c + languageName: node + linkType: hard + +"@tsconfig/node10@npm:^1.0.7": + version: 1.0.9 + resolution: "@tsconfig/node10@npm:1.0.9" + checksum: a33ae4dc2a621c0678ac8ac4bceb8e512ae75dac65417a2ad9b022d9b5411e863c4c198b6ba9ef659e14b9fb609bbec680841a2e84c1172df7a5ffcf076539df + languageName: node + linkType: hard + +"@tsconfig/node12@npm:^1.0.7": + version: 1.0.11 + resolution: "@tsconfig/node12@npm:1.0.11" + checksum: 5ce29a41b13e7897a58b8e2df11269c5395999e588b9a467386f99d1d26f6c77d1af2719e407621412520ea30517d718d5192a32403b8dfcc163bf33e40a338a + languageName: node + linkType: hard + +"@tsconfig/node14@npm:^1.0.0": + version: 1.0.3 + resolution: "@tsconfig/node14@npm:1.0.3" + checksum: 19275fe80c4c8d0ad0abed6a96dbf00642e88b220b090418609c4376e1cef81bf16237bf170ad1b341452feddb8115d8dd2e5acdfdea1b27422071163dc9ba9d + languageName: node + linkType: hard + +"@tsconfig/node16@npm:^1.0.2": + version: 1.0.4 + resolution: "@tsconfig/node16@npm:1.0.4" + checksum: 202319785901f942a6e1e476b872d421baec20cf09f4b266a1854060efbf78cde16a4d256e8bc949d31e6cd9a90f1e8ef8fb06af96a65e98338a2b6b0de0a0ff + languageName: node + linkType: hard + +"@types/accepts@npm:*": + version: 1.3.7 + resolution: "@types/accepts@npm:1.3.7" + dependencies: + "@types/node": "*" + checksum: 7678cf74976e16093aff6e6f9755826faf069ac1e30179276158ce46ea246348ff22ca6bdd46cef08428881337d9ceefbf00bab08a7731646eb9fc9449d6a1e7 + languageName: node + linkType: hard + +"@types/acorn@npm:^4.0.0": + version: 4.0.6 + resolution: "@types/acorn@npm:4.0.6" + dependencies: + "@types/estree": "*" + checksum: 60e1fd28af18d6cb54a93a7231c7c18774a9a8739c9b179e9e8750dca631e10cbef2d82b02830ea3f557b1d121e6406441e9e1250bd492dc81d4b3456e76e4d4 + languageName: node + linkType: hard + +"@types/adm-zip@npm:^0.5.0": + version: 0.5.5 + resolution: "@types/adm-zip@npm:0.5.5" + dependencies: + "@types/node": "*" + checksum: 808c25b8a1c2e1c594cf9b1514e7953105cf96e19e38aa7dc109ff2537bda7345b950ef1f4e54a6e824e5503e29d24b0ff6d0aa1ff9bd4afb79ef0ef2df9ebab + languageName: node + linkType: hard + +"@types/babel__code-frame@npm:^7.0.2": + version: 7.0.6 + resolution: "@types/babel__code-frame@npm:7.0.6" + checksum: 5325ab85d95e58fe84279757788ddb0de68bfd6814bc636e868f9ff7b5229915873f28847c4baf48fd3a4a460a73b4ea87bc9e1d78a3a5a60cfc7ca627a722c5 + languageName: node + linkType: hard + +"@types/bn.js@npm:^4.11.3": + version: 4.11.6 + resolution: "@types/bn.js@npm:4.11.6" + dependencies: + "@types/node": "*" + checksum: 7f66f2c7b7b9303b3205a57184261974b114495736b77853af5b18d857c0b33e82ce7146911e86e87a87837de8acae28986716fd381ac7c301fd6e8d8b6c811f + languageName: node + linkType: hard + +"@types/bn.js@npm:^5.1.0": + version: 5.1.5 + resolution: "@types/bn.js@npm:5.1.5" + dependencies: + "@types/node": "*" + checksum: c87b28c4af74545624f8a3dae5294b16aa190c222626e8d4b2e327b33b1a3f1eeb43e7a24d914a9774bca43d8cd6e1cb0325c1f4b3a244af6693a024e1d918e6 + languageName: node + linkType: hard + +"@types/body-parser@npm:*": + version: 1.19.5 + resolution: "@types/body-parser@npm:1.19.5" + dependencies: + "@types/connect": "*" + "@types/node": "*" + checksum: 1e251118c4b2f61029cc43b0dc028495f2d1957fe8ee49a707fb940f86a9bd2f9754230805598278fe99958b49e9b7e66eec8ef6a50ab5c1f6b93e1ba2aaba82 + languageName: node + linkType: hard + +"@types/bonjour@npm:^3.5.9": + version: 3.5.13 + resolution: "@types/bonjour@npm:3.5.13" + dependencies: + "@types/node": "*" + checksum: e827570e097bd7d625a673c9c208af2d1a22fa3885c0a1646533cf24394c839c3e5f60ac1bc60c0ddcc69c0615078c9fb2c01b42596c7c582d895d974f2409ee + languageName: node + linkType: hard + +"@types/chai-as-promised@npm:^7.1.3": + version: 7.1.8 + resolution: "@types/chai-as-promised@npm:7.1.8" + dependencies: + "@types/chai": "*" + checksum: f0e5eab451b91bc1e289ed89519faf6591932e8a28d2ec9bbe95826eb73d28fe43713633e0c18706f3baa560a7d97e7c7c20dc53ce639e5d75bac46b2a50bf21 + languageName: node + linkType: hard + +"@types/chai@npm:*, @types/chai@npm:^4, @types/chai@npm:^4.2.12": + version: 4.3.11 + resolution: "@types/chai@npm:4.3.11" + checksum: d0c05fe5d02b2e6bbca2bd4866a2ab20a59cf729bc04af0060e7a3277eaf2fb65651b90d4c74b0ebf1d152b4b1d49fa8e44143acef276a2bbaa7785fbe5642d3 + languageName: node + linkType: hard + +"@types/co-body@npm:^6.1.0": + version: 6.1.3 + resolution: "@types/co-body@npm:6.1.3" + dependencies: + "@types/node": "*" + "@types/qs": "*" + checksum: e93fdc177f69ee0535cf401783258e4255f5eb8235c58b5a2a5a8958cf341fadf3d0bf2c75907ed6b7d188ce2c2f2cf9593a71d4eef12900beba54ebbbdd5cc1 + languageName: node + linkType: hard + +"@types/command-line-args@npm:^5.0.0": + version: 5.2.3 + resolution: "@types/command-line-args@npm:5.2.3" + checksum: 3d90db5b4bbaabd049654a0d12fa378989ab0d76a0f98d4c606761b5a08ce76458df0f9bb175219e187b4cd57e285e6f836d23e86b2c3d997820854cc3ed9121 + languageName: node + linkType: hard + +"@types/connect-history-api-fallback@npm:^1.3.5": + version: 1.5.4 + resolution: "@types/connect-history-api-fallback@npm:1.5.4" + dependencies: + "@types/express-serve-static-core": "*" + "@types/node": "*" + checksum: e1dee43b8570ffac02d2d47a2b4ba80d3ca0dd1840632dafb221da199e59dbe3778d3d7303c9e23c6b401f37c076935a5bc2aeae1c4e5feaefe1c371fe2073fd + languageName: node + linkType: hard + +"@types/connect@npm:*": + version: 3.4.38 + resolution: "@types/connect@npm:3.4.38" + dependencies: + "@types/node": "*" + checksum: 7eb1bc5342a9604facd57598a6c62621e244822442976c443efb84ff745246b10d06e8b309b6e80130026a396f19bf6793b7cecd7380169f369dac3bfc46fb99 + languageName: node + linkType: hard + +"@types/content-disposition@npm:*": + version: 0.5.8 + resolution: "@types/content-disposition@npm:0.5.8" + checksum: eeea868fb510ae7a32aa2d7de680fba79d59001f3e758a334621e10bc0a6496d3a42bb79243a5e53b9c63cb524522853ccc144fe1ab160c4247d37cdb81146c4 + languageName: node + linkType: hard + +"@types/convert-source-map@npm:^2.0.0": + version: 2.0.3 + resolution: "@types/convert-source-map@npm:2.0.3" + checksum: 411cf9a02cf5dbe204e325dd5ebf50de00b58b38d1d2a3064c6ea28417c23bae956206eaa9ed3a75a994909b4ab3f9c6389073d0636a62500fa6d6333c64d45a + languageName: node + linkType: hard + +"@types/cookies@npm:*": + version: 0.7.10 + resolution: "@types/cookies@npm:0.7.10" + dependencies: + "@types/connect": "*" + "@types/express": "*" + "@types/keygrip": "*" + "@types/node": "*" + checksum: 99cd44a193398932ff7926cfaac1eb4441d3dc47c3f64fdfb28861acbeb290b6db6a20376f993defc9d302db92bb1d36189b89ba447a633f960535f3f0d34e2d + languageName: node + linkType: hard + +"@types/debounce@npm:^1.2.0": + version: 1.2.4 + resolution: "@types/debounce@npm:1.2.4" + checksum: decef3eee65d681556d50f7fac346f1b33134f6b21f806d41326f9dfb362fa66b0282ff0640ae6791b690694c9dc3dad4e146e909e707e6f96650f3aa325b9da + languageName: node + linkType: hard + +"@types/debug@npm:^4.0.0": + version: 4.1.12 + resolution: "@types/debug@npm:4.1.12" + dependencies: + "@types/ms": "*" + checksum: 47876a852de8240bfdaf7481357af2b88cb660d30c72e73789abf00c499d6bc7cd5e52f41c915d1b9cd8ec9fef5b05688d7b7aef17f7f272c2d04679508d1053 + languageName: node + linkType: hard + +"@types/eslint-scope@npm:^3.7.3": + version: 3.7.7 + resolution: "@types/eslint-scope@npm:3.7.7" + dependencies: + "@types/eslint": "*" + "@types/estree": "*" + checksum: e2889a124aaab0b89af1bab5959847c5bec09809209255de0e63b9f54c629a94781daa04adb66bffcdd742f5e25a17614fb933965093c0eea64aacda4309380e + languageName: node + linkType: hard + +"@types/eslint@npm:*": + version: 8.44.8 + resolution: "@types/eslint@npm:8.44.8" + dependencies: + "@types/estree": "*" + "@types/json-schema": "*" + checksum: c3bc70166075e6e9f7fb43978882b9ac0b22596b519900b08dc8a1d761bbbddec4c48a60cc4eb674601266223c6f11db30f3fb6ceaae96c23c54b35ad88022bc + languageName: node + linkType: hard + +"@types/estree-jsx@npm:^1.0.0": + version: 1.0.3 + resolution: "@types/estree-jsx@npm:1.0.3" + dependencies: + "@types/estree": "*" + checksum: 6887a134308b6db4a33a147b56c9d0a47c17ea7e810bdd7c498c306a0fd00bcf2619cb0f57f74009d03dda974b3cd7e414767f85332b1d1b2be30a3ef9e1cca9 + languageName: node + linkType: hard + +"@types/estree@npm:*, @types/estree@npm:1.0.5, @types/estree@npm:^1.0.0": + version: 1.0.5 + resolution: "@types/estree@npm:1.0.5" + checksum: dd8b5bed28e6213b7acd0fb665a84e693554d850b0df423ac8076cc3ad5823a6bc26b0251d080bdc545af83179ede51dd3f6fa78cad2c46ed1f29624ddf3e41a + languageName: node + linkType: hard + +"@types/estree@npm:0.0.39": + version: 0.0.39 + resolution: "@types/estree@npm:0.0.39" + checksum: 412fb5b9868f2c418126451821833414189b75cc6bf84361156feed733e3d92ec220b9d74a89e52722e03d5e241b2932732711b7497374a404fad49087adc248 languageName: node linkType: hard -"@ethersproject/bignumber@npm:5.7.0, @ethersproject/bignumber@npm:^5.7.0": - version: 5.7.0 - resolution: "@ethersproject/bignumber@npm:5.7.0" +"@types/express-serve-static-core@npm:*, @types/express-serve-static-core@npm:^4.17.33": + version: 4.17.41 + resolution: "@types/express-serve-static-core@npm:4.17.41" dependencies: - "@ethersproject/bytes": ^5.7.0 - "@ethersproject/logger": ^5.7.0 - bn.js: ^5.2.1 - checksum: 8c9a134b76f3feb4ec26a5a27379efb4e156b8fb2de0678a67788a91c7f4e30abe9d948638458e4b20f2e42380da0adacc7c9389d05fce070692edc6ae9b4904 + "@types/node": "*" + "@types/qs": "*" + "@types/range-parser": "*" + "@types/send": "*" + checksum: 12750f6511dd870bbaccfb8208ad1e79361cf197b147f62a3bedc19ec642f3a0f9926ace96705f4bc88ec2ae56f61f7ca8c2438e6b22f5540842b5569c28a121 languageName: node linkType: hard -"@ethersproject/bytes@npm:5.7.0, @ethersproject/bytes@npm:^5.7.0": - version: 5.7.0 - resolution: "@ethersproject/bytes@npm:5.7.0" +"@types/express@npm:*, @types/express@npm:^4.17.13": + version: 4.17.21 + resolution: "@types/express@npm:4.17.21" dependencies: - "@ethersproject/logger": ^5.7.0 - checksum: 66ad365ceaab5da1b23b72225c71dce472cf37737af5118181fa8ab7447d696bea15ca22e3a0e8836fdd8cfac161afe321a7c67d0dde96f9f645ddd759676621 + "@types/body-parser": "*" + "@types/express-serve-static-core": ^4.17.33 + "@types/qs": "*" + "@types/serve-static": "*" + checksum: fb238298630370a7392c7abdc80f495ae6c716723e114705d7e3fb67e3850b3859bbfd29391463a3fb8c0b32051847935933d99e719c0478710f8098ee7091c5 languageName: node linkType: hard -"@ethersproject/constants@npm:5.7.0, @ethersproject/constants@npm:^5.7.0": - version: 5.7.0 - resolution: "@ethersproject/constants@npm:5.7.0" +"@types/fs-extra@npm:^9.0.13": + version: 9.0.13 + resolution: "@types/fs-extra@npm:9.0.13" dependencies: - "@ethersproject/bignumber": ^5.7.0 - checksum: 6d4b1355747cce837b3e76ec3bde70e4732736f23b04f196f706ebfa5d4d9c2be50904a390d4d40ce77803b98d03d16a9b6898418e04ba63491933ce08c4ba8a + "@types/node": "*" + checksum: add79e212acd5ac76b97b9045834e03a7996aef60a814185e0459088fd290519a3c1620865d588fa36c4498bf614210d2a703af5cf80aa1dbc125db78f6edac3 languageName: node linkType: hard -"@ethersproject/contracts@npm:5.7.0": - version: 5.7.0 - resolution: "@ethersproject/contracts@npm:5.7.0" - dependencies: - "@ethersproject/abi": ^5.7.0 - "@ethersproject/abstract-provider": ^5.7.0 - "@ethersproject/abstract-signer": ^5.7.0 - "@ethersproject/address": ^5.7.0 - "@ethersproject/bignumber": ^5.7.0 - "@ethersproject/bytes": ^5.7.0 - "@ethersproject/constants": ^5.7.0 - "@ethersproject/logger": ^5.7.0 - "@ethersproject/properties": ^5.7.0 - "@ethersproject/transactions": ^5.7.0 - checksum: 6ccf1121cba01b31e02f8c507cb971ab6bfed85706484a9ec09878ef1594a62215f43c4fdef8f4a4875b99c4a800bc95e3be69b1803f8ce479e07634b5a740c0 +"@types/gtag.js@npm:^0.0.12": + version: 0.0.12 + resolution: "@types/gtag.js@npm:0.0.12" + checksum: 34efc27fbfd0013255b8bfd4af38ded9d5a6ba761130c76f17fd3a9585d83acc88d8005aab667cfec4bdec0e7c7217f689739799a8f61aed0edb929be58b162e languageName: node linkType: hard -"@ethersproject/hash@npm:5.7.0, @ethersproject/hash@npm:^5.7.0": - version: 5.7.0 - resolution: "@ethersproject/hash@npm:5.7.0" +"@types/hast@npm:^2.0.0": + version: 2.3.8 + resolution: "@types/hast@npm:2.3.8" dependencies: - "@ethersproject/abstract-signer": ^5.7.0 - "@ethersproject/address": ^5.7.0 - "@ethersproject/base64": ^5.7.0 - "@ethersproject/bignumber": ^5.7.0 - "@ethersproject/bytes": ^5.7.0 - "@ethersproject/keccak256": ^5.7.0 - "@ethersproject/logger": ^5.7.0 - "@ethersproject/properties": ^5.7.0 - "@ethersproject/strings": ^5.7.0 - checksum: 6e9fa8d14eb08171cd32f17f98cc108ec2aeca74a427655f0d689c550fee0b22a83b3b400fad7fb3f41cf14d4111f87f170aa7905bcbcd1173a55f21b06262ef + "@types/unist": ^2 + checksum: 4c3b3efb7067d32a568a9bf5d2a7599f99ec08c2eaade3aaeb579b7a31bcdf8f6475f56c1ac5bc3f4e4e07b84a93a9b1cf1ef9a8b52b39e3deabea7989e5dd4b languageName: node linkType: hard -"@ethersproject/hdnode@npm:5.7.0, @ethersproject/hdnode@npm:^5.7.0": - version: 5.7.0 - resolution: "@ethersproject/hdnode@npm:5.7.0" +"@types/hast@npm:^3.0.0": + version: 3.0.3 + resolution: "@types/hast@npm:3.0.3" dependencies: - "@ethersproject/abstract-signer": ^5.7.0 - "@ethersproject/basex": ^5.7.0 - "@ethersproject/bignumber": ^5.7.0 - "@ethersproject/bytes": ^5.7.0 - "@ethersproject/logger": ^5.7.0 - "@ethersproject/pbkdf2": ^5.7.0 - "@ethersproject/properties": ^5.7.0 - "@ethersproject/sha2": ^5.7.0 - "@ethersproject/signing-key": ^5.7.0 - "@ethersproject/strings": ^5.7.0 - "@ethersproject/transactions": ^5.7.0 - "@ethersproject/wordlists": ^5.7.0 - checksum: bfe5ca2d89a42de73655f853170ef4766b933c5f481cddad709b3aca18823275b096e572f92d1602a052f80b426edde44ad6b9d028799775a7dad4a5bbed2133 + "@types/unist": "*" + checksum: ca204207550fd6848ee20b5ba2018fd54f515d59a8b80375cdbe392ba2b4b130dac25fdfbaf9f2a70d2aec9d074a34dc14d4d59d31fa3ede80ef9850afad5d3c languageName: node linkType: hard -"@ethersproject/json-wallets@npm:5.7.0, @ethersproject/json-wallets@npm:^5.7.0": - version: 5.7.0 - resolution: "@ethersproject/json-wallets@npm:5.7.0" - dependencies: - "@ethersproject/abstract-signer": ^5.7.0 - "@ethersproject/address": ^5.7.0 - "@ethersproject/bytes": ^5.7.0 - "@ethersproject/hdnode": ^5.7.0 - "@ethersproject/keccak256": ^5.7.0 - "@ethersproject/logger": ^5.7.0 - "@ethersproject/pbkdf2": ^5.7.0 - "@ethersproject/properties": ^5.7.0 - "@ethersproject/random": ^5.7.0 - "@ethersproject/strings": ^5.7.0 - "@ethersproject/transactions": ^5.7.0 - aes-js: 3.0.0 - scrypt-js: 3.0.1 - checksum: f583458d22db62efaaf94d38dd243482776a45bf90f9f3882fbad5aa0b8fd288b41eb7c1ff8ec0b99c9b751088e43d6173530db64dd33c59f9d8daa8d7ad5aa2 +"@types/history@npm:^4.7.11": + version: 4.7.11 + resolution: "@types/history@npm:4.7.11" + checksum: c92e2ba407dcab0581a9afdf98f533aa41b61a71133420a6d92b1ca9839f741ab1f9395b17454ba5b88cb86020b70b22d74a1950ccfbdfd9beeaa5459fdc3464 languageName: node linkType: hard -"@ethersproject/keccak256@npm:5.7.0, @ethersproject/keccak256@npm:^5.7.0": - version: 5.7.0 - resolution: "@ethersproject/keccak256@npm:5.7.0" - dependencies: - "@ethersproject/bytes": ^5.7.0 - js-sha3: 0.8.0 - checksum: ff70950d82203aab29ccda2553422cbac2e7a0c15c986bd20a69b13606ed8bb6e4fdd7b67b8d3b27d4f841e8222cbaccd33ed34be29f866fec7308f96ed244c6 +"@types/html-minifier-terser@npm:^6.0.0": + version: 6.1.0 + resolution: "@types/html-minifier-terser@npm:6.1.0" + checksum: eb843f6a8d662d44fb18ec61041117734c6aae77aa38df1be3b4712e8e50ffaa35f1e1c92fdd0fde14a5675fecf457abcd0d15a01fae7506c91926176967f452 languageName: node linkType: hard -"@ethersproject/logger@npm:5.7.0, @ethersproject/logger@npm:^5.7.0": - version: 5.7.0 - resolution: "@ethersproject/logger@npm:5.7.0" - checksum: 075ab2f605f1fd0813f2e39c3308f77b44a67732b36e712d9bc085f22a84aac4da4f71b39bee50fe78da3e1c812673fadc41180c9970fe5e486e91ea17befe0d +"@types/http-assert@npm:*": + version: 1.5.5 + resolution: "@types/http-assert@npm:1.5.5" + checksum: cd6bb7fd42cc6e2a702cb55370b8b25231954ad74c04bcd185b943a74ded3d4c28099c30f77b26951df2426441baff41718816c60b5af80efe2b8888d900bf93 languageName: node linkType: hard -"@ethersproject/networks@npm:5.7.1, @ethersproject/networks@npm:^5.7.0": - version: 5.7.1 - resolution: "@ethersproject/networks@npm:5.7.1" - dependencies: - "@ethersproject/logger": ^5.7.0 - checksum: 0339f312304c17d9a0adce550edb825d4d2c8c9468c1634c44172c67a9ed256f594da62c4cda5c3837a0f28b7fabc03aca9b492f68ff1fdad337ee861b27bd5d +"@types/http-cache-semantics@npm:^4.0.2": + version: 4.0.4 + resolution: "@types/http-cache-semantics@npm:4.0.4" + checksum: 7f4dd832e618bc1e271be49717d7b4066d77c2d4eed5b81198eb987e532bb3e1c7e02f45d77918185bad936f884b700c10cebe06305f50400f382ab75055f9e8 languageName: node linkType: hard -"@ethersproject/pbkdf2@npm:5.7.0, @ethersproject/pbkdf2@npm:^5.7.0": - version: 5.7.0 - resolution: "@ethersproject/pbkdf2@npm:5.7.0" - dependencies: - "@ethersproject/bytes": ^5.7.0 - "@ethersproject/sha2": ^5.7.0 - checksum: b895adb9e35a8a127e794f7aadc31a2424ef355a70e51cde10d457e3e888bb8102373199a540cf61f2d6b9a32e47358f9c65b47d559f42bf8e596b5fd67901e9 +"@types/http-errors@npm:*": + version: 2.0.4 + resolution: "@types/http-errors@npm:2.0.4" + checksum: 1f3d7c3b32c7524811a45690881736b3ef741bf9849ae03d32ad1ab7062608454b150a4e7f1351f83d26a418b2d65af9bdc06198f1c079d75578282884c4e8e3 languageName: node linkType: hard -"@ethersproject/properties@npm:5.7.0, @ethersproject/properties@npm:^5.7.0": - version: 5.7.0 - resolution: "@ethersproject/properties@npm:5.7.0" +"@types/http-proxy@npm:^1.17.8": + version: 1.17.14 + resolution: "@types/http-proxy@npm:1.17.14" dependencies: - "@ethersproject/logger": ^5.7.0 - checksum: 6ab0ccf0c3aadc9221e0cdc5306ce6cd0df7f89f77d77bccdd1277182c9ead0202cd7521329ba3acde130820bf8af299e17cf567d0d497c736ee918207bbf59f + "@types/node": "*" + checksum: 491320bce3565bbb6c7d39d25b54bce626237cfb6b09e60ee7f77b56ae7c6cbad76f08d47fe01eaa706781124ee3dfad9bb737049254491efd98ed1f014c4e83 languageName: node linkType: hard -"@ethersproject/providers@npm:5.7.2, @ethersproject/providers@npm:^5.7.1, @ethersproject/providers@npm:^5.7.2": - version: 5.7.2 - resolution: "@ethersproject/providers@npm:5.7.2" - dependencies: - "@ethersproject/abstract-provider": ^5.7.0 - "@ethersproject/abstract-signer": ^5.7.0 - "@ethersproject/address": ^5.7.0 - "@ethersproject/base64": ^5.7.0 - "@ethersproject/basex": ^5.7.0 - "@ethersproject/bignumber": ^5.7.0 - "@ethersproject/bytes": ^5.7.0 - "@ethersproject/constants": ^5.7.0 - "@ethersproject/hash": ^5.7.0 - "@ethersproject/logger": ^5.7.0 - "@ethersproject/networks": ^5.7.0 - "@ethersproject/properties": ^5.7.0 - "@ethersproject/random": ^5.7.0 - "@ethersproject/rlp": ^5.7.0 - "@ethersproject/sha2": ^5.7.0 - "@ethersproject/strings": ^5.7.0 - "@ethersproject/transactions": ^5.7.0 - "@ethersproject/web": ^5.7.0 - bech32: 1.1.4 - ws: 7.4.6 - checksum: 1754c731a5ca6782ae9677f4a9cd8b6246c4ef21a966c9a01b133750f3c578431ec43ec254e699969c4a0f87e84463ded50f96b415600aabd37d2056aee58c19 +"@types/istanbul-lib-coverage@npm:*, @types/istanbul-lib-coverage@npm:^2.0.0, @types/istanbul-lib-coverage@npm:^2.0.1, @types/istanbul-lib-coverage@npm:^2.0.3": + version: 2.0.6 + resolution: "@types/istanbul-lib-coverage@npm:2.0.6" + checksum: 3feac423fd3e5449485afac999dcfcb3d44a37c830af898b689fadc65d26526460bedb889db278e0d4d815a670331796494d073a10ee6e3a6526301fe7415778 languageName: node linkType: hard -"@ethersproject/random@npm:5.7.0, @ethersproject/random@npm:^5.7.0": - version: 5.7.0 - resolution: "@ethersproject/random@npm:5.7.0" +"@types/istanbul-lib-report@npm:*": + version: 3.0.3 + resolution: "@types/istanbul-lib-report@npm:3.0.3" dependencies: - "@ethersproject/bytes": ^5.7.0 - "@ethersproject/logger": ^5.7.0 - checksum: 017829c91cff6c76470852855108115b0b52c611b6be817ed1948d56ba42d6677803ec2012aa5ae298a7660024156a64c11fcf544e235e239ab3f89f0fff7345 + "@types/istanbul-lib-coverage": "*" + checksum: b91e9b60f865ff08cb35667a427b70f6c2c63e88105eadd29a112582942af47ed99c60610180aa8dcc22382fa405033f141c119c69b95db78c4c709fbadfeeb4 languageName: node linkType: hard -"@ethersproject/rlp@npm:5.7.0, @ethersproject/rlp@npm:^5.7.0": - version: 5.7.0 - resolution: "@ethersproject/rlp@npm:5.7.0" +"@types/istanbul-reports@npm:^3.0.0": + version: 3.0.4 + resolution: "@types/istanbul-reports@npm:3.0.4" dependencies: - "@ethersproject/bytes": ^5.7.0 - "@ethersproject/logger": ^5.7.0 - checksum: bce165b0f7e68e4d091c9d3cf47b247cac33252df77a095ca4281d32d5eeaaa3695d9bc06b2b057c5015353a68df89f13a4a54a72e888e4beeabbe56b15dda6e + "@types/istanbul-lib-report": "*" + checksum: 93eb18835770b3431f68ae9ac1ca91741ab85f7606f310a34b3586b5a34450ec038c3eed7ab19266635499594de52ff73723a54a72a75b9f7d6a956f01edee95 languageName: node linkType: hard -"@ethersproject/sha2@npm:5.7.0, @ethersproject/sha2@npm:^5.7.0": - version: 5.7.0 - resolution: "@ethersproject/sha2@npm:5.7.0" - dependencies: - "@ethersproject/bytes": ^5.7.0 - "@ethersproject/logger": ^5.7.0 - hash.js: 1.1.7 - checksum: 09321057c022effbff4cc2d9b9558228690b5dd916329d75c4b1ffe32ba3d24b480a367a7cc92d0f0c0b1c896814d03351ae4630e2f1f7160be2bcfbde435dbc +"@types/json-schema@npm:*, @types/json-schema@npm:^7.0.12, @types/json-schema@npm:^7.0.4, @types/json-schema@npm:^7.0.5, @types/json-schema@npm:^7.0.8, @types/json-schema@npm:^7.0.9": + version: 7.0.15 + resolution: "@types/json-schema@npm:7.0.15" + checksum: 97ed0cb44d4070aecea772b7b2e2ed971e10c81ec87dd4ecc160322ffa55ff330dace1793489540e3e318d90942064bb697cc0f8989391797792d919737b3b98 languageName: node linkType: hard -"@ethersproject/signing-key@npm:5.7.0, @ethersproject/signing-key@npm:^5.7.0": - version: 5.7.0 - resolution: "@ethersproject/signing-key@npm:5.7.0" - dependencies: - "@ethersproject/bytes": ^5.7.0 - "@ethersproject/logger": ^5.7.0 - "@ethersproject/properties": ^5.7.0 - bn.js: ^5.2.1 - elliptic: 6.5.4 - hash.js: 1.1.7 - checksum: 8f8de09b0aac709683bbb49339bc0a4cd2f95598f3546436c65d6f3c3a847ffa98e06d35e9ed2b17d8030bd2f02db9b7bd2e11c5cf8a71aad4537487ab4cf03a +"@types/katex@npm:^0.16.0": + version: 0.16.7 + resolution: "@types/katex@npm:0.16.7" + checksum: 4fd15d93553be97c02c064e16be18d7ccbabf66ec72a9dc7fd5bfa47f0c7581da2f942f693c7cb59499de4c843c2189796e49c9647d336cbd52b777b6722a95a languageName: node linkType: hard -"@ethersproject/solidity@npm:5.7.0": - version: 5.7.0 - resolution: "@ethersproject/solidity@npm:5.7.0" +"@types/keygrip@npm:*": + version: 1.0.6 + resolution: "@types/keygrip@npm:1.0.6" + checksum: d157f60bf920492347791d2b26d530d5069ce05796549fbacd4c24d66ffbebbcb0ab67b21e7a1b80a593b9fd4b67dc4843dec04c12bbc2e0fddfb8577a826c41 + languageName: node + linkType: hard + +"@types/keyv@npm:^3.1.1": + version: 3.1.4 + resolution: "@types/keyv@npm:3.1.4" dependencies: - "@ethersproject/bignumber": ^5.7.0 - "@ethersproject/bytes": ^5.7.0 - "@ethersproject/keccak256": ^5.7.0 - "@ethersproject/logger": ^5.7.0 - "@ethersproject/sha2": ^5.7.0 - "@ethersproject/strings": ^5.7.0 - checksum: 9a02f37f801c96068c3e7721f83719d060175bc4e80439fe060e92bd7acfcb6ac1330c7e71c49f4c2535ca1308f2acdcb01e00133129aac00581724c2d6293f3 + "@types/node": "*" + checksum: e009a2bfb50e90ca9b7c6e8f648f8464067271fd99116f881073fa6fa76dc8d0133181dd65e6614d5fb1220d671d67b0124aef7d97dc02d7e342ab143a47779d languageName: node linkType: hard -"@ethersproject/strings@npm:5.7.0, @ethersproject/strings@npm:^5.7.0": - version: 5.7.0 - resolution: "@ethersproject/strings@npm:5.7.0" +"@types/koa-compose@npm:*": + version: 3.2.8 + resolution: "@types/koa-compose@npm:3.2.8" dependencies: - "@ethersproject/bytes": ^5.7.0 - "@ethersproject/constants": ^5.7.0 - "@ethersproject/logger": ^5.7.0 - checksum: 5ff78693ae3fdf3cf23e1f6dc047a61e44c8197d2408c42719fef8cb7b7b3613a4eec88ac0ed1f9f5558c74fe0de7ae3195a29ca91a239c74b9f444d8e8b50df + "@types/koa": "*" + checksum: 95c32bdee738ac7c10439bbf6342ca3b9f0aafd7e8118739eac7fb0fa703a23cfe4c88f63e13a69a16fbde702e0bcdc62b272aa734325fc8efa7e5625479752e languageName: node linkType: hard -"@ethersproject/transactions@npm:5.7.0, @ethersproject/transactions@npm:^5.7.0": - version: 5.7.0 - resolution: "@ethersproject/transactions@npm:5.7.0" +"@types/koa@npm:*, @types/koa@npm:^2.11.6": + version: 2.13.12 + resolution: "@types/koa@npm:2.13.12" dependencies: - "@ethersproject/address": ^5.7.0 - "@ethersproject/bignumber": ^5.7.0 - "@ethersproject/bytes": ^5.7.0 - "@ethersproject/constants": ^5.7.0 - "@ethersproject/keccak256": ^5.7.0 - "@ethersproject/logger": ^5.7.0 - "@ethersproject/properties": ^5.7.0 - "@ethersproject/rlp": ^5.7.0 - "@ethersproject/signing-key": ^5.7.0 - checksum: a31b71996d2b283f68486241bff0d3ea3f1ba0e8f1322a8fffc239ccc4f4a7eb2ea9994b8fd2f093283fd75f87bae68171e01b6265261f821369aca319884a79 + "@types/accepts": "*" + "@types/content-disposition": "*" + "@types/cookies": "*" + "@types/http-assert": "*" + "@types/http-errors": "*" + "@types/keygrip": "*" + "@types/koa-compose": "*" + "@types/node": "*" + checksum: 4a2e7e19bf91779d0a3c46b8838d6faba5ee0e9a888e5ef8683722c21aa8efcf35f86d1a661adb56ca9f239f7724e306055e33bfa1b2dc008b9c0fa9fcc12bee languageName: node linkType: hard -"@ethersproject/units@npm:5.7.0": - version: 5.7.0 - resolution: "@ethersproject/units@npm:5.7.0" - dependencies: - "@ethersproject/bignumber": ^5.7.0 - "@ethersproject/constants": ^5.7.0 - "@ethersproject/logger": ^5.7.0 - checksum: 304714f848cd32e57df31bf545f7ad35c2a72adae957198b28cbc62166daa929322a07bff6e9c9ac4577ab6aa0de0546b065ed1b2d20b19e25748b7d475cb0fc +"@types/lru-cache@npm:^5.1.0": + version: 5.1.1 + resolution: "@types/lru-cache@npm:5.1.1" + checksum: e1d6c0085f61b16ec5b3073ec76ad1be4844ea036561c3f145fc19f71f084b58a6eb600b14128aa95809d057d28f1d147c910186ae51219f58366ffd2ff2e118 languageName: node linkType: hard -"@ethersproject/wallet@npm:5.7.0": - version: 5.7.0 - resolution: "@ethersproject/wallet@npm:5.7.0" +"@types/mdast@npm:^3.0.0": + version: 3.0.15 + resolution: "@types/mdast@npm:3.0.15" dependencies: - "@ethersproject/abstract-provider": ^5.7.0 - "@ethersproject/abstract-signer": ^5.7.0 - "@ethersproject/address": ^5.7.0 - "@ethersproject/bignumber": ^5.7.0 - "@ethersproject/bytes": ^5.7.0 - "@ethersproject/hash": ^5.7.0 - "@ethersproject/hdnode": ^5.7.0 - "@ethersproject/json-wallets": ^5.7.0 - "@ethersproject/keccak256": ^5.7.0 - "@ethersproject/logger": ^5.7.0 - "@ethersproject/properties": ^5.7.0 - "@ethersproject/random": ^5.7.0 - "@ethersproject/signing-key": ^5.7.0 - "@ethersproject/transactions": ^5.7.0 - "@ethersproject/wordlists": ^5.7.0 - checksum: a4009bf7331eddab38e3015b5e9101ef92de7f705b00a6196b997db0e5635b6d83561674d46c90c6f77b87c0500fe4a6b0183ba13749efc22db59c99deb82fbd + "@types/unist": ^2 + checksum: af85042a4e3af3f879bde4059fa9e76c71cb552dffc896cdcc6cf9dc1fd38e37035c2dbd6245cfa6535b433f1f0478f5549696234ccace47a64055a10c656530 languageName: node linkType: hard -"@ethersproject/web@npm:5.7.1, @ethersproject/web@npm:^5.7.0": - version: 5.7.1 - resolution: "@ethersproject/web@npm:5.7.1" +"@types/mdast@npm:^4.0.0, @types/mdast@npm:^4.0.2": + version: 4.0.3 + resolution: "@types/mdast@npm:4.0.3" dependencies: - "@ethersproject/base64": ^5.7.0 - "@ethersproject/bytes": ^5.7.0 - "@ethersproject/logger": ^5.7.0 - "@ethersproject/properties": ^5.7.0 - "@ethersproject/strings": ^5.7.0 - checksum: 7028c47103f82fd2e2c197ce0eecfacaa9180ffeec7de7845b1f4f9b19d84081b7a48227aaddde05a4aaa526af574a9a0ce01cc0fc75e3e371f84b38b5b16b2b + "@types/unist": "*" + checksum: 345c5a22fccf05f35239ea6313ee4aaf6ebed5927c03ac79744abccb69b9ba5e692f9b771e36a012b79e17429082cada30f579e9c43b8a54e0ffb365431498b6 languageName: node linkType: hard -"@ethersproject/wordlists@npm:5.7.0, @ethersproject/wordlists@npm:^5.7.0": - version: 5.7.0 - resolution: "@ethersproject/wordlists@npm:5.7.0" - dependencies: - "@ethersproject/bytes": ^5.7.0 - "@ethersproject/hash": ^5.7.0 - "@ethersproject/logger": ^5.7.0 - "@ethersproject/properties": ^5.7.0 - "@ethersproject/strings": ^5.7.0 - checksum: 30eb6eb0731f9ef5faa44bf9c0c6e950bcaaef61e4d2d9ce0ae6d341f4e2d6d1f4ab4f8880bfce03b7aac4b862fb740e1421170cfbf8e2aafc359277d49e6e97 +"@types/mdx@npm:^2.0.0": + version: 2.0.10 + resolution: "@types/mdx@npm:2.0.10" + checksum: 3e2fb24b7bfae739a59573344171292b6c31256ad9afddc00232e9de4fbc97b270e1a11d13cb935cba0d9bbb9bc7348793eda82ee752233c5d2289f4b897f719 languageName: node linkType: hard -"@fastify/busboy@npm:^2.0.0": - version: 2.0.0 - resolution: "@fastify/busboy@npm:2.0.0" - checksum: 41879937ce1dee6421ef9cd4da53239830617e1f0bb7a0e843940772cd72827205d05e518af6adabe6e1ea19301285fff432b9d11bad01a531e698bea95c781b +"@types/mime@npm:*": + version: 3.0.4 + resolution: "@types/mime@npm:3.0.4" + checksum: a6139c8e1f705ef2b064d072f6edc01f3c099023ad7c4fce2afc6c2bf0231888202adadbdb48643e8e20da0ce409481a49922e737eca52871b3dc08017455843 languageName: node linkType: hard -"@hapi/hoek@npm:^9.0.0": - version: 9.3.0 - resolution: "@hapi/hoek@npm:9.3.0" - checksum: 4771c7a776242c3c022b168046af4e324d116a9d2e1d60631ee64f474c6e38d1bb07092d898bf95c7bc5d334c5582798a1456321b2e53ca817d4e7c88bc25b43 +"@types/mime@npm:^1": + version: 1.3.5 + resolution: "@types/mime@npm:1.3.5" + checksum: e29a5f9c4776f5229d84e525b7cd7dd960b51c30a0fb9a028c0821790b82fca9f672dab56561e2acd9e8eed51d431bde52eafdfef30f643586c4162f1aecfc78 languageName: node linkType: hard -"@hapi/topo@npm:^5.0.0": - version: 5.1.0 - resolution: "@hapi/topo@npm:5.1.0" - dependencies: - "@hapi/hoek": ^9.0.0 - checksum: 604dfd5dde76d5c334bd03f9001fce69c7ce529883acf92da96f4fe7e51221bf5e5110e964caca287a6a616ba027c071748ab636ff178ad750547fba611d6014 +"@types/minimist@npm:^1.2.2": + version: 1.2.5 + resolution: "@types/minimist@npm:1.2.5" + checksum: 477047b606005058ab0263c4f58097136268007f320003c348794f74adedc3166ffc47c80ec3e94687787f2ab7f4e72c468223946e79892cf0fd9e25e9970a90 languageName: node linkType: hard -"@humanwhocodes/config-array@npm:^0.11.11": - version: 0.11.11 - resolution: "@humanwhocodes/config-array@npm:0.11.11" +"@types/mocha-each@npm:^2": + version: 2.0.4 + resolution: "@types/mocha-each@npm:2.0.4" dependencies: - "@humanwhocodes/object-schema": ^1.2.1 - debug: ^4.1.1 - minimatch: ^3.0.5 - checksum: db84507375ab77b8ffdd24f498a5b49ad6b64391d30dd2ac56885501d03964d29637e05b1ed5aefa09d57ac667e28028bc22d2da872bfcd619652fbdb5f4ca19 + "@types/mocha": "*" + checksum: 2588284db079e2d0a17735c8fb5c12cba9feabf2de55c9ab49e1f3b38cc522691d30ed3abb1bcb21c087b27f373e3f4123ef7bd8d9a4f95cef38f6c8045c71f3 languageName: node linkType: hard -"@humanwhocodes/module-importer@npm:^1.0.1": - version: 1.0.1 - resolution: "@humanwhocodes/module-importer@npm:1.0.1" - checksum: 0fd22007db8034a2cdf2c764b140d37d9020bbfce8a49d3ec5c05290e77d4b0263b1b972b752df8c89e5eaa94073408f2b7d977aed131faf6cf396ebb5d7fb61 +"@types/mocha@npm:*, @types/mocha@npm:^10.0.1, @types/mocha@npm:^10.0.6": + version: 10.0.6 + resolution: "@types/mocha@npm:10.0.6" + checksum: f7c836cf6cf27dc0f5970d262591b56f2a3caeaec8cfdc612c12e1cfbb207f601f710ece207e935164d4e3343b93be5054d0db5544f31f453b3923775d82099f languageName: node linkType: hard -"@humanwhocodes/object-schema@npm:^1.2.1": - version: 1.2.1 - resolution: "@humanwhocodes/object-schema@npm:1.2.1" - checksum: a824a1ec31591231e4bad5787641f59e9633827d0a2eaae131a288d33c9ef0290bd16fda8da6f7c0fcb014147865d12118df10db57f27f41e20da92369fcb3f1 +"@types/mocha@npm:^8.2.0": + version: 8.2.3 + resolution: "@types/mocha@npm:8.2.3" + checksum: b43ed1b642a2ee62bf10792a07d5d21d66ab8b4d2cf5d822c8a7643e77b90009aecc000eefab5f6ddc9eb69004192f84119a6f97a8499e1a13ea082e7a5e71bf languageName: node linkType: hard -"@isaacs/cliui@npm:^8.0.2": - version: 8.0.2 - resolution: "@isaacs/cliui@npm:8.0.2" - dependencies: - string-width: ^5.1.2 - string-width-cjs: "npm:string-width@^4.2.0" - strip-ansi: ^7.0.1 - strip-ansi-cjs: "npm:strip-ansi@^6.0.1" - wrap-ansi: ^8.1.0 - wrap-ansi-cjs: "npm:wrap-ansi@^7.0.0" - checksum: 4a473b9b32a7d4d3cfb7a614226e555091ff0c5a29a1734c28c72a182c2f6699b26fc6b5c2131dfd841e86b185aea714c72201d7c98c2fba5f17709333a67aeb +"@types/ms@npm:*": + version: 0.7.34 + resolution: "@types/ms@npm:0.7.34" + checksum: f38d36e7b6edecd9badc9cf50474159e9da5fa6965a75186cceaf883278611b9df6669dc3a3cc122b7938d317b68a9e3d573d316fcb35d1be47ec9e468c6bd8a languageName: node linkType: hard -"@jest/schemas@npm:^29.6.3": - version: 29.6.3 - resolution: "@jest/schemas@npm:29.6.3" +"@types/node-forge@npm:^1.3.0": + version: 1.3.10 + resolution: "@types/node-forge@npm:1.3.10" dependencies: - "@sinclair/typebox": ^0.27.8 - checksum: 910040425f0fc93cd13e68c750b7885590b8839066dfa0cd78e7def07bbb708ad869381f725945d66f2284de5663bbecf63e8fdd856e2ae6e261ba30b1687e93 + "@types/node": "*" + checksum: 363af42c83956c7e2483a71e398a02101ef6a55b4d86386c276315ca98bad02d6050b99fdbe13debcd1bcda250086b4a5b5c15a6fb2953d32420d269865ca7f8 languageName: node linkType: hard -"@jest/types@npm:^29.6.3": - version: 29.6.3 - resolution: "@jest/types@npm:29.6.3" +"@types/node@npm:*, @types/node@npm:^20.6.2": + version: 20.10.4 + resolution: "@types/node@npm:20.10.4" dependencies: - "@jest/schemas": ^29.6.3 - "@types/istanbul-lib-coverage": ^2.0.0 - "@types/istanbul-reports": ^3.0.0 - "@types/node": "*" - "@types/yargs": ^17.0.8 - chalk: ^4.0.0 - checksum: a0bcf15dbb0eca6bdd8ce61a3fb055349d40268622a7670a3b2eb3c3dbafe9eb26af59938366d520b86907b9505b0f9b29b85cec11579a9e580694b87cd90fcc + undici-types: ~5.26.4 + checksum: 054b296417e771ab524bea63cf3289559c6bdf290d45428f7cc68e9b00030ff7a0ece47b8c99a26b4f47a443919813bcf42beadff2f0bea7d8125fa541d92eb0 languageName: node linkType: hard -"@jridgewell/gen-mapping@npm:^0.3.0, @jridgewell/gen-mapping@npm:^0.3.2": - version: 0.3.3 - resolution: "@jridgewell/gen-mapping@npm:0.3.3" - dependencies: - "@jridgewell/set-array": ^1.0.1 - "@jridgewell/sourcemap-codec": ^1.4.10 - "@jridgewell/trace-mapping": ^0.3.9 - checksum: 4a74944bd31f22354fc01c3da32e83c19e519e3bbadafa114f6da4522ea77dd0c2842607e923a591d60a76699d819a2fbb6f3552e277efdb9b58b081390b60ab +"@types/node@npm:18.15.13": + version: 18.15.13 + resolution: "@types/node@npm:18.15.13" + checksum: 79cc5a2b5f98e8973061a4260a781425efd39161a0e117a69cd089603964816c1a14025e1387b4590c8e82d05133b7b4154fa53a7dffb3877890a66145e76515 languageName: node linkType: hard -"@jridgewell/resolve-uri@npm:^3.0.3, @jridgewell/resolve-uri@npm:^3.1.0": - version: 3.1.1 - resolution: "@jridgewell/resolve-uri@npm:3.1.1" - checksum: f5b441fe7900eab4f9155b3b93f9800a916257f4e8563afbcd3b5a5337b55e52bd8ae6735453b1b745457d9f6cdb16d74cd6220bbdd98cf153239e13f6cbb653 +"@types/node@npm:^17.0.5": + version: 17.0.45 + resolution: "@types/node@npm:17.0.45" + checksum: aa04366b9103b7d6cfd6b2ef64182e0eaa7d4462c3f817618486ea0422984c51fc69fd0d436eae6c9e696ddfdbec9ccaa27a917f7c2e8c75c5d57827fe3d95e8 languageName: node linkType: hard -"@jridgewell/set-array@npm:^1.0.1": - version: 1.1.2 - resolution: "@jridgewell/set-array@npm:1.1.2" - checksum: 69a84d5980385f396ff60a175f7177af0b8da4ddb81824cb7016a9ef914eee9806c72b6b65942003c63f7983d4f39a5c6c27185bbca88eb4690b62075602e28e +"@types/node@npm:^18.7.20": + version: 18.19.3 + resolution: "@types/node@npm:18.19.3" + dependencies: + undici-types: ~5.26.4 + checksum: 58c4fa45a78fcec75c78182a4b266395905957633654eb0311c5f9c30ac15c179ea2287ab1af034e46c2db7bb0589ef0000ee64c1de8f568a0aad29eaadb100c languageName: node linkType: hard -"@jridgewell/source-map@npm:^0.3.3": - version: 0.3.5 - resolution: "@jridgewell/source-map@npm:0.3.5" +"@types/node@npm:^20.10.5": + version: 20.10.5 + resolution: "@types/node@npm:20.10.5" dependencies: - "@jridgewell/gen-mapping": ^0.3.0 - "@jridgewell/trace-mapping": ^0.3.9 - checksum: 1ad4dec0bdafbade57920a50acec6634f88a0eb735851e0dda906fa9894e7f0549c492678aad1a10f8e144bfe87f238307bf2a914a1bc85b7781d345417e9f6f + undici-types: ~5.26.4 + checksum: e216b679f545a8356960ce985a0e53c3a58fff0eacd855e180b9e223b8db2b5bd07b744a002b8c1f0c37f9194648ab4578533b5c12df2ec10cc02f61d20948d2 languageName: node linkType: hard -"@jridgewell/sourcemap-codec@npm:^1.4.10, @jridgewell/sourcemap-codec@npm:^1.4.14": - version: 1.4.15 - resolution: "@jridgewell/sourcemap-codec@npm:1.4.15" - checksum: b881c7e503db3fc7f3c1f35a1dd2655a188cc51a3612d76efc8a6eb74728bef5606e6758ee77423e564092b4a518aba569bbb21c9bac5ab7a35b0c6ae7e344c8 +"@types/parse-json@npm:^4.0.0": + version: 4.0.2 + resolution: "@types/parse-json@npm:4.0.2" + checksum: 5bf62eec37c332ad10059252fc0dab7e7da730764869c980b0714777ad3d065e490627be9f40fc52f238ffa3ac4199b19de4127196910576c2fe34dd47c7a470 languageName: node linkType: hard -"@jridgewell/trace-mapping@npm:0.3.9": - version: 0.3.9 - resolution: "@jridgewell/trace-mapping@npm:0.3.9" - dependencies: - "@jridgewell/resolve-uri": ^3.0.3 - "@jridgewell/sourcemap-codec": ^1.4.10 - checksum: d89597752fd88d3f3480845691a05a44bd21faac18e2185b6f436c3b0fd0c5a859fbbd9aaa92050c4052caf325ad3e10e2e1d1b64327517471b7d51babc0ddef +"@types/parse5@npm:^5.0.0": + version: 5.0.3 + resolution: "@types/parse5@npm:5.0.3" + checksum: d6b7495cb1850f9f2e9c5e103ede9f2d30a5320669707b105c403868adc9e4bf8d3a7ff314cc23f67826bbbbbc0e6147346ce9062ab429f099dba7a01f463919 languageName: node linkType: hard -"@jridgewell/trace-mapping@npm:^0.3.12, @jridgewell/trace-mapping@npm:^0.3.17, @jridgewell/trace-mapping@npm:^0.3.9": - version: 0.3.19 - resolution: "@jridgewell/trace-mapping@npm:0.3.19" - dependencies: - "@jridgewell/resolve-uri": ^3.1.0 - "@jridgewell/sourcemap-codec": ^1.4.14 - checksum: 956a6f0f6fec060fb48c6bf1f5ec2064e13cd38c8be3873877d4b92b4a27ba58289a34071752671262a3e3c202abcc3fa2aac64d8447b4b0fa1ba3c9047f1c20 +"@types/parse5@npm:^6.0.1": + version: 6.0.3 + resolution: "@types/parse5@npm:6.0.3" + checksum: ddb59ee4144af5dfcc508a8dcf32f37879d11e12559561e65788756b95b33e6f03ea027d88e1f5408f9b7bfb656bf630ace31a2169edf44151daaf8dd58df1b7 languageName: node linkType: hard -"@leichtgewicht/ip-codec@npm:^2.0.1": - version: 2.0.4 - resolution: "@leichtgewicht/ip-codec@npm:2.0.4" - checksum: 468de1f04d33de6d300892683d7c8aecbf96d1e2c5fe084f95f816e50a054d45b7c1ebfb141a1447d844b86a948733f6eebd92234da8581c84a1ad4de2946a2d +"@types/path-browserify@npm:^1": + version: 1.0.2 + resolution: "@types/path-browserify@npm:1.0.2" + checksum: 7b26df1a32827f7e588009386f62e0f900e8d349722b1b8b8c100cd2c4cc1e95c64099f5ec879cd131e69869088f336998a3ddb17085b68312414431a4eace01 languageName: node linkType: hard -"@mdn/browser-compat-data@npm:^4.0.0": - version: 4.2.1 - resolution: "@mdn/browser-compat-data@npm:4.2.1" - checksum: 76eaa7dafed154040e769ba6d23f2dcb58e805ed3ccb376a5c4b76326c92643753c20194faed363870800dc3c1af26c107b8562710c8bb37aaee8c5ffe2a89cd +"@types/pbkdf2@npm:^3.0.0": + version: 3.1.2 + resolution: "@types/pbkdf2@npm:3.1.2" + dependencies: + "@types/node": "*" + checksum: bebe1e596cbbe5f7d2726a58859e61986c5a42459048e29cb7f2d4d764be6bbb0844572fd5d70ca8955a8a17e8b4ed80984fc4903e165d9efb8807a3fbb051aa languageName: node linkType: hard -"@mdx-js/mdx@npm:^1.6.22": - version: 1.6.22 - resolution: "@mdx-js/mdx@npm:1.6.22" +"@types/prettier@npm:^3": + version: 3.0.0 + resolution: "@types/prettier@npm:3.0.0" dependencies: - "@babel/core": 7.12.9 - "@babel/plugin-syntax-jsx": 7.12.1 - "@babel/plugin-syntax-object-rest-spread": 7.8.3 - "@mdx-js/util": 1.6.22 - babel-plugin-apply-mdx-type-prop: 1.6.22 - babel-plugin-extract-import-names: 1.6.22 - camelcase-css: 2.0.1 - detab: 2.0.4 - hast-util-raw: 6.0.1 - lodash.uniq: 4.5.0 - mdast-util-to-hast: 10.0.1 - remark-footnotes: 2.0.0 - remark-mdx: 1.6.22 - remark-parse: 8.0.3 - remark-squeeze-paragraphs: 4.0.0 - style-to-object: 0.3.0 - unified: 9.2.0 - unist-builder: 2.0.3 - unist-util-visit: 2.0.3 - checksum: 0839b4a3899416326ea6578fe9e470af319da559bc6d3669c60942e456b49a98eebeb3358c623007b4786a2175a450d2c51cd59df64639013c5a3d22366931a6 + prettier: "*" + checksum: a2a512d304e5bcf78f38089dc88ad19215e6ab871d435a17aef3ce538a63b07c0e359c18db23989dc1ed9fff96d99eee1f680416080184df5c7e0e3bf767e165 languageName: node linkType: hard -"@mdx-js/react@npm:^1.6.22": - version: 1.6.22 - resolution: "@mdx-js/react@npm:1.6.22" - peerDependencies: - react: ^16.13.1 || ^17.0.0 - checksum: bc84bd514bc127f898819a0c6f1a6915d9541011bd8aefa1fcc1c9bea8939f31051409e546bdec92babfa5b56092a16d05ef6d318304ac029299df5181dc94c8 +"@types/prismjs@npm:^1.26.0": + version: 1.26.3 + resolution: "@types/prismjs@npm:1.26.3" + checksum: c627fa9d9f4277ce413bb8347944152cddfc892702e34ff4b099dc1cf3f00c09514d36349c23529b903b0e57f3b2e0dc91ee66e98af07fbbe1e3fe8346b23370 languageName: node linkType: hard -"@mdx-js/util@npm:1.6.22": - version: 1.6.22 - resolution: "@mdx-js/util@npm:1.6.22" - checksum: 4b393907e39a1a75214f0314bf72a0adfa5e5adffd050dd5efe9c055b8549481a3cfc9f308c16dfb33311daf3ff63added7d5fd1fe52db614c004f886e0e559a +"@types/prop-types@npm:*": + version: 15.7.11 + resolution: "@types/prop-types@npm:15.7.11" + checksum: 7519ff11d06fbf6b275029fe03fff9ec377b4cb6e864cac34d87d7146c7f5a7560fd164bdc1d2dbe00b60c43713631251af1fd3d34d46c69cd354602bc0c7c54 languageName: node linkType: hard -"@metamask/eth-sig-util@npm:^4.0.0": - version: 4.0.1 - resolution: "@metamask/eth-sig-util@npm:4.0.1" - dependencies: - ethereumjs-abi: ^0.6.8 - ethereumjs-util: ^6.2.1 - ethjs-util: ^0.1.6 - tweetnacl: ^1.0.3 - tweetnacl-util: ^0.15.1 - checksum: 740df4c92a1282e6be4c00c86c1a8ccfb93e767596e43f6da895aa5bab4a28fc3c2209f0327db34924a4a1e9db72bc4d3dddfcfc45cca0b218c9ccbf7d1b1445 +"@types/ps-tree@npm:^1.1.2": + version: 1.1.6 + resolution: "@types/ps-tree@npm:1.1.6" + checksum: bf5b7bb9bd11b8762a8302b93c335728ecb19c85a74c640a3888d476368a03733f11612b9a87b1ad9ea56f95720db23a824c78113b16024dc59264b7f9008df5 languageName: node linkType: hard -"@noble/hashes@npm:1.1.2": - version: 1.1.2 - resolution: "@noble/hashes@npm:1.1.2" - checksum: 3c2a8cb7c2e053811032f242155d870c5eb98844d924d69702244d48804cb03b42d4a666c49c2b71164420d8229cb9a6f242b972d50d5bb2f1d673b98b041de2 +"@types/qs@npm:*": + version: 6.9.10 + resolution: "@types/qs@npm:6.9.10" + checksum: 3e479ee056bd2b60894baa119d12ecd33f20a25231b836af04654e784c886f28a356477630430152a86fba253da65d7ecd18acffbc2a8877a336e75aa0272c67 languageName: node linkType: hard -"@noble/hashes@npm:1.2.0, @noble/hashes@npm:~1.2.0": - version: 1.2.0 - resolution: "@noble/hashes@npm:1.2.0" - checksum: 8ca080ce557b8f40fb2f78d3aedffd95825a415ac8e13d7ffe3643f8626a8c2d99a3e5975b555027ac24316d8b3c02a35b8358567c0c23af681e6573602aa434 +"@types/range-parser@npm:*": + version: 1.2.7 + resolution: "@types/range-parser@npm:1.2.7" + checksum: 95640233b689dfbd85b8c6ee268812a732cf36d5affead89e806fe30da9a430767af8ef2cd661024fd97e19d61f3dec75af2df5e80ec3bea000019ab7028629a languageName: node linkType: hard -"@noble/secp256k1@npm:1.7.1, @noble/secp256k1@npm:~1.7.0": - version: 1.7.1 - resolution: "@noble/secp256k1@npm:1.7.1" - checksum: d2301f1f7690368d8409a3152450458f27e54df47e3f917292de3de82c298770890c2de7c967d237eff9c95b70af485389a9695f73eb05a43e2bd562d18b18cb +"@types/react-router-config@npm:*, @types/react-router-config@npm:^5.0.6, @types/react-router-config@npm:^5.0.7": + version: 5.0.11 + resolution: "@types/react-router-config@npm:5.0.11" + dependencies: + "@types/history": ^4.7.11 + "@types/react": "*" + "@types/react-router": ^5.1.0 + checksum: 4b72d9b71e0576e193c11e5085bbdac43f31debfa3b6ebc24666f3d646ef25c1f57f16c29b1ddd3051c881e85f8e0d4ab5a7bbd5fc215b9377f57675b210be7c + languageName: node + linkType: hard + +"@types/react-router-dom@npm:*": + version: 5.3.3 + resolution: "@types/react-router-dom@npm:5.3.3" + dependencies: + "@types/history": ^4.7.11 + "@types/react": "*" + "@types/react-router": "*" + checksum: 28c4ea48909803c414bf5a08502acbb8ba414669b4b43bb51297c05fe5addc4df0b8fd00e0a9d1e3535ec4073ef38aaafac2c4a2b95b787167d113bc059beff3 languageName: node linkType: hard -"@node-rs/jieba-android-arm-eabi@npm:1.7.2": - version: 1.7.2 - resolution: "@node-rs/jieba-android-arm-eabi@npm:1.7.2" - conditions: os=android & cpu=arm +"@types/react-router@npm:*, @types/react-router@npm:^5.1.0": + version: 5.1.20 + resolution: "@types/react-router@npm:5.1.20" + dependencies: + "@types/history": ^4.7.11 + "@types/react": "*" + checksum: 128764143473a5e9457ddc715436b5d49814b1c214dde48939b9bef23f0e77f52ffcdfa97eb8d3cc27e2c229869c0cdd90f637d887b62f2c9f065a87d6425419 languageName: node linkType: hard -"@node-rs/jieba-android-arm64@npm:1.7.2": - version: 1.7.2 - resolution: "@node-rs/jieba-android-arm64@npm:1.7.2" - conditions: os=android & cpu=arm64 +"@types/react@npm:*": + version: 18.2.43 + resolution: "@types/react@npm:18.2.43" + dependencies: + "@types/prop-types": "*" + "@types/scheduler": "*" + csstype: ^3.0.2 + checksum: e7e4da18c7b14e4b8b1ea630d1d8224835698f2bb2485766455bfb9dea93b8ee6cc549a9fe1cb10c984e62685db44fcfb61e8fac913b008cd30a92087f25b9df languageName: node linkType: hard -"@node-rs/jieba-darwin-arm64@npm:1.7.2": - version: 1.7.2 - resolution: "@node-rs/jieba-darwin-arm64@npm:1.7.2" - conditions: os=darwin & cpu=arm64 +"@types/readable-stream@npm:^2.3.13": + version: 2.3.15 + resolution: "@types/readable-stream@npm:2.3.15" + dependencies: + "@types/node": "*" + safe-buffer: ~5.1.1 + checksum: ec36f525cad09b6c65a1dafcb5ad99b9e2ed824ec49b7aa23180ac427e5d35b8a0706193ecd79ab4253a283ad485ba03d5917a98daaaa144f0ea34f4823e9d82 languageName: node linkType: hard -"@node-rs/jieba-darwin-x64@npm:1.7.2": - version: 1.7.2 - resolution: "@node-rs/jieba-darwin-x64@npm:1.7.2" - conditions: os=darwin & cpu=x64 +"@types/readable-stream@npm:^4": + version: 4.0.10 + resolution: "@types/readable-stream@npm:4.0.10" + dependencies: + "@types/node": "*" + safe-buffer: ~5.1.1 + checksum: dc7cb95be737d442770fcaf1fbe3860c011f5f024c6f09c8b032f95b7ef9cce0031302715afe13e18d264b3a162e5838ce78873ec8d63eb238db49bc70906a21 languageName: node linkType: hard -"@node-rs/jieba-freebsd-x64@npm:1.7.2": - version: 1.7.2 - resolution: "@node-rs/jieba-freebsd-x64@npm:1.7.2" - conditions: os=freebsd & cpu=x64 +"@types/resolve@npm:1.17.1": + version: 1.17.1 + resolution: "@types/resolve@npm:1.17.1" + dependencies: + "@types/node": "*" + checksum: dc6a6df507656004e242dcb02c784479deca516d5f4b58a1707e708022b269ae147e1da0521f3e8ad0d63638869d87e0adc023f0bd5454aa6f72ac66c7525cf5 languageName: node linkType: hard -"@node-rs/jieba-linux-arm-gnueabihf@npm:1.7.2": - version: 1.7.2 - resolution: "@node-rs/jieba-linux-arm-gnueabihf@npm:1.7.2" - conditions: os=linux & cpu=arm +"@types/resolve@npm:1.20.2": + version: 1.20.2 + resolution: "@types/resolve@npm:1.20.2" + checksum: 61c2cad2499ffc8eab36e3b773945d337d848d3ac6b7b0a87c805ba814bc838ef2f262fc0f109bfd8d2e0898ff8bd80ad1025f9ff64f1f71d3d4294c9f14e5f6 languageName: node linkType: hard -"@node-rs/jieba-linux-arm64-gnu@npm:1.7.2": - version: 1.7.2 - resolution: "@node-rs/jieba-linux-arm64-gnu@npm:1.7.2" - conditions: os=linux & cpu=arm64 & libc=glibc +"@types/responselike@npm:^1.0.0": + version: 1.0.3 + resolution: "@types/responselike@npm:1.0.3" + dependencies: + "@types/node": "*" + checksum: 6ac4b35723429b11b117e813c7acc42c3af8b5554caaf1fc750404c1ae59f9b7376bc69b9e9e194a5a97357a597c2228b7173d317320f0360d617b6425212f58 languageName: node linkType: hard -"@node-rs/jieba-linux-arm64-musl@npm:1.7.2": - version: 1.7.2 - resolution: "@node-rs/jieba-linux-arm64-musl@npm:1.7.2" - conditions: os=linux & cpu=arm64 & libc=musl +"@types/retry@npm:0.12.0": + version: 0.12.0 + resolution: "@types/retry@npm:0.12.0" + checksum: 61a072c7639f6e8126588bf1eb1ce8835f2cb9c2aba795c4491cf6310e013267b0c8488039857c261c387e9728c1b43205099223f160bb6a76b4374f741b5603 languageName: node linkType: hard -"@node-rs/jieba-linux-x64-gnu@npm:1.7.2": - version: 1.7.2 - resolution: "@node-rs/jieba-linux-x64-gnu@npm:1.7.2" - conditions: os=linux & cpu=x64 & libc=glibc +"@types/sax@npm:^1.2.1": + version: 1.2.7 + resolution: "@types/sax@npm:1.2.7" + dependencies: + "@types/node": "*" + checksum: 7ece5fbb5d9c8fc76ab0de2f99d705edf92f18e701d4f9d9b0647275e32eb65e656c1badf9dfaa12f4e1ff3e250561c8c9cfe79e8b5f33dd1417ac0f1804f6cc languageName: node linkType: hard -"@node-rs/jieba-linux-x64-musl@npm:1.7.2": - version: 1.7.2 - resolution: "@node-rs/jieba-linux-x64-musl@npm:1.7.2" - conditions: os=linux & cpu=x64 & libc=musl +"@types/scheduler@npm:*": + version: 0.16.8 + resolution: "@types/scheduler@npm:0.16.8" + checksum: 6c091b096daa490093bf30dd7947cd28e5b2cd612ec93448432b33f724b162587fed9309a0acc104d97b69b1d49a0f3fc755a62282054d62975d53d7fd13472d languageName: node linkType: hard -"@node-rs/jieba-win32-arm64-msvc@npm:1.7.2": - version: 1.7.2 - resolution: "@node-rs/jieba-win32-arm64-msvc@npm:1.7.2" - conditions: os=win32 & cpu=arm64 +"@types/secp256k1@npm:^4.0.1": + version: 4.0.6 + resolution: "@types/secp256k1@npm:4.0.6" + dependencies: + "@types/node": "*" + checksum: 984494caf49a4ce99fda2b9ea1840eb47af946b8c2737314108949bcc0c06b4880e871296bd49ed6ea4c8423e3a302ad79fec43abfc987330e7eb98f0c4e8ba4 languageName: node linkType: hard -"@node-rs/jieba-win32-ia32-msvc@npm:1.7.2": - version: 1.7.2 - resolution: "@node-rs/jieba-win32-ia32-msvc@npm:1.7.2" - conditions: os=win32 & cpu=ia32 +"@types/semver@npm:^7.5.0": + version: 7.5.6 + resolution: "@types/semver@npm:7.5.6" + checksum: 563a0120ec0efcc326567db2ed920d5d98346f3638b6324ea6b50222b96f02a8add3c51a916b6897b51523aad8ac227d21d3dcf8913559f1bfc6c15b14d23037 languageName: node linkType: hard -"@node-rs/jieba-win32-x64-msvc@npm:1.7.2": - version: 1.7.2 - resolution: "@node-rs/jieba-win32-x64-msvc@npm:1.7.2" - conditions: os=win32 & cpu=x64 +"@types/send@npm:*": + version: 0.17.4 + resolution: "@types/send@npm:0.17.4" + dependencies: + "@types/mime": ^1 + "@types/node": "*" + checksum: cf4db48251bbb03cd6452b4de6e8e09e2d75390a92fd798eca4a803df06444adc94ed050246c94c7ed46fb97be1f63607f0e1f13c3ce83d71788b3e08640e5e0 languageName: node linkType: hard -"@node-rs/jieba@npm:^1.6.0": - version: 1.7.2 - resolution: "@node-rs/jieba@npm:1.7.2" +"@types/serve-index@npm:^1.9.1": + version: 1.9.4 + resolution: "@types/serve-index@npm:1.9.4" dependencies: - "@node-rs/jieba-android-arm-eabi": 1.7.2 - "@node-rs/jieba-android-arm64": 1.7.2 - "@node-rs/jieba-darwin-arm64": 1.7.2 - "@node-rs/jieba-darwin-x64": 1.7.2 - "@node-rs/jieba-freebsd-x64": 1.7.2 - "@node-rs/jieba-linux-arm-gnueabihf": 1.7.2 - "@node-rs/jieba-linux-arm64-gnu": 1.7.2 - "@node-rs/jieba-linux-arm64-musl": 1.7.2 - "@node-rs/jieba-linux-x64-gnu": 1.7.2 - "@node-rs/jieba-linux-x64-musl": 1.7.2 - "@node-rs/jieba-win32-arm64-msvc": 1.7.2 - "@node-rs/jieba-win32-ia32-msvc": 1.7.2 - "@node-rs/jieba-win32-x64-msvc": 1.7.2 - dependenciesMeta: - "@node-rs/jieba-android-arm-eabi": - optional: true - "@node-rs/jieba-android-arm64": - optional: true - "@node-rs/jieba-darwin-arm64": - optional: true - "@node-rs/jieba-darwin-x64": - optional: true - "@node-rs/jieba-freebsd-x64": - optional: true - "@node-rs/jieba-linux-arm-gnueabihf": - optional: true - "@node-rs/jieba-linux-arm64-gnu": - optional: true - "@node-rs/jieba-linux-arm64-musl": - optional: true - "@node-rs/jieba-linux-x64-gnu": - optional: true - "@node-rs/jieba-linux-x64-musl": - optional: true - "@node-rs/jieba-win32-arm64-msvc": - optional: true - "@node-rs/jieba-win32-ia32-msvc": - optional: true - "@node-rs/jieba-win32-x64-msvc": - optional: true - checksum: 47108c5e84993ba2ddbc1d922ecb840b74668d61ff8ec83137a300c02e6c97748e64d98f23484dce7d1aee9d00dcf3d1921ccb5375b83ed8474650d0fc1e7e60 + "@types/express": "*" + checksum: 72727c88d54da5b13275ebfb75dcdc4aa12417bbe9da1939e017c4c5f0c906fae843aa4e0fbfe360e7ee9df2f3d388c21abfc488f77ce58693fb57809f8ded92 languageName: node linkType: hard -"@nodelib/fs.scandir@npm:2.1.5": - version: 2.1.5 - resolution: "@nodelib/fs.scandir@npm:2.1.5" +"@types/serve-static@npm:*, @types/serve-static@npm:^1.13.10": + version: 1.15.5 + resolution: "@types/serve-static@npm:1.15.5" dependencies: - "@nodelib/fs.stat": 2.0.5 - run-parallel: ^1.1.9 - checksum: a970d595bd23c66c880e0ef1817791432dbb7acbb8d44b7e7d0e7a22f4521260d4a83f7f9fd61d44fda4610105577f8f58a60718105fb38352baed612fd79e59 + "@types/http-errors": "*" + "@types/mime": "*" + "@types/node": "*" + checksum: 0ff4b3703cf20ba89c9f9e345bc38417860a88e85863c8d6fe274a543220ab7f5f647d307c60a71bb57dc9559f0890a661e8dc771a6ec5ef195d91c8afc4a893 languageName: node linkType: hard -"@nodelib/fs.stat@npm:2.0.5, @nodelib/fs.stat@npm:^2.0.2": - version: 2.0.5 - resolution: "@nodelib/fs.stat@npm:2.0.5" - checksum: 012480b5ca9d97bff9261571dbbec7bbc6033f69cc92908bc1ecfad0792361a5a1994bc48674b9ef76419d056a03efadfce5a6cf6dbc0a36559571a7a483f6f0 +"@types/sinon@npm:^17": + version: 17.0.2 + resolution: "@types/sinon@npm:17.0.2" + dependencies: + "@types/sinonjs__fake-timers": "*" + checksum: 3a56615f2dc7d0b67d3e4b8ae358df2ff2164d89fabb22e9b46e6afe7d4df844a354ea65d409af9baf29ac0103ea562ab44dd0176405a9cf82a4ff183939f22f languageName: node linkType: hard -"@nodelib/fs.walk@npm:^1.2.3, @nodelib/fs.walk@npm:^1.2.8": - version: 1.2.8 - resolution: "@nodelib/fs.walk@npm:1.2.8" - dependencies: - "@nodelib/fs.scandir": 2.1.5 - fastq: ^1.6.0 - checksum: 190c643f156d8f8f277bf2a6078af1ffde1fd43f498f187c2db24d35b4b4b5785c02c7dc52e356497b9a1b65b13edc996de08de0b961c32844364da02986dc53 +"@types/sinonjs__fake-timers@npm:*": + version: 8.1.5 + resolution: "@types/sinonjs__fake-timers@npm:8.1.5" + checksum: 7e3c08f6c13df44f3ea7d9a5155ddf77e3f7314c156fa1c5a829a4f3763bafe2f75b1283b887f06e6b4296996a2f299b70f64ff82625f9af5885436e2524d10c languageName: node linkType: hard -"@noir-lang/acvm_js@workspace:*, @noir-lang/acvm_js@workspace:acvm-repo/acvm_js": - version: 0.0.0-use.local - resolution: "@noir-lang/acvm_js@workspace:acvm-repo/acvm_js" - dependencies: - "@esm-bundle/chai": ^4.3.4-fix.0 - "@web/dev-server-esbuild": ^0.3.6 - "@web/test-runner": ^0.15.3 - "@web/test-runner-playwright": ^0.10.0 - chai: ^4.3.7 - eslint: ^8.50.0 - eslint-plugin-prettier: ^5.0.0 - mocha: ^10.2.0 - prettier: 3.0.3 - ts-node: ^10.9.1 - typescript: ^5.0.4 - languageName: unknown - linkType: soft - -"@noir-lang/backend_barretenberg@workspace:*, @noir-lang/backend_barretenberg@workspace:tooling/noir_js_backend_barretenberg": - version: 0.0.0-use.local - resolution: "@noir-lang/backend_barretenberg@workspace:tooling/noir_js_backend_barretenberg" +"@types/sockjs@npm:^0.3.33": + version: 0.3.36 + resolution: "@types/sockjs@npm:0.3.36" dependencies: - "@aztec/bb.js": 0.12.0 - "@noir-lang/types": "workspace:*" - "@types/node": ^20.6.2 - "@types/prettier": ^3 - eslint: ^8.50.0 - eslint-plugin-prettier: ^5.0.0 - fflate: ^0.8.0 - prettier: 3.0.3 - typescript: 5.1.5 - languageName: unknown - linkType: soft + "@types/node": "*" + checksum: b4b5381122465d80ea8b158537c00bc82317222d3fb31fd7229ff25b31fa89134abfbab969118da55622236bf3d8fee75759f3959908b5688991f492008f29bc + languageName: node + linkType: hard -"@noir-lang/noir_codegen@workspace:tooling/noir_codegen": - version: 0.0.0-use.local - resolution: "@noir-lang/noir_codegen@workspace:tooling/noir_codegen" - dependencies: - "@noir-lang/noir_js": "workspace:*" - "@noir-lang/types": "workspace:*" - "@types/chai": ^4 - "@types/lodash": ^4 - "@types/mocha": ^10.0.1 - "@types/node": ^20.6.2 - "@types/prettier": ^3 - chai: ^4.3.8 - eslint: ^8.50.0 - eslint-plugin-prettier: ^5.0.0 - glob: ^10.3.10 - lodash: ^4.17.21 - mocha: ^10.2.0 - prettier: 3.0.3 - ts-command-line-args: ^2.5.1 - ts-node: ^10.9.1 - typescript: ^5.2.2 - bin: - noir-codegen: lib/main.js - languageName: unknown - linkType: soft +"@types/unist@npm:*, @types/unist@npm:^3.0.0": + version: 3.0.2 + resolution: "@types/unist@npm:3.0.2" + checksum: 3d04d0be69316e5f14599a0d993a208606c12818cf631fd399243d1dc7a9bd8a3917d6066baa6abc290814afbd744621484756803c80cba892c39cd4b4a85616 + languageName: node + linkType: hard -"@noir-lang/noir_js@workspace:*, @noir-lang/noir_js@workspace:tooling/noir_js": - version: 0.0.0-use.local - resolution: "@noir-lang/noir_js@workspace:tooling/noir_js" - dependencies: - "@noir-lang/acvm_js": "workspace:*" - "@noir-lang/noirc_abi": "workspace:*" - "@noir-lang/types": "workspace:*" - "@types/chai": ^4 - "@types/mocha": ^10.0.1 - "@types/node": ^20.6.2 - "@types/prettier": ^3 - chai: ^4.3.8 - eslint: ^8.50.0 - eslint-plugin-prettier: ^5.0.0 - mocha: ^10.2.0 - prettier: 3.0.3 - ts-node: ^10.9.1 - tsc-multi: ^1.1.0 - typescript: ^5.2.2 - languageName: unknown - linkType: soft +"@types/unist@npm:^2, @types/unist@npm:^2.0.0, @types/unist@npm:^2.0.2, @types/unist@npm:^2.0.3": + version: 2.0.10 + resolution: "@types/unist@npm:2.0.10" + checksum: e2924e18dedf45f68a5c6ccd6015cd62f1643b1b43baac1854efa21ae9e70505db94290434a23da1137d9e31eb58e54ca175982005698ac37300a1c889f6c4aa + languageName: node + linkType: hard -"@noir-lang/noir_wasm@workspace:*, @noir-lang/noir_wasm@workspace:compiler/wasm": - version: 0.0.0-use.local - resolution: "@noir-lang/noir_wasm@workspace:compiler/wasm" - dependencies: - "@esm-bundle/chai": ^4.3.4-fix.0 - "@web/dev-server-esbuild": ^0.3.6 - "@web/test-runner": ^0.15.3 - "@web/test-runner-playwright": ^0.10.0 - mocha: ^10.2.0 - peerDependencies: - "@noir-lang/source-resolver": "workspace:*" - languageName: unknown - linkType: soft +"@types/which@npm:^2.0.1": + version: 2.0.2 + resolution: "@types/which@npm:2.0.2" + checksum: 8626a3c2f6db676c449142e1082e33ea0c9d88b8a2bd796366b944891e6da0088b2aa83d3fa9c79e6696f7381a851fc76d43bd353eb6c4d98a7775b4ae0a96a5 + languageName: node + linkType: hard -"@noir-lang/noirc_abi@workspace:*, @noir-lang/noirc_abi@workspace:tooling/noirc_abi_wasm": - version: 0.0.0-use.local - resolution: "@noir-lang/noirc_abi@workspace:tooling/noirc_abi_wasm" +"@types/ws@npm:^7.4.0": + version: 7.4.7 + resolution: "@types/ws@npm:7.4.7" dependencies: - "@esm-bundle/chai": ^4.3.4-fix.0 - "@web/dev-server-esbuild": ^0.3.6 - "@web/test-runner": ^0.15.3 - "@web/test-runner-playwright": ^0.10.0 - eslint: ^8.50.0 - mocha: ^10.2.0 - languageName: unknown - linkType: soft + "@types/node": "*" + checksum: b4c9b8ad209620c9b21e78314ce4ff07515c0cadab9af101c1651e7bfb992d7fd933bd8b9c99d110738fd6db523ed15f82f29f50b45510288da72e964dedb1a3 + languageName: node + linkType: hard -"@noir-lang/root@workspace:.": - version: 0.0.0-use.local - resolution: "@noir-lang/root@workspace:." +"@types/ws@npm:^8.5.5": + version: 8.5.10 + resolution: "@types/ws@npm:8.5.10" dependencies: - "@typescript-eslint/eslint-plugin": ^6.7.3 - "@typescript-eslint/parser": ^6.7.3 - chai: ^4.3.7 - eslint: ^8.50.0 - eslint-plugin-prettier: ^5.0.0 - mocha: ^10.2.0 - prettier: 3.0.3 - ts-node: ^10.9.1 - typescript: ^5.0.4 - languageName: unknown - linkType: soft + "@types/node": "*" + checksum: 3ec416ea2be24042ebd677932a462cf16d2080393d8d7d0b1b3f5d6eaa4a7387aaf0eefb99193c0bfd29444857cf2e0c3ac89899e130550dc6c14ada8a46d25e + languageName: node + linkType: hard -"@noir-lang/source-resolver@workspace:*, @noir-lang/source-resolver@workspace:compiler/source-resolver": - version: 0.0.0-use.local - resolution: "@noir-lang/source-resolver@workspace:compiler/source-resolver" +"@types/yargs-parser@npm:*": + version: 21.0.3 + resolution: "@types/yargs-parser@npm:21.0.3" + checksum: ef236c27f9432983e91432d974243e6c4cdae227cb673740320eff32d04d853eed59c92ca6f1142a335cfdc0e17cccafa62e95886a8154ca8891cc2dec4ee6fc + languageName: node + linkType: hard + +"@types/yargs@npm:^17.0.8": + version: 17.0.32 + resolution: "@types/yargs@npm:17.0.32" dependencies: - "@types/node": ^20.5.7 - ava: ^5.2.0 - typescript: 4.9.4 - languageName: unknown - linkType: soft + "@types/yargs-parser": "*" + checksum: 4505bdebe8716ff383640c6e928f855b5d337cb3c68c81f7249fc6b983d0aa48de3eee26062b84f37e0d75a5797bc745e0c6e76f42f81771252a758c638f36ba + languageName: node + linkType: hard -"@noir-lang/types@workspace:*, @noir-lang/types@workspace:tooling/noir_js_types": - version: 0.0.0-use.local - resolution: "@noir-lang/types@workspace:tooling/noir_js_types" +"@types/yauzl@npm:^2.9.1": + version: 2.10.3 + resolution: "@types/yauzl@npm:2.10.3" dependencies: - "@noir-lang/noirc_abi": "workspace:*" - "@types/prettier": ^3 - eslint: ^8.50.0 - eslint-plugin-prettier: ^5.0.0 - prettier: 3.0.3 - typescript: ^5.2.2 - languageName: unknown - linkType: soft + "@types/node": "*" + checksum: 5ee966ea7bd6b2802f31ad4281c92c4c0b6dfa593c378a2582c58541fa113bec3d70eb0696b34ad95e8e6861a884cba6c3e351285816693ed176222f840a8c08 + languageName: node + linkType: hard -"@nomicfoundation/ethereumjs-block@npm:5.0.2": - version: 5.0.2 - resolution: "@nomicfoundation/ethereumjs-block@npm:5.0.2" +"@typescript-eslint/eslint-plugin@npm:^6.7.3": + version: 6.13.2 + resolution: "@typescript-eslint/eslint-plugin@npm:6.13.2" dependencies: - "@nomicfoundation/ethereumjs-common": 4.0.2 - "@nomicfoundation/ethereumjs-rlp": 5.0.2 - "@nomicfoundation/ethereumjs-trie": 6.0.2 - "@nomicfoundation/ethereumjs-tx": 5.0.2 - "@nomicfoundation/ethereumjs-util": 9.0.2 - ethereum-cryptography: 0.1.3 - ethers: ^5.7.1 - checksum: 7ff744f44a01f1c059ca7812a1cfc8089f87aa506af6cb39c78331dca71b32993cbd6fa05ad03f8c4f4fab73bb998a927af69e0d8ff01ae192ee5931606e09f5 + "@eslint-community/regexpp": ^4.5.1 + "@typescript-eslint/scope-manager": 6.13.2 + "@typescript-eslint/type-utils": 6.13.2 + "@typescript-eslint/utils": 6.13.2 + "@typescript-eslint/visitor-keys": 6.13.2 + debug: ^4.3.4 + graphemer: ^1.4.0 + ignore: ^5.2.4 + natural-compare: ^1.4.0 + semver: ^7.5.4 + ts-api-utils: ^1.0.1 + peerDependencies: + "@typescript-eslint/parser": ^6.0.0 || ^6.0.0-alpha + eslint: ^7.0.0 || ^8.0.0 + peerDependenciesMeta: + typescript: + optional: true + checksum: e50cbbe7104eecef59faf3355ab981d9f353b19327f0b4607dfd829b4726f9e694b536fe43ab55f50bb00fbfdd2e4268a7e2a568b28d5fcd0d2a32a8d2466218 languageName: node linkType: hard -"@nomicfoundation/ethereumjs-blockchain@npm:7.0.2": - version: 7.0.2 - resolution: "@nomicfoundation/ethereumjs-blockchain@npm:7.0.2" +"@typescript-eslint/parser@npm:^6.7.3": + version: 6.13.2 + resolution: "@typescript-eslint/parser@npm:6.13.2" dependencies: - "@nomicfoundation/ethereumjs-block": 5.0.2 - "@nomicfoundation/ethereumjs-common": 4.0.2 - "@nomicfoundation/ethereumjs-ethash": 3.0.2 - "@nomicfoundation/ethereumjs-rlp": 5.0.2 - "@nomicfoundation/ethereumjs-trie": 6.0.2 - "@nomicfoundation/ethereumjs-tx": 5.0.2 - "@nomicfoundation/ethereumjs-util": 9.0.2 - abstract-level: ^1.0.3 - debug: ^4.3.3 - ethereum-cryptography: 0.1.3 - level: ^8.0.0 - lru-cache: ^5.1.1 - memory-level: ^1.0.0 - checksum: b7e440dcd73e32aa72d13bfd28cb472773c9c60ea808a884131bf7eb3f42286ad594a0864215f599332d800f3fe1f772fff4b138d2dcaa8f41e4d8389bff33e7 + "@typescript-eslint/scope-manager": 6.13.2 + "@typescript-eslint/types": 6.13.2 + "@typescript-eslint/typescript-estree": 6.13.2 + "@typescript-eslint/visitor-keys": 6.13.2 + debug: ^4.3.4 + peerDependencies: + eslint: ^7.0.0 || ^8.0.0 + peerDependenciesMeta: + typescript: + optional: true + checksum: aeafc414d295d7855384f10d57abb4f5f2ff35b57991b5c8854f43268761b3cc995e62af585dea1dc48295d762f466b565b5ae5699bfe642585d3f83ba8e1515 languageName: node linkType: hard -"@nomicfoundation/ethereumjs-common@npm:4.0.2": - version: 4.0.2 - resolution: "@nomicfoundation/ethereumjs-common@npm:4.0.2" +"@typescript-eslint/scope-manager@npm:6.13.2": + version: 6.13.2 + resolution: "@typescript-eslint/scope-manager@npm:6.13.2" dependencies: - "@nomicfoundation/ethereumjs-util": 9.0.2 - crc-32: ^1.2.0 - checksum: f0d84704d6254d374299c19884312bd5666974b4b6f342d3f10bc76e549de78d20e45a53d25fbdc146268a52335497127e4f069126da7c60ac933a158e704887 + "@typescript-eslint/types": 6.13.2 + "@typescript-eslint/visitor-keys": 6.13.2 + checksum: ff8fd64ddf324e296e2e0e34a8f73149c9a5f14d1761ea8e8665fc5998faa2b0bbbd1a5d416aa10d725f13c804032d532f68e39a0ca6cc36d1c9b9c0aea94311 languageName: node linkType: hard -"@nomicfoundation/ethereumjs-ethash@npm:3.0.2": - version: 3.0.2 - resolution: "@nomicfoundation/ethereumjs-ethash@npm:3.0.2" +"@typescript-eslint/type-utils@npm:6.13.2": + version: 6.13.2 + resolution: "@typescript-eslint/type-utils@npm:6.13.2" dependencies: - "@nomicfoundation/ethereumjs-block": 5.0.2 - "@nomicfoundation/ethereumjs-rlp": 5.0.2 - "@nomicfoundation/ethereumjs-util": 9.0.2 - abstract-level: ^1.0.3 - bigint-crypto-utils: ^3.0.23 - ethereum-cryptography: 0.1.3 - checksum: e4011e4019dd9b92f7eeebfc1e6c9a9685c52d8fd0ee4f28f03e50048a23b600c714490827f59fdce497b3afb503b3fd2ebf6815ff307e9949c3efeff1403278 + "@typescript-eslint/typescript-estree": 6.13.2 + "@typescript-eslint/utils": 6.13.2 + debug: ^4.3.4 + ts-api-utils: ^1.0.1 + peerDependencies: + eslint: ^7.0.0 || ^8.0.0 + peerDependenciesMeta: + typescript: + optional: true + checksum: ba54e5746139f778c35e4058e523ec8c20b68cf6472b3a7784170328e48c228f0761d2fc7e43dab053ca7d85ac4378b6965567774e6afedf551e600638404215 languageName: node linkType: hard -"@nomicfoundation/ethereumjs-evm@npm:2.0.2": - version: 2.0.2 - resolution: "@nomicfoundation/ethereumjs-evm@npm:2.0.2" +"@typescript-eslint/types@npm:6.13.2": + version: 6.13.2 + resolution: "@typescript-eslint/types@npm:6.13.2" + checksum: 4493ff06fa07c68c5adbcbd842f6dd6f5c88f14d160b53c3379b6b703e6f62808fab7fdebcc06ff06a56f20ab432b6ceeb0afb8931dc97d4061cb417e787f2c1 + languageName: node + linkType: hard + +"@typescript-eslint/typescript-estree@npm:6.13.2": + version: 6.13.2 + resolution: "@typescript-eslint/typescript-estree@npm:6.13.2" dependencies: - "@ethersproject/providers": ^5.7.1 - "@nomicfoundation/ethereumjs-common": 4.0.2 - "@nomicfoundation/ethereumjs-tx": 5.0.2 - "@nomicfoundation/ethereumjs-util": 9.0.2 - debug: ^4.3.3 - ethereum-cryptography: 0.1.3 - mcl-wasm: ^0.7.1 - rustbn.js: ~0.2.0 - checksum: a23cf570836ddc147606b02df568069de946108e640f902358fef67e589f6b371d856056ee44299d9b4e3497f8ae25faa45e6b18fefd90e9b222dc6a761d85f0 + "@typescript-eslint/types": 6.13.2 + "@typescript-eslint/visitor-keys": 6.13.2 + debug: ^4.3.4 + globby: ^11.1.0 + is-glob: ^4.0.3 + semver: ^7.5.4 + ts-api-utils: ^1.0.1 + peerDependenciesMeta: + typescript: + optional: true + checksum: 0c18ee5ef594a2411a788fe9d7bc6d51a03bce38d9d764bcb24ab557e5bc1942c2ddf9bd6fb4877eb102b0ae488974fb7b7fe72daa70a2054bf04d3cc6803546 languageName: node linkType: hard -"@nomicfoundation/ethereumjs-rlp@npm:5.0.2": - version: 5.0.2 - resolution: "@nomicfoundation/ethereumjs-rlp@npm:5.0.2" - bin: - rlp: bin/rlp - checksum: a74434cadefca9aa8754607cc1ad7bb4bbea4ee61c6214918e60a5bbee83206850346eb64e39fd1fe97f854c7ec0163e01148c0c881dda23881938f0645a0ef2 +"@typescript-eslint/utils@npm:6.13.2": + version: 6.13.2 + resolution: "@typescript-eslint/utils@npm:6.13.2" + dependencies: + "@eslint-community/eslint-utils": ^4.4.0 + "@types/json-schema": ^7.0.12 + "@types/semver": ^7.5.0 + "@typescript-eslint/scope-manager": 6.13.2 + "@typescript-eslint/types": 6.13.2 + "@typescript-eslint/typescript-estree": 6.13.2 + semver: ^7.5.4 + peerDependencies: + eslint: ^7.0.0 || ^8.0.0 + checksum: b66bcf2a945e9c55f3dccb48af49565863d974837ee23b2f01ce7f3fb2462eb8a5871784d4a2fcc80dac7d5cd4ed90c8d01431cd177c0249de89a448f6663fc8 languageName: node linkType: hard -"@nomicfoundation/ethereumjs-statemanager@npm:2.0.2": - version: 2.0.2 - resolution: "@nomicfoundation/ethereumjs-statemanager@npm:2.0.2" +"@typescript-eslint/visitor-keys@npm:6.13.2": + version: 6.13.2 + resolution: "@typescript-eslint/visitor-keys@npm:6.13.2" dependencies: - "@nomicfoundation/ethereumjs-common": 4.0.2 - "@nomicfoundation/ethereumjs-rlp": 5.0.2 - debug: ^4.3.3 - ethereum-cryptography: 0.1.3 - ethers: ^5.7.1 - js-sdsl: ^4.1.4 - checksum: 3ab6578e252e53609afd98d8ba42a99f182dcf80252f23ed9a5e0471023ffb2502130f85fc47fa7c94cd149f9be799ed9a0942ca52a143405be9267f4ad94e64 + "@typescript-eslint/types": 6.13.2 + eslint-visitor-keys: ^3.4.1 + checksum: 4b4def7acd7451e6a18dab3ee13f06504b3d23e51f195fced7c544f2203ee8a83426c82fa57ab6b58725c70fdedaf7a3eccb69793180be35756eed0f2c69fe04 languageName: node linkType: hard -"@nomicfoundation/ethereumjs-trie@npm:6.0.2": - version: 6.0.2 - resolution: "@nomicfoundation/ethereumjs-trie@npm:6.0.2" +"@ungap/structured-clone@npm:^1.0.0, @ungap/structured-clone@npm:^1.2.0": + version: 1.2.0 + resolution: "@ungap/structured-clone@npm:1.2.0" + checksum: 4f656b7b4672f2ce6e272f2427d8b0824ed11546a601d8d5412b9d7704e83db38a8d9f402ecdf2b9063fc164af842ad0ec4a55819f621ed7e7ea4d1efcc74524 + languageName: node + linkType: hard + +"@wasm-tool/wasm-pack-plugin@npm:^1.7.0": + version: 1.7.0 + resolution: "@wasm-tool/wasm-pack-plugin@npm:1.7.0" dependencies: - "@nomicfoundation/ethereumjs-rlp": 5.0.2 - "@nomicfoundation/ethereumjs-util": 9.0.2 - "@types/readable-stream": ^2.3.13 - ethereum-cryptography: 0.1.3 - readable-stream: ^3.6.0 - checksum: d4da918d333851b9f2cce7dbd25ab5753e0accd43d562d98fd991b168b6a08d1794528f0ade40fe5617c84900378376fe6256cdbe52c8d66bf4c53293bbc7c40 + chalk: ^2.4.1 + command-exists: ^1.2.7 + watchpack: ^2.1.1 + which: ^2.0.2 + checksum: b0a09f70827b262c4b4689296aae712cc7d9fb742e32eb7e7b720a5b45be1d256864c84c37c8f97de4c762cdae9c35be4a37cdea6832281056921edf4944aea4 languageName: node linkType: hard -"@nomicfoundation/ethereumjs-tx@npm:5.0.2": - version: 5.0.2 - resolution: "@nomicfoundation/ethereumjs-tx@npm:5.0.2" +"@web/browser-logs@npm:^0.2.6": + version: 0.2.6 + resolution: "@web/browser-logs@npm:0.2.6" dependencies: - "@chainsafe/ssz": ^0.9.2 - "@ethersproject/providers": ^5.7.2 - "@nomicfoundation/ethereumjs-common": 4.0.2 - "@nomicfoundation/ethereumjs-rlp": 5.0.2 - "@nomicfoundation/ethereumjs-util": 9.0.2 - ethereum-cryptography: 0.1.3 - checksum: 0bbcea75786b2ccb559afe2ecc9866fb4566a9f157b6ffba4f50960d14f4b3da2e86e273f6fadda9b860e67cfcabf589970fb951b328cb5f900a585cd21842a2 + errorstacks: ^2.2.0 + checksum: 82693e37a7e5a3c3df255e1e4feef6e6c2f2b7f5f883e1a9fd233d09a22c4f3e9e3dfd2ec809d7a02f0894156f26b89f1759bf4e9317640ee3630e9a3d9ec2a8 languageName: node linkType: hard -"@nomicfoundation/ethereumjs-util@npm:9.0.2": - version: 9.0.2 - resolution: "@nomicfoundation/ethereumjs-util@npm:9.0.2" +"@web/browser-logs@npm:^0.3.4": + version: 0.3.4 + resolution: "@web/browser-logs@npm:0.3.4" dependencies: - "@chainsafe/ssz": ^0.10.0 - "@nomicfoundation/ethereumjs-rlp": 5.0.2 - ethereum-cryptography: 0.1.3 - checksum: 3a08f7b88079ef9f53b43da9bdcb8195498fd3d3911c2feee2571f4d1204656053f058b2f650471c86f7d2d0ba2f814768c7cfb0f266eede41c848356afc4900 + errorstacks: ^2.2.0 + checksum: fe212c91c26deada3458b6562a8d7d2ae98b7b51c7099e1cdb972e9f799c63f6cd170776b2eadbe43c47531cb6d9b06f48282113a5944f4394270a0076f8565e languageName: node linkType: hard -"@nomicfoundation/ethereumjs-vm@npm:7.0.2": - version: 7.0.2 - resolution: "@nomicfoundation/ethereumjs-vm@npm:7.0.2" +"@web/browser-logs@npm:^0.4.0": + version: 0.4.0 + resolution: "@web/browser-logs@npm:0.4.0" dependencies: - "@nomicfoundation/ethereumjs-block": 5.0.2 - "@nomicfoundation/ethereumjs-blockchain": 7.0.2 - "@nomicfoundation/ethereumjs-common": 4.0.2 - "@nomicfoundation/ethereumjs-evm": 2.0.2 - "@nomicfoundation/ethereumjs-rlp": 5.0.2 - "@nomicfoundation/ethereumjs-statemanager": 2.0.2 - "@nomicfoundation/ethereumjs-trie": 6.0.2 - "@nomicfoundation/ethereumjs-tx": 5.0.2 - "@nomicfoundation/ethereumjs-util": 9.0.2 - debug: ^4.3.3 - ethereum-cryptography: 0.1.3 - mcl-wasm: ^0.7.1 - rustbn.js: ~0.2.0 - checksum: 1c25ba4d0644cadb8a2b0241a4bb02e578bfd7f70e3492b855c2ab5c120cb159cb8f7486f84dc1597884bd1697feedbfb5feb66e91352afb51f3694fd8e4a043 + errorstacks: ^2.2.0 + checksum: 65c6c4312b1ff00ff40fd15c07708f5e113bb8b4f87c972356010546a0664287dd4f64c5475f8be27b6abb29b9272ecc4162aada90ffb732f1779ceec5fd8ad6 languageName: node linkType: hard -"@nomicfoundation/hardhat-chai-matchers@npm:^2.0.0": - version: 2.0.2 - resolution: "@nomicfoundation/hardhat-chai-matchers@npm:2.0.2" +"@web/config-loader@npm:^0.1.3": + version: 0.1.3 + resolution: "@web/config-loader@npm:0.1.3" dependencies: - "@types/chai-as-promised": ^7.1.3 - chai-as-promised: ^7.1.1 - deep-eql: ^4.0.1 - ordinal: ^1.0.3 - peerDependencies: - "@nomicfoundation/hardhat-ethers": ^3.0.0 - chai: ^4.2.0 - ethers: ^6.1.0 - hardhat: ^2.9.4 - checksum: 62d7d69f6b34a06bc43fe0dab8adc9e3b6f907f1b68bb5cf47feb78a4c7ef057b9a4aa713611abeca38df9d8fe166bbd9bbf98e42c4edbdf7aece477b3f9485a + semver: ^7.3.4 + checksum: 278554bd00b757eaf296ba904a224c61d4698df1a5d6c04931c40bc6bb308e81e767055cbf283b763cc530aae6b200bb950aa19eb41aa8979a3a2b29e5f0ac7a languageName: node linkType: hard -"@nomicfoundation/hardhat-ethers@npm:^3.0.0": - version: 3.0.4 - resolution: "@nomicfoundation/hardhat-ethers@npm:3.0.4" +"@web/config-loader@npm:^0.3.0": + version: 0.3.1 + resolution: "@web/config-loader@npm:0.3.1" + checksum: 3ff87dc5cf44a98be97477b1d4c36673d282e7567b61e554aff56f8674a187938c51cd8045f868ca5f4b6ff4d52c72fbbc176c26f27df4335f2341d105d2b535 + languageName: node + linkType: hard + +"@web/dev-server-core@npm:^0.4.1": + version: 0.4.1 + resolution: "@web/dev-server-core@npm:0.4.1" dependencies: - debug: ^4.1.1 - lodash.isequal: ^4.5.0 - peerDependencies: - ethers: ^6.1.0 - hardhat: ^2.0.0 - checksum: 57cbb13682cf0e14cf5bb17b47a2004f69765358fffba7d660ab277a6aff38583216637aa3c5c68410a649a9cd41f774a8df2cb89098f6f616f6108c0f6e5d7a + "@types/koa": ^2.11.6 + "@types/ws": ^7.4.0 + "@web/parse5-utils": ^1.3.1 + chokidar: ^3.4.3 + clone: ^2.1.2 + es-module-lexer: ^1.0.0 + get-stream: ^6.0.0 + is-stream: ^2.0.0 + isbinaryfile: ^5.0.0 + koa: ^2.13.0 + koa-etag: ^4.0.0 + koa-send: ^5.0.1 + koa-static: ^5.0.0 + lru-cache: ^6.0.0 + mime-types: ^2.1.27 + parse5: ^6.0.1 + picomatch: ^2.2.2 + ws: ^7.4.2 + checksum: 4cf728ac781c7831c9c59ffaa1bd2dca1f1e8a6553bedd0d80e47d946ea427067eb1d07b028fc8296a36930c1dd5631e0bc1ccf8f0f4b9203da362c958c1833e languageName: node linkType: hard -"@nomicfoundation/solidity-analyzer-darwin-arm64@npm:0.1.1": - version: 0.1.1 - resolution: "@nomicfoundation/solidity-analyzer-darwin-arm64@npm:0.1.1" - conditions: os=darwin & cpu=arm64 +"@web/dev-server-core@npm:^0.6.2": + version: 0.6.3 + resolution: "@web/dev-server-core@npm:0.6.3" + dependencies: + "@types/koa": ^2.11.6 + "@types/ws": ^7.4.0 + "@web/parse5-utils": ^2.0.2 + chokidar: ^3.4.3 + clone: ^2.1.2 + es-module-lexer: ^1.0.0 + get-stream: ^6.0.0 + is-stream: ^2.0.0 + isbinaryfile: ^5.0.0 + koa: ^2.13.0 + koa-etag: ^4.0.0 + koa-send: ^5.0.1 + koa-static: ^5.0.0 + lru-cache: ^8.0.4 + mime-types: ^2.1.27 + parse5: ^6.0.1 + picomatch: ^2.2.2 + ws: ^7.4.2 + checksum: 98ba42df5eb865828c223bd1de098d013efd8e89983efff28e26ecd9d08c8b35fd29b4c1256ed08b05ecb365abe1aa80d2854e1953bdebbbe230a7e2a597dd8f languageName: node linkType: hard -"@nomicfoundation/solidity-analyzer-darwin-x64@npm:0.1.1": - version: 0.1.1 - resolution: "@nomicfoundation/solidity-analyzer-darwin-x64@npm:0.1.1" - conditions: os=darwin & cpu=x64 +"@web/dev-server-core@npm:^0.7.0": + version: 0.7.0 + resolution: "@web/dev-server-core@npm:0.7.0" + dependencies: + "@types/koa": ^2.11.6 + "@types/ws": ^7.4.0 + "@web/parse5-utils": ^2.1.0 + chokidar: ^3.4.3 + clone: ^2.1.2 + es-module-lexer: ^1.0.0 + get-stream: ^6.0.0 + is-stream: ^2.0.0 + isbinaryfile: ^5.0.0 + koa: ^2.13.0 + koa-etag: ^4.0.0 + koa-send: ^5.0.1 + koa-static: ^5.0.0 + lru-cache: ^8.0.4 + mime-types: ^2.1.27 + parse5: ^6.0.1 + picomatch: ^2.2.2 + ws: ^7.4.2 + checksum: 68c4440c75eca686626182af94d40f8ffa2e7848074692abd930ae69490af1866c5d10f13e8ad6745fb7ae386ba91377b4bcdbd74604a9600ce4ab2c8df1576c languageName: node linkType: hard -"@nomicfoundation/solidity-analyzer-freebsd-x64@npm:0.1.1": - version: 0.1.1 - resolution: "@nomicfoundation/solidity-analyzer-freebsd-x64@npm:0.1.1" - conditions: os=freebsd & cpu=x64 +"@web/dev-server-esbuild@npm:^0.3.6": + version: 0.3.6 + resolution: "@web/dev-server-esbuild@npm:0.3.6" + dependencies: + "@mdn/browser-compat-data": ^4.0.0 + "@web/dev-server-core": ^0.4.1 + esbuild: ^0.16 || ^0.17 + parse5: ^6.0.1 + ua-parser-js: ^1.0.33 + checksum: ed29357d8a832c695f129de62bd658744b46b1e17a5aab24ad6d7cd09a90f27714d83fc6ece2471c35bff55f4f09435a18af65b37d65709019bfe09c10f4f9eb languageName: node linkType: hard -"@nomicfoundation/solidity-analyzer-linux-arm64-gnu@npm:0.1.1": - version: 0.1.1 - resolution: "@nomicfoundation/solidity-analyzer-linux-arm64-gnu@npm:0.1.1" - conditions: os=linux & cpu=arm64 & libc=glibc +"@web/dev-server-rollup@npm:^0.4.1": + version: 0.4.1 + resolution: "@web/dev-server-rollup@npm:0.4.1" + dependencies: + "@rollup/plugin-node-resolve": ^13.0.4 + "@web/dev-server-core": ^0.4.1 + nanocolors: ^0.2.1 + parse5: ^6.0.1 + rollup: ^2.67.0 + whatwg-url: ^11.0.0 + checksum: a0c3566f67b5a5ead3822431302ddcaa9d043b18fdcf1190056a4e0539e5d5b545ebfecaf6021412eb4b5b6e074c2b1eff35c71e859195623c7c07e065f9df58 languageName: node linkType: hard -"@nomicfoundation/solidity-analyzer-linux-arm64-musl@npm:0.1.1": - version: 0.1.1 - resolution: "@nomicfoundation/solidity-analyzer-linux-arm64-musl@npm:0.1.1" - conditions: os=linux & cpu=arm64 & libc=musl +"@web/dev-server-rollup@npm:^0.6.1": + version: 0.6.1 + resolution: "@web/dev-server-rollup@npm:0.6.1" + dependencies: + "@rollup/plugin-node-resolve": ^15.0.1 + "@web/dev-server-core": ^0.7.0 + nanocolors: ^0.2.1 + parse5: ^6.0.1 + rollup: ^4.4.0 + whatwg-url: ^11.0.0 + checksum: 59536e38d8519bc1edbf4e11adf36c4f5273c02f5fbc5171962cb7acadea2cfd8c44df057ff4422282f34964668280b3564f6b053bfee5327dc0368a382f2e00 languageName: node linkType: hard -"@nomicfoundation/solidity-analyzer-linux-x64-gnu@npm:0.1.1": - version: 0.1.1 - resolution: "@nomicfoundation/solidity-analyzer-linux-x64-gnu@npm:0.1.1" - conditions: os=linux & cpu=x64 & libc=glibc +"@web/dev-server@npm:^0.1.38": + version: 0.1.38 + resolution: "@web/dev-server@npm:0.1.38" + dependencies: + "@babel/code-frame": ^7.12.11 + "@types/command-line-args": ^5.0.0 + "@web/config-loader": ^0.1.3 + "@web/dev-server-core": ^0.4.1 + "@web/dev-server-rollup": ^0.4.1 + camelcase: ^6.2.0 + command-line-args: ^5.1.1 + command-line-usage: ^7.0.1 + debounce: ^1.2.0 + deepmerge: ^4.2.2 + ip: ^1.1.5 + nanocolors: ^0.2.1 + open: ^8.0.2 + portfinder: ^1.0.32 + bin: + wds: dist/bin.js + web-dev-server: dist/bin.js + checksum: eeaf34f8744f58cfb9493155ad8548a87cae4e445a2fa894610b070f66cb303614d247bb609e378b9df342935ad980a259630317c444d19f9796abfcfb20bb13 languageName: node linkType: hard -"@nomicfoundation/solidity-analyzer-linux-x64-musl@npm:0.1.1": - version: 0.1.1 - resolution: "@nomicfoundation/solidity-analyzer-linux-x64-musl@npm:0.1.1" - conditions: os=linux & cpu=x64 & libc=musl +"@web/dev-server@npm:^0.4.0": + version: 0.4.1 + resolution: "@web/dev-server@npm:0.4.1" + dependencies: + "@babel/code-frame": ^7.12.11 + "@types/command-line-args": ^5.0.0 + "@web/config-loader": ^0.3.0 + "@web/dev-server-core": ^0.7.0 + "@web/dev-server-rollup": ^0.6.1 + camelcase: ^6.2.0 + command-line-args: ^5.1.1 + command-line-usage: ^7.0.1 + debounce: ^1.2.0 + deepmerge: ^4.2.2 + ip: ^1.1.5 + nanocolors: ^0.2.1 + open: ^8.0.2 + portfinder: ^1.0.32 + bin: + wds: dist/bin.js + web-dev-server: dist/bin.js + checksum: 2d9f2fcef4511e9965de24fef0ff4430a525ccc7bdc2069e51a96d1aaab4a50a935e32725dd128bb3641be4d6a2acb870f0d645208b59036bfd9d7f55c0a33a4 languageName: node linkType: hard -"@nomicfoundation/solidity-analyzer-win32-arm64-msvc@npm:0.1.1": - version: 0.1.1 - resolution: "@nomicfoundation/solidity-analyzer-win32-arm64-msvc@npm:0.1.1" - conditions: os=win32 & cpu=arm64 +"@web/parse5-utils@npm:^1.3.1": + version: 1.3.1 + resolution: "@web/parse5-utils@npm:1.3.1" + dependencies: + "@types/parse5": ^6.0.1 + parse5: ^6.0.1 + checksum: 3320b2c4ea1e6a2ff1e57086b0c697a71dac7a6e54da86b2fb8e6c1d5a673bf9b911a743a10daa8cd62a571719edf3e66c9b17e87d79cc982f234bf141e2e178 languageName: node linkType: hard -"@nomicfoundation/solidity-analyzer-win32-ia32-msvc@npm:0.1.1": - version: 0.1.1 - resolution: "@nomicfoundation/solidity-analyzer-win32-ia32-msvc@npm:0.1.1" - conditions: os=win32 & cpu=ia32 +"@web/parse5-utils@npm:^2.0.2, @web/parse5-utils@npm:^2.1.0": + version: 2.1.0 + resolution: "@web/parse5-utils@npm:2.1.0" + dependencies: + "@types/parse5": ^6.0.1 + parse5: ^6.0.1 + checksum: 0faa93c51d61934e0006bebc2e257036f8cedeb455c7bf22b8fdbc17919929518c2cc99ced3769f8eb3b1d6694dd4a7186d66ad2b3c4330140fd2ce03dc6c4d2 languageName: node linkType: hard -"@nomicfoundation/solidity-analyzer-win32-x64-msvc@npm:0.1.1": - version: 0.1.1 - resolution: "@nomicfoundation/solidity-analyzer-win32-x64-msvc@npm:0.1.1" - conditions: os=win32 & cpu=x64 +"@web/test-runner-chrome@npm:^0.12.1": + version: 0.12.1 + resolution: "@web/test-runner-chrome@npm:0.12.1" + dependencies: + "@web/test-runner-core": ^0.10.29 + "@web/test-runner-coverage-v8": ^0.5.0 + chrome-launcher: ^0.15.0 + puppeteer-core: ^19.8.1 + checksum: 08964e4c22c286231a6bdc003393316a7e0c7878560a9394d8bb9212c3c2e37799bc0aaf4732921213647c3dc89169146f65746d381feaa9c281c0373bf9da59 languageName: node linkType: hard -"@nomicfoundation/solidity-analyzer@npm:^0.1.0": - version: 0.1.1 - resolution: "@nomicfoundation/solidity-analyzer@npm:0.1.1" +"@web/test-runner-chrome@npm:^0.15.0": + version: 0.15.0 + resolution: "@web/test-runner-chrome@npm:0.15.0" dependencies: - "@nomicfoundation/solidity-analyzer-darwin-arm64": 0.1.1 - "@nomicfoundation/solidity-analyzer-darwin-x64": 0.1.1 - "@nomicfoundation/solidity-analyzer-freebsd-x64": 0.1.1 - "@nomicfoundation/solidity-analyzer-linux-arm64-gnu": 0.1.1 - "@nomicfoundation/solidity-analyzer-linux-arm64-musl": 0.1.1 - "@nomicfoundation/solidity-analyzer-linux-x64-gnu": 0.1.1 - "@nomicfoundation/solidity-analyzer-linux-x64-musl": 0.1.1 - "@nomicfoundation/solidity-analyzer-win32-arm64-msvc": 0.1.1 - "@nomicfoundation/solidity-analyzer-win32-ia32-msvc": 0.1.1 - "@nomicfoundation/solidity-analyzer-win32-x64-msvc": 0.1.1 - dependenciesMeta: - "@nomicfoundation/solidity-analyzer-darwin-arm64": - optional: true - "@nomicfoundation/solidity-analyzer-darwin-x64": - optional: true - "@nomicfoundation/solidity-analyzer-freebsd-x64": - optional: true - "@nomicfoundation/solidity-analyzer-linux-arm64-gnu": - optional: true - "@nomicfoundation/solidity-analyzer-linux-arm64-musl": - optional: true - "@nomicfoundation/solidity-analyzer-linux-x64-gnu": - optional: true - "@nomicfoundation/solidity-analyzer-linux-x64-musl": - optional: true - "@nomicfoundation/solidity-analyzer-win32-arm64-msvc": - optional: true - "@nomicfoundation/solidity-analyzer-win32-ia32-msvc": - optional: true - "@nomicfoundation/solidity-analyzer-win32-x64-msvc": - optional: true - checksum: 038cffafd5769e25256b5b8bef88d95cc1c021274a65c020cf84aceb3237752a3b51645fdb0687f5516a2bdfebf166fcf50b08ab64857925100213e0654b266b + "@web/test-runner-core": ^0.13.0 + "@web/test-runner-coverage-v8": ^0.8.0 + async-mutex: 0.4.0 + chrome-launcher: ^0.15.0 + puppeteer-core: ^20.0.0 + checksum: 091aa83707aa1a6ade8074c37050f9a0fae2729f223b5e7d756f86ccdadcd85e738cc47d0a4ae8ac6ea930142cc20e341f5d3ad30a3a81d6666b353a7e8c2dd4 + languageName: node + linkType: hard + +"@web/test-runner-commands@npm:^0.6.6": + version: 0.6.6 + resolution: "@web/test-runner-commands@npm:0.6.6" + dependencies: + "@web/test-runner-core": ^0.10.29 + mkdirp: ^1.0.4 + checksum: b25533edd9ec59aeec28756a52ae4c6730388c336fa94e0c21eedf208012efd9aedf96c47ebad9a98cce9d87c4ee539b318a571e1e2bfb1bf8e5f1f6889c98e4 languageName: node linkType: hard -"@npmcli/fs@npm:^3.1.0": - version: 3.1.0 - resolution: "@npmcli/fs@npm:3.1.0" +"@web/test-runner-commands@npm:^0.9.0": + version: 0.9.0 + resolution: "@web/test-runner-commands@npm:0.9.0" dependencies: - semver: ^7.3.5 - checksum: a50a6818de5fc557d0b0e6f50ec780a7a02ab8ad07e5ac8b16bf519e0ad60a144ac64f97d05c443c3367235d337182e1d012bbac0eb8dbae8dc7b40b193efd0e + "@web/test-runner-core": ^0.13.0 + mkdirp: ^1.0.4 + checksum: df226f76148c5967df68c2589549b10ffe75f3d34a31d63bea132447271cdf073de7350aa680fcbf4315737b909fc44faad23d9f8e7e3ce37e93e05e67a7f295 languageName: node linkType: hard -"@pkgjs/parseargs@npm:^0.11.0": - version: 0.11.0 - resolution: "@pkgjs/parseargs@npm:0.11.0" - checksum: 6ad6a00fc4f2f2cfc6bff76fb1d88b8ee20bc0601e18ebb01b6d4be583733a860239a521a7fbca73b612e66705078809483549d2b18f370eb346c5155c8e4a0f +"@web/test-runner-core@npm:^0.10.20, @web/test-runner-core@npm:^0.10.29": + version: 0.10.29 + resolution: "@web/test-runner-core@npm:0.10.29" + dependencies: + "@babel/code-frame": ^7.12.11 + "@types/babel__code-frame": ^7.0.2 + "@types/co-body": ^6.1.0 + "@types/convert-source-map": ^2.0.0 + "@types/debounce": ^1.2.0 + "@types/istanbul-lib-coverage": ^2.0.3 + "@types/istanbul-reports": ^3.0.0 + "@web/browser-logs": ^0.2.6 + "@web/dev-server-core": ^0.4.1 + chokidar: ^3.4.3 + cli-cursor: ^3.1.0 + co-body: ^6.1.0 + convert-source-map: ^2.0.0 + debounce: ^1.2.0 + dependency-graph: ^0.11.0 + globby: ^11.0.1 + ip: ^1.1.5 + istanbul-lib-coverage: ^3.0.0 + istanbul-lib-report: ^3.0.0 + istanbul-reports: ^3.0.2 + log-update: ^4.0.0 + nanocolors: ^0.2.1 + nanoid: ^3.1.25 + open: ^8.0.2 + picomatch: ^2.2.2 + source-map: ^0.7.3 + checksum: 635a510442bea3bce97596a2aed1c58a6154b4b83a44bf3e9c9497a751f42426cae5f67555916c4fd63064a4e91a5e26755e3090887ebac38ec0ab2691e1fe6c languageName: node linkType: hard -"@pkgr/utils@npm:^2.3.1": - version: 2.4.2 - resolution: "@pkgr/utils@npm:2.4.2" +"@web/test-runner-core@npm:^0.12.0": + version: 0.12.0 + resolution: "@web/test-runner-core@npm:0.12.0" dependencies: - cross-spawn: ^7.0.3 - fast-glob: ^3.3.0 - is-glob: ^4.0.3 - open: ^9.1.0 - picocolors: ^1.0.0 - tslib: ^2.6.0 - checksum: 24e04c121269317d259614cd32beea3af38277151c4002df5883c4be920b8e3490bb897748e844f9d46bf68230f86dabd4e8f093773130e7e60529a769a132fc + "@babel/code-frame": ^7.12.11 + "@types/babel__code-frame": ^7.0.2 + "@types/co-body": ^6.1.0 + "@types/convert-source-map": ^2.0.0 + "@types/debounce": ^1.2.0 + "@types/istanbul-lib-coverage": ^2.0.3 + "@types/istanbul-reports": ^3.0.0 + "@web/browser-logs": ^0.3.4 + "@web/dev-server-core": ^0.6.2 + chokidar: ^3.4.3 + cli-cursor: ^3.1.0 + co-body: ^6.1.0 + convert-source-map: ^2.0.0 + debounce: ^1.2.0 + dependency-graph: ^0.11.0 + globby: ^11.0.1 + ip: ^1.1.5 + istanbul-lib-coverage: ^3.0.0 + istanbul-lib-report: ^3.0.1 + istanbul-reports: ^3.0.2 + log-update: ^4.0.0 + nanocolors: ^0.2.1 + nanoid: ^3.1.25 + open: ^8.0.2 + picomatch: ^2.2.2 + source-map: ^0.7.3 + checksum: e71afa227f9dc2ea4ec67838b1bc4c8af2c61d3e6002b78e37724e3dc09be466e7f7aa5e6795d5431dca1a0b13b94765a880103f98c5497c97943c2f708327eb languageName: node linkType: hard -"@polka/url@npm:^1.0.0-next.20": - version: 1.0.0-next.23 - resolution: "@polka/url@npm:1.0.0-next.23" - checksum: 4b0330de1ceecd1002c7e7449094d0c41f2ed0e21765f4835ccc7b003f2f024ac557d503b9ffdf0918cf50b80d5b8c99dfc5a91927e7b3c468b09c6bb42a3c41 +"@web/test-runner-core@npm:^0.13.0": + version: 0.13.0 + resolution: "@web/test-runner-core@npm:0.13.0" + dependencies: + "@babel/code-frame": ^7.12.11 + "@types/babel__code-frame": ^7.0.2 + "@types/co-body": ^6.1.0 + "@types/convert-source-map": ^2.0.0 + "@types/debounce": ^1.2.0 + "@types/istanbul-lib-coverage": ^2.0.3 + "@types/istanbul-reports": ^3.0.0 + "@web/browser-logs": ^0.4.0 + "@web/dev-server-core": ^0.7.0 + chokidar: ^3.4.3 + cli-cursor: ^3.1.0 + co-body: ^6.1.0 + convert-source-map: ^2.0.0 + debounce: ^1.2.0 + dependency-graph: ^0.11.0 + globby: ^11.0.1 + ip: ^1.1.5 + istanbul-lib-coverage: ^3.0.0 + istanbul-lib-report: ^3.0.1 + istanbul-reports: ^3.0.2 + log-update: ^4.0.0 + nanocolors: ^0.2.1 + nanoid: ^3.1.25 + open: ^8.0.2 + picomatch: ^2.2.2 + source-map: ^0.7.3 + checksum: 6c2bd4962da51521dc0d2cf660bf4e1fa3a9a92166a8c71e85a9fe9160710bbe83658dfceae6108123dcf85bf41fbabd8d3b7294f350e2f732f3713fcaa9a024 languageName: node linkType: hard -"@puppeteer/browsers@npm:0.5.0": +"@web/test-runner-coverage-v8@npm:^0.5.0": version: 0.5.0 - resolution: "@puppeteer/browsers@npm:0.5.0" + resolution: "@web/test-runner-coverage-v8@npm:0.5.0" dependencies: - debug: 4.3.4 - extract-zip: 2.0.1 - https-proxy-agent: 5.0.1 - progress: 2.0.3 - proxy-from-env: 1.1.0 - tar-fs: 2.1.1 - unbzip2-stream: 1.4.3 - yargs: 17.7.1 - peerDependencies: - typescript: ">= 4.7.4" - peerDependenciesMeta: - typescript: - optional: true - bin: - browsers: lib/cjs/main-cli.js - checksum: d75fde03be4be106ca907834739251c2bb0b33a09fa23315c5dbe8b8b4cfed2f1b26af62e1dbe5fccc227e9bc87b51da0815461b982477eb01439bfdd6e7b01a + "@web/test-runner-core": ^0.10.20 + istanbul-lib-coverage: ^3.0.0 + picomatch: ^2.2.2 + v8-to-istanbul: ^9.0.1 + checksum: e69dc6379cff24f28bd21cc37a661945fbf7f3fd532da813e74f4042efe17fc191cdb7c09f1e1ea276167952b0116478ba0fe7af0966fa4867278c3a2cd772df languageName: node linkType: hard -"@rollup/plugin-node-resolve@npm:^13.0.4": - version: 13.3.0 - resolution: "@rollup/plugin-node-resolve@npm:13.3.0" +"@web/test-runner-coverage-v8@npm:^0.7.3": + version: 0.7.3 + resolution: "@web/test-runner-coverage-v8@npm:0.7.3" dependencies: - "@rollup/pluginutils": ^3.1.0 - "@types/resolve": 1.17.1 - deepmerge: ^4.2.2 - is-builtin-module: ^3.1.0 - is-module: ^1.0.0 - resolve: ^1.19.0 - peerDependencies: - rollup: ^2.42.0 - checksum: ec5418e6b3c23a9e30683056b3010e9d325316dcfae93fbc673ae64dad8e56a2ce761c15c48f5e2dcfe0c822fdc4a4905ee6346e3dcf90603ba2260afef5a5e6 + "@web/test-runner-core": ^0.12.0 + istanbul-lib-coverage: ^3.0.0 + lru-cache: ^8.0.4 + picomatch: ^2.2.2 + v8-to-istanbul: ^9.0.1 + checksum: 05d7a9a4df8ca30991307a8d69ac9388a6572a9c6585887a925e7bdb158a0430f213c81cb356b8dcb7bf9cd3423d0071030b481c29358562bd344da8ea814daa languageName: node linkType: hard -"@rollup/pluginutils@npm:^3.1.0": - version: 3.1.0 - resolution: "@rollup/pluginutils@npm:3.1.0" +"@web/test-runner-coverage-v8@npm:^0.8.0": + version: 0.8.0 + resolution: "@web/test-runner-coverage-v8@npm:0.8.0" dependencies: - "@types/estree": 0.0.39 - estree-walker: ^1.0.1 + "@web/test-runner-core": ^0.13.0 + istanbul-lib-coverage: ^3.0.0 + lru-cache: ^8.0.4 picomatch: ^2.2.2 - peerDependencies: - rollup: ^1.20.0||^2.0.0 - checksum: 8be16e27863c219edbb25a4e6ec2fe0e1e451d9e917b6a43cf2ae5bc025a6b8faaa40f82a6e53b66d0de37b58ff472c6c3d57a83037ae635041f8df959d6d9aa + v8-to-istanbul: ^9.0.1 + checksum: 343f834372b3aeb2c24f4b03ce956d8ad851ef2a85b94507651c2a65321fcdff1b26a2c44d7516e97d9c42786bb003b9c245ad0798a414a814d0264fdbe0761e languageName: node linkType: hard -"@scure/base@npm:~1.1.0": - version: 1.1.3 - resolution: "@scure/base@npm:1.1.3" - checksum: 1606ab8a4db898cb3a1ada16c15437c3bce4e25854fadc8eb03ae93cbbbac1ed90655af4b0be3da37e12056fef11c0374499f69b9e658c9e5b7b3e06353c630c +"@web/test-runner-mocha@npm:^0.7.5": + version: 0.7.5 + resolution: "@web/test-runner-mocha@npm:0.7.5" + dependencies: + "@types/mocha": ^8.2.0 + "@web/test-runner-core": ^0.10.20 + checksum: 12f87299945d230815bb783de2953ac4239306c1a67145ef5b78cfb0b361ae7f659e5d3e5150af2cedc6f2c55adf10652b761f016430a7ac2d7f77b91ecb9cd1 languageName: node linkType: hard -"@scure/bip32@npm:1.1.5": - version: 1.1.5 - resolution: "@scure/bip32@npm:1.1.5" +"@web/test-runner-mocha@npm:^0.9.0": + version: 0.9.0 + resolution: "@web/test-runner-mocha@npm:0.9.0" dependencies: - "@noble/hashes": ~1.2.0 - "@noble/secp256k1": ~1.7.0 - "@scure/base": ~1.1.0 - checksum: b08494ab0d2b1efee7226d1b5100db5157ebea22a78bb87126982a76a186cb3048413e8be0ba2622d00d048a20acbba527af730de86c132a77de616eb9907a3b + "@web/test-runner-core": ^0.13.0 + checksum: bcc9410ac9d679e7bb804fc5720b2a0ed3b4d08f2b49c03f2157f5b54c7f525a432712e1da644f04e5190c2480af2dc46a4c736cdba3fda3ba5fa98fd0f01a94 languageName: node linkType: hard -"@scure/bip39@npm:1.1.1": - version: 1.1.1 - resolution: "@scure/bip39@npm:1.1.1" +"@web/test-runner-playwright@npm:^0.10.0": + version: 0.10.3 + resolution: "@web/test-runner-playwright@npm:0.10.3" dependencies: - "@noble/hashes": ~1.2.0 - "@scure/base": ~1.1.0 - checksum: fbb594c50696fa9c14e891d872f382e50a3f919b6c96c55ef2fb10c7102c546dafb8f099a62bd114c12a00525b595dcf7381846f383f0ddcedeaa6e210747d2f + "@web/test-runner-core": ^0.12.0 + "@web/test-runner-coverage-v8": ^0.7.3 + playwright: ^1.22.2 + checksum: 7c765d34482f2e299742c3ffe80790229d0825569016ccfccbb1a0c915f89551a3cc14a1454ed7c6895aaa03605ea444f7c1846eeab82bf02702e87a60628b3c languageName: node linkType: hard -"@sentry/core@npm:5.30.0": - version: 5.30.0 - resolution: "@sentry/core@npm:5.30.0" +"@web/test-runner-playwright@npm:^0.11.0": + version: 0.11.0 + resolution: "@web/test-runner-playwright@npm:0.11.0" dependencies: - "@sentry/hub": 5.30.0 - "@sentry/minimal": 5.30.0 - "@sentry/types": 5.30.0 - "@sentry/utils": 5.30.0 - tslib: ^1.9.3 - checksum: 8a2b22687e70d76fa4381bce215d770b6c08561c5ff5d6afe39c8c3c509c18ee7384ad0be3aee18d3a858a3c88e1d2821cf10eb5e05646376a33200903b56da2 + "@web/test-runner-core": ^0.13.0 + "@web/test-runner-coverage-v8": ^0.8.0 + playwright: ^1.22.2 + checksum: 3618b0b559d865af1211b3d86ec57487b32722a0dd640c9a3faca6c692cceec75176d742d0db73c823d300fbe761e5c15b2fad0c5096c3dbfedab313ed1aa7fe languageName: node linkType: hard -"@sentry/hub@npm:5.30.0": - version: 5.30.0 - resolution: "@sentry/hub@npm:5.30.0" +"@web/test-runner@npm:^0.15.3": + version: 0.15.3 + resolution: "@web/test-runner@npm:0.15.3" dependencies: - "@sentry/types": 5.30.0 - "@sentry/utils": 5.30.0 - tslib: ^1.9.3 - checksum: 09f778cc78765213f1e35a3ee6da3a8e02a706e8a7e5b7f84614707f4b665c7297b700a1849ab2ca1f02ede5884fd9ae893e58dc65f04f35ccdfee17e99ee93d + "@web/browser-logs": ^0.2.6 + "@web/config-loader": ^0.1.3 + "@web/dev-server": ^0.1.38 + "@web/test-runner-chrome": ^0.12.1 + "@web/test-runner-commands": ^0.6.6 + "@web/test-runner-core": ^0.10.29 + "@web/test-runner-mocha": ^0.7.5 + camelcase: ^6.2.0 + command-line-args: ^5.1.1 + command-line-usage: ^7.0.1 + convert-source-map: ^2.0.0 + diff: ^5.0.0 + globby: ^11.0.1 + nanocolors: ^0.2.1 + portfinder: ^1.0.32 + source-map: ^0.7.3 + bin: + web-test-runner: dist/bin.js + wtr: dist/bin.js + checksum: 75d00d4f15f9977ff4e8fca84e1c7f9d834073688df06d9e4b62bf43cad65a36b2ae21b9ebab5706e4d3b07fc639bb90758b1be1df036c8b80137ec3407a8f08 + languageName: node + linkType: hard + +"@web/test-runner@npm:^0.18.0": + version: 0.18.0 + resolution: "@web/test-runner@npm:0.18.0" + dependencies: + "@web/browser-logs": ^0.4.0 + "@web/config-loader": ^0.3.0 + "@web/dev-server": ^0.4.0 + "@web/test-runner-chrome": ^0.15.0 + "@web/test-runner-commands": ^0.9.0 + "@web/test-runner-core": ^0.13.0 + "@web/test-runner-mocha": ^0.9.0 + camelcase: ^6.2.0 + command-line-args: ^5.1.1 + command-line-usage: ^7.0.1 + convert-source-map: ^2.0.0 + diff: ^5.0.0 + globby: ^11.0.1 + nanocolors: ^0.2.1 + portfinder: ^1.0.32 + source-map: ^0.7.3 + bin: + web-test-runner: dist/bin.js + wtr: dist/bin.js + checksum: d5e410f08cb954f9854a3d837f5f704b578376ee8b0452cff66aeca2eb3cb98e50556ca3b958bda567b42af2ef2cd0a7424eaea40f9b3e80362ae788fbd33118 + languageName: node + linkType: hard + +"@webassemblyjs/ast@npm:1.11.6, @webassemblyjs/ast@npm:^1.11.5": + version: 1.11.6 + resolution: "@webassemblyjs/ast@npm:1.11.6" + dependencies: + "@webassemblyjs/helper-numbers": 1.11.6 + "@webassemblyjs/helper-wasm-bytecode": 1.11.6 + checksum: 38ef1b526ca47c210f30975b06df2faf1a8170b1636ce239fc5738fc231ce28389dd61ecedd1bacfc03cbe95b16d1af848c805652080cb60982836eb4ed2c6cf + languageName: node + linkType: hard + +"@webassemblyjs/floating-point-hex-parser@npm:1.11.6": + version: 1.11.6 + resolution: "@webassemblyjs/floating-point-hex-parser@npm:1.11.6" + checksum: 29b08758841fd8b299c7152eda36b9eb4921e9c584eb4594437b5cd90ed6b920523606eae7316175f89c20628da14326801090167cc7fbffc77af448ac84b7e2 languageName: node linkType: hard -"@sentry/minimal@npm:5.30.0": - version: 5.30.0 - resolution: "@sentry/minimal@npm:5.30.0" - dependencies: - "@sentry/hub": 5.30.0 - "@sentry/types": 5.30.0 - tslib: ^1.9.3 - checksum: 934650f6989ce51f425c7c4b4d4d9bfecface8162a36d21df8a241f780ab1716dd47b81e2170e4cc624797ed1eebe10f71e4876c1e25b787860daaef75ca7a0c +"@webassemblyjs/helper-api-error@npm:1.11.6": + version: 1.11.6 + resolution: "@webassemblyjs/helper-api-error@npm:1.11.6" + checksum: e8563df85161096343008f9161adb138a6e8f3c2cc338d6a36011aa55eabb32f2fd138ffe63bc278d009ada001cc41d263dadd1c0be01be6c2ed99076103689f languageName: node linkType: hard -"@sentry/node@npm:^5.18.1": - version: 5.30.0 - resolution: "@sentry/node@npm:5.30.0" - dependencies: - "@sentry/core": 5.30.0 - "@sentry/hub": 5.30.0 - "@sentry/tracing": 5.30.0 - "@sentry/types": 5.30.0 - "@sentry/utils": 5.30.0 - cookie: ^0.4.1 - https-proxy-agent: ^5.0.0 - lru_map: ^0.3.3 - tslib: ^1.9.3 - checksum: 5f0367cc52f9d716c64ba727e2a5c8592364494c8fdadfb3df2d0ee9d7956b886fb3ec674370292d2a7b7e1d9a8e1b84c69c06e8a4a064be8d4687698df0090c +"@webassemblyjs/helper-buffer@npm:1.11.6": + version: 1.11.6 + resolution: "@webassemblyjs/helper-buffer@npm:1.11.6" + checksum: b14d0573bf680d22b2522e8a341ec451fddd645d1f9c6bd9012ccb7e587a2973b86ab7b89fe91e1c79939ba96095f503af04369a3b356c8023c13a5893221644 languageName: node linkType: hard -"@sentry/tracing@npm:5.30.0": - version: 5.30.0 - resolution: "@sentry/tracing@npm:5.30.0" +"@webassemblyjs/helper-numbers@npm:1.11.6": + version: 1.11.6 + resolution: "@webassemblyjs/helper-numbers@npm:1.11.6" dependencies: - "@sentry/hub": 5.30.0 - "@sentry/minimal": 5.30.0 - "@sentry/types": 5.30.0 - "@sentry/utils": 5.30.0 - tslib: ^1.9.3 - checksum: 720c07b111e8128e70a939ab4e9f9cfd13dc23303b27575afddabab08d08f9b94499017c76a9ffe253bf3ca40833e8f9262cf6dc546ba24da6eb74fedae5f92b + "@webassemblyjs/floating-point-hex-parser": 1.11.6 + "@webassemblyjs/helper-api-error": 1.11.6 + "@xtuc/long": 4.2.2 + checksum: f4b562fa219f84368528339e0f8d273ad44e047a07641ffcaaec6f93e5b76fd86490a009aa91a294584e1436d74b0a01fa9fde45e333a4c657b58168b04da424 languageName: node linkType: hard -"@sentry/types@npm:5.30.0": - version: 5.30.0 - resolution: "@sentry/types@npm:5.30.0" - checksum: de7df777824c8e311f143c6fd7de220b24f25b5018312fe8f67d93bebf0f3cdd32bbca9f155846f5c31441d940eebe27c8338000321559a743264c7e41dda560 +"@webassemblyjs/helper-wasm-bytecode@npm:1.11.6": + version: 1.11.6 + resolution: "@webassemblyjs/helper-wasm-bytecode@npm:1.11.6" + checksum: 3535ef4f1fba38de3475e383b3980f4bbf3de72bbb631c2b6584c7df45be4eccd62c6ff48b5edd3f1bcff275cfd605a37679ec199fc91fd0a7705d7f1e3972dc languageName: node linkType: hard -"@sentry/utils@npm:5.30.0": - version: 5.30.0 - resolution: "@sentry/utils@npm:5.30.0" +"@webassemblyjs/helper-wasm-section@npm:1.11.6": + version: 1.11.6 + resolution: "@webassemblyjs/helper-wasm-section@npm:1.11.6" dependencies: - "@sentry/types": 5.30.0 - tslib: ^1.9.3 - checksum: 27b259a136c664427641dd32ee3dc490553f3b5e92986accfa829d14063ebc69b191e92209ac9c40fbc367f74cfa17dc93b4c40981d666711fd57b4d51a82062 + "@webassemblyjs/ast": 1.11.6 + "@webassemblyjs/helper-buffer": 1.11.6 + "@webassemblyjs/helper-wasm-bytecode": 1.11.6 + "@webassemblyjs/wasm-gen": 1.11.6 + checksum: b2cf751bf4552b5b9999d27bbb7692d0aca75260140195cb58ea6374d7b9c2dc69b61e10b211a0e773f66209c3ddd612137ed66097e3684d7816f854997682e9 languageName: node linkType: hard -"@sideway/address@npm:^4.1.3": - version: 4.1.4 - resolution: "@sideway/address@npm:4.1.4" +"@webassemblyjs/ieee754@npm:1.11.6": + version: 1.11.6 + resolution: "@webassemblyjs/ieee754@npm:1.11.6" dependencies: - "@hapi/hoek": ^9.0.0 - checksum: b9fca2a93ac2c975ba12e0a6d97853832fb1f4fb02393015e012b47fa916a75ca95102d77214b2a29a2784740df2407951af8c5dde054824c65577fd293c4cdb + "@xtuc/ieee754": ^1.2.0 + checksum: 13574b8e41f6ca39b700e292d7edf102577db5650fe8add7066a320aa4b7a7c09a5056feccac7a74eb68c10dea9546d4461412af351f13f6b24b5f32379b49de languageName: node linkType: hard -"@sideway/formula@npm:^3.0.1": - version: 3.0.1 - resolution: "@sideway/formula@npm:3.0.1" - checksum: e4beeebc9dbe2ff4ef0def15cec0165e00d1612e3d7cea0bc9ce5175c3263fc2c818b679bd558957f49400ee7be9d4e5ac90487e1625b4932e15c4aa7919c57a +"@webassemblyjs/leb128@npm:1.11.6": + version: 1.11.6 + resolution: "@webassemblyjs/leb128@npm:1.11.6" + dependencies: + "@xtuc/long": 4.2.2 + checksum: 7ea942dc9777d4b18a5ebfa3a937b30ae9e1d2ce1fee637583ed7f376334dd1d4274f813d2e250056cca803e0952def4b954913f1a3c9068bcd4ab4ee5143bf0 languageName: node linkType: hard -"@sideway/pinpoint@npm:^2.0.0": - version: 2.0.0 - resolution: "@sideway/pinpoint@npm:2.0.0" - checksum: 0f4491e5897fcf5bf02c46f5c359c56a314e90ba243f42f0c100437935daa2488f20482f0f77186bd6bf43345095a95d8143ecf8b1f4d876a7bc0806aba9c3d2 +"@webassemblyjs/utf8@npm:1.11.6": + version: 1.11.6 + resolution: "@webassemblyjs/utf8@npm:1.11.6" + checksum: 807fe5b5ce10c390cfdd93e0fb92abda8aebabb5199980681e7c3743ee3306a75729bcd1e56a3903980e96c885ee53ef901fcbaac8efdfa480f9c0dae1d08713 languageName: node linkType: hard -"@sinclair/typebox@npm:^0.27.8": - version: 0.27.8 - resolution: "@sinclair/typebox@npm:0.27.8" - checksum: 00bd7362a3439021aa1ea51b0e0d0a0e8ca1351a3d54c606b115fdcc49b51b16db6e5f43b4fe7a28c38688523e22a94d49dd31168868b655f0d4d50f032d07a1 +"@webassemblyjs/wasm-edit@npm:^1.11.5": + version: 1.11.6 + resolution: "@webassemblyjs/wasm-edit@npm:1.11.6" + dependencies: + "@webassemblyjs/ast": 1.11.6 + "@webassemblyjs/helper-buffer": 1.11.6 + "@webassemblyjs/helper-wasm-bytecode": 1.11.6 + "@webassemblyjs/helper-wasm-section": 1.11.6 + "@webassemblyjs/wasm-gen": 1.11.6 + "@webassemblyjs/wasm-opt": 1.11.6 + "@webassemblyjs/wasm-parser": 1.11.6 + "@webassemblyjs/wast-printer": 1.11.6 + checksum: 29ce75870496d6fad864d815ebb072395a8a3a04dc9c3f4e1ffdc63fc5fa58b1f34304a1117296d8240054cfdbc38aca88e71fb51483cf29ffab0a61ef27b481 languageName: node linkType: hard -"@sindresorhus/is@npm:^0.14.0": - version: 0.14.0 - resolution: "@sindresorhus/is@npm:0.14.0" - checksum: 971e0441dd44ba3909b467219a5e242da0fc584048db5324cfb8048148fa8dcc9d44d71e3948972c4f6121d24e5da402ef191420d1266a95f713bb6d6e59c98a +"@webassemblyjs/wasm-gen@npm:1.11.6": + version: 1.11.6 + resolution: "@webassemblyjs/wasm-gen@npm:1.11.6" + dependencies: + "@webassemblyjs/ast": 1.11.6 + "@webassemblyjs/helper-wasm-bytecode": 1.11.6 + "@webassemblyjs/ieee754": 1.11.6 + "@webassemblyjs/leb128": 1.11.6 + "@webassemblyjs/utf8": 1.11.6 + checksum: a645a2eecbea24833c3260a249704a7f554ef4a94c6000984728e94bb2bc9140a68dfd6fd21d5e0bbb09f6dfc98e083a45760a83ae0417b41a0196ff6d45a23a languageName: node linkType: hard -"@slorber/static-site-generator-webpack-plugin@npm:^4.0.7": - version: 4.0.7 - resolution: "@slorber/static-site-generator-webpack-plugin@npm:4.0.7" +"@webassemblyjs/wasm-opt@npm:1.11.6": + version: 1.11.6 + resolution: "@webassemblyjs/wasm-opt@npm:1.11.6" dependencies: - eval: ^0.1.8 - p-map: ^4.0.0 - webpack-sources: ^3.2.2 - checksum: a1e1d8b22dd51059524993f3fdd6861db10eb950debc389e5dd650702287fa2004eace03e6bc8f25b977bd7bc01d76a50aa271cbb73c58a8ec558945d728f307 + "@webassemblyjs/ast": 1.11.6 + "@webassemblyjs/helper-buffer": 1.11.6 + "@webassemblyjs/wasm-gen": 1.11.6 + "@webassemblyjs/wasm-parser": 1.11.6 + checksum: b4557f195487f8e97336ddf79f7bef40d788239169aac707f6eaa2fa5fe243557c2d74e550a8e57f2788e70c7ae4e7d32f7be16101afe183d597b747a3bdd528 languageName: node linkType: hard -"@svgr/babel-plugin-add-jsx-attribute@npm:^6.5.1": - version: 6.5.1 - resolution: "@svgr/babel-plugin-add-jsx-attribute@npm:6.5.1" - peerDependencies: - "@babel/core": ^7.0.0-0 - checksum: cab83832830a57735329ed68f67c03b57ca21fa037b0134847b0c5c0ef4beca89956d7dacfbf7b2a10fd901e7009e877512086db2ee918b8c69aee7742ae32c0 +"@webassemblyjs/wasm-parser@npm:1.11.6, @webassemblyjs/wasm-parser@npm:^1.11.5": + version: 1.11.6 + resolution: "@webassemblyjs/wasm-parser@npm:1.11.6" + dependencies: + "@webassemblyjs/ast": 1.11.6 + "@webassemblyjs/helper-api-error": 1.11.6 + "@webassemblyjs/helper-wasm-bytecode": 1.11.6 + "@webassemblyjs/ieee754": 1.11.6 + "@webassemblyjs/leb128": 1.11.6 + "@webassemblyjs/utf8": 1.11.6 + checksum: 8200a8d77c15621724a23fdabe58d5571415cda98a7058f542e670ea965dd75499f5e34a48675184947c66f3df23adf55df060312e6d72d57908e3f049620d8a languageName: node linkType: hard -"@svgr/babel-plugin-remove-jsx-attribute@npm:*": - version: 8.0.0 - resolution: "@svgr/babel-plugin-remove-jsx-attribute@npm:8.0.0" - peerDependencies: - "@babel/core": ^7.0.0-0 - checksum: ff992893c6c4ac802713ba3a97c13be34e62e6d981c813af40daabcd676df68a72a61bd1e692bb1eda3587f1b1d700ea462222ae2153bb0f46886632d4f88d08 +"@webassemblyjs/wast-printer@npm:1.11.6": + version: 1.11.6 + resolution: "@webassemblyjs/wast-printer@npm:1.11.6" + dependencies: + "@webassemblyjs/ast": 1.11.6 + "@xtuc/long": 4.2.2 + checksum: d2fa6a4c427325ec81463e9c809aa6572af6d47f619f3091bf4c4a6fc34f1da3df7caddaac50b8e7a457f8784c62cd58c6311b6cb69b0162ccd8d4c072f79cf8 languageName: node linkType: hard -"@svgr/babel-plugin-remove-jsx-empty-expression@npm:*": - version: 8.0.0 - resolution: "@svgr/babel-plugin-remove-jsx-empty-expression@npm:8.0.0" +"@webpack-cli/configtest@npm:^1.2.0": + version: 1.2.0 + resolution: "@webpack-cli/configtest@npm:1.2.0" peerDependencies: - "@babel/core": ^7.0.0-0 - checksum: 0fb691b63a21bac00da3aa2dccec50d0d5a5b347ff408d60803b84410d8af168f2656e4ba1ee1f24dab0ae4e4af77901f2928752bb0434c1f6788133ec599ec8 + webpack: 4.x.x || 5.x.x + webpack-cli: 4.x.x + checksum: a2726cd9ec601d2b57e5fc15e0ebf5200a8892065e735911269ac2038e62be4bfc176ea1f88c2c46ff09b4d05d4c10ae045e87b3679372483d47da625a327e28 languageName: node linkType: hard -"@svgr/babel-plugin-replace-jsx-attribute-value@npm:^6.5.1": - version: 6.5.1 - resolution: "@svgr/babel-plugin-replace-jsx-attribute-value@npm:6.5.1" +"@webpack-cli/info@npm:^1.5.0": + version: 1.5.0 + resolution: "@webpack-cli/info@npm:1.5.0" + dependencies: + envinfo: ^7.7.3 peerDependencies: - "@babel/core": ^7.0.0-0 - checksum: b7d2125758e766e1ebd14b92216b800bdc976959bc696dbfa1e28682919147c1df4bb8b1b5fd037d7a83026e27e681fea3b8d3741af8d3cf4c9dfa3d412125df + webpack-cli: 4.x.x + checksum: 7f56fe037cd7d1fd5c7428588519fbf04a0cad33925ee4202ffbafd00f8ec1f2f67d991245e687d50e0f3e23f7b7814273d56cb9f7da4b05eed47c8d815c6296 languageName: node linkType: hard -"@svgr/babel-plugin-svg-dynamic-title@npm:^6.5.1": - version: 6.5.1 - resolution: "@svgr/babel-plugin-svg-dynamic-title@npm:6.5.1" +"@webpack-cli/serve@npm:^1.7.0": + version: 1.7.0 + resolution: "@webpack-cli/serve@npm:1.7.0" peerDependencies: - "@babel/core": ^7.0.0-0 - checksum: 0fd42ebf127ae9163ef341e84972daa99bdcb9e6ed3f83aabd95ee173fddc43e40e02fa847fbc0a1058cf5549f72b7960a2c5e22c3e4ac18f7e3ac81277852ae + webpack-cli: 4.x.x + peerDependenciesMeta: + webpack-dev-server: + optional: true + checksum: d475e8effa23eb7ff9a48b14d4de425989fd82f906ce71c210921cc3852327c22873be00c35e181a25a6bd03d424ae2b83e7f3b3f410ac7ee31b128ab4ac7713 languageName: node linkType: hard -"@svgr/babel-plugin-svg-em-dimensions@npm:^6.5.1": - version: 6.5.1 - resolution: "@svgr/babel-plugin-svg-em-dimensions@npm:6.5.1" - peerDependencies: - "@babel/core": ^7.0.0-0 - checksum: c1550ee9f548526fa66fd171e3ffb5696bfc4e4cd108a631d39db492c7410dc10bba4eb5a190e9df824bf806130ccc586ae7d2e43c547e6a4f93bbb29a18f344 +"@xtuc/ieee754@npm:^1.2.0": + version: 1.2.0 + resolution: "@xtuc/ieee754@npm:1.2.0" + checksum: ac56d4ca6e17790f1b1677f978c0c6808b1900a5b138885d3da21732f62e30e8f0d9120fcf8f6edfff5100ca902b46f8dd7c1e3f903728634523981e80e2885a languageName: node linkType: hard -"@svgr/babel-plugin-transform-react-native-svg@npm:^6.5.1": - version: 6.5.1 - resolution: "@svgr/babel-plugin-transform-react-native-svg@npm:6.5.1" - peerDependencies: - "@babel/core": ^7.0.0-0 - checksum: 4c924af22b948b812629e80efb90ad1ec8faae26a232d8ca8a06b46b53e966a2c415a57806a3ff0ea806a622612e546422719b69ec6839717a7755dac19171d9 +"@xtuc/long@npm:4.2.2": + version: 4.2.2 + resolution: "@xtuc/long@npm:4.2.2" + checksum: 8ed0d477ce3bc9c6fe2bf6a6a2cc316bb9c4127c5a7827bae947fa8ec34c7092395c5a283cc300c05b5fa01cbbfa1f938f410a7bf75db7c7846fea41949989ec languageName: node linkType: hard -"@svgr/babel-plugin-transform-svg-component@npm:^6.5.1": - version: 6.5.1 - resolution: "@svgr/babel-plugin-transform-svg-component@npm:6.5.1" - peerDependencies: - "@babel/core": ^7.0.0-0 - checksum: e496bb5ee871feb6bcab250b6e067322da7dd5c9c2b530b41e5586fe090f86611339b49d0a909c334d9b24cbca0fa755c949a2526c6ad03c6b5885666874cf5f +"@zeit/schemas@npm:2.29.0": + version: 2.29.0 + resolution: "@zeit/schemas@npm:2.29.0" + checksum: 3cea06bb67d790336aca0cc17580fd492ff3fc66ef4d180dce7053ff7ff54ab81b56bf718ba6f537148c581161d06306a481ec218d540bff922e0e009844ffd1 languageName: node linkType: hard -"@svgr/babel-preset@npm:^6.5.1": - version: 6.5.1 - resolution: "@svgr/babel-preset@npm:6.5.1" +"abbrev@npm:^2.0.0": + version: 2.0.0 + resolution: "abbrev@npm:2.0.0" + checksum: 0e994ad2aa6575f94670d8a2149afe94465de9cedaaaac364e7fb43a40c3691c980ff74899f682f4ca58fa96b4cbd7421a015d3a6defe43a442117d7821a2f36 + languageName: node + linkType: hard + +"abort-controller@npm:^3.0.0": + version: 3.0.0 + resolution: "abort-controller@npm:3.0.0" dependencies: - "@svgr/babel-plugin-add-jsx-attribute": ^6.5.1 - "@svgr/babel-plugin-remove-jsx-attribute": "*" - "@svgr/babel-plugin-remove-jsx-empty-expression": "*" - "@svgr/babel-plugin-replace-jsx-attribute-value": ^6.5.1 - "@svgr/babel-plugin-svg-dynamic-title": ^6.5.1 - "@svgr/babel-plugin-svg-em-dimensions": ^6.5.1 - "@svgr/babel-plugin-transform-react-native-svg": ^6.5.1 - "@svgr/babel-plugin-transform-svg-component": ^6.5.1 - peerDependencies: - "@babel/core": ^7.0.0-0 - checksum: 9f124be39a8e64f909162f925b3a63ddaa5a342a5e24fc0b7f7d9d4d7f7e3b916596c754fb557dc259928399cad5366a27cb231627a0d2dcc4b13ac521cf05af + event-target-shim: ^5.0.0 + checksum: 170bdba9b47b7e65906a28c8ce4f38a7a369d78e2271706f020849c1bfe0ee2067d4261df8bbb66eb84f79208fd5b710df759d64191db58cfba7ce8ef9c54b75 languageName: node linkType: hard -"@svgr/core@npm:^6.5.1": - version: 6.5.1 - resolution: "@svgr/core@npm:6.5.1" +"abstract-level@npm:^1.0.0, abstract-level@npm:^1.0.2, abstract-level@npm:^1.0.3": + version: 1.0.3 + resolution: "abstract-level@npm:1.0.3" dependencies: - "@babel/core": ^7.19.6 - "@svgr/babel-preset": ^6.5.1 - "@svgr/plugin-jsx": ^6.5.1 - camelcase: ^6.2.0 - cosmiconfig: ^7.0.1 - checksum: fd6d6d5da5aeb956703310480b626c1fb3e3973ad9fe8025efc1dcf3d895f857b70d100c63cf32cebb20eb83c9607bafa464c9436e18fe6fe4fafdc73ed6b1a5 + buffer: ^6.0.3 + catering: ^2.1.0 + is-buffer: ^2.0.5 + level-supports: ^4.0.0 + level-transcoder: ^1.0.1 + module-error: ^1.0.1 + queue-microtask: ^1.2.3 + checksum: 70d61a3924526ebc257b138992052f9ff571a6cee5a7660836e37a1cc7081273c3acf465dd2f5e1897b38dc743a6fd9dba14a5d8a2a9d39e5787cd3da99f301d languageName: node linkType: hard -"@svgr/hast-util-to-babel-ast@npm:^6.5.1": - version: 6.5.1 - resolution: "@svgr/hast-util-to-babel-ast@npm:6.5.1" +"abstract-leveldown@npm:~0.12.0, abstract-leveldown@npm:~0.12.1": + version: 0.12.4 + resolution: "abstract-leveldown@npm:0.12.4" dependencies: - "@babel/types": ^7.20.0 - entities: ^4.4.0 - checksum: 37923cce1b3f4e2039077b0c570b6edbabe37d1cf1a6ee35e71e0fe00f9cffac450eec45e9720b1010418131a999cb0047331ba1b6d1d2c69af1b92ac785aacf + xtend: ~3.0.0 + checksum: e300f04bb638cc9c462f6e8fa925672e51beb24c1470c39ece709e54f2f499661ac5fe0119175c7dcb6e32c843423d6960009d4d24e72526478b261163e8070b languageName: node linkType: hard -"@svgr/plugin-jsx@npm:^6.5.1": - version: 6.5.1 - resolution: "@svgr/plugin-jsx@npm:6.5.1" +"accepts@npm:^1.3.5, accepts@npm:~1.3.4, accepts@npm:~1.3.5, accepts@npm:~1.3.8": + version: 1.3.8 + resolution: "accepts@npm:1.3.8" dependencies: - "@babel/core": ^7.19.6 - "@svgr/babel-preset": ^6.5.1 - "@svgr/hast-util-to-babel-ast": ^6.5.1 - svg-parser: ^2.0.4 + mime-types: ~2.1.34 + negotiator: 0.6.3 + checksum: 50c43d32e7b50285ebe84b613ee4a3aa426715a7d131b65b786e2ead0fd76b6b60091b9916d3478a75f11f162628a2139991b6c03ab3f1d9ab7c86075dc8eab4 + languageName: node + linkType: hard + +"acorn-import-assertions@npm:^1.9.0": + version: 1.9.0 + resolution: "acorn-import-assertions@npm:1.9.0" peerDependencies: - "@svgr/core": ^6.0.0 - checksum: 42f22847a6bdf930514d7bedd3c5e1fd8d53eb3594779f9db16cb94c762425907c375cd8ec789114e100a4d38068aca6c7ab5efea4c612fba63f0630c44cc859 + acorn: ^8 + checksum: 944fb2659d0845c467066bdcda2e20c05abe3aaf11972116df457ce2627628a81764d800dd55031ba19de513ee0d43bb771bc679cc0eda66dc8b4fade143bc0c languageName: node linkType: hard -"@svgr/plugin-svgo@npm:^6.5.1": - version: 6.5.1 - resolution: "@svgr/plugin-svgo@npm:6.5.1" - dependencies: - cosmiconfig: ^7.0.1 - deepmerge: ^4.2.2 - svgo: ^2.8.0 +"acorn-jsx@npm:^5.0.0, acorn-jsx@npm:^5.3.2": + version: 5.3.2 + resolution: "acorn-jsx@npm:5.3.2" peerDependencies: - "@svgr/core": "*" - checksum: cd2833530ac0485221adc2146fd992ab20d79f4b12eebcd45fa859721dd779483158e11dfd9a534858fe468416b9412416e25cbe07ac7932c44ed5fa2021c72e + acorn: ^6.0.0 || ^7.0.0 || ^8.0.0 + checksum: c3d3b2a89c9a056b205b69530a37b972b404ee46ec8e5b341666f9513d3163e2a4f214a71f4dfc7370f5a9c07472d2fd1c11c91c3f03d093e37637d95da98950 languageName: node linkType: hard -"@svgr/webpack@npm:^6.2.1": - version: 6.5.1 - resolution: "@svgr/webpack@npm:6.5.1" - dependencies: - "@babel/core": ^7.19.6 - "@babel/plugin-transform-react-constant-elements": ^7.18.12 - "@babel/preset-env": ^7.19.4 - "@babel/preset-react": ^7.18.6 - "@babel/preset-typescript": ^7.18.6 - "@svgr/core": ^6.5.1 - "@svgr/plugin-jsx": ^6.5.1 - "@svgr/plugin-svgo": ^6.5.1 - checksum: d10582eb4fa82a5b6d314cb49f2c640af4fd3a60f5b76095d2b14e383ef6a43a6f4674b68774a21787dbde69dec0a251cfcfc3f9a96c82754ba5d5c6daf785f0 +"acorn-walk@npm:^8.0.0, acorn-walk@npm:^8.1.1": + version: 8.3.1 + resolution: "acorn-walk@npm:8.3.1" + checksum: 5c8926ddb5400bc825b6baca782931f9df4ace603ba1a517f5243290fd9cdb089d52877840687b5d5c939591ebc314e2e63721514feaa37c6829c828f2b940ce languageName: node linkType: hard -"@szmarczak/http-timer@npm:^1.1.2": - version: 1.1.2 - resolution: "@szmarczak/http-timer@npm:1.1.2" - dependencies: - defer-to-connect: ^1.0.1 - checksum: 4d9158061c5f397c57b4988cde33a163244e4f02df16364f103971957a32886beb104d6180902cbe8b38cb940e234d9f98a4e486200deca621923f62f50a06fe +"acorn@npm:^8.0.0, acorn@npm:^8.0.4, acorn@npm:^8.4.1, acorn@npm:^8.7.1, acorn@npm:^8.8.2, acorn@npm:^8.9.0": + version: 8.11.2 + resolution: "acorn@npm:8.11.2" + bin: + acorn: bin/acorn + checksum: 818450408684da89423e3daae24e4dc9b68692db8ab49ea4569c7c5abb7a3f23669438bf129cc81dfdada95e1c9b944ee1bfca2c57a05a4dc73834a612fbf6a7 languageName: node linkType: hard -"@tootallnate/once@npm:2": - version: 2.0.0 - resolution: "@tootallnate/once@npm:2.0.0" - checksum: ad87447820dd3f24825d2d947ebc03072b20a42bfc96cbafec16bff8bbda6c1a81fcb0be56d5b21968560c5359a0af4038a68ba150c3e1694fe4c109a063bed8 +"address@npm:^1.0.1, address@npm:^1.1.2": + version: 1.2.2 + resolution: "address@npm:1.2.2" + checksum: ace439960c1e3564d8f523aff23a841904bf33a2a7c2e064f7f60a064194075758b9690e65bd9785692a4ef698a998c57eb74d145881a1cecab8ba658ddb1607 languageName: node linkType: hard -"@trysound/sax@npm:0.2.0": - version: 0.2.0 - resolution: "@trysound/sax@npm:0.2.0" - checksum: 11226c39b52b391719a2a92e10183e4260d9651f86edced166da1d95f39a0a1eaa470e44d14ac685ccd6d3df7e2002433782872c0feeb260d61e80f21250e65c +"adm-zip@npm:^0.4.16": + version: 0.4.16 + resolution: "adm-zip@npm:0.4.16" + checksum: 5ea46664d8b3b073fffeb7f934705fea288708745e708cffc1dd732ce3d2672cecd476b243f9d051892fd12952db2b6bd061975e1ff40057246f6d0cb6534a50 languageName: node linkType: hard -"@tsconfig/node10@npm:^1.0.7": - version: 1.0.9 - resolution: "@tsconfig/node10@npm:1.0.9" - checksum: a33ae4dc2a621c0678ac8ac4bceb8e512ae75dac65417a2ad9b022d9b5411e863c4c198b6ba9ef659e14b9fb609bbec680841a2e84c1172df7a5ffcf076539df +"adm-zip@npm:^0.5.0": + version: 0.5.10 + resolution: "adm-zip@npm:0.5.10" + checksum: 07ed91cf6423bf5dca4ee63977bc7635e91b8d21829c00829d48dce4c6932e1b19e6cfcbe44f1931c956e68795ae97183fc775913883fa48ce88a1ac11fb2034 languageName: node linkType: hard -"@tsconfig/node12@npm:^1.0.7": - version: 1.0.11 - resolution: "@tsconfig/node12@npm:1.0.11" - checksum: 5ce29a41b13e7897a58b8e2df11269c5395999e588b9a467386f99d1d26f6c77d1af2719e407621412520ea30517d718d5192a32403b8dfcc163bf33e40a338a +"aes-js@npm:3.0.0": + version: 3.0.0 + resolution: "aes-js@npm:3.0.0" + checksum: 251e26d533cd1a915b44896b17d5ed68c24a02484cfdd2e74ec700a309267db96651ea4eb657bf20aac32a3baa61f6e34edf8e2fec2de440a655da9942d334b8 languageName: node linkType: hard -"@tsconfig/node14@npm:^1.0.0": - version: 1.0.3 - resolution: "@tsconfig/node14@npm:1.0.3" - checksum: 19275fe80c4c8d0ad0abed6a96dbf00642e88b220b090418609c4376e1cef81bf16237bf170ad1b341452feddb8115d8dd2e5acdfdea1b27422071163dc9ba9d +"aes-js@npm:4.0.0-beta.5": + version: 4.0.0-beta.5 + resolution: "aes-js@npm:4.0.0-beta.5" + checksum: cc2ea969d77df939c32057f7e361b6530aa6cb93cb10617a17a45cd164e6d761002f031ff6330af3e67e58b1f0a3a8fd0b63a720afd591a653b02f649470e15b languageName: node linkType: hard -"@tsconfig/node16@npm:^1.0.2": - version: 1.0.4 - resolution: "@tsconfig/node16@npm:1.0.4" - checksum: 202319785901f942a6e1e476b872d421baec20cf09f4b266a1854060efbf78cde16a4d256e8bc949d31e6cd9a90f1e8ef8fb06af96a65e98338a2b6b0de0a0ff +"agent-base@npm:6": + version: 6.0.2 + resolution: "agent-base@npm:6.0.2" + dependencies: + debug: 4 + checksum: f52b6872cc96fd5f622071b71ef200e01c7c4c454ee68bc9accca90c98cfb39f2810e3e9aa330435835eedc8c23f4f8a15267f67c6e245d2b33757575bdac49d languageName: node linkType: hard -"@types/accepts@npm:*": - version: 1.3.5 - resolution: "@types/accepts@npm:1.3.5" +"agent-base@npm:^7.0.2, agent-base@npm:^7.1.0": + version: 7.1.0 + resolution: "agent-base@npm:7.1.0" dependencies: - "@types/node": "*" - checksum: 590b7580570534a640510c071e09074cf63b5958b237a728f94322567350aea4d239f8a9d897a12b15c856b992ee4d7907e9812bb079886af2c00714e7fb3f60 + debug: ^4.3.4 + checksum: f7828f991470a0cc22cb579c86a18cbae83d8a3cbed39992ab34fc7217c4d126017f1c74d0ab66be87f71455318a8ea3e757d6a37881b8d0f2a2c6aa55e5418f languageName: node linkType: hard -"@types/babel__code-frame@npm:^7.0.2": - version: 7.0.4 - resolution: "@types/babel__code-frame@npm:7.0.4" - checksum: eb4adb0a79c5cbf2d88ad087b2694a1e38749dc0056693e6125c17a3967fc8d4cc1eb33eab26d3dcaaa8995236cba4a088965cc3a115967545f2a01ee430c87f +"aggregate-error@npm:^3.0.0": + version: 3.1.0 + resolution: "aggregate-error@npm:3.1.0" + dependencies: + clean-stack: ^2.0.0 + indent-string: ^4.0.0 + checksum: 1101a33f21baa27a2fa8e04b698271e64616b886795fd43c31068c07533c7b3facfcaf4e9e0cab3624bd88f729a592f1c901a1a229c9e490eafce411a8644b79 languageName: node linkType: hard -"@types/bn.js@npm:^4.11.3": - version: 4.11.6 - resolution: "@types/bn.js@npm:4.11.6" +"ajv-formats@npm:^2.1.1": + version: 2.1.1 + resolution: "ajv-formats@npm:2.1.1" dependencies: - "@types/node": "*" - checksum: 7f66f2c7b7b9303b3205a57184261974b114495736b77853af5b18d857c0b33e82ce7146911e86e87a87837de8acae28986716fd381ac7c301fd6e8d8b6c811f + ajv: ^8.0.0 + peerDependencies: + ajv: ^8.0.0 + peerDependenciesMeta: + ajv: + optional: true + checksum: 4a287d937f1ebaad4683249a4c40c0fa3beed30d9ddc0adba04859026a622da0d317851316ea64b3680dc60f5c3c708105ddd5d5db8fe595d9d0207fd19f90b7 languageName: node linkType: hard -"@types/bn.js@npm:^5.1.0": - version: 5.1.2 - resolution: "@types/bn.js@npm:5.1.2" +"ajv-keywords@npm:^3.4.1, ajv-keywords@npm:^3.5.2": + version: 3.5.2 + resolution: "ajv-keywords@npm:3.5.2" + peerDependencies: + ajv: ^6.9.1 + checksum: 7dc5e5931677a680589050f79dcbe1fefbb8fea38a955af03724229139175b433c63c68f7ae5f86cf8f65d55eb7c25f75a046723e2e58296707617ca690feae9 + languageName: node + linkType: hard + +"ajv-keywords@npm:^5.1.0": + version: 5.1.0 + resolution: "ajv-keywords@npm:5.1.0" dependencies: - "@types/node": "*" - checksum: 8d9fdb43836646c2ecd445041de03e057f9b459885be57faee64104160487a63730b9f371e8ad7d33f360b3cc6dc0e323543962fc5fa296b92b322b946732be0 + fast-deep-equal: ^3.1.3 + peerDependencies: + ajv: ^8.8.2 + checksum: c35193940b853119242c6757787f09ecf89a2c19bcd36d03ed1a615e710d19d450cb448bfda407b939aba54b002368c8bff30529cc50a0536a8e10bcce300421 languageName: node linkType: hard -"@types/body-parser@npm:*": - version: 1.19.3 - resolution: "@types/body-parser@npm:1.19.3" +"ajv@npm:8.11.0": + version: 8.11.0 + resolution: "ajv@npm:8.11.0" dependencies: - "@types/connect": "*" - "@types/node": "*" - checksum: 932fa71437c275023799123680ef26ffd90efd37f51a1abe405e6ae6e5b4ad9511b7a3a8f5a12877ed1444a02b6286c0a137a98e914b3c61932390c83643cc2c + fast-deep-equal: ^3.1.1 + json-schema-traverse: ^1.0.0 + require-from-string: ^2.0.2 + uri-js: ^4.2.2 + checksum: 5e0ff226806763be73e93dd7805b634f6f5921e3e90ca04acdf8db81eed9d8d3f0d4c5f1213047f45ebbf8047ffe0c840fa1ef2ec42c3a644899f69aa72b5bef languageName: node linkType: hard -"@types/bonjour@npm:^3.5.9": - version: 3.5.11 - resolution: "@types/bonjour@npm:3.5.11" +"ajv@npm:^6.12.2, ajv@npm:^6.12.4, ajv@npm:^6.12.5": + version: 6.12.6 + resolution: "ajv@npm:6.12.6" dependencies: - "@types/node": "*" - checksum: 12fb86a1bb4a610f16ef6d7d68f85e7c31070029f02b6622073794a271e75abcf58230ed205a2ae23c53be2c08b9e507d3b91fa0dc9dfe76c4b1f5e19e9370cb + fast-deep-equal: ^3.1.1 + fast-json-stable-stringify: ^2.0.0 + json-schema-traverse: ^0.4.1 + uri-js: ^4.2.2 + checksum: 874972efe5c4202ab0a68379481fbd3d1b5d0a7bd6d3cc21d40d3536ebff3352a2a1fabb632d4fd2cc7fe4cbdcd5ed6782084c9bbf7f32a1536d18f9da5007d4 languageName: node linkType: hard -"@types/chai-as-promised@npm:^7.1.3": - version: 7.1.6 - resolution: "@types/chai-as-promised@npm:7.1.6" +"ajv@npm:^8.0.0, ajv@npm:^8.9.0": + version: 8.12.0 + resolution: "ajv@npm:8.12.0" dependencies: - "@types/chai": "*" - checksum: f765dd249ae9384540f8e6402bd3a9f5e87b97f9078ef58f4b5ec15f7c3673e1f10f0089f819eceb20e00b3df40b7aae1bd44d2b8f4edbbedfcb33ce296f6791 + fast-deep-equal: ^3.1.1 + json-schema-traverse: ^1.0.0 + require-from-string: ^2.0.2 + uri-js: ^4.2.2 + checksum: 4dc13714e316e67537c8b31bc063f99a1d9d9a497eb4bbd55191ac0dcd5e4985bbb71570352ad6f1e76684fb6d790928f96ba3b2d4fd6e10024be9612fe3f001 languageName: node linkType: hard -"@types/chai@npm:*, @types/chai@npm:^4, @types/chai@npm:^4.2.12": - version: 4.3.6 - resolution: "@types/chai@npm:4.3.6" - checksum: 32a6c18bf53fb3dbd89d1bfcadb1c6fd45cc0007c34e436393cc37a0a5a556f9e6a21d1e8dd71674c40cc36589d2f30bf4d9369d7787021e54d6e997b0d7300a +"algoliasearch-helper@npm:^3.13.3": + version: 3.16.0 + resolution: "algoliasearch-helper@npm:3.16.0" + dependencies: + "@algolia/events": ^4.0.1 + peerDependencies: + algoliasearch: ">= 3.1 < 6" + checksum: e024ad33c65d1eb7a31290bcf0bec37f2485154364e90ada45e07242ac35155313fe913fabd08cdb4d69e2794cc7d09dda03878566cea286cfac8fd4cc61f33f languageName: node linkType: hard -"@types/co-body@npm:^6.1.0": - version: 6.1.1 - resolution: "@types/co-body@npm:6.1.1" +"algoliasearch@npm:^4.18.0, algoliasearch@npm:^4.19.1": + version: 4.20.0 + resolution: "algoliasearch@npm:4.20.0" dependencies: - "@types/node": "*" - "@types/qs": "*" - checksum: 38a5198c712bfb40f36c7c5fd85964cabfdac0d0aa4d703993c205ccafa113995b67846e59d5d05415dcba230e21126060e04c1287d8073fc2dc71257cb2ea0c + "@algolia/cache-browser-local-storage": 4.20.0 + "@algolia/cache-common": 4.20.0 + "@algolia/cache-in-memory": 4.20.0 + "@algolia/client-account": 4.20.0 + "@algolia/client-analytics": 4.20.0 + "@algolia/client-common": 4.20.0 + "@algolia/client-personalization": 4.20.0 + "@algolia/client-search": 4.20.0 + "@algolia/logger-common": 4.20.0 + "@algolia/logger-console": 4.20.0 + "@algolia/requester-browser-xhr": 4.20.0 + "@algolia/requester-common": 4.20.0 + "@algolia/requester-node-http": 4.20.0 + "@algolia/transporter": 4.20.0 + checksum: 078954944452f57d2e3b47c6ed4905caf797814324a4d5068a8b6685d434a885977a3e607714c5fb6eb29c7c3e717b3ee9cb01c8b2320e2c7bd73bcd8d42e70f languageName: node linkType: hard -"@types/command-line-args@npm:^5.0.0": - version: 5.2.1 - resolution: "@types/command-line-args@npm:5.2.1" - checksum: a5df8562a7a0eb7c4da218661360ff8369a63c0fd783310d1940f0ece55826d5173eeb3732bab48dbfb60b1614d61989a9d87c6cdbee04353c4df6f45387d417 +"ansi-align@npm:^3.0.0, ansi-align@npm:^3.0.1": + version: 3.0.1 + resolution: "ansi-align@npm:3.0.1" + dependencies: + string-width: ^4.1.0 + checksum: 6abfa08f2141d231c257162b15292467081fa49a208593e055c866aa0455b57f3a86b5a678c190c618faa79b4c59e254493099cb700dd9cf2293c6be2c8f5d8d languageName: node linkType: hard -"@types/connect-history-api-fallback@npm:^1.3.5": - version: 1.5.1 - resolution: "@types/connect-history-api-fallback@npm:1.5.1" +"ansi-colors@npm:4.1.1": + version: 4.1.1 + resolution: "ansi-colors@npm:4.1.1" + checksum: 138d04a51076cb085da0a7e2d000c5c0bb09f6e772ed5c65c53cb118d37f6c5f1637506d7155fb5f330f0abcf6f12fa2e489ac3f8cdab9da393bf1bb4f9a32b0 + languageName: node + linkType: hard + +"ansi-colors@npm:^4.1.1": + version: 4.1.3 + resolution: "ansi-colors@npm:4.1.3" + checksum: a9c2ec842038a1fabc7db9ece7d3177e2fe1c5dc6f0c51ecfbf5f39911427b89c00b5dc6b8bd95f82a26e9b16aaae2e83d45f060e98070ce4d1333038edceb0e + languageName: node + linkType: hard + +"ansi-escapes@npm:^4.3.0": + version: 4.3.2 + resolution: "ansi-escapes@npm:4.3.2" dependencies: - "@types/express-serve-static-core": "*" - "@types/node": "*" - checksum: bc5e46663300eba56eaf8ef242156256e2bdadc52bb7d6543f4b37f2945b6a810901c245711f8321fce7d94c7b588b308a86519f3e1f87a80eb87841d808dbdc + type-fest: ^0.21.3 + checksum: 93111c42189c0a6bed9cdb4d7f2829548e943827ee8479c74d6e0b22ee127b2a21d3f8b5ca57723b8ef78ce011fbfc2784350eb2bde3ccfccf2f575fa8489815 + languageName: node + linkType: hard + +"ansi-html-community@npm:^0.0.8": + version: 0.0.8 + resolution: "ansi-html-community@npm:0.0.8" + bin: + ansi-html: bin/ansi-html + checksum: 04c568e8348a636963f915e48eaa3e01218322e1169acafdd79c384f22e5558c003f79bbc480c1563865497482817c7eed025f0653ebc17642fededa5cb42089 + languageName: node + linkType: hard + +"ansi-regex@npm:^5.0.1": + version: 5.0.1 + resolution: "ansi-regex@npm:5.0.1" + checksum: 2aa4bb54caf2d622f1afdad09441695af2a83aa3fe8b8afa581d205e57ed4261c183c4d3877cee25794443fde5876417d859c108078ab788d6af7e4fe52eb66b languageName: node linkType: hard -"@types/connect@npm:*": - version: 3.4.36 - resolution: "@types/connect@npm:3.4.36" - dependencies: - "@types/node": "*" - checksum: 4dee3d966fb527b98f0cbbdcf6977c9193fc3204ed539b7522fe5e64dfa45f9017bdda4ffb1f760062262fce7701a0ee1c2f6ce2e50af36c74d4e37052303172 +"ansi-regex@npm:^6.0.1": + version: 6.0.1 + resolution: "ansi-regex@npm:6.0.1" + checksum: 1ff8b7667cded1de4fa2c9ae283e979fc87036864317da86a2e546725f96406746411d0d85e87a2d12fa5abd715d90006de7fa4fa0477c92321ad3b4c7d4e169 languageName: node linkType: hard -"@types/content-disposition@npm:*": - version: 0.5.6 - resolution: "@types/content-disposition@npm:0.5.6" - checksum: da07798d52cc8fc46a8843d768b48d54c70f1a44c861dc2c73c4c25a1e08af859709629ab0e4d23d5198107b8926bb48c593df436ba68123d87191f5e25fe4bc +"ansi-sequence-parser@npm:^1.1.0": + version: 1.1.1 + resolution: "ansi-sequence-parser@npm:1.1.1" + checksum: ead5b15c596e8e85ca02951a844366c6776769dcc9fd1bd3a0db11bb21364554822c6a439877fb599e7e1ffa0b5f039f1e5501423950457f3dcb2f480c30b188 languageName: node linkType: hard -"@types/convert-source-map@npm:^2.0.0": - version: 2.0.1 - resolution: "@types/convert-source-map@npm:2.0.1" - checksum: 200b2792b37748e89e4363ef07686b074f64e21a26f27381d51bc336222a85503cfa1266e29d3b9c9121c8156e0a3973f3adbe2f4be59516fa255c080b4ca976 +"ansi-styles@npm:^3.2.1": + version: 3.2.1 + resolution: "ansi-styles@npm:3.2.1" + dependencies: + color-convert: ^1.9.0 + checksum: d85ade01c10e5dd77b6c89f34ed7531da5830d2cb5882c645f330079975b716438cd7ebb81d0d6e6b4f9c577f19ae41ab55f07f19786b02f9dfd9e0377395665 languageName: node linkType: hard -"@types/cookies@npm:*": - version: 0.7.8 - resolution: "@types/cookies@npm:0.7.8" +"ansi-styles@npm:^4.0.0, ansi-styles@npm:^4.1.0": + version: 4.3.0 + resolution: "ansi-styles@npm:4.3.0" dependencies: - "@types/connect": "*" - "@types/express": "*" - "@types/keygrip": "*" - "@types/node": "*" - checksum: 7945b0cfe370bf1f05a1f328c9eba55333dac1bb9d7efa3148b107c260ab924263546351f9fd168daa72948d195464d395319a24477995f9f887a3a99fbcb5b5 + color-convert: ^2.0.1 + checksum: 513b44c3b2105dd14cc42a19271e80f386466c4be574bccf60b627432f9198571ebf4ab1e4c3ba17347658f4ee1711c163d574248c0c1cdc2d5917a0ad582ec4 languageName: node linkType: hard -"@types/debounce@npm:^1.2.0": - version: 1.2.1 - resolution: "@types/debounce@npm:1.2.1" - checksum: bea6d414acefbee50adfe87cee10f8a855d033e4778567ab03bdc3cb2648b6bf9237ca53f4ee76fe4be75f77f86d4688411499626fe409bc870f53631d24231f +"ansi-styles@npm:^6.1.0": + version: 6.2.1 + resolution: "ansi-styles@npm:6.2.1" + checksum: ef940f2f0ced1a6347398da88a91da7930c33ecac3c77b72c5905f8b8fe402c52e6fde304ff5347f616e27a742da3f1dc76de98f6866c69251ad0b07a66776d9 languageName: node linkType: hard -"@types/eslint-scope@npm:^3.7.3": - version: 3.7.5 - resolution: "@types/eslint-scope@npm:3.7.5" +"anymatch@npm:~3.1.2": + version: 3.1.3 + resolution: "anymatch@npm:3.1.3" dependencies: - "@types/eslint": "*" - "@types/estree": "*" - checksum: e91ce335c3791c2cf6084caa0073f90d5b7ae3fcf27785ade8422b7d896159fa14a5a3f1efd31ef03e9ebc1ff04983288280dfe8c9a5579a958539f59df8cc9f + normalize-path: ^3.0.0 + picomatch: ^2.0.4 + checksum: 3e044fd6d1d26545f235a9fe4d7a534e2029d8e59fa7fd9f2a6eb21230f6b5380ea1eaf55136e60cbf8e613544b3b766e7a6fa2102e2a3a117505466e3025dc2 languageName: node linkType: hard -"@types/eslint@npm:*": - version: 8.44.4 - resolution: "@types/eslint@npm:8.44.4" - dependencies: - "@types/estree": "*" - "@types/json-schema": "*" - checksum: 15bafdaba800e2995f38d3a2a929d8e9303035315e8d3535523a21cd719b6769a45884afa955f0b845ffa545a4150429b0178e2c44feeedf59ebb285eeae9825 +"arch@npm:^2.2.0": + version: 2.2.0 + resolution: "arch@npm:2.2.0" + checksum: e21b7635029fe8e9cdd5a026f9a6c659103e63fff423834323cdf836a1bb240a72d0c39ca8c470f84643385cf581bd8eda2cad8bf493e27e54bd9783abe9101f languageName: node linkType: hard -"@types/estree@npm:*, @types/estree@npm:^1.0.0": - version: 1.0.2 - resolution: "@types/estree@npm:1.0.2" - checksum: aeedb1b2fe20cbe06f44b99b562bf9703e360bfcdf5bb3d61d248182ee1dd63500f2474e12f098ffe1f5ac3202b43b3e18ec99902d9328d5374f5512fa077e45 +"arg@npm:5.0.2, arg@npm:^5.0.0, arg@npm:^5.0.2": + version: 5.0.2 + resolution: "arg@npm:5.0.2" + checksum: 6c69ada1a9943d332d9e5382393e897c500908d91d5cb735a01120d5f71daf1b339b7b8980cbeaba8fd1afc68e658a739746179e4315a26e8a28951ff9930078 languageName: node linkType: hard -"@types/estree@npm:0.0.39": - version: 0.0.39 - resolution: "@types/estree@npm:0.0.39" - checksum: 412fb5b9868f2c418126451821833414189b75cc6bf84361156feed733e3d92ec220b9d74a89e52722e03d5e241b2932732711b7497374a404fad49087adc248 +"arg@npm:^4.1.0": + version: 4.1.3 + resolution: "arg@npm:4.1.3" + checksum: 544af8dd3f60546d3e4aff084d451b96961d2267d668670199692f8d054f0415d86fc5497d0e641e91546f0aa920e7c29e5250e99fc89f5552a34b5d93b77f43 languageName: node linkType: hard -"@types/express-serve-static-core@npm:*": - version: 4.17.37 - resolution: "@types/express-serve-static-core@npm:4.17.37" +"argparse@npm:^1.0.7": + version: 1.0.10 + resolution: "argparse@npm:1.0.10" dependencies: - "@types/node": "*" - "@types/qs": "*" - "@types/range-parser": "*" - "@types/send": "*" - checksum: 2dab1380e45eb44e56ecc1be1c42c4b897364d2f2a08e03ca28fbcb1e6866e390217385435813711c046f9acd684424d088855dc32825d5cbecf72c60ecd037f + sprintf-js: ~1.0.2 + checksum: 7ca6e45583a28de7258e39e13d81e925cfa25d7d4aacbf806a382d3c02fcb13403a07fb8aeef949f10a7cfe4a62da0e2e807b348a5980554cc28ee573ef95945 languageName: node linkType: hard -"@types/express-serve-static-core@npm:^4.17.33": - version: 4.17.36 - resolution: "@types/express-serve-static-core@npm:4.17.36" - dependencies: - "@types/node": "*" - "@types/qs": "*" - "@types/range-parser": "*" - "@types/send": "*" - checksum: 410b13cbd663f18c0f8729e7f2ff54d960d96de76ebbae7cadb612972f85cc66c54051e00d32f11aa230c0a683d81a6d6fc7f7e4e383a95c0801494c517f36e1 +"argparse@npm:^2.0.1": + version: 2.0.1 + resolution: "argparse@npm:2.0.1" + checksum: 83644b56493e89a254bae05702abf3a1101b4fa4d0ca31df1c9985275a5a5bd47b3c27b7fa0b71098d41114d8ca000e6ed90cad764b306f8a503665e4d517ced languageName: node linkType: hard -"@types/express@npm:*": - version: 4.17.17 - resolution: "@types/express@npm:4.17.17" - dependencies: - "@types/body-parser": "*" - "@types/express-serve-static-core": ^4.17.33 - "@types/qs": "*" - "@types/serve-static": "*" - checksum: 0196dacc275ac3ce89d7364885cb08e7fb61f53ca101f65886dbf1daf9b7eb05c0943e2e4bbd01b0cc5e50f37e0eea7e4cbe97d0304094411ac73e1b7998f4da +"array-back@npm:^3.0.1, array-back@npm:^3.1.0": + version: 3.1.0 + resolution: "array-back@npm:3.1.0" + checksum: 7205004fcd0f9edd926db921af901b083094608d5b265738d0290092f9822f73accb468e677db74c7c94ef432d39e5ed75a7b1786701e182efb25bbba9734209 languageName: node linkType: hard -"@types/express@npm:^4.17.13": - version: 4.17.19 - resolution: "@types/express@npm:4.17.19" - dependencies: - "@types/body-parser": "*" - "@types/express-serve-static-core": ^4.17.33 - "@types/qs": "*" - "@types/serve-static": "*" - checksum: 3d39d0655eb0825d96fec100985a38737767ddd6da2dbda1e330a3adf36c98a9b7cd8d9539db32876d1fbb47a09343cad7b38c30c8dd7c291271fcb9b85cb21b +"array-back@npm:^4.0.1, array-back@npm:^4.0.2": + version: 4.0.2 + resolution: "array-back@npm:4.0.2" + checksum: f30603270771eeb54e5aad5f54604c62b3577a18b6db212a7272b2b6c32049121b49431f656654790ed1469411e45f387e7627c0de8fd0515995cc40df9b9294 languageName: node linkType: hard -"@types/fs-extra@npm:^9.0.13": - version: 9.0.13 - resolution: "@types/fs-extra@npm:9.0.13" - dependencies: - "@types/node": "*" - checksum: add79e212acd5ac76b97b9045834e03a7996aef60a814185e0459088fd290519a3c1620865d588fa36c4498bf614210d2a703af5cf80aa1dbc125db78f6edac3 +"array-back@npm:^6.2.2": + version: 6.2.2 + resolution: "array-back@npm:6.2.2" + checksum: baae1e3a1687300a307d3bdf09715f6415e1099b5729d3d8e397309fb1e43d90b939d694602892172aaca7e0aeed38da89d04aa4951637d31c2a21350809e003 languageName: node linkType: hard -"@types/hast@npm:^2.0.0": - version: 2.3.6 - resolution: "@types/hast@npm:2.3.6" - dependencies: - "@types/unist": ^2 - checksum: c004372f6ab919ec92a2de43e4380707e27b76fe371c7d06ab26547c1e851dfba2a7c740c544218df8c7e0a94443458793c43730ad563a39e3fdc1a48904d7f5 +"array-flatten@npm:1.1.1": + version: 1.1.1 + resolution: "array-flatten@npm:1.1.1" + checksum: a9925bf3512d9dce202112965de90c222cd59a4fbfce68a0951d25d965cf44642931f40aac72309c41f12df19afa010ecadceb07cfff9ccc1621e99d89ab5f3b languageName: node linkType: hard -"@types/history@npm:^4.7.11": - version: 4.7.11 - resolution: "@types/history@npm:4.7.11" - checksum: c92e2ba407dcab0581a9afdf98f533aa41b61a71133420a6d92b1ca9839f741ab1f9395b17454ba5b88cb86020b70b22d74a1950ccfbdfd9beeaa5459fdc3464 +"array-flatten@npm:^2.1.2": + version: 2.1.2 + resolution: "array-flatten@npm:2.1.2" + checksum: e8988aac1fbfcdaae343d08c9a06a6fddd2c6141721eeeea45c3cf523bf4431d29a46602929455ed548c7a3e0769928cdc630405427297e7081bd118fdec9262 languageName: node linkType: hard -"@types/html-minifier-terser@npm:^6.0.0": - version: 6.1.0 - resolution: "@types/html-minifier-terser@npm:6.1.0" - checksum: eb843f6a8d662d44fb18ec61041117734c6aae77aa38df1be3b4712e8e50ffaa35f1e1c92fdd0fde14a5675fecf457abcd0d15a01fae7506c91926176967f452 +"array-timsort@npm:^1.0.3": + version: 1.0.3 + resolution: "array-timsort@npm:1.0.3" + checksum: fd4b5b0911214bdc8b5699ed10d309685551b518b3819c611c967cff59b87aee01cf591a10e36a3f14dbff696984bd6682b845f6fdbf1217195e910f241a4f78 languageName: node linkType: hard -"@types/http-assert@npm:*": - version: 1.5.3 - resolution: "@types/http-assert@npm:1.5.3" - checksum: 9553e5a0b8bcfdac4b51d3fa3b89a91b5450171861a667a5b4c47204e0f4a1ca865d97396e6ceaf220e87b64d06b7a8bad7bfba15ef97acb41a87507c9940dbc +"array-union@npm:^2.1.0": + version: 2.1.0 + resolution: "array-union@npm:2.1.0" + checksum: 5bee12395cba82da674931df6d0fea23c4aa4660cb3b338ced9f828782a65caa232573e6bf3968f23e0c5eb301764a382cef2f128b170a9dc59de0e36c39f98d languageName: node linkType: hard -"@types/http-errors@npm:*": - version: 2.0.2 - resolution: "@types/http-errors@npm:2.0.2" - checksum: d7f14045240ac4b563725130942b8e5c8080bfabc724c8ff3f166ea928ff7ae02c5194763bc8f6aaf21897e8a44049b0492493b9de3e058247e58fdfe0f86692 +"assert@npm:^2.1.0": + version: 2.1.0 + resolution: "assert@npm:2.1.0" + dependencies: + call-bind: ^1.0.2 + is-nan: ^1.3.2 + object-is: ^1.1.5 + object.assign: ^4.1.4 + util: ^0.12.5 + checksum: 1ed1cabba9abe55f4109b3f7292b4e4f3cf2953aad8dc148c0b3c3bd676675c31b1abb32ef563b7d5a19d1715bf90d1e5f09fad2a4ee655199468902da80f7c2 languageName: node linkType: hard -"@types/http-proxy@npm:^1.17.8": - version: 1.17.12 - resolution: "@types/http-proxy@npm:1.17.12" +"assertion-error@npm:^1.1.0": + version: 1.1.0 + resolution: "assertion-error@npm:1.1.0" + checksum: fd9429d3a3d4fd61782eb3962ae76b6d08aa7383123fca0596020013b3ebd6647891a85b05ce821c47d1471ed1271f00b0545cf6a4326cf2fc91efcc3b0fbecf + languageName: node + linkType: hard + +"ast-types@npm:^0.13.4": + version: 0.13.4 + resolution: "ast-types@npm:0.13.4" dependencies: - "@types/node": "*" - checksum: 89700c8e3c8f2c59c87c8db8e7a070c97a3b30a4a38223aca6b8b817e6f2ca931f5a500e16ecadc1ebcfed2676cc60e073d8f887e621d84420298728ec6fd000 + tslib: ^2.0.1 + checksum: 5a51f7b70588ecced3601845a0e203279ca2f5fdc184416a0a1640c93ec0a267241d6090a328e78eebb8de81f8754754e0a4f1558ba2a3d638f8ccbd0b1f0eff languageName: node linkType: hard -"@types/istanbul-lib-coverage@npm:*, @types/istanbul-lib-coverage@npm:^2.0.0, @types/istanbul-lib-coverage@npm:^2.0.1, @types/istanbul-lib-coverage@npm:^2.0.3": - version: 2.0.4 - resolution: "@types/istanbul-lib-coverage@npm:2.0.4" - checksum: a25d7589ee65c94d31464c16b72a9dc81dfa0bea9d3e105ae03882d616e2a0712a9c101a599ec482d297c3591e16336962878cb3eb1a0a62d5b76d277a890ce7 +"astral-regex@npm:^2.0.0": + version: 2.0.0 + resolution: "astral-regex@npm:2.0.0" + checksum: 876231688c66400473ba505731df37ea436e574dd524520294cc3bbc54ea40334865e01fa0d074d74d036ee874ee7e62f486ea38bc421ee8e6a871c06f011766 languageName: node linkType: hard -"@types/istanbul-lib-report@npm:*": - version: 3.0.0 - resolution: "@types/istanbul-lib-report@npm:3.0.0" - dependencies: - "@types/istanbul-lib-coverage": "*" - checksum: 656398b62dc288e1b5226f8880af98087233cdb90100655c989a09f3052b5775bf98ba58a16c5ae642fb66c61aba402e07a9f2bff1d1569e3b306026c59f3f36 +"astring@npm:^1.8.0": + version: 1.8.6 + resolution: "astring@npm:1.8.6" + bin: + astring: bin/astring + checksum: 6f034d2acef1dac8bb231e7cc26c573d3c14e1975ea6e04f20312b43d4f462f963209bc64187d25d477a182dc3c33277959a0156ab7a3617aa79b1eac4d88e1f languageName: node linkType: hard -"@types/istanbul-reports@npm:^3.0.0": - version: 3.0.1 - resolution: "@types/istanbul-reports@npm:3.0.1" +"async-mutex@npm:0.4.0": + version: 0.4.0 + resolution: "async-mutex@npm:0.4.0" dependencies: - "@types/istanbul-lib-report": "*" - checksum: f1ad54bc68f37f60b30c7915886b92f86b847033e597f9b34f2415acdbe5ed742fa559a0a40050d74cdba3b6a63c342cac1f3a64dba5b68b66a6941f4abd7903 + tslib: ^2.4.0 + checksum: 813a71728b35a4fbfd64dba719f04726d9133c67b577fcd951b7028c4a675a13ee34e69beb82d621f87bf81f5d4f135c4c44be0448550c7db728547244ef71fc languageName: node linkType: hard -"@types/json-schema@npm:*, @types/json-schema@npm:^7.0.12, @types/json-schema@npm:^7.0.4, @types/json-schema@npm:^7.0.5, @types/json-schema@npm:^7.0.8, @types/json-schema@npm:^7.0.9": - version: 7.0.13 - resolution: "@types/json-schema@npm:7.0.13" - checksum: 345df21a678fa72fb389f35f33de77833d09d4a142bb2bcb27c18690efa4cf70fc2876e43843cefb3fbdb9fcb12cd3e970a90936df30f53bbee899865ff605ab +"async@npm:^2.6.4": + version: 2.6.4 + resolution: "async@npm:2.6.4" + dependencies: + lodash: ^4.17.14 + checksum: a52083fb32e1ebe1d63e5c5624038bb30be68ff07a6c8d7dfe35e47c93fc144bd8652cbec869e0ac07d57dde387aa5f1386be3559cdee799cb1f789678d88e19 languageName: node linkType: hard -"@types/katex@npm:^0.11.0": - version: 0.11.1 - resolution: "@types/katex@npm:0.11.1" - checksum: 1e51988b4b386a1b6fa8e22826ab4705bf3e6c9fb03461f2c91d28cb31095232bdeff491069ac9bc74bc4c26110be6a11a41e12ca77a2e4169f3afd8cd349355 +"asynckit@npm:^0.4.0": + version: 0.4.0 + resolution: "asynckit@npm:0.4.0" + checksum: 7b78c451df768adba04e2d02e63e2d0bf3b07adcd6e42b4cf665cb7ce899bedd344c69a1dcbce355b5f972d597b25aaa1c1742b52cffd9caccb22f348114f6be languageName: node linkType: hard -"@types/keygrip@npm:*": - version: 1.0.3 - resolution: "@types/keygrip@npm:1.0.3" - checksum: adee9a3efda3db9c64466af1c7c91a6d049420ee50589500cfd36e3e38d6abefdd858da88e6da63ed186e588127af3e862c1dc64fb0ad45c91870e6c35fe3be0 +"at-least-node@npm:^1.0.0": + version: 1.0.0 + resolution: "at-least-node@npm:1.0.0" + checksum: 463e2f8e43384f1afb54bc68485c436d7622acec08b6fad269b421cb1d29cebb5af751426793d0961ed243146fe4dc983402f6d5a51b720b277818dbf6f2e49e languageName: node linkType: hard -"@types/keyv@npm:^3.1.1": - version: 3.1.4 - resolution: "@types/keyv@npm:3.1.4" +"autoprefixer@npm:^10.4.12, autoprefixer@npm:^10.4.14, autoprefixer@npm:^10.4.7": + version: 10.4.16 + resolution: "autoprefixer@npm:10.4.16" dependencies: - "@types/node": "*" - checksum: e009a2bfb50e90ca9b7c6e8f648f8464067271fd99116f881073fa6fa76dc8d0133181dd65e6614d5fb1220d671d67b0124aef7d97dc02d7e342ab143a47779d + browserslist: ^4.21.10 + caniuse-lite: ^1.0.30001538 + fraction.js: ^4.3.6 + normalize-range: ^0.1.2 + picocolors: ^1.0.0 + postcss-value-parser: ^4.2.0 + peerDependencies: + postcss: ^8.1.0 + bin: + autoprefixer: bin/autoprefixer + checksum: 45fad7086495048dacb14bb7b00313e70e135b5d8e8751dcc60548889400763705ab16fc2d99ea628b44c3472698fb0e39598f595ba28409c965ab159035afde languageName: node linkType: hard -"@types/koa-compose@npm:*": - version: 3.2.6 - resolution: "@types/koa-compose@npm:3.2.6" - dependencies: - "@types/koa": "*" - checksum: 1204c5bfa4c69448b692aba29c566ef6bedbdbe5842fa180450267a23d3606faa13ef209876fd0c989edb5bc381812a66610fcfeac196ce4e76364354756ba1f +"available-typed-arrays@npm:^1.0.5": + version: 1.0.5 + resolution: "available-typed-arrays@npm:1.0.5" + checksum: 20eb47b3cefd7db027b9bbb993c658abd36d4edd3fe1060e83699a03ee275b0c9b216cc076ff3f2db29073225fb70e7613987af14269ac1fe2a19803ccc97f1a languageName: node linkType: hard -"@types/koa@npm:*, @types/koa@npm:^2.11.6": - version: 2.13.9 - resolution: "@types/koa@npm:2.13.9" +"axios@npm:^0.25.0": + version: 0.25.0 + resolution: "axios@npm:0.25.0" dependencies: - "@types/accepts": "*" - "@types/content-disposition": "*" - "@types/cookies": "*" - "@types/http-assert": "*" - "@types/http-errors": "*" - "@types/keygrip": "*" - "@types/koa-compose": "*" - "@types/node": "*" - checksum: af9cd599c8e17e2ae0f4168a61d964e343f713d002b65fd995658d7addc6551ccadecfd32b3405cf44e4d360178ee4f972d6881533548261ae1f636a655d24b1 + follow-redirects: ^1.14.7 + checksum: 2a8a3787c05f2a0c9c3878f49782357e2a9f38945b93018fb0c4fd788171c43dceefbb577988628e09fea53952744d1ecebde234b561f1e703aa43e0a598a3ad languageName: node linkType: hard -"@types/lodash@npm:^4": - version: 4.14.200 - resolution: "@types/lodash@npm:4.14.200" - checksum: 6471f8bb5da692a6ecf03a8da4935bfbc341e67ee9bcb4f5730bfacff0c367232548f0a01e8ac5ea18c6fe78fb085d502494e33ccb47a7ee87cbdee03b47d00d +"axios@npm:^1.4.0": + version: 1.6.2 + resolution: "axios@npm:1.6.2" + dependencies: + follow-redirects: ^1.15.0 + form-data: ^4.0.0 + proxy-from-env: ^1.1.0 + checksum: 4a7429e2b784be0f2902ca2680964391eae7236faa3967715f30ea45464b98ae3f1c6f631303b13dfe721b17126b01f486c7644b9ef276bfc63112db9fd379f8 languageName: node linkType: hard -"@types/lru-cache@npm:^5.1.0": - version: 5.1.1 - resolution: "@types/lru-cache@npm:5.1.1" - checksum: e1d6c0085f61b16ec5b3073ec76ad1be4844ea036561c3f145fc19f71f084b58a6eb600b14128aa95809d057d28f1d147c910186ae51219f58366ffd2ff2e118 +"b4a@npm:^1.6.4": + version: 1.6.4 + resolution: "b4a@npm:1.6.4" + checksum: 81b086f9af1f8845fbef4476307236bda3d660c158c201db976f19cdce05f41f93110ab6b12fd7a2696602a490cc43d5410ee36a56d6eef93afb0d6ca69ac3b2 languageName: node linkType: hard -"@types/mdast@npm:^3.0.0": - version: 3.0.13 - resolution: "@types/mdast@npm:3.0.13" +"babel-loader@npm:^8.2.5": + version: 8.3.0 + resolution: "babel-loader@npm:8.3.0" dependencies: - "@types/unist": ^2 - checksum: f13fa17a2931ed1492a2f0012a3abd6de3a2d1128145981321909e03fedba80162668f284a4af92aca3732b27e933c5f4772336d96b9ae660bfde696d07abbe6 + find-cache-dir: ^3.3.1 + loader-utils: ^2.0.0 + make-dir: ^3.1.0 + schema-utils: ^2.6.5 + peerDependencies: + "@babel/core": ^7.0.0 + webpack: ">=2" + checksum: d48bcf9e030e598656ad3ff5fb85967db2eaaf38af5b4a4b99d25618a2057f9f100e6b231af2a46c1913206db506115ca7a8cbdf52c9c73d767070dae4352ab5 languageName: node linkType: hard -"@types/mime@npm:*": - version: 3.0.1 - resolution: "@types/mime@npm:3.0.1" - checksum: 4040fac73fd0cea2460e29b348c1a6173da747f3a87da0dbce80dd7a9355a3d0e51d6d9a401654f3e5550620e3718b5a899b2ec1debf18424e298a2c605346e7 +"babel-loader@npm:^9.1.3": + version: 9.1.3 + resolution: "babel-loader@npm:9.1.3" + dependencies: + find-cache-dir: ^4.0.0 + schema-utils: ^4.0.0 + peerDependencies: + "@babel/core": ^7.12.0 + webpack: ">=5" + checksum: b168dde5b8cf11206513371a79f86bb3faa7c714e6ec9fffd420876b61f3d7f5f4b976431095ef6a14bc4d324505126deb91045fd41e312ba49f4deaa166fe28 languageName: node linkType: hard -"@types/mime@npm:^1": - version: 1.3.2 - resolution: "@types/mime@npm:1.3.2" - checksum: 0493368244cced1a69cb791b485a260a422e6fcc857782e1178d1e6f219f1b161793e9f87f5fae1b219af0f50bee24fcbe733a18b4be8fdd07a38a8fb91146fd +"babel-plugin-apply-mdx-type-prop@npm:1.6.22": + version: 1.6.22 + resolution: "babel-plugin-apply-mdx-type-prop@npm:1.6.22" + dependencies: + "@babel/helper-plugin-utils": 7.10.4 + "@mdx-js/util": 1.6.22 + peerDependencies: + "@babel/core": ^7.11.6 + checksum: 43e2100164a8f3e46fddd76afcbfb1f02cbebd5612cfe63f3d344a740b0afbdc4d2bf5659cffe9323dd2554c7b86b23ebedae9dadcec353b6594f4292a1a28e2 languageName: node linkType: hard -"@types/minimist@npm:^1.2.2": - version: 1.2.2 - resolution: "@types/minimist@npm:1.2.2" - checksum: b8da83c66eb4aac0440e64674b19564d9d86c80ae273144db9681e5eeff66f238ade9515f5006ffbfa955ceff8b89ad2bd8ec577d7caee74ba101431fb07045d +"babel-plugin-dynamic-import-node@npm:^2.3.3": + version: 2.3.3 + resolution: "babel-plugin-dynamic-import-node@npm:2.3.3" + dependencies: + object.assign: ^4.1.0 + checksum: c9d24415bcc608d0db7d4c8540d8002ac2f94e2573d2eadced137a29d9eab7e25d2cbb4bc6b9db65cf6ee7430f7dd011d19c911a9a778f0533b4a05ce8292c9b languageName: node linkType: hard -"@types/mocha@npm:^10.0.1": - version: 10.0.1 - resolution: "@types/mocha@npm:10.0.1" - checksum: 224ea9fce7b1734ccdb9aa99a622d902a538ce1847bca7fd22c5fb38adcf3ed536f50f48f587085db988a4bb3c2eb68f4b98e1cd6a38bc5547bd3bbbedc54495 +"babel-plugin-extract-import-names@npm:1.6.22": + version: 1.6.22 + resolution: "babel-plugin-extract-import-names@npm:1.6.22" + dependencies: + "@babel/helper-plugin-utils": 7.10.4 + checksum: 145ccf09c96d36411d340e78086555f8d4d5924ea39fcb0eca461c066cfa98bc4344982bb35eb85d054ef88f8d4dfc0205ba27370c1d8fcc78191b02908d044d languageName: node linkType: hard -"@types/mocha@npm:^8.2.0": - version: 8.2.3 - resolution: "@types/mocha@npm:8.2.3" - checksum: b43ed1b642a2ee62bf10792a07d5d21d66ab8b4d2cf5d822c8a7643e77b90009aecc000eefab5f6ddc9eb69004192f84119a6f97a8499e1a13ea082e7a5e71bf +"babel-plugin-polyfill-corejs2@npm:^0.4.6": + version: 0.4.6 + resolution: "babel-plugin-polyfill-corejs2@npm:0.4.6" + dependencies: + "@babel/compat-data": ^7.22.6 + "@babel/helper-define-polyfill-provider": ^0.4.3 + semver: ^6.3.1 + peerDependencies: + "@babel/core": ^7.4.0 || ^8.0.0-0 <8.0.0 + checksum: 08896811df31530be6a9bcdd630cb9fd4b5ae5181039d18db3796efbc54e38d57a42af460845c10a04434e1bc45c0d47743c7e6c860383cc6b141083cde22030 languageName: node linkType: hard -"@types/node@npm:*, @types/node@npm:^20.5.7, @types/node@npm:^20.6.2": - version: 20.6.3 - resolution: "@types/node@npm:20.6.3" - checksum: 444a6f1f41cfa8d3e20ce0108e6e43960fb2ae0e481f233bb1c14d6252aa63a92e021de561cd317d9fdb411688f871065f40175a1f18763282dee2613a08f8a3 +"babel-plugin-polyfill-corejs3@npm:^0.8.5": + version: 0.8.6 + resolution: "babel-plugin-polyfill-corejs3@npm:0.8.6" + dependencies: + "@babel/helper-define-polyfill-provider": ^0.4.3 + core-js-compat: ^3.33.1 + peerDependencies: + "@babel/core": ^7.4.0 || ^8.0.0-0 <8.0.0 + checksum: 36951c2edac42ac0f05b200502e90d77bf66ccee5b52e2937d23496c6ef2372cce31b8c64144da374b77bd3eb65e2721703a52eac56cad16a152326c092cbf77 languageName: node linkType: hard -"@types/node@npm:18.15.13": - version: 18.15.13 - resolution: "@types/node@npm:18.15.13" - checksum: 79cc5a2b5f98e8973061a4260a781425efd39161a0e117a69cd089603964816c1a14025e1387b4590c8e82d05133b7b4154fa53a7dffb3877890a66145e76515 +"babel-plugin-polyfill-regenerator@npm:^0.5.3": + version: 0.5.3 + resolution: "babel-plugin-polyfill-regenerator@npm:0.5.3" + dependencies: + "@babel/helper-define-polyfill-provider": ^0.4.3 + peerDependencies: + "@babel/core": ^7.4.0 || ^8.0.0-0 <8.0.0 + checksum: 2bb546582cda1870d19e646a7183baeb2cccd56e0ef3e4eaeabd28e120daf17cb87399194a9ccdcf32506bcaa68d23e73440fc8ab990a7a0f8c5a77c12d5d4bc languageName: node linkType: hard -"@types/node@npm:^17.0.5": - version: 17.0.45 - resolution: "@types/node@npm:17.0.45" - checksum: aa04366b9103b7d6cfd6b2ef64182e0eaa7d4462c3f817618486ea0422984c51fc69fd0d436eae6c9e696ddfdbec9ccaa27a917f7c2e8c75c5d57827fe3d95e8 +"bail@npm:^1.0.0": + version: 1.0.5 + resolution: "bail@npm:1.0.5" + checksum: 6c334940d7eaa4e656a12fb12407b6555649b6deb6df04270fa806e0da82684ebe4a4e47815b271c794b40f8d6fa286e0c248b14ddbabb324a917fab09b7301a languageName: node linkType: hard -"@types/node@npm:^18.7.20": - version: 18.17.18 - resolution: "@types/node@npm:18.17.18" - checksum: 59cbd906363d37017fe9ba0c08c1446e440d4d977459609c5f90b8fb7eb41f273ce8af30c5a5b5d599d7de934c1b3702bc9fc27caf8d2270e5cdb659c5232991 +"bail@npm:^2.0.0": + version: 2.0.2 + resolution: "bail@npm:2.0.2" + checksum: aab4e8ccdc8d762bf3fdfce8e706601695620c0c2eda256dd85088dc0be3cfd7ff126f6e99c2bee1f24f5d418414aacf09d7f9702f16d6963df2fa488cda8824 languageName: node linkType: hard -"@types/parse-json@npm:^4.0.0": - version: 4.0.0 - resolution: "@types/parse-json@npm:4.0.0" - checksum: fd6bce2b674b6efc3db4c7c3d336bd70c90838e8439de639b909ce22f3720d21344f52427f1d9e57b265fcb7f6c018699b99e5e0c208a1a4823014269a6bf35b +"balanced-match@npm:^1.0.0": + version: 1.0.2 + resolution: "balanced-match@npm:1.0.2" + checksum: 9706c088a283058a8a99e0bf91b0a2f75497f185980d9ffa8b304de1d9e58ebda7c72c07ebf01dadedaac5b2907b2c6f566f660d62bd336c3468e960403b9d65 languageName: node linkType: hard -"@types/parse5@npm:^5.0.0": - version: 5.0.3 - resolution: "@types/parse5@npm:5.0.3" - checksum: d6b7495cb1850f9f2e9c5e103ede9f2d30a5320669707b105c403868adc9e4bf8d3a7ff314cc23f67826bbbbbc0e6147346ce9062ab429f099dba7a01f463919 +"base-x@npm:^3.0.2": + version: 3.0.9 + resolution: "base-x@npm:3.0.9" + dependencies: + safe-buffer: ^5.0.1 + checksum: 957101d6fd09e1903e846fd8f69fd7e5e3e50254383e61ab667c725866bec54e5ece5ba49ce385128ae48f9ec93a26567d1d5ebb91f4d56ef4a9cc0d5a5481e8 languageName: node linkType: hard -"@types/parse5@npm:^6.0.1": - version: 6.0.3 - resolution: "@types/parse5@npm:6.0.3" - checksum: ddb59ee4144af5dfcc508a8dcf32f37879d11e12559561e65788756b95b33e6f03ea027d88e1f5408f9b7bfb656bf630ace31a2169edf44151daaf8dd58df1b7 +"base64-js@npm:^1.3.1": + version: 1.5.1 + resolution: "base64-js@npm:1.5.1" + checksum: 669632eb3745404c2f822a18fc3a0122d2f9a7a13f7fb8b5823ee19d1d2ff9ee5b52c53367176ea4ad093c332fd5ab4bd0ebae5a8e27917a4105a4cfc86b1005 languageName: node linkType: hard -"@types/pbkdf2@npm:^3.0.0": - version: 3.1.0 - resolution: "@types/pbkdf2@npm:3.1.0" - dependencies: - "@types/node": "*" - checksum: d15024b1957c21cf3b8887329d9bd8dfde754cf13a09d76ae25f1391cfc62bb8b8d7b760773c5dbaa748172fba8b3e0c3dbe962af6ccbd69b76df12a48dfba40 +"basic-ftp@npm:^5.0.2": + version: 5.0.4 + resolution: "basic-ftp@npm:5.0.4" + checksum: 57725f24debd8c1b36f9bad1bfee39c5d9f5997f32a23e5c957389dcc64373a13b41711e5723b4a3b616a93530b345686119f480c27a115b2fde944c1652ceb1 languageName: node linkType: hard -"@types/prettier@npm:^3": - version: 3.0.0 - resolution: "@types/prettier@npm:3.0.0" - dependencies: - prettier: "*" - checksum: a2a512d304e5bcf78f38089dc88ad19215e6ab871d435a17aef3ce538a63b07c0e359c18db23989dc1ed9fff96d99eee1f680416080184df5c7e0e3bf767e165 +"batch@npm:0.6.1": + version: 0.6.1 + resolution: "batch@npm:0.6.1" + checksum: 61f9934c7378a51dce61b915586191078ef7f1c3eca707fdd58b96ff2ff56d9e0af2bdab66b1462301a73c73374239e6542d9821c0af787f3209a23365d07e7f languageName: node linkType: hard -"@types/prop-types@npm:*": - version: 15.7.8 - resolution: "@types/prop-types@npm:15.7.8" - checksum: 61dfad79da8b1081c450bab83b77935df487ae1cdd4660ec7df6be8e74725c15fa45cf486ce057addc956ca4ae78300b97091e2a25061133d1b9a1440bc896ae +"bech32@npm:1.1.4": + version: 1.1.4 + resolution: "bech32@npm:1.1.4" + checksum: 0e98db619191548390d6f09ff68b0253ba7ae6a55db93dfdbb070ba234c1fd3308c0606fbcc95fad50437227b10011e2698b89f0181f6e7f845c499bd14d0f4b languageName: node linkType: hard -"@types/ps-tree@npm:^1.1.2": - version: 1.1.2 - resolution: "@types/ps-tree@npm:1.1.2" - checksum: 575c3b2b83ea8935ab296ac9e17f6a2c1a5bb155f9e30663bb7a7c741a8ca4641f0df9748866230f1d6c3f87ed4ffa3fa91f1df444ef9979a3df31114534bf25 +"big-integer@npm:^1.6.44": + version: 1.6.52 + resolution: "big-integer@npm:1.6.52" + checksum: 6e86885787a20fed96521958ae9086960e4e4b5e74d04f3ef7513d4d0ad631a9f3bde2730fc8aaa4b00419fc865f6ec573e5320234531ef37505da7da192c40b languageName: node linkType: hard -"@types/qs@npm:*": - version: 6.9.8 - resolution: "@types/qs@npm:6.9.8" - checksum: c28e07d00d07970e5134c6eed184a0189b8a4649e28fdf36d9117fe671c067a44820890de6bdecef18217647a95e9c6aebdaaae69f5fe4b0bec9345db885f77e +"big.js@npm:^5.2.2": + version: 5.2.2 + resolution: "big.js@npm:5.2.2" + checksum: b89b6e8419b097a8fb4ed2399a1931a68c612bce3cfd5ca8c214b2d017531191070f990598de2fc6f3f993d91c0f08aa82697717f6b3b8732c9731866d233c9e languageName: node linkType: hard -"@types/range-parser@npm:*": - version: 1.2.4 - resolution: "@types/range-parser@npm:1.2.4" - checksum: b7c0dfd5080a989d6c8bb0b6750fc0933d9acabeb476da6fe71d8bdf1ab65e37c136169d84148034802f48378ab94e3c37bb4ef7656b2bec2cb9c0f8d4146a95 +"bigint-crypto-utils@npm:^3.0.23": + version: 3.3.0 + resolution: "bigint-crypto-utils@npm:3.3.0" + checksum: 9598ce57b23f776c8936d44114c9f051e62b5fa654915b664784cbcbacc5aa0485f4479571c51ff58008abb1210c0d6a234853742f07cf84bda890f2a1e01000 languageName: node linkType: hard -"@types/react-router-config@npm:*, @types/react-router-config@npm:^5.0.6": - version: 5.0.8 - resolution: "@types/react-router-config@npm:5.0.8" - dependencies: - "@types/history": ^4.7.11 - "@types/react": "*" - "@types/react-router": ^5.1.0 - checksum: afbd96e526fcdd19a3c8604912496a5a7ecfdcd848632a003ef8af69701ca74f14451e4fac93d265b678ca07c82ec4a103126f64c040a4aefa1a8172619be2bd +"binary-extensions@npm:^2.0.0": + version: 2.2.0 + resolution: "binary-extensions@npm:2.2.0" + checksum: ccd267956c58d2315f5d3ea6757cf09863c5fc703e50fbeb13a7dc849b812ef76e3cf9ca8f35a0c48498776a7478d7b4a0418e1e2b8cb9cb9731f2922aaad7f8 languageName: node linkType: hard -"@types/react-router-dom@npm:*": - version: 5.3.3 - resolution: "@types/react-router-dom@npm:5.3.3" +"bl@npm:^4.0.3": + version: 4.1.0 + resolution: "bl@npm:4.1.0" dependencies: - "@types/history": ^4.7.11 - "@types/react": "*" - "@types/react-router": "*" - checksum: 28c4ea48909803c414bf5a08502acbb8ba414669b4b43bb51297c05fe5addc4df0b8fd00e0a9d1e3535ec4073ef38aaafac2c4a2b95b787167d113bc059beff3 + buffer: ^5.5.0 + inherits: ^2.0.4 + readable-stream: ^3.4.0 + checksum: 9e8521fa7e83aa9427c6f8ccdcba6e8167ef30cc9a22df26effcc5ab682ef91d2cbc23a239f945d099289e4bbcfae7a192e9c28c84c6202e710a0dfec3722662 languageName: node linkType: hard -"@types/react-router@npm:*, @types/react-router@npm:^5.1.0": - version: 5.1.20 - resolution: "@types/react-router@npm:5.1.20" +"bl@npm:~0.8.1": + version: 0.8.2 + resolution: "bl@npm:0.8.2" dependencies: - "@types/history": ^4.7.11 - "@types/react": "*" - checksum: 128764143473a5e9457ddc715436b5d49814b1c214dde48939b9bef23f0e77f52ffcdfa97eb8d3cc27e2c229869c0cdd90f637d887b62f2c9f065a87d6425419 + readable-stream: ~1.0.26 + checksum: 18767c5c861ae1cdbb000bb346e9e8e29137225e8eef97f39db78beeb236beca609f465580c5c1b177d621505f57400834fb4a17a66d264f33a0237293ec2ac5 languageName: node linkType: hard -"@types/react@npm:*": - version: 18.2.28 - resolution: "@types/react@npm:18.2.28" - dependencies: - "@types/prop-types": "*" - "@types/scheduler": "*" - csstype: ^3.0.2 - checksum: 81381bedeba83278f4c9febb0b83e0bd3f42a25897a50b9cb36ef53651d34b3d50f87ebf11211ea57ea575131f85d31e93e496ce46478a00b0f9bf7b26b5917a +"blakejs@npm:^1.1.0": + version: 1.2.1 + resolution: "blakejs@npm:1.2.1" + checksum: d699ba116cfa21d0b01d12014a03e484dd76d483133e6dc9eb415aa70a119f08beb3bcefb8c71840106a00b542cba77383f8be60cd1f0d4589cb8afb922eefbe languageName: node linkType: hard -"@types/readable-stream@npm:^2.3.13": - version: 2.3.15 - resolution: "@types/readable-stream@npm:2.3.15" - dependencies: - "@types/node": "*" - safe-buffer: ~5.1.1 - checksum: ec36f525cad09b6c65a1dafcb5ad99b9e2ed824ec49b7aa23180ac427e5d35b8a0706193ecd79ab4253a283ad485ba03d5917a98daaaa144f0ea34f4823e9d82 +"bn.js@npm:^4.11.0, bn.js@npm:^4.11.8, bn.js@npm:^4.11.9": + version: 4.12.0 + resolution: "bn.js@npm:4.12.0" + checksum: 39afb4f15f4ea537b55eaf1446c896af28ac948fdcf47171961475724d1bb65118cca49fa6e3d67706e4790955ec0e74de584e45c8f1ef89f46c812bee5b5a12 languageName: node linkType: hard -"@types/resolve@npm:1.17.1": - version: 1.17.1 - resolution: "@types/resolve@npm:1.17.1" - dependencies: - "@types/node": "*" - checksum: dc6a6df507656004e242dcb02c784479deca516d5f4b58a1707e708022b269ae147e1da0521f3e8ad0d63638869d87e0adc023f0bd5454aa6f72ac66c7525cf5 +"bn.js@npm:^5.2.0, bn.js@npm:^5.2.1": + version: 5.2.1 + resolution: "bn.js@npm:5.2.1" + checksum: 3dd8c8d38055fedfa95c1d5fc3c99f8dd547b36287b37768db0abab3c239711f88ff58d18d155dd8ad902b0b0cee973747b7ae20ea12a09473272b0201c9edd3 languageName: node linkType: hard -"@types/responselike@npm:^1.0.0": - version: 1.0.1 - resolution: "@types/responselike@npm:1.0.1" +"body-parser@npm:1.20.1": + version: 1.20.1 + resolution: "body-parser@npm:1.20.1" dependencies: - "@types/node": "*" - checksum: ae8c36c9354aaedfa462dab655aa17613529d545a418acc54ba0214145fc1d0454be2ae107031a1b2c24768f19f2af7e4096a85d1e604010becd0bec2355cb0e - languageName: node - linkType: hard - -"@types/retry@npm:0.12.0": - version: 0.12.0 - resolution: "@types/retry@npm:0.12.0" - checksum: 61a072c7639f6e8126588bf1eb1ce8835f2cb9c2aba795c4491cf6310e013267b0c8488039857c261c387e9728c1b43205099223f160bb6a76b4374f741b5603 + bytes: 3.1.2 + content-type: ~1.0.4 + debug: 2.6.9 + depd: 2.0.0 + destroy: 1.2.0 + http-errors: 2.0.0 + iconv-lite: 0.4.24 + on-finished: 2.4.1 + qs: 6.11.0 + raw-body: 2.5.1 + type-is: ~1.6.18 + unpipe: 1.0.0 + checksum: f1050dbac3bede6a78f0b87947a8d548ce43f91ccc718a50dd774f3c81f2d8b04693e52acf62659fad23101827dd318da1fb1363444ff9a8482b886a3e4a5266 languageName: node linkType: hard -"@types/sax@npm:^1.2.1": - version: 1.2.5 - resolution: "@types/sax@npm:1.2.5" +"bonjour-service@npm:^1.0.11": + version: 1.1.1 + resolution: "bonjour-service@npm:1.1.1" dependencies: - "@types/node": "*" - checksum: a4bf27d7eb1b99030e75dea01fab2fa3959554f5c463b4f577dbbc9d8ed88a5b26b83ac84d045ae5a53e350057f120574db6e1c4e8507c011299dd023e4fa4f2 + array-flatten: ^2.1.2 + dns-equal: ^1.0.0 + fast-deep-equal: ^3.1.3 + multicast-dns: ^7.2.5 + checksum: 832d0cf78b91368fac8bb11fd7a714e46f4c4fb1bb14d7283bce614a6fb3aae2f3fe209aba5b4fa051811c1cab6921d073a83db8432fb23292f27dd4161fb0f1 languageName: node linkType: hard -"@types/scheduler@npm:*": - version: 0.16.4 - resolution: "@types/scheduler@npm:0.16.4" - checksum: a57b0f10da1b021e6bd5eeef8a1917dd3b08a8715bd8029e2ded2096d8f091bb1bb1fef2d66e139588a983c4bfbad29b59e48011141725fa83c76e986e1257d7 +"boolbase@npm:^1.0.0": + version: 1.0.0 + resolution: "boolbase@npm:1.0.0" + checksum: 3e25c80ef626c3a3487c73dbfc70ac322ec830666c9ad915d11b701142fab25ec1e63eff2c450c74347acfd2de854ccde865cd79ef4db1683f7c7b046ea43bb0 languageName: node linkType: hard -"@types/secp256k1@npm:^4.0.1": - version: 4.0.4 - resolution: "@types/secp256k1@npm:4.0.4" +"boxen@npm:7.0.0": + version: 7.0.0 + resolution: "boxen@npm:7.0.0" dependencies: - "@types/node": "*" - checksum: 6f521a08486a98e71c8529f5c3119f99e610196a47243cc6052c6160b216dff2c85dc50a8f3208ed47028dbb470bbb6fdee47a3fdc064687e46021d5a712767c + ansi-align: ^3.0.1 + camelcase: ^7.0.0 + chalk: ^5.0.1 + cli-boxes: ^3.0.0 + string-width: ^5.1.2 + type-fest: ^2.13.0 + widest-line: ^4.0.1 + wrap-ansi: ^8.0.1 + checksum: b917cf7a168ef3149635a8c02d5c9717d66182348bd27038d85328ad12655151e3324db0f2815253846c33e5f0ddf28b6cd52d56a12b9f88617b7f8f722b946a languageName: node linkType: hard -"@types/semver@npm:^7.5.0": - version: 7.5.3 - resolution: "@types/semver@npm:7.5.3" - checksum: 349fdd1ab6c213bac5c991bac766bd07b8b12e63762462bb058740dcd2eb09c8193d068bb226f134661275f2022976214c0e727a4e5eb83ec1b131127c980d3e +"boxen@npm:^5.0.0": + version: 5.1.2 + resolution: "boxen@npm:5.1.2" + dependencies: + ansi-align: ^3.0.0 + camelcase: ^6.2.0 + chalk: ^4.1.0 + cli-boxes: ^2.2.1 + string-width: ^4.2.2 + type-fest: ^0.20.2 + widest-line: ^3.1.0 + wrap-ansi: ^7.0.0 + checksum: 82d03e42a72576ff235123f17b7c505372fe05c83f75f61e7d4fa4bcb393897ec95ce766fecb8f26b915f0f7a7227d66e5ec7cef43f5b2bd9d3aeed47ec55877 languageName: node linkType: hard -"@types/send@npm:*": - version: 0.17.1 - resolution: "@types/send@npm:0.17.1" +"boxen@npm:^6.2.1": + version: 6.2.1 + resolution: "boxen@npm:6.2.1" dependencies: - "@types/mime": ^1 - "@types/node": "*" - checksum: 10b620a5960058ef009afbc17686f680d6486277c62f640845381ec4baa0ea683fdd77c3afea4803daf5fcddd3fb2972c8aa32e078939f1d4e96f83195c89793 + ansi-align: ^3.0.1 + camelcase: ^6.2.0 + chalk: ^4.1.2 + cli-boxes: ^3.0.0 + string-width: ^5.0.1 + type-fest: ^2.5.0 + widest-line: ^4.0.1 + wrap-ansi: ^8.0.1 + checksum: 2b3226092f1ff8e149c02979098c976552afa15f9e0231c9ed2dfcaaf84604494d16a6f13b647f718439f64d3140a088e822d47c7db00d2266e9ffc8d7321774 languageName: node linkType: hard -"@types/serve-index@npm:^1.9.1": - version: 1.9.2 - resolution: "@types/serve-index@npm:1.9.2" +"boxen@npm:^7.0.0": + version: 7.1.1 + resolution: "boxen@npm:7.1.1" dependencies: - "@types/express": "*" - checksum: 4fd0a8fcdd6e2b2d7704a539b7c1e0d143e9e00be4c3992394fe2ef7e9b67283d74b43db3f92b0e0717d779aa51184168bbae617d30456357cb95ec58aa59ea8 + ansi-align: ^3.0.1 + camelcase: ^7.0.1 + chalk: ^5.2.0 + cli-boxes: ^3.0.0 + string-width: ^5.1.2 + type-fest: ^2.13.0 + widest-line: ^4.0.1 + wrap-ansi: ^8.1.0 + checksum: ad8833d5f2845b0a728fdf8a0bc1505dff0c518edcb0fd56979a08774b1f26cf48b71e66532179ccdfb9ed95b64aa008689cca26f7776f93f002b8000a683d76 languageName: node linkType: hard -"@types/serve-static@npm:*": - version: 1.15.2 - resolution: "@types/serve-static@npm:1.15.2" +"bplist-parser@npm:^0.2.0": + version: 0.2.0 + resolution: "bplist-parser@npm:0.2.0" dependencies: - "@types/http-errors": "*" - "@types/mime": "*" - "@types/node": "*" - checksum: 15c261dbfc57890f7cc17c04d5b22b418dfa0330c912b46c5d8ae2064da5d6f844ef7f41b63c7f4bbf07675e97ebe6ac804b032635ec742ae45d6f1274259b3e + big-integer: ^1.6.44 + checksum: d5339dd16afc51de6c88f88f58a45b72ed6a06aa31f5557d09877575f220b7c1d3fbe375da0b62e6a10d4b8ed80523567e351f24014f5bc886ad523758142cdd languageName: node linkType: hard -"@types/serve-static@npm:^1.13.10": - version: 1.15.3 - resolution: "@types/serve-static@npm:1.15.3" +"brace-expansion@npm:^1.1.7": + version: 1.1.11 + resolution: "brace-expansion@npm:1.1.11" dependencies: - "@types/http-errors": "*" - "@types/mime": "*" - "@types/node": "*" - checksum: afa52252f0ba94cdb5391e80f23e17fd629bdf2a31be8876e2c4490312ed6b0570822dd7de7cea04c9002049e207709563568b7f4ee10bb9f456321db1e83e40 + balanced-match: ^1.0.0 + concat-map: 0.0.1 + checksum: faf34a7bb0c3fcf4b59c7808bc5d2a96a40988addf2e7e09dfbb67a2251800e0d14cd2bfc1aa79174f2f5095c54ff27f46fb1289fe2d77dac755b5eb3434cc07 languageName: node linkType: hard -"@types/sockjs@npm:^0.3.33": - version: 0.3.34 - resolution: "@types/sockjs@npm:0.3.34" +"brace-expansion@npm:^2.0.1": + version: 2.0.1 + resolution: "brace-expansion@npm:2.0.1" dependencies: - "@types/node": "*" - checksum: 1d38b1976a97f5895a6be00cead1b2a59d842f023b6e35450b7eec8a3131c860c447aba3f7ea3c880c066d8277b0c76fae9d080be1aad475811b568ed6079d49 + balanced-match: ^1.0.0 + checksum: a61e7cd2e8a8505e9f0036b3b6108ba5e926b4b55089eeb5550cd04a471fe216c96d4fe7e4c7f995c728c554ae20ddfc4244cad10aef255e72b62930afd233d1 languageName: node linkType: hard -"@types/unist@npm:^2, @types/unist@npm:^2.0.0, @types/unist@npm:^2.0.2, @types/unist@npm:^2.0.3": - version: 2.0.8 - resolution: "@types/unist@npm:2.0.8" - checksum: f4852d10a6752dc70df363917ef74453e5d2fd42824c0f6d09d19d530618e1402193977b1207366af4415aaec81d4e262c64d00345402020c4ca179216e553c7 +"braces@npm:^3.0.2, braces@npm:~3.0.2": + version: 3.0.2 + resolution: "braces@npm:3.0.2" + dependencies: + fill-range: ^7.0.1 + checksum: e2a8e769a863f3d4ee887b5fe21f63193a891c68b612ddb4b68d82d1b5f3ff9073af066c343e9867a393fe4c2555dcb33e89b937195feb9c1613d259edfcd459 languageName: node linkType: hard -"@types/which@npm:^2.0.1": - version: 2.0.2 - resolution: "@types/which@npm:2.0.2" - checksum: 8626a3c2f6db676c449142e1082e33ea0c9d88b8a2bd796366b944891e6da0088b2aa83d3fa9c79e6696f7381a851fc76d43bd353eb6c4d98a7775b4ae0a96a5 +"brorand@npm:^1.1.0": + version: 1.1.0 + resolution: "brorand@npm:1.1.0" + checksum: 8a05c9f3c4b46572dec6ef71012b1946db6cae8c7bb60ccd4b7dd5a84655db49fe043ecc6272e7ef1f69dc53d6730b9e2a3a03a8310509a3d797a618cbee52be languageName: node linkType: hard -"@types/ws@npm:^7.4.0": - version: 7.4.7 - resolution: "@types/ws@npm:7.4.7" +"browser-level@npm:^1.0.1": + version: 1.0.1 + resolution: "browser-level@npm:1.0.1" dependencies: - "@types/node": "*" - checksum: b4c9b8ad209620c9b21e78314ce4ff07515c0cadab9af101c1651e7bfb992d7fd933bd8b9c99d110738fd6db523ed15f82f29f50b45510288da72e964dedb1a3 + abstract-level: ^1.0.2 + catering: ^2.1.1 + module-error: ^1.0.2 + run-parallel-limit: ^1.1.0 + checksum: 67fbc77ce832940bfa25073eccff279f512ad56f545deb996a5b23b02316f5e76f4a79d381acc27eda983f5c9a2566aaf9c97e4fdd0748288c4407307537a29b languageName: node linkType: hard -"@types/ws@npm:^8.5.5": - version: 8.5.7 - resolution: "@types/ws@npm:8.5.7" - dependencies: - "@types/node": "*" - checksum: 4502085c0f7ae6b36d5419c0fc6ce4b9002ee5e997a8708d6ed10b393e97091e1b986e8038ec604945c194f14aac150e74d6596629a2775628d122f552009c35 +"browser-stdout@npm:1.3.1": + version: 1.3.1 + resolution: "browser-stdout@npm:1.3.1" + checksum: b717b19b25952dd6af483e368f9bcd6b14b87740c3d226c2977a65e84666ffd67000bddea7d911f111a9b6ddc822b234de42d52ab6507bce4119a4cc003ef7b3 languageName: node linkType: hard -"@types/yargs-parser@npm:*": - version: 21.0.1 - resolution: "@types/yargs-parser@npm:21.0.1" - checksum: 64e6316c2045e2d460c4fb79572f872f9d2f98fddc6d9d3949c71f0b6ad0ef8a2706cf49db26dfb02a9cb81433abb8f340f015e1d20a9692279abe9477b72c8e +"browserify-aes@npm:^1.2.0": + version: 1.2.0 + resolution: "browserify-aes@npm:1.2.0" + dependencies: + buffer-xor: ^1.0.3 + cipher-base: ^1.0.0 + create-hash: ^1.1.0 + evp_bytestokey: ^1.0.3 + inherits: ^2.0.1 + safe-buffer: ^5.0.1 + checksum: 4a17c3eb55a2aa61c934c286f34921933086bf6d67f02d4adb09fcc6f2fc93977b47d9d884c25619144fccd47b3b3a399e1ad8b3ff5a346be47270114bcf7104 languageName: node linkType: hard -"@types/yargs@npm:^17.0.8": - version: 17.0.28 - resolution: "@types/yargs@npm:17.0.28" +"browserify-fs@npm:^1.0.0": + version: 1.0.0 + resolution: "browserify-fs@npm:1.0.0" dependencies: - "@types/yargs-parser": "*" - checksum: f78c5e5c29903933c0557b4ffcd1d0b8564d66859c8ca4aa51da3714e49109ed7c2644334a1918d033df19028f4cecc91fd2e502651bb8e8451f246c371da847 + level-filesystem: ^1.0.1 + level-js: ^2.1.3 + levelup: ^0.18.2 + checksum: e0c35cf42c839c0a217048b1671d91ee6e53fd05f163db4f809e46c2f6264f784768e7c850abc200b0eaca378d42e00e01876eda21fd84fc0a4280bd6200a9c3 languageName: node linkType: hard -"@types/yauzl@npm:^2.9.1": - version: 2.10.0 - resolution: "@types/yauzl@npm:2.10.0" +"browserslist@npm:^4.0.0, browserslist@npm:^4.14.5, browserslist@npm:^4.18.1, browserslist@npm:^4.21.10, browserslist@npm:^4.21.4, browserslist@npm:^4.21.9, browserslist@npm:^4.22.2": + version: 4.22.2 + resolution: "browserslist@npm:4.22.2" dependencies: - "@types/node": "*" - checksum: 55d27ae5d346ea260e40121675c24e112ef0247649073848e5d4e03182713ae4ec8142b98f61a1c6cbe7d3b72fa99bbadb65d8b01873e5e605cdc30f1ff70ef2 + caniuse-lite: ^1.0.30001565 + electron-to-chromium: ^1.4.601 + node-releases: ^2.0.14 + update-browserslist-db: ^1.0.13 + bin: + browserslist: cli.js + checksum: 33ddfcd9145220099a7a1ac533cecfe5b7548ffeb29b313e1b57be6459000a1f8fa67e781cf4abee97268ac594d44134fcc4a6b2b4750ceddc9796e3a22076d9 languageName: node linkType: hard -"@typescript-eslint/eslint-plugin@npm:^6.7.3": - version: 6.7.3 - resolution: "@typescript-eslint/eslint-plugin@npm:6.7.3" +"bs58@npm:^4.0.0": + version: 4.0.1 + resolution: "bs58@npm:4.0.1" dependencies: - "@eslint-community/regexpp": ^4.5.1 - "@typescript-eslint/scope-manager": 6.7.3 - "@typescript-eslint/type-utils": 6.7.3 - "@typescript-eslint/utils": 6.7.3 - "@typescript-eslint/visitor-keys": 6.7.3 - debug: ^4.3.4 - graphemer: ^1.4.0 - ignore: ^5.2.4 - natural-compare: ^1.4.0 - semver: ^7.5.4 - ts-api-utils: ^1.0.1 - peerDependencies: - "@typescript-eslint/parser": ^6.0.0 || ^6.0.0-alpha - eslint: ^7.0.0 || ^8.0.0 - peerDependenciesMeta: - typescript: - optional: true - checksum: ac2790882199047abc59c0407a862f3339645623d03ea0aae5a73fd4bac6abfb753afcf9f23fd51cd1d5aa73f132ef94e2850774c4b2a3d99ebb83030b09429c + base-x: ^3.0.2 + checksum: b3c5365bb9e0c561e1a82f1a2d809a1a692059fae016be233a6127ad2f50a6b986467c3a50669ce4c18929dcccb297c5909314dd347a25a68c21b68eb3e95ac2 languageName: node linkType: hard -"@typescript-eslint/parser@npm:^6.7.3": - version: 6.7.3 - resolution: "@typescript-eslint/parser@npm:6.7.3" +"bs58check@npm:^2.1.2": + version: 2.1.2 + resolution: "bs58check@npm:2.1.2" dependencies: - "@typescript-eslint/scope-manager": 6.7.3 - "@typescript-eslint/types": 6.7.3 - "@typescript-eslint/typescript-estree": 6.7.3 - "@typescript-eslint/visitor-keys": 6.7.3 - debug: ^4.3.4 - peerDependencies: - eslint: ^7.0.0 || ^8.0.0 - peerDependenciesMeta: - typescript: - optional: true - checksum: 658f3294b281db06ebb46884b92172d45eb402ec25c7d4a09cc2461eee359266029af7a49eb9006ee7c3e0003ba53a06f4bee84aa2e99d2d9a3507b9c84ff775 + bs58: ^4.0.0 + create-hash: ^1.1.0 + safe-buffer: ^5.1.2 + checksum: 43bdf08a5dd04581b78f040bc4169480e17008da482ffe2a6507327bbc4fc5c28de0501f7faf22901cfe57fbca79cbb202ca529003fedb4cb8dccd265b38e54d languageName: node linkType: hard -"@typescript-eslint/scope-manager@npm:6.7.3": - version: 6.7.3 - resolution: "@typescript-eslint/scope-manager@npm:6.7.3" - dependencies: - "@typescript-eslint/types": 6.7.3 - "@typescript-eslint/visitor-keys": 6.7.3 - checksum: 08215444b7c70af5c45e185ba3c31c550a0a671ab464a67058cbee680c94aa9d1a062958976d8b09f7bcabf2f63114cdc7be2e4e32e2dfdcb2d7cc79961b7b32 +"buffer-crc32@npm:~0.2.3": + version: 0.2.13 + resolution: "buffer-crc32@npm:0.2.13" + checksum: 06252347ae6daca3453b94e4b2f1d3754a3b146a111d81c68924c22d91889a40623264e95e67955b1cb4a68cbedf317abeabb5140a9766ed248973096db5ce1c languageName: node linkType: hard -"@typescript-eslint/type-utils@npm:6.7.3": - version: 6.7.3 - resolution: "@typescript-eslint/type-utils@npm:6.7.3" - dependencies: - "@typescript-eslint/typescript-estree": 6.7.3 - "@typescript-eslint/utils": 6.7.3 - debug: ^4.3.4 - ts-api-utils: ^1.0.1 - peerDependencies: - eslint: ^7.0.0 || ^8.0.0 - peerDependenciesMeta: - typescript: - optional: true - checksum: f30a5ab4f88f76457810d72e3ada79fefd94dbbb456069ac004bd7601c9b7f15689b906b66cd849c230f30ae65f6f7039fb169609177ab545b34bacab64f015e +"buffer-from@npm:^1.0.0": + version: 1.1.2 + resolution: "buffer-from@npm:1.1.2" + checksum: 0448524a562b37d4d7ed9efd91685a5b77a50672c556ea254ac9a6d30e3403a517d8981f10e565db24e8339413b43c97ca2951f10e399c6125a0d8911f5679bb languageName: node linkType: hard -"@typescript-eslint/types@npm:6.7.3": - version: 6.7.3 - resolution: "@typescript-eslint/types@npm:6.7.3" - checksum: 4adb6177ec710e7438610fee553839a7abecc498dbb36d0170786bab66c5e5415cd720ac06419fd905458ad88c39b661603af5f013adc299137ccb4c51c4c879 +"buffer-xor@npm:^1.0.3": + version: 1.0.3 + resolution: "buffer-xor@npm:1.0.3" + checksum: 10c520df29d62fa6e785e2800e586a20fc4f6dfad84bcdbd12e1e8a83856de1cb75c7ebd7abe6d036bbfab738a6cf18a3ae9c8e5a2e2eb3167ca7399ce65373a languageName: node linkType: hard -"@typescript-eslint/typescript-estree@npm:6.7.3": - version: 6.7.3 - resolution: "@typescript-eslint/typescript-estree@npm:6.7.3" +"buffer@npm:^5.2.1, buffer@npm:^5.5.0": + version: 5.7.1 + resolution: "buffer@npm:5.7.1" dependencies: - "@typescript-eslint/types": 6.7.3 - "@typescript-eslint/visitor-keys": 6.7.3 - debug: ^4.3.4 - globby: ^11.1.0 - is-glob: ^4.0.3 - semver: ^7.5.4 - ts-api-utils: ^1.0.1 - peerDependenciesMeta: - typescript: - optional: true - checksum: eaba1feb0e6882b0bad292172c118aac43ba683d1f04b940b542a20035468d030b062b036ea49eca36aa21782e9b1019e87717003b3c3db7d12dc707466b7eb7 + base64-js: ^1.3.1 + ieee754: ^1.1.13 + checksum: e2cf8429e1c4c7b8cbd30834ac09bd61da46ce35f5c22a78e6c2f04497d6d25541b16881e30a019c6fd3154150650ccee27a308eff3e26229d788bbdeb08ab84 languageName: node linkType: hard -"@typescript-eslint/utils@npm:6.7.3": - version: 6.7.3 - resolution: "@typescript-eslint/utils@npm:6.7.3" +"buffer@npm:^6.0.3": + version: 6.0.3 + resolution: "buffer@npm:6.0.3" dependencies: - "@eslint-community/eslint-utils": ^4.4.0 - "@types/json-schema": ^7.0.12 - "@types/semver": ^7.5.0 - "@typescript-eslint/scope-manager": 6.7.3 - "@typescript-eslint/types": 6.7.3 - "@typescript-eslint/typescript-estree": 6.7.3 - semver: ^7.5.4 - peerDependencies: - eslint: ^7.0.0 || ^8.0.0 - checksum: 685b7c9fa95ad085f30e26431dc41b3059a42a16925defe2a94b32fb46974bfc168000de7d4d9ad4a1d0568a983f9d3c01ea6bc6cfa9a798e482719af9e9165b + base64-js: ^1.3.1 + ieee754: ^1.2.1 + checksum: 5ad23293d9a731e4318e420025800b42bf0d264004c0286c8cc010af7a270c7a0f6522e84f54b9ad65cbd6db20b8badbfd8d2ebf4f80fa03dab093b89e68c3f9 languageName: node linkType: hard -"@typescript-eslint/visitor-keys@npm:6.7.3": - version: 6.7.3 - resolution: "@typescript-eslint/visitor-keys@npm:6.7.3" - dependencies: - "@typescript-eslint/types": 6.7.3 - eslint-visitor-keys: ^3.4.1 - checksum: cef64173a919107f420703e204d97d0afef0d9bd7a67570df5bdb39ac9464211c5a7b3af735d8f41e8004b443ab83e88b1d6fb951886aed4d3fe9d4778667199 +"builtin-modules@npm:^3.3.0": + version: 3.3.0 + resolution: "builtin-modules@npm:3.3.0" + checksum: db021755d7ed8be048f25668fe2117620861ef6703ea2c65ed2779c9e3636d5c3b82325bd912244293959ff3ae303afa3471f6a15bf5060c103e4cc3a839749d languageName: node linkType: hard -"@web/browser-logs@npm:^0.2.6": - version: 0.2.6 - resolution: "@web/browser-logs@npm:0.2.6" +"bundle-name@npm:^3.0.0": + version: 3.0.0 + resolution: "bundle-name@npm:3.0.0" dependencies: - errorstacks: ^2.2.0 - checksum: 82693e37a7e5a3c3df255e1e4feef6e6c2f2b7f5f883e1a9fd233d09a22c4f3e9e3dfd2ec809d7a02f0894156f26b89f1759bf4e9317640ee3630e9a3d9ec2a8 + run-applescript: ^5.0.0 + checksum: edf2b1fbe6096ed32e7566947ace2ea937ee427391744d7510a2880c4b9a5b3543d3f6c551236a29e5c87d3195f8e2912516290e638c15bcbede7b37cc375615 languageName: node linkType: hard -"@web/browser-logs@npm:^0.3.2": - version: 0.3.3 - resolution: "@web/browser-logs@npm:0.3.3" - dependencies: - errorstacks: ^2.2.0 - checksum: 08db1332f7151e8e0fb1ed61e6e10f736e44f5c9675423f3af0c24f3a764560ea115e956cf3a4d9cd7a0826c0126accf966ebdb0bfa25a72c7e79da47b2cee3b +"bytes@npm:3.0.0": + version: 3.0.0 + resolution: "bytes@npm:3.0.0" + checksum: a2b386dd8188849a5325f58eef69c3b73c51801c08ffc6963eddc9be244089ba32d19347caf6d145c86f315ae1b1fc7061a32b0c1aa6379e6a719090287ed101 languageName: node linkType: hard -"@web/config-loader@npm:^0.1.3": - version: 0.1.3 - resolution: "@web/config-loader@npm:0.1.3" - dependencies: - semver: ^7.3.4 - checksum: 278554bd00b757eaf296ba904a224c61d4698df1a5d6c04931c40bc6bb308e81e767055cbf283b763cc530aae6b200bb950aa19eb41aa8979a3a2b29e5f0ac7a +"bytes@npm:3.1.2": + version: 3.1.2 + resolution: "bytes@npm:3.1.2" + checksum: e4bcd3948d289c5127591fbedf10c0b639ccbf00243504e4e127374a15c3bc8eed0d28d4aaab08ff6f1cf2abc0cce6ba3085ed32f4f90e82a5683ce0014e1b6e languageName: node linkType: hard -"@web/dev-server-core@npm:^0.4.1": - version: 0.4.1 - resolution: "@web/dev-server-core@npm:0.4.1" +"cacache@npm:^18.0.0": + version: 18.0.1 + resolution: "cacache@npm:18.0.1" dependencies: - "@types/koa": ^2.11.6 - "@types/ws": ^7.4.0 - "@web/parse5-utils": ^1.3.1 - chokidar: ^3.4.3 - clone: ^2.1.2 - es-module-lexer: ^1.0.0 - get-stream: ^6.0.0 - is-stream: ^2.0.0 - isbinaryfile: ^5.0.0 - koa: ^2.13.0 - koa-etag: ^4.0.0 - koa-send: ^5.0.1 - koa-static: ^5.0.0 - lru-cache: ^6.0.0 - mime-types: ^2.1.27 - parse5: ^6.0.1 - picomatch: ^2.2.2 - ws: ^7.4.2 - checksum: 4cf728ac781c7831c9c59ffaa1bd2dca1f1e8a6553bedd0d80e47d946ea427067eb1d07b028fc8296a36930c1dd5631e0bc1ccf8f0f4b9203da362c958c1833e + "@npmcli/fs": ^3.1.0 + fs-minipass: ^3.0.0 + glob: ^10.2.2 + lru-cache: ^10.0.1 + minipass: ^7.0.3 + minipass-collect: ^2.0.1 + minipass-flush: ^1.0.5 + minipass-pipeline: ^1.2.4 + p-map: ^4.0.0 + ssri: ^10.0.0 + tar: ^6.1.11 + unique-filename: ^3.0.0 + checksum: 5a0b3b2ea451a0379814dc1d3c81af48c7c6db15cd8f7d72e028501ae0036a599a99bbac9687bfec307afb2760808d1c7708e9477c8c70d2b166e7d80b162a23 languageName: node linkType: hard -"@web/dev-server-core@npm:^0.5.1": - version: 0.5.2 - resolution: "@web/dev-server-core@npm:0.5.2" +"cache-content-type@npm:^1.0.0": + version: 1.0.1 + resolution: "cache-content-type@npm:1.0.1" dependencies: - "@types/koa": ^2.11.6 - "@types/ws": ^7.4.0 - "@web/parse5-utils": ^2.0.0 - chokidar: ^3.4.3 - clone: ^2.1.2 - es-module-lexer: ^1.0.0 - get-stream: ^6.0.0 - is-stream: ^2.0.0 - isbinaryfile: ^5.0.0 - koa: ^2.13.0 - koa-etag: ^4.0.0 - koa-send: ^5.0.1 - koa-static: ^5.0.0 - lru-cache: ^8.0.4 - mime-types: ^2.1.27 - parse5: ^6.0.1 - picomatch: ^2.2.2 - ws: ^7.4.2 - checksum: 1a42f71a3d7eafd41bdb33a8cf9d37bb7e205704f3bcde9e32bd0aa639759d7a6e4448efe448be127042c46816931dd7585575e0f30a15c1b22576d578ccaacc + mime-types: ^2.1.18 + ylru: ^1.2.0 + checksum: 18db4d59452669ccbfd7146a1510a37eb28e9eccf18ca7a4eb603dff2edc5cccdca7498fc3042a2978f76f11151fba486eb9eb69d9afa3fb124957870aef4fd3 languageName: node linkType: hard -"@web/dev-server-esbuild@npm:^0.3.6": - version: 0.3.6 - resolution: "@web/dev-server-esbuild@npm:0.3.6" +"cacheable-lookup@npm:^7.0.0": + version: 7.0.0 + resolution: "cacheable-lookup@npm:7.0.0" + checksum: 9e2856763fc0a7347ab34d704c010440b819d4bb5e3593b664381b7433e942dd22e67ee5581f12256f908e79b82d30b86ebbacf40a081bfe10ee93fbfbc2d6a9 + languageName: node + linkType: hard + +"cacheable-request@npm:^10.2.8": + version: 10.2.14 + resolution: "cacheable-request@npm:10.2.14" dependencies: - "@mdn/browser-compat-data": ^4.0.0 - "@web/dev-server-core": ^0.4.1 - esbuild: ^0.16 || ^0.17 - parse5: ^6.0.1 - ua-parser-js: ^1.0.33 - checksum: ed29357d8a832c695f129de62bd658744b46b1e17a5aab24ad6d7cd09a90f27714d83fc6ece2471c35bff55f4f09435a18af65b37d65709019bfe09c10f4f9eb + "@types/http-cache-semantics": ^4.0.2 + get-stream: ^6.0.1 + http-cache-semantics: ^4.1.1 + keyv: ^4.5.3 + mimic-response: ^4.0.0 + normalize-url: ^8.0.0 + responselike: ^3.0.0 + checksum: 56f2b8e1c497c91f8391f0b099d19907a7dde25e71087e622b23e45fc8061736c2a6964ef121b16f377c3c61079cf8dc17320ab54004209d1343e4d26aba7015 languageName: node linkType: hard -"@web/dev-server-rollup@npm:^0.4.1": - version: 0.4.1 - resolution: "@web/dev-server-rollup@npm:0.4.1" +"cacheable-request@npm:^6.0.0": + version: 6.1.0 + resolution: "cacheable-request@npm:6.1.0" dependencies: - "@rollup/plugin-node-resolve": ^13.0.4 - "@web/dev-server-core": ^0.4.1 - nanocolors: ^0.2.1 - parse5: ^6.0.1 - rollup: ^2.67.0 - whatwg-url: ^11.0.0 - checksum: a0c3566f67b5a5ead3822431302ddcaa9d043b18fdcf1190056a4e0539e5d5b545ebfecaf6021412eb4b5b6e074c2b1eff35c71e859195623c7c07e065f9df58 + clone-response: ^1.0.2 + get-stream: ^5.1.0 + http-cache-semantics: ^4.0.0 + keyv: ^3.0.0 + lowercase-keys: ^2.0.0 + normalize-url: ^4.1.0 + responselike: ^1.0.2 + checksum: b510b237b18d17e89942e9ee2d2a077cb38db03f12167fd100932dfa8fc963424bfae0bfa1598df4ae16c944a5484e43e03df8f32105b04395ee9495e9e4e9f1 languageName: node linkType: hard -"@web/dev-server@npm:^0.1.38": - version: 0.1.38 - resolution: "@web/dev-server@npm:0.1.38" +"call-bind@npm:^1.0.0, call-bind@npm:^1.0.2, call-bind@npm:^1.0.4, call-bind@npm:^1.0.5": + version: 1.0.5 + resolution: "call-bind@npm:1.0.5" dependencies: - "@babel/code-frame": ^7.12.11 - "@types/command-line-args": ^5.0.0 - "@web/config-loader": ^0.1.3 - "@web/dev-server-core": ^0.4.1 - "@web/dev-server-rollup": ^0.4.1 - camelcase: ^6.2.0 - command-line-args: ^5.1.1 - command-line-usage: ^7.0.1 - debounce: ^1.2.0 - deepmerge: ^4.2.2 - ip: ^1.1.5 - nanocolors: ^0.2.1 - open: ^8.0.2 - portfinder: ^1.0.32 - bin: - wds: dist/bin.js - web-dev-server: dist/bin.js - checksum: eeaf34f8744f58cfb9493155ad8548a87cae4e445a2fa894610b070f66cb303614d247bb609e378b9df342935ad980a259630317c444d19f9796abfcfb20bb13 + function-bind: ^1.1.2 + get-intrinsic: ^1.2.1 + set-function-length: ^1.1.1 + checksum: 449e83ecbd4ba48e7eaac5af26fea3b50f8f6072202c2dd7c5a6e7a6308f2421abe5e13a3bbd55221087f76320c5e09f25a8fdad1bab2b77c68ae74d92234ea5 languageName: node linkType: hard -"@web/parse5-utils@npm:^1.3.1": - version: 1.3.1 - resolution: "@web/parse5-utils@npm:1.3.1" +"callsites@npm:^3.0.0, callsites@npm:^3.1.0": + version: 3.1.0 + resolution: "callsites@npm:3.1.0" + checksum: 072d17b6abb459c2ba96598918b55868af677154bec7e73d222ef95a8fdb9bbf7dae96a8421085cdad8cd190d86653b5b6dc55a4484f2e5b2e27d5e0c3fc15b3 + languageName: node + linkType: hard + +"camel-case@npm:^4.1.2": + version: 4.1.2 + resolution: "camel-case@npm:4.1.2" dependencies: - "@types/parse5": ^6.0.1 - parse5: ^6.0.1 - checksum: 3320b2c4ea1e6a2ff1e57086b0c697a71dac7a6e54da86b2fb8e6c1d5a673bf9b911a743a10daa8cd62a571719edf3e66c9b17e87d79cc982f234bf141e2e178 + pascal-case: ^3.1.2 + tslib: ^2.0.3 + checksum: bcbd25cd253b3cbc69be3f535750137dbf2beb70f093bdc575f73f800acc8443d34fd52ab8f0a2413c34f1e8203139ffc88428d8863e4dfe530cfb257a379ad6 languageName: node linkType: hard -"@web/parse5-utils@npm:^2.0.0": +"camelcase-css@npm:2.0.1": version: 2.0.1 - resolution: "@web/parse5-utils@npm:2.0.1" - dependencies: - "@types/parse5": ^6.0.1 - parse5: ^6.0.1 - checksum: d3993b0dee350e040bf5b13749283df88c97a48b063f8938fb21623422cd5f03b641393ca03bc724936e9f17d8bfd7939f501de6eb1c33ad82f6ac5112bf6cff + resolution: "camelcase-css@npm:2.0.1" + checksum: 1cec2b3b3dcb5026688a470b00299a8db7d904c4802845c353dbd12d9d248d3346949a814d83bfd988d4d2e5b9904c07efe76fecd195a1d4f05b543e7c0b56b1 languageName: node linkType: hard -"@web/test-runner-chrome@npm:^0.12.1": - version: 0.12.1 - resolution: "@web/test-runner-chrome@npm:0.12.1" - dependencies: - "@web/test-runner-core": ^0.10.29 - "@web/test-runner-coverage-v8": ^0.5.0 - chrome-launcher: ^0.15.0 - puppeteer-core: ^19.8.1 - checksum: 08964e4c22c286231a6bdc003393316a7e0c7878560a9394d8bb9212c3c2e37799bc0aaf4732921213647c3dc89169146f65746d381feaa9c281c0373bf9da59 +"camelcase@npm:^6.0.0, camelcase@npm:^6.2.0": + version: 6.3.0 + resolution: "camelcase@npm:6.3.0" + checksum: 8c96818a9076434998511251dcb2761a94817ea17dbdc37f47ac080bd088fc62c7369429a19e2178b993497132c8cbcf5cc1f44ba963e76782ba469c0474938d languageName: node linkType: hard -"@web/test-runner-commands@npm:^0.6.6": - version: 0.6.6 - resolution: "@web/test-runner-commands@npm:0.6.6" - dependencies: - "@web/test-runner-core": ^0.10.29 - mkdirp: ^1.0.4 - checksum: b25533edd9ec59aeec28756a52ae4c6730388c336fa94e0c21eedf208012efd9aedf96c47ebad9a98cce9d87c4ee539b318a571e1e2bfb1bf8e5f1f6889c98e4 +"camelcase@npm:^7.0.0, camelcase@npm:^7.0.1": + version: 7.0.1 + resolution: "camelcase@npm:7.0.1" + checksum: 86ab8f3ebf08bcdbe605a211a242f00ed30d8bfb77dab4ebb744dd36efbc84432d1c4adb28975ba87a1b8be40a80fbd1e60e2f06565315918fa7350011a26d3d languageName: node linkType: hard -"@web/test-runner-core@npm:^0.10.20, @web/test-runner-core@npm:^0.10.29": - version: 0.10.29 - resolution: "@web/test-runner-core@npm:0.10.29" +"caniuse-api@npm:^3.0.0": + version: 3.0.0 + resolution: "caniuse-api@npm:3.0.0" dependencies: - "@babel/code-frame": ^7.12.11 - "@types/babel__code-frame": ^7.0.2 - "@types/co-body": ^6.1.0 - "@types/convert-source-map": ^2.0.0 - "@types/debounce": ^1.2.0 - "@types/istanbul-lib-coverage": ^2.0.3 - "@types/istanbul-reports": ^3.0.0 - "@web/browser-logs": ^0.2.6 - "@web/dev-server-core": ^0.4.1 - chokidar: ^3.4.3 - cli-cursor: ^3.1.0 - co-body: ^6.1.0 - convert-source-map: ^2.0.0 - debounce: ^1.2.0 - dependency-graph: ^0.11.0 - globby: ^11.0.1 - ip: ^1.1.5 - istanbul-lib-coverage: ^3.0.0 - istanbul-lib-report: ^3.0.0 - istanbul-reports: ^3.0.2 - log-update: ^4.0.0 - nanocolors: ^0.2.1 - nanoid: ^3.1.25 - open: ^8.0.2 - picomatch: ^2.2.2 - source-map: ^0.7.3 - checksum: 635a510442bea3bce97596a2aed1c58a6154b4b83a44bf3e9c9497a751f42426cae5f67555916c4fd63064a4e91a5e26755e3090887ebac38ec0ab2691e1fe6c + browserslist: ^4.0.0 + caniuse-lite: ^1.0.0 + lodash.memoize: ^4.1.2 + lodash.uniq: ^4.5.0 + checksum: db2a229383b20d0529b6b589dde99d7b6cb56ba371366f58cbbfa2929c9f42c01f873e2b6ef641d4eda9f0b4118de77dbb2805814670bdad4234bf08e720b0b4 languageName: node linkType: hard -"@web/test-runner-core@npm:^0.11.0": - version: 0.11.4 - resolution: "@web/test-runner-core@npm:0.11.4" - dependencies: - "@babel/code-frame": ^7.12.11 - "@types/babel__code-frame": ^7.0.2 - "@types/co-body": ^6.1.0 - "@types/convert-source-map": ^2.0.0 - "@types/debounce": ^1.2.0 - "@types/istanbul-lib-coverage": ^2.0.3 - "@types/istanbul-reports": ^3.0.0 - "@web/browser-logs": ^0.3.2 - "@web/dev-server-core": ^0.5.1 - chokidar: ^3.4.3 - cli-cursor: ^3.1.0 - co-body: ^6.1.0 - convert-source-map: ^2.0.0 - debounce: ^1.2.0 - dependency-graph: ^0.11.0 - globby: ^11.0.1 - ip: ^1.1.5 - istanbul-lib-coverage: ^3.0.0 - istanbul-lib-report: ^3.0.1 - istanbul-reports: ^3.0.2 - log-update: ^4.0.0 - nanocolors: ^0.2.1 - nanoid: ^3.1.25 - open: ^8.0.2 - picomatch: ^2.2.2 - source-map: ^0.7.3 - checksum: 45aea23ae1b0d112e2d331c15e1e581b5d208c278e6ecab746cffba81d0d11a8fd7b3fb0da488dd3c3c01e49debf7f3e8261031c2b86a7c8716fb3cc3e1a172c +"caniuse-lite@npm:^1.0.0, caniuse-lite@npm:^1.0.30001538, caniuse-lite@npm:^1.0.30001565": + version: 1.0.30001568 + resolution: "caniuse-lite@npm:1.0.30001568" + checksum: 7092aaa246dc8531fbca5b47be91e92065db7e5c04cc9e3d864e848f8f1be769ac6754429e843a5e939f7331a771e8b0a1bc3b13495c66b748c65e2f5bdb1220 languageName: node linkType: hard -"@web/test-runner-coverage-v8@npm:^0.5.0": - version: 0.5.0 - resolution: "@web/test-runner-coverage-v8@npm:0.5.0" - dependencies: - "@web/test-runner-core": ^0.10.20 - istanbul-lib-coverage: ^3.0.0 - picomatch: ^2.2.2 - v8-to-istanbul: ^9.0.1 - checksum: e69dc6379cff24f28bd21cc37a661945fbf7f3fd532da813e74f4042efe17fc191cdb7c09f1e1ea276167952b0116478ba0fe7af0966fa4867278c3a2cd772df +"case@npm:^1.6.3": + version: 1.6.3 + resolution: "case@npm:1.6.3" + checksum: febe73278f910b0d28aab7efd6f51c235f9aa9e296148edb56dfb83fd58faa88308c30ce9a0122b6e53e0362c44f4407105bd5ef89c46860fc2b184e540fd68d languageName: node linkType: hard -"@web/test-runner-coverage-v8@npm:^0.7.0": - version: 0.7.1 - resolution: "@web/test-runner-coverage-v8@npm:0.7.1" - dependencies: - "@web/test-runner-core": ^0.11.0 - istanbul-lib-coverage: ^3.0.0 - lru-cache: ^8.0.4 - picomatch: ^2.2.2 - v8-to-istanbul: ^9.0.1 - checksum: 317b11502b1378097598458a54054353fdb94d3bf9e0954daa9a3b1dcc1f0664032cd18f48292e763b890a132a980f5a5ead6555c33e9fb064d0ca960a884292 +"catering@npm:^2.1.0, catering@npm:^2.1.1": + version: 2.1.1 + resolution: "catering@npm:2.1.1" + checksum: 205daefa69c935b0c19f3d8f2e0a520dd69aebe9bda55902958003f7c9cff8f967dfb90071b421bd6eb618576f657a89d2bc0986872c9bc04bbd66655e9d4bd6 languageName: node linkType: hard -"@web/test-runner-mocha@npm:^0.7.5": - version: 0.7.5 - resolution: "@web/test-runner-mocha@npm:0.7.5" +"ccount@npm:^1.0.0": + version: 1.1.0 + resolution: "ccount@npm:1.1.0" + checksum: b335a79d0aa4308919cf7507babcfa04ac63d389ebed49dbf26990d4607c8a4713cde93cc83e707d84571ddfe1e7615dad248be9bc422ae4c188210f71b08b78 + languageName: node + linkType: hard + +"ccount@npm:^2.0.0": + version: 2.0.1 + resolution: "ccount@npm:2.0.1" + checksum: 48193dada54c9e260e0acf57fc16171a225305548f9ad20d5471e0f7a8c026aedd8747091dccb0d900cde7df4e4ddbd235df0d8de4a64c71b12f0d3303eeafd4 + languageName: node + linkType: hard + +"chai-as-promised@npm:^7.1.1": + version: 7.1.1 + resolution: "chai-as-promised@npm:7.1.1" dependencies: - "@types/mocha": ^8.2.0 - "@web/test-runner-core": ^0.10.20 - checksum: 12f87299945d230815bb783de2953ac4239306c1a67145ef5b78cfb0b361ae7f659e5d3e5150af2cedc6f2c55adf10652b761f016430a7ac2d7f77b91ecb9cd1 + check-error: ^1.0.2 + peerDependencies: + chai: ">= 2.1.2 < 5" + checksum: 7262868a5b51a12af4e432838ddf97a893109266a505808e1868ba63a12de7ee1166e9d43b5c501a190c377c1b11ecb9ff8e093c89f097ad96c397e8ec0f8d6a languageName: node linkType: hard -"@web/test-runner-playwright@npm:^0.10.0": - version: 0.10.1 - resolution: "@web/test-runner-playwright@npm:0.10.1" +"chai@npm:^4.3.10, chai@npm:^4.3.7, chai@npm:^4.3.8": + version: 4.3.10 + resolution: "chai@npm:4.3.10" dependencies: - "@web/test-runner-core": ^0.11.0 - "@web/test-runner-coverage-v8": ^0.7.0 - playwright: ^1.22.2 - checksum: d2aebc54c0444fb434671dc0d3a3912f76570eae4a909b7912cf46b9535232c7f7829729554e8629f546abce48433f9f5b21d583011582753fbb40e288c28e80 + assertion-error: ^1.1.0 + check-error: ^1.0.3 + deep-eql: ^4.1.3 + get-func-name: ^2.0.2 + loupe: ^2.3.6 + pathval: ^1.1.1 + type-detect: ^4.0.8 + checksum: 536668c60a0d985a0fbd94418028e388d243a925d7c5e858c7443e334753511614a3b6a124bac9ca077dfc4c37acc367d62f8c294960f440749536dc181dfc6d languageName: node linkType: hard -"@web/test-runner@npm:^0.15.3": - version: 0.15.3 - resolution: "@web/test-runner@npm:0.15.3" +"chalk-template@npm:0.4.0, chalk-template@npm:^0.4.0": + version: 0.4.0 + resolution: "chalk-template@npm:0.4.0" dependencies: - "@web/browser-logs": ^0.2.6 - "@web/config-loader": ^0.1.3 - "@web/dev-server": ^0.1.38 - "@web/test-runner-chrome": ^0.12.1 - "@web/test-runner-commands": ^0.6.6 - "@web/test-runner-core": ^0.10.29 - "@web/test-runner-mocha": ^0.7.5 - camelcase: ^6.2.0 - command-line-args: ^5.1.1 - command-line-usage: ^7.0.1 - convert-source-map: ^2.0.0 - diff: ^5.0.0 - globby: ^11.0.1 - nanocolors: ^0.2.1 - portfinder: ^1.0.32 - source-map: ^0.7.3 - bin: - web-test-runner: dist/bin.js - wtr: dist/bin.js - checksum: 75d00d4f15f9977ff4e8fca84e1c7f9d834073688df06d9e4b62bf43cad65a36b2ae21b9ebab5706e4d3b07fc639bb90758b1be1df036c8b80137ec3407a8f08 + chalk: ^4.1.2 + checksum: 6c706802a79a7963cbce18f022b046fe86e438a67843151868852f80ea7346e975a6a9749991601e7e5d3b6a6c4852a04c53dc966a9a3d04031bd0e0ed53c819 languageName: node linkType: hard -"@webassemblyjs/ast@npm:1.11.6, @webassemblyjs/ast@npm:^1.11.5": - version: 1.11.6 - resolution: "@webassemblyjs/ast@npm:1.11.6" +"chalk-template@npm:^1.1.0": + version: 1.1.0 + resolution: "chalk-template@npm:1.1.0" dependencies: - "@webassemblyjs/helper-numbers": 1.11.6 - "@webassemblyjs/helper-wasm-bytecode": 1.11.6 - checksum: 38ef1b526ca47c210f30975b06df2faf1a8170b1636ce239fc5738fc231ce28389dd61ecedd1bacfc03cbe95b16d1af848c805652080cb60982836eb4ed2c6cf + chalk: ^5.2.0 + checksum: 868aae8d4e7556ad2f35de4e04fe65dbe1ea6c5c80ad783f1c156d0a5c33f444c6814f49cbb68fe348c78e99daf2bcf566b47ad7e13603e4691ca78b2f422824 languageName: node linkType: hard -"@webassemblyjs/floating-point-hex-parser@npm:1.11.6": - version: 1.11.6 - resolution: "@webassemblyjs/floating-point-hex-parser@npm:1.11.6" - checksum: 29b08758841fd8b299c7152eda36b9eb4921e9c584eb4594437b5cd90ed6b920523606eae7316175f89c20628da14326801090167cc7fbffc77af448ac84b7e2 +"chalk@npm:5.0.1": + version: 5.0.1 + resolution: "chalk@npm:5.0.1" + checksum: 7b45300372b908f0471fbf7389ce2f5de8d85bb949026fd51a1b95b10d0ed32c7ed5aab36dd5e9d2bf3191867909b4404cef75c5f4d2d1daeeacd301dd280b76 languageName: node linkType: hard -"@webassemblyjs/helper-api-error@npm:1.11.6": - version: 1.11.6 - resolution: "@webassemblyjs/helper-api-error@npm:1.11.6" - checksum: e8563df85161096343008f9161adb138a6e8f3c2cc338d6a36011aa55eabb32f2fd138ffe63bc278d009ada001cc41d263dadd1c0be01be6c2ed99076103689f +"chalk@npm:^2.4.1, chalk@npm:^2.4.2": + version: 2.4.2 + resolution: "chalk@npm:2.4.2" + dependencies: + ansi-styles: ^3.2.1 + escape-string-regexp: ^1.0.5 + supports-color: ^5.3.0 + checksum: ec3661d38fe77f681200f878edbd9448821924e0f93a9cefc0e26a33b145f1027a2084bf19967160d11e1f03bfe4eaffcabf5493b89098b2782c3fe0b03d80c2 languageName: node linkType: hard -"@webassemblyjs/helper-buffer@npm:1.11.6": - version: 1.11.6 - resolution: "@webassemblyjs/helper-buffer@npm:1.11.6" - checksum: b14d0573bf680d22b2522e8a341ec451fddd645d1f9c6bd9012ccb7e587a2973b86ab7b89fe91e1c79939ba96095f503af04369a3b356c8023c13a5893221644 +"chalk@npm:^4.0.0, chalk@npm:^4.1.0, chalk@npm:^4.1.2": + version: 4.1.2 + resolution: "chalk@npm:4.1.2" + dependencies: + ansi-styles: ^4.1.0 + supports-color: ^7.1.0 + checksum: fe75c9d5c76a7a98d45495b91b2172fa3b7a09e0cc9370e5c8feb1c567b85c4288e2b3fded7cfdd7359ac28d6b3844feb8b82b8686842e93d23c827c417e83fc languageName: node linkType: hard -"@webassemblyjs/helper-numbers@npm:1.11.6": - version: 1.11.6 - resolution: "@webassemblyjs/helper-numbers@npm:1.11.6" - dependencies: - "@webassemblyjs/floating-point-hex-parser": 1.11.6 - "@webassemblyjs/helper-api-error": 1.11.6 - "@xtuc/long": 4.2.2 - checksum: f4b562fa219f84368528339e0f8d273ad44e047a07641ffcaaec6f93e5b76fd86490a009aa91a294584e1436d74b0a01fa9fde45e333a4c657b58168b04da424 +"chalk@npm:^5.0.1, chalk@npm:^5.2.0, chalk@npm:^5.3.0": + version: 5.3.0 + resolution: "chalk@npm:5.3.0" + checksum: 623922e077b7d1e9dedaea6f8b9e9352921f8ae3afe739132e0e00c275971bdd331268183b2628cf4ab1727c45ea1f28d7e24ac23ce1db1eb653c414ca8a5a80 languageName: node linkType: hard -"@webassemblyjs/helper-wasm-bytecode@npm:1.11.6": - version: 1.11.6 - resolution: "@webassemblyjs/helper-wasm-bytecode@npm:1.11.6" - checksum: 3535ef4f1fba38de3475e383b3980f4bbf3de72bbb631c2b6584c7df45be4eccd62c6ff48b5edd3f1bcff275cfd605a37679ec199fc91fd0a7705d7f1e3972dc +"char-regex@npm:^1.0.2": + version: 1.0.2 + resolution: "char-regex@npm:1.0.2" + checksum: b563e4b6039b15213114626621e7a3d12f31008bdce20f9c741d69987f62aeaace7ec30f6018890ad77b2e9b4d95324c9f5acfca58a9441e3b1dcdd1e2525d17 languageName: node linkType: hard -"@webassemblyjs/helper-wasm-section@npm:1.11.6": - version: 1.11.6 - resolution: "@webassemblyjs/helper-wasm-section@npm:1.11.6" - dependencies: - "@webassemblyjs/ast": 1.11.6 - "@webassemblyjs/helper-buffer": 1.11.6 - "@webassemblyjs/helper-wasm-bytecode": 1.11.6 - "@webassemblyjs/wasm-gen": 1.11.6 - checksum: b2cf751bf4552b5b9999d27bbb7692d0aca75260140195cb58ea6374d7b9c2dc69b61e10b211a0e773f66209c3ddd612137ed66097e3684d7816f854997682e9 +"character-entities-html4@npm:^2.0.0": + version: 2.1.0 + resolution: "character-entities-html4@npm:2.1.0" + checksum: 7034aa7c7fa90309667f6dd50499c8a760c3d3a6fb159adb4e0bada0107d194551cdbad0714302f62d06ce4ed68565c8c2e15fdef2e8f8764eb63fa92b34b11d languageName: node linkType: hard -"@webassemblyjs/ieee754@npm:1.11.6": - version: 1.11.6 - resolution: "@webassemblyjs/ieee754@npm:1.11.6" - dependencies: - "@xtuc/ieee754": ^1.2.0 - checksum: 13574b8e41f6ca39b700e292d7edf102577db5650fe8add7066a320aa4b7a7c09a5056feccac7a74eb68c10dea9546d4461412af351f13f6b24b5f32379b49de +"character-entities-legacy@npm:^1.0.0": + version: 1.1.4 + resolution: "character-entities-legacy@npm:1.1.4" + checksum: fe03a82c154414da3a0c8ab3188e4237ec68006cbcd681cf23c7cfb9502a0e76cd30ab69a2e50857ca10d984d57de3b307680fff5328ccd427f400e559c3a811 languageName: node linkType: hard -"@webassemblyjs/leb128@npm:1.11.6": - version: 1.11.6 - resolution: "@webassemblyjs/leb128@npm:1.11.6" - dependencies: - "@xtuc/long": 4.2.2 - checksum: 7ea942dc9777d4b18a5ebfa3a937b30ae9e1d2ce1fee637583ed7f376334dd1d4274f813d2e250056cca803e0952def4b954913f1a3c9068bcd4ab4ee5143bf0 +"character-entities-legacy@npm:^3.0.0": + version: 3.0.0 + resolution: "character-entities-legacy@npm:3.0.0" + checksum: 7582af055cb488b626d364b7d7a4e46b06abd526fb63c0e4eb35bcb9c9799cc4f76b39f34fdccef2d1174ac95e53e9ab355aae83227c1a2505877893fce77731 languageName: node linkType: hard -"@webassemblyjs/utf8@npm:1.11.6": - version: 1.11.6 - resolution: "@webassemblyjs/utf8@npm:1.11.6" - checksum: 807fe5b5ce10c390cfdd93e0fb92abda8aebabb5199980681e7c3743ee3306a75729bcd1e56a3903980e96c885ee53ef901fcbaac8efdfa480f9c0dae1d08713 +"character-entities@npm:^1.0.0": + version: 1.2.4 + resolution: "character-entities@npm:1.2.4" + checksum: e1545716571ead57beac008433c1ff69517cd8ca5b336889321c5b8ff4a99c29b65589a701e9c086cda8a5e346a67295e2684f6c7ea96819fe85cbf49bf8686d languageName: node linkType: hard -"@webassemblyjs/wasm-edit@npm:^1.11.5": - version: 1.11.6 - resolution: "@webassemblyjs/wasm-edit@npm:1.11.6" - dependencies: - "@webassemblyjs/ast": 1.11.6 - "@webassemblyjs/helper-buffer": 1.11.6 - "@webassemblyjs/helper-wasm-bytecode": 1.11.6 - "@webassemblyjs/helper-wasm-section": 1.11.6 - "@webassemblyjs/wasm-gen": 1.11.6 - "@webassemblyjs/wasm-opt": 1.11.6 - "@webassemblyjs/wasm-parser": 1.11.6 - "@webassemblyjs/wast-printer": 1.11.6 - checksum: 29ce75870496d6fad864d815ebb072395a8a3a04dc9c3f4e1ffdc63fc5fa58b1f34304a1117296d8240054cfdbc38aca88e71fb51483cf29ffab0a61ef27b481 +"character-entities@npm:^2.0.0": + version: 2.0.2 + resolution: "character-entities@npm:2.0.2" + checksum: cf1643814023697f725e47328fcec17923b8f1799102a8a79c1514e894815651794a2bffd84bb1b3a4b124b050154e4529ed6e81f7c8068a734aecf07a6d3def languageName: node linkType: hard -"@webassemblyjs/wasm-gen@npm:1.11.6": - version: 1.11.6 - resolution: "@webassemblyjs/wasm-gen@npm:1.11.6" - dependencies: - "@webassemblyjs/ast": 1.11.6 - "@webassemblyjs/helper-wasm-bytecode": 1.11.6 - "@webassemblyjs/ieee754": 1.11.6 - "@webassemblyjs/leb128": 1.11.6 - "@webassemblyjs/utf8": 1.11.6 - checksum: a645a2eecbea24833c3260a249704a7f554ef4a94c6000984728e94bb2bc9140a68dfd6fd21d5e0bbb09f6dfc98e083a45760a83ae0417b41a0196ff6d45a23a +"character-reference-invalid@npm:^1.0.0": + version: 1.1.4 + resolution: "character-reference-invalid@npm:1.1.4" + checksum: 20274574c70e05e2f81135f3b93285536bc8ff70f37f0809b0d17791a832838f1e49938382899ed4cb444e5bbd4314ca1415231344ba29f4222ce2ccf24fea0b languageName: node linkType: hard -"@webassemblyjs/wasm-opt@npm:1.11.6": - version: 1.11.6 - resolution: "@webassemblyjs/wasm-opt@npm:1.11.6" - dependencies: - "@webassemblyjs/ast": 1.11.6 - "@webassemblyjs/helper-buffer": 1.11.6 - "@webassemblyjs/wasm-gen": 1.11.6 - "@webassemblyjs/wasm-parser": 1.11.6 - checksum: b4557f195487f8e97336ddf79f7bef40d788239169aac707f6eaa2fa5fe243557c2d74e550a8e57f2788e70c7ae4e7d32f7be16101afe183d597b747a3bdd528 +"character-reference-invalid@npm:^2.0.0": + version: 2.0.1 + resolution: "character-reference-invalid@npm:2.0.1" + checksum: 98d3b1a52ae510b7329e6ee7f6210df14f1e318c5415975d4c9e7ee0ef4c07875d47c6e74230c64551f12f556b4a8ccc24d9f3691a2aa197019e72a95e9297ee languageName: node linkType: hard -"@webassemblyjs/wasm-parser@npm:1.11.6, @webassemblyjs/wasm-parser@npm:^1.11.5": - version: 1.11.6 - resolution: "@webassemblyjs/wasm-parser@npm:1.11.6" +"check-error@npm:^1.0.2, check-error@npm:^1.0.3": + version: 1.0.3 + resolution: "check-error@npm:1.0.3" dependencies: - "@webassemblyjs/ast": 1.11.6 - "@webassemblyjs/helper-api-error": 1.11.6 - "@webassemblyjs/helper-wasm-bytecode": 1.11.6 - "@webassemblyjs/ieee754": 1.11.6 - "@webassemblyjs/leb128": 1.11.6 - "@webassemblyjs/utf8": 1.11.6 - checksum: 8200a8d77c15621724a23fdabe58d5571415cda98a7058f542e670ea965dd75499f5e34a48675184947c66f3df23adf55df060312e6d72d57908e3f049620d8a + get-func-name: ^2.0.2 + checksum: e2131025cf059b21080f4813e55b3c480419256914601750b0fee3bd9b2b8315b531e551ef12560419b8b6d92a3636511322752b1ce905703239e7cc451b6399 languageName: node linkType: hard -"@webassemblyjs/wast-printer@npm:1.11.6": - version: 1.11.6 - resolution: "@webassemblyjs/wast-printer@npm:1.11.6" +"cheerio-select@npm:^2.1.0": + version: 2.1.0 + resolution: "cheerio-select@npm:2.1.0" dependencies: - "@webassemblyjs/ast": 1.11.6 - "@xtuc/long": 4.2.2 - checksum: d2fa6a4c427325ec81463e9c809aa6572af6d47f619f3091bf4c4a6fc34f1da3df7caddaac50b8e7a457f8784c62cd58c6311b6cb69b0162ccd8d4c072f79cf8 + boolbase: ^1.0.0 + css-select: ^5.1.0 + css-what: ^6.1.0 + domelementtype: ^2.3.0 + domhandler: ^5.0.3 + domutils: ^3.0.1 + checksum: 843d6d479922f28a6c5342c935aff1347491156814de63c585a6eb73baf7bb4185c1b4383a1195dca0f12e3946d737c7763bcef0b9544c515d905c5c44c5308b languageName: node linkType: hard -"@xtuc/ieee754@npm:^1.2.0": - version: 1.2.0 - resolution: "@xtuc/ieee754@npm:1.2.0" - checksum: ac56d4ca6e17790f1b1677f978c0c6808b1900a5b138885d3da21732f62e30e8f0d9120fcf8f6edfff5100ca902b46f8dd7c1e3f903728634523981e80e2885a +"cheerio@npm:^1.0.0-rc.12, cheerio@npm:^1.0.0-rc.3": + version: 1.0.0-rc.12 + resolution: "cheerio@npm:1.0.0-rc.12" + dependencies: + cheerio-select: ^2.1.0 + dom-serializer: ^2.0.0 + domhandler: ^5.0.3 + domutils: ^3.0.1 + htmlparser2: ^8.0.1 + parse5: ^7.0.0 + parse5-htmlparser2-tree-adapter: ^7.0.0 + checksum: 5d4c1b7a53cf22d3a2eddc0aff70cf23cbb30d01a4c79013e703a012475c02461aa1fcd99127e8d83a02216386ed6942b2c8103845fd0812300dd199e6e7e054 languageName: node linkType: hard -"@xtuc/long@npm:4.2.2": - version: 4.2.2 - resolution: "@xtuc/long@npm:4.2.2" - checksum: 8ed0d477ce3bc9c6fe2bf6a6a2cc316bb9c4127c5a7827bae947fa8ec34c7092395c5a283cc300c05b5fa01cbbfa1f938f410a7bf75db7c7846fea41949989ec +"chokidar@npm:3.5.3, chokidar@npm:^3.4.0, chokidar@npm:^3.4.2, chokidar@npm:^3.4.3, chokidar@npm:^3.5.3": + version: 3.5.3 + resolution: "chokidar@npm:3.5.3" + dependencies: + anymatch: ~3.1.2 + braces: ~3.0.2 + fsevents: ~2.3.2 + glob-parent: ~5.1.2 + is-binary-path: ~2.1.0 + is-glob: ~4.0.1 + normalize-path: ~3.0.0 + readdirp: ~3.6.0 + dependenciesMeta: + fsevents: + optional: true + checksum: b49fcde40176ba007ff361b198a2d35df60d9bb2a5aab228279eb810feae9294a6b4649ab15981304447afe1e6ffbf4788ad5db77235dc770ab777c6e771980c languageName: node linkType: hard -"abbrev@npm:^1.0.0": - version: 1.1.1 - resolution: "abbrev@npm:1.1.1" - checksum: a4a97ec07d7ea112c517036882b2ac22f3109b7b19077dc656316d07d308438aac28e4d9746dc4d84bf6b1e75b4a7b0a5f3cb30592419f128ca9a8cee3bcfa17 +"chownr@npm:^1.1.1": + version: 1.1.4 + resolution: "chownr@npm:1.1.4" + checksum: 115648f8eb38bac5e41c3857f3e663f9c39ed6480d1349977c4d96c95a47266fcacc5a5aabf3cb6c481e22d72f41992827db47301851766c4fd77ac21a4f081d languageName: node linkType: hard -"abstract-level@npm:^1.0.0, abstract-level@npm:^1.0.2, abstract-level@npm:^1.0.3": - version: 1.0.3 - resolution: "abstract-level@npm:1.0.3" - dependencies: - buffer: ^6.0.3 - catering: ^2.1.0 - is-buffer: ^2.0.5 - level-supports: ^4.0.0 - level-transcoder: ^1.0.1 - module-error: ^1.0.1 - queue-microtask: ^1.2.3 - checksum: 70d61a3924526ebc257b138992052f9ff571a6cee5a7660836e37a1cc7081273c3acf465dd2f5e1897b38dc743a6fd9dba14a5d8a2a9d39e5787cd3da99f301d +"chownr@npm:^2.0.0": + version: 2.0.0 + resolution: "chownr@npm:2.0.0" + checksum: c57cf9dd0791e2f18a5ee9c1a299ae6e801ff58fee96dc8bfd0dcb4738a6ce58dd252a3605b1c93c6418fe4f9d5093b28ffbf4d66648cb2a9c67eaef9679be2f languageName: node linkType: hard -"accepts@npm:^1.3.5, accepts@npm:~1.3.4, accepts@npm:~1.3.5, accepts@npm:~1.3.8": - version: 1.3.8 - resolution: "accepts@npm:1.3.8" +"chrome-launcher@npm:^0.15.0": + version: 0.15.2 + resolution: "chrome-launcher@npm:0.15.2" dependencies: - mime-types: ~2.1.34 - negotiator: 0.6.3 - checksum: 50c43d32e7b50285ebe84b613ee4a3aa426715a7d131b65b786e2ead0fd76b6b60091b9916d3478a75f11f162628a2139991b6c03ab3f1d9ab7c86075dc8eab4 + "@types/node": "*" + escape-string-regexp: ^4.0.0 + is-wsl: ^2.2.0 + lighthouse-logger: ^1.0.0 + bin: + print-chrome-path: bin/print-chrome-path.js + checksum: e1f8131b9f7bd931248ea85f413c6cdb93a0d41440ff5bf0987f36afb081d2b2c7b60ba6062ee7ae2dd9b052143f6b275b38c9eb115d11b49c3ea8829bad7db0 languageName: node linkType: hard -"acorn-import-assertions@npm:^1.9.0": - version: 1.9.0 - resolution: "acorn-import-assertions@npm:1.9.0" - peerDependencies: - acorn: ^8 - checksum: 944fb2659d0845c467066bdcda2e20c05abe3aaf11972116df457ce2627628a81764d800dd55031ba19de513ee0d43bb771bc679cc0eda66dc8b4fade143bc0c +"chrome-trace-event@npm:^1.0.2": + version: 1.0.3 + resolution: "chrome-trace-event@npm:1.0.3" + checksum: cb8b1fc7e881aaef973bd0c4a43cd353c2ad8323fb471a041e64f7c2dd849cde4aad15f8b753331a32dda45c973f032c8a03b8177fc85d60eaa75e91e08bfb97 languageName: node linkType: hard -"acorn-jsx@npm:^5.3.2": - version: 5.3.2 - resolution: "acorn-jsx@npm:5.3.2" +"chromium-bidi@npm:0.4.16": + version: 0.4.16 + resolution: "chromium-bidi@npm:0.4.16" + dependencies: + mitt: 3.0.0 peerDependencies: - acorn: ^6.0.0 || ^7.0.0 || ^8.0.0 - checksum: c3d3b2a89c9a056b205b69530a37b972b404ee46ec8e5b341666f9513d3163e2a4f214a71f4dfc7370f5a9c07472d2fd1c11c91c3f03d093e37637d95da98950 + devtools-protocol: "*" + checksum: 9cbb362fdf589dbdfd1618499c5bbdac45a3aa1291c1d2faa2f1ea3768738677985175d1bb1511dfe3e188bc78e6ea2acb453564ece7e09f535bbcd2253ce06a languageName: node linkType: hard -"acorn-walk@npm:^8.0.0, acorn-walk@npm:^8.1.1, acorn-walk@npm:^8.2.0": - version: 8.2.0 - resolution: "acorn-walk@npm:8.2.0" - checksum: 1715e76c01dd7b2d4ca472f9c58968516a4899378a63ad5b6c2d668bba8da21a71976c14ec5f5b75f887b6317c4ae0b897ab141c831d741dc76024d8745f1ad1 +"chromium-bidi@npm:0.4.7": + version: 0.4.7 + resolution: "chromium-bidi@npm:0.4.7" + dependencies: + mitt: 3.0.0 + peerDependencies: + devtools-protocol: "*" + checksum: eec7581e2eddd2c95014c6edc5aae0b036c79bbeadee05166436b16139b6932c902c5ce21d95ed919a592f58d3a47c5469dc5f3de2a300700b2748ab119ad65e languageName: node linkType: hard -"acorn@npm:^8.0.4, acorn@npm:^8.4.1, acorn@npm:^8.7.1, acorn@npm:^8.8.2, acorn@npm:^8.9.0": - version: 8.10.0 - resolution: "acorn@npm:8.10.0" - bin: - acorn: bin/acorn - checksum: 538ba38af0cc9e5ef983aee196c4b8b4d87c0c94532334fa7e065b2c8a1f85863467bb774231aae91613fcda5e68740c15d97b1967ae3394d20faddddd8af61d +"ci-info@npm:^2.0.0": + version: 2.0.0 + resolution: "ci-info@npm:2.0.0" + checksum: 3b374666a85ea3ca43fa49aa3a048d21c9b475c96eb13c133505d2324e7ae5efd6a454f41efe46a152269e9b6a00c9edbe63ec7fa1921957165aae16625acd67 languageName: node linkType: hard -"address@npm:^1.0.1, address@npm:^1.1.2": - version: 1.2.2 - resolution: "address@npm:1.2.2" - checksum: ace439960c1e3564d8f523aff23a841904bf33a2a7c2e064f7f60a064194075758b9690e65bd9785692a4ef698a998c57eb74d145881a1cecab8ba658ddb1607 +"ci-info@npm:^3.2.0": + version: 3.9.0 + resolution: "ci-info@npm:3.9.0" + checksum: 6b19dc9b2966d1f8c2041a838217299718f15d6c4b63ae36e4674edd2bee48f780e94761286a56aa59eb305a85fbea4ddffb7630ec063e7ec7e7e5ad42549a87 languageName: node linkType: hard -"adm-zip@npm:^0.4.16": - version: 0.4.16 - resolution: "adm-zip@npm:0.4.16" - checksum: 5ea46664d8b3b073fffeb7f934705fea288708745e708cffc1dd732ce3d2672cecd476b243f9d051892fd12952db2b6bd061975e1ff40057246f6d0cb6534a50 +"cipher-base@npm:^1.0.0, cipher-base@npm:^1.0.1, cipher-base@npm:^1.0.3": + version: 1.0.4 + resolution: "cipher-base@npm:1.0.4" + dependencies: + inherits: ^2.0.1 + safe-buffer: ^5.0.1 + checksum: 47d3568dbc17431a339bad1fe7dff83ac0891be8206911ace3d3b818fc695f376df809bea406e759cdea07fff4b454fa25f1013e648851bec790c1d75763032e languageName: node linkType: hard -"aes-js@npm:3.0.0": - version: 3.0.0 - resolution: "aes-js@npm:3.0.0" - checksum: 251e26d533cd1a915b44896b17d5ed68c24a02484cfdd2e74ec700a309267db96651ea4eb657bf20aac32a3baa61f6e34edf8e2fec2de440a655da9942d334b8 +"classic-level@npm:^1.2.0": + version: 1.3.0 + resolution: "classic-level@npm:1.3.0" + dependencies: + abstract-level: ^1.0.2 + catering: ^2.1.0 + module-error: ^1.0.1 + napi-macros: ^2.2.2 + node-gyp: latest + node-gyp-build: ^4.3.0 + checksum: 773da48aef52a041115d413fee8340b357a4da2eb505764f327183b155edd7cc9d24819eb4f707c83dbdae8588024f5dddeb322125567c59d5d1f6f16334cdb9 languageName: node linkType: hard -"aes-js@npm:4.0.0-beta.5": - version: 4.0.0-beta.5 - resolution: "aes-js@npm:4.0.0-beta.5" - checksum: cc2ea969d77df939c32057f7e361b6530aa6cb93cb10617a17a45cd164e6d761002f031ff6330af3e67e58b1f0a3a8fd0b63a720afd591a653b02f649470e15b +"clean-css@npm:^5.2.2, clean-css@npm:^5.3.0, clean-css@npm:^5.3.2, clean-css@npm:~5.3.2": + version: 5.3.3 + resolution: "clean-css@npm:5.3.3" + dependencies: + source-map: ~0.6.0 + checksum: 941987c14860dd7d346d5cf121a82fd2caf8344160b1565c5387f7ccca4bbcaf885bace961be37c4f4713ce2d8c488dd89483c1add47bb779790edbfdcc79cbc languageName: node linkType: hard -"agent-base@npm:6, agent-base@npm:^6.0.2": - version: 6.0.2 - resolution: "agent-base@npm:6.0.2" - dependencies: - debug: 4 - checksum: f52b6872cc96fd5f622071b71ef200e01c7c4c454ee68bc9accca90c98cfb39f2810e3e9aa330435835eedc8c23f4f8a15267f67c6e245d2b33757575bdac49d +"clean-stack@npm:^2.0.0": + version: 2.2.0 + resolution: "clean-stack@npm:2.2.0" + checksum: 2ac8cd2b2f5ec986a3c743935ec85b07bc174d5421a5efc8017e1f146a1cf5f781ae962618f416352103b32c9cd7e203276e8c28241bbe946160cab16149fb68 languageName: node linkType: hard -"agentkeepalive@npm:^4.2.1": - version: 4.5.0 - resolution: "agentkeepalive@npm:4.5.0" +"clear-module@npm:^4.1.2": + version: 4.1.2 + resolution: "clear-module@npm:4.1.2" dependencies: - humanize-ms: ^1.2.1 - checksum: 13278cd5b125e51eddd5079f04d6fe0914ac1b8b91c1f3db2c1822f99ac1a7457869068997784342fe455d59daaff22e14fb7b8c3da4e741896e7e31faf92481 + parent-module: ^2.0.0 + resolve-from: ^5.0.0 + checksum: 4931f0c461f5d7b9b79f62c2d1bc31c37f7f1d33b4e95eef7080a83955c0374f4c180f5a96cc4d63bbefc64a9aa5d12b155641109e8e489dfa50fd5820e5101f languageName: node linkType: hard -"aggregate-error@npm:^3.0.0": - version: 3.1.0 - resolution: "aggregate-error@npm:3.1.0" - dependencies: - clean-stack: ^2.0.0 - indent-string: ^4.0.0 - checksum: 1101a33f21baa27a2fa8e04b698271e64616b886795fd43c31068c07533c7b3facfcaf4e9e0cab3624bd88f729a592f1c901a1a229c9e490eafce411a8644b79 +"cli-boxes@npm:^2.2.1": + version: 2.2.1 + resolution: "cli-boxes@npm:2.2.1" + checksum: be79f8ec23a558b49e01311b39a1ea01243ecee30539c880cf14bf518a12e223ef40c57ead0cb44f509bffdffc5c129c746cd50d863ab879385370112af4f585 languageName: node linkType: hard -"aggregate-error@npm:^4.0.0": - version: 4.0.1 - resolution: "aggregate-error@npm:4.0.1" - dependencies: - clean-stack: ^4.0.0 - indent-string: ^5.0.0 - checksum: bb3ffdfd13447800fff237c2cba752c59868ee669104bb995dfbbe0b8320e967d679e683dabb640feb32e4882d60258165cde0baafc4cd467cc7d275a13ad6b5 +"cli-boxes@npm:^3.0.0": + version: 3.0.0 + resolution: "cli-boxes@npm:3.0.0" + checksum: 637d84419d293a9eac40a1c8c96a2859e7d98b24a1a317788e13c8f441be052fc899480c6acab3acc82eaf1bccda6b7542d7cdcf5c9c3cc39227175dc098d5b2 languageName: node linkType: hard -"ajv-formats@npm:^2.1.1": - version: 2.1.1 - resolution: "ajv-formats@npm:2.1.1" +"cli-cursor@npm:^3.1.0": + version: 3.1.0 + resolution: "cli-cursor@npm:3.1.0" dependencies: - ajv: ^8.0.0 - peerDependencies: - ajv: ^8.0.0 - peerDependenciesMeta: - ajv: - optional: true - checksum: 4a287d937f1ebaad4683249a4c40c0fa3beed30d9ddc0adba04859026a622da0d317851316ea64b3680dc60f5c3c708105ddd5d5db8fe595d9d0207fd19f90b7 + restore-cursor: ^3.1.0 + checksum: 2692784c6cd2fd85cfdbd11f53aea73a463a6d64a77c3e098b2b4697a20443f430c220629e1ca3b195ea5ac4a97a74c2ee411f3807abf6df2b66211fec0c0a29 languageName: node linkType: hard -"ajv-keywords@npm:^3.4.1, ajv-keywords@npm:^3.5.2": - version: 3.5.2 - resolution: "ajv-keywords@npm:3.5.2" - peerDependencies: - ajv: ^6.9.1 - checksum: 7dc5e5931677a680589050f79dcbe1fefbb8fea38a955af03724229139175b433c63c68f7ae5f86cf8f65d55eb7c25f75a046723e2e58296707617ca690feae9 +"cli-table3@npm:^0.6.2, cli-table3@npm:^0.6.3": + version: 0.6.3 + resolution: "cli-table3@npm:0.6.3" + dependencies: + "@colors/colors": 1.5.0 + string-width: ^4.2.0 + dependenciesMeta: + "@colors/colors": + optional: true + checksum: 09897f68467973f827c04e7eaadf13b55f8aec49ecd6647cc276386ea660059322e2dd8020a8b6b84d422dbdd619597046fa89cbbbdc95b2cea149a2df7c096c languageName: node linkType: hard -"ajv-keywords@npm:^5.1.0": - version: 5.1.0 - resolution: "ajv-keywords@npm:5.1.0" +"clipboardy@npm:3.0.0": + version: 3.0.0 + resolution: "clipboardy@npm:3.0.0" dependencies: - fast-deep-equal: ^3.1.3 - peerDependencies: - ajv: ^8.8.2 - checksum: c35193940b853119242c6757787f09ecf89a2c19bcd36d03ed1a615e710d19d450cb448bfda407b939aba54b002368c8bff30529cc50a0536a8e10bcce300421 + arch: ^2.2.0 + execa: ^5.1.1 + is-wsl: ^2.2.0 + checksum: 2c292acb59705494cbe07d7df7c8becff4f01651514d32ebd80f4aec2d20946d8f3824aac67ecdf2d09ef21fdf0eb24b6a7f033c137ccdceedc4661c54455c94 languageName: node linkType: hard -"ajv@npm:^6.12.2, ajv@npm:^6.12.4, ajv@npm:^6.12.5": - version: 6.12.6 - resolution: "ajv@npm:6.12.6" +"cliui@npm:^7.0.2": + version: 7.0.4 + resolution: "cliui@npm:7.0.4" dependencies: - fast-deep-equal: ^3.1.1 - fast-json-stable-stringify: ^2.0.0 - json-schema-traverse: ^0.4.1 - uri-js: ^4.2.2 - checksum: 874972efe5c4202ab0a68379481fbd3d1b5d0a7bd6d3cc21d40d3536ebff3352a2a1fabb632d4fd2cc7fe4cbdcd5ed6782084c9bbf7f32a1536d18f9da5007d4 + string-width: ^4.2.0 + strip-ansi: ^6.0.0 + wrap-ansi: ^7.0.0 + checksum: ce2e8f578a4813806788ac399b9e866297740eecd4ad1823c27fd344d78b22c5f8597d548adbcc46f0573e43e21e751f39446c5a5e804a12aace402b7a315d7f languageName: node linkType: hard -"ajv@npm:^8.0.0, ajv@npm:^8.9.0": - version: 8.12.0 - resolution: "ajv@npm:8.12.0" +"cliui@npm:^8.0.1": + version: 8.0.1 + resolution: "cliui@npm:8.0.1" dependencies: - fast-deep-equal: ^3.1.1 - json-schema-traverse: ^1.0.0 - require-from-string: ^2.0.2 - uri-js: ^4.2.2 - checksum: 4dc13714e316e67537c8b31bc063f99a1d9d9a497eb4bbd55191ac0dcd5e4985bbb71570352ad6f1e76684fb6d790928f96ba3b2d4fd6e10024be9612fe3f001 + string-width: ^4.2.0 + strip-ansi: ^6.0.1 + wrap-ansi: ^7.0.0 + checksum: 79648b3b0045f2e285b76fb2e24e207c6db44323581e421c3acbd0e86454cba1b37aea976ab50195a49e7384b871e6dfb2247ad7dec53c02454ac6497394cb56 languageName: node linkType: hard -"algoliasearch-helper@npm:^3.10.0": - version: 3.14.2 - resolution: "algoliasearch-helper@npm:3.14.2" +"clone-deep@npm:^4.0.1": + version: 4.0.1 + resolution: "clone-deep@npm:4.0.1" dependencies: - "@algolia/events": ^4.0.1 - peerDependencies: - algoliasearch: ">= 3.1 < 6" - checksum: d66444b25fe8ee64675bb660ff1980870751818cb4a29c57bda6ca410372f2bfa031a455dcd5981941736db89d8294187c5b3bc1ce2a2567c6e43657ccd208b8 + is-plain-object: ^2.0.4 + kind-of: ^6.0.2 + shallow-clone: ^3.0.0 + checksum: 770f912fe4e6f21873c8e8fbb1e99134db3b93da32df271d00589ea4a29dbe83a9808a322c93f3bcaf8584b8b4fa6fc269fc8032efbaa6728e0c9886c74467d2 languageName: node linkType: hard -"algoliasearch@npm:^4.13.1, algoliasearch@npm:^4.19.1": - version: 4.20.0 - resolution: "algoliasearch@npm:4.20.0" +"clone-response@npm:^1.0.2": + version: 1.0.3 + resolution: "clone-response@npm:1.0.3" dependencies: - "@algolia/cache-browser-local-storage": 4.20.0 - "@algolia/cache-common": 4.20.0 - "@algolia/cache-in-memory": 4.20.0 - "@algolia/client-account": 4.20.0 - "@algolia/client-analytics": 4.20.0 - "@algolia/client-common": 4.20.0 - "@algolia/client-personalization": 4.20.0 - "@algolia/client-search": 4.20.0 - "@algolia/logger-common": 4.20.0 - "@algolia/logger-console": 4.20.0 - "@algolia/requester-browser-xhr": 4.20.0 - "@algolia/requester-common": 4.20.0 - "@algolia/requester-node-http": 4.20.0 - "@algolia/transporter": 4.20.0 - checksum: 078954944452f57d2e3b47c6ed4905caf797814324a4d5068a8b6685d434a885977a3e607714c5fb6eb29c7c3e717b3ee9cb01c8b2320e2c7bd73bcd8d42e70f + mimic-response: ^1.0.0 + checksum: 4e671cac39b11c60aa8ba0a450657194a5d6504df51bca3fac5b3bd0145c4f8e8464898f87c8406b83232e3bc5cca555f51c1f9c8ac023969ebfbf7f6bdabb2e languageName: node linkType: hard -"ansi-align@npm:^3.0.0, ansi-align@npm:^3.0.1": - version: 3.0.1 - resolution: "ansi-align@npm:3.0.1" - dependencies: - string-width: ^4.1.0 - checksum: 6abfa08f2141d231c257162b15292467081fa49a208593e055c866aa0455b57f3a86b5a678c190c618faa79b4c59e254493099cb700dd9cf2293c6be2c8f5d8d +"clone@npm:^2.1.2": + version: 2.1.2 + resolution: "clone@npm:2.1.2" + checksum: aaf106e9bc025b21333e2f4c12da539b568db4925c0501a1bf4070836c9e848c892fa22c35548ce0d1132b08bbbfa17a00144fe58fccdab6fa900fec4250f67d languageName: node linkType: hard -"ansi-colors@npm:4.1.1": - version: 4.1.1 - resolution: "ansi-colors@npm:4.1.1" - checksum: 138d04a51076cb085da0a7e2d000c5c0bb09f6e772ed5c65c53cb118d37f6c5f1637506d7155fb5f330f0abcf6f12fa2e489ac3f8cdab9da393bf1bb4f9a32b0 +"clone@npm:~0.1.9": + version: 0.1.19 + resolution: "clone@npm:0.1.19" + checksum: 5e710e16da67abe30c0664c8fd69c280635be59a4fae0a5fe58ed324e701e99348b48ce67288716fa223edd42ba574e58a3783cb2fcfa381b8b49ce7e56ac3f4 languageName: node linkType: hard -"ansi-colors@npm:^4.1.1": - version: 4.1.3 - resolution: "ansi-colors@npm:4.1.3" - checksum: a9c2ec842038a1fabc7db9ece7d3177e2fe1c5dc6f0c51ecfbf5f39911427b89c00b5dc6b8bd95f82a26e9b16aaae2e83d45f060e98070ce4d1333038edceb0e +"clsx@npm:^1.1.1, clsx@npm:^1.2.1": + version: 1.2.1 + resolution: "clsx@npm:1.2.1" + checksum: 30befca8019b2eb7dbad38cff6266cf543091dae2825c856a62a8ccf2c3ab9c2907c4d12b288b73101196767f66812365400a227581484a05f968b0307cfaf12 languageName: node linkType: hard -"ansi-escapes@npm:^4.3.0": - version: 4.3.2 - resolution: "ansi-escapes@npm:4.3.2" - dependencies: - type-fest: ^0.21.3 - checksum: 93111c42189c0a6bed9cdb4d7f2829548e943827ee8479c74d6e0b22ee127b2a21d3f8b5ca57723b8ef78ce011fbfc2784350eb2bde3ccfccf2f575fa8489815 +"clsx@npm:^2.0.0": + version: 2.0.0 + resolution: "clsx@npm:2.0.0" + checksum: a2cfb2351b254611acf92faa0daf15220f4cd648bdf96ce369d729813b85336993871a4bf6978ddea2b81b5a130478339c20d9d0b5c6fc287e5147f0c059276e languageName: node linkType: hard -"ansi-html-community@npm:^0.0.8": - version: 0.0.8 - resolution: "ansi-html-community@npm:0.0.8" - bin: - ansi-html: bin/ansi-html - checksum: 04c568e8348a636963f915e48eaa3e01218322e1169acafdd79c384f22e5558c003f79bbc480c1563865497482817c7eed025f0653ebc17642fededa5cb42089 +"co-body@npm:^6.1.0": + version: 6.1.0 + resolution: "co-body@npm:6.1.0" + dependencies: + inflation: ^2.0.0 + qs: ^6.5.2 + raw-body: ^2.3.3 + type-is: ^1.6.16 + checksum: d0a78831a6651f2085fce16b0ecdc49f45fb5baf4f94148c2f499e7ec89d188205362548b9c500eae15a819360cfda208079e68a72c204cf66ca3ffa2fc0f57e languageName: node linkType: hard -"ansi-regex@npm:^5.0.1": - version: 5.0.1 - resolution: "ansi-regex@npm:5.0.1" - checksum: 2aa4bb54caf2d622f1afdad09441695af2a83aa3fe8b8afa581d205e57ed4261c183c4d3877cee25794443fde5876417d859c108078ab788d6af7e4fe52eb66b +"co@npm:^4.6.0": + version: 4.6.0 + resolution: "co@npm:4.6.0" + checksum: 5210d9223010eb95b29df06a91116f2cf7c8e0748a9013ed853b53f362ea0e822f1e5bb054fb3cefc645239a4cf966af1f6133a3b43f40d591f3b68ed6cf0510 languageName: node linkType: hard -"ansi-regex@npm:^6.0.1": - version: 6.0.1 - resolution: "ansi-regex@npm:6.0.1" - checksum: 1ff8b7667cded1de4fa2c9ae283e979fc87036864317da86a2e546725f96406746411d0d85e87a2d12fa5abd715d90006de7fa4fa0477c92321ad3b4c7d4e169 +"collapse-white-space@npm:^1.0.2": + version: 1.0.6 + resolution: "collapse-white-space@npm:1.0.6" + checksum: 9673fb797952c5c888341435596c69388b22cd5560c8cd3f40edb72734a9c820f56a7c9525166bcb7068b5d5805372e6fd0c4b9f2869782ad070cb5d3faf26e7 languageName: node linkType: hard -"ansi-sequence-parser@npm:^1.1.0": - version: 1.1.1 - resolution: "ansi-sequence-parser@npm:1.1.1" - checksum: ead5b15c596e8e85ca02951a844366c6776769dcc9fd1bd3a0db11bb21364554822c6a439877fb599e7e1ffa0b5f039f1e5501423950457f3dcb2f480c30b188 +"collapse-white-space@npm:^2.0.0": + version: 2.1.0 + resolution: "collapse-white-space@npm:2.1.0" + checksum: c8978b1f4e7d68bf846cfdba6c6689ce8910511df7d331eb6e6757e51ceffb52768d59a28db26186c91dcf9594955b59be9f8ccd473c485790f5d8b90dc6726f languageName: node linkType: hard -"ansi-styles@npm:^3.2.1": - version: 3.2.1 - resolution: "ansi-styles@npm:3.2.1" +"color-convert@npm:^1.9.0": + version: 1.9.3 + resolution: "color-convert@npm:1.9.3" dependencies: - color-convert: ^1.9.0 - checksum: d85ade01c10e5dd77b6c89f34ed7531da5830d2cb5882c645f330079975b716438cd7ebb81d0d6e6b4f9c577f19ae41ab55f07f19786b02f9dfd9e0377395665 + color-name: 1.1.3 + checksum: fd7a64a17cde98fb923b1dd05c5f2e6f7aefda1b60d67e8d449f9328b4e53b228a428fd38bfeaeb2db2ff6b6503a776a996150b80cdf224062af08a5c8a3a203 languageName: node linkType: hard -"ansi-styles@npm:^4.0.0, ansi-styles@npm:^4.1.0": - version: 4.3.0 - resolution: "ansi-styles@npm:4.3.0" +"color-convert@npm:^2.0.1": + version: 2.0.1 + resolution: "color-convert@npm:2.0.1" dependencies: - color-convert: ^2.0.1 - checksum: 513b44c3b2105dd14cc42a19271e80f386466c4be574bccf60b627432f9198571ebf4ab1e4c3ba17347658f4ee1711c163d574248c0c1cdc2d5917a0ad582ec4 + color-name: ~1.1.4 + checksum: 79e6bdb9fd479a205c71d89574fccfb22bd9053bd98c6c4d870d65c132e5e904e6034978e55b43d69fcaa7433af2016ee203ce76eeba9cfa554b373e7f7db336 languageName: node linkType: hard -"ansi-styles@npm:^6.0.0, ansi-styles@npm:^6.1.0, ansi-styles@npm:^6.2.1": - version: 6.2.1 - resolution: "ansi-styles@npm:6.2.1" - checksum: ef940f2f0ced1a6347398da88a91da7930c33ecac3c77b72c5905f8b8fe402c52e6fde304ff5347f616e27a742da3f1dc76de98f6866c69251ad0b07a66776d9 +"color-name@npm:1.1.3": + version: 1.1.3 + resolution: "color-name@npm:1.1.3" + checksum: 09c5d3e33d2105850153b14466501f2bfb30324a2f76568a408763a3b7433b0e50e5b4ab1947868e65cb101bb7cb75029553f2c333b6d4b8138a73fcc133d69d languageName: node linkType: hard -"anymatch@npm:~3.1.2": - version: 3.1.3 - resolution: "anymatch@npm:3.1.3" - dependencies: - normalize-path: ^3.0.0 - picomatch: ^2.0.4 - checksum: 3e044fd6d1d26545f235a9fe4d7a534e2029d8e59fa7fd9f2a6eb21230f6b5380ea1eaf55136e60cbf8e613544b3b766e7a6fa2102e2a3a117505466e3025dc2 +"color-name@npm:~1.1.4": + version: 1.1.4 + resolution: "color-name@npm:1.1.4" + checksum: b0445859521eb4021cd0fb0cc1a75cecf67fceecae89b63f62b201cca8d345baf8b952c966862a9d9a2632987d4f6581f0ec8d957dfacece86f0a7919316f610 languageName: node linkType: hard -"aproba@npm:^1.0.3 || ^2.0.0": - version: 2.0.0 - resolution: "aproba@npm:2.0.0" - checksum: 5615cadcfb45289eea63f8afd064ab656006361020e1735112e346593856f87435e02d8dcc7ff0d11928bc7d425f27bc7c2a84f6c0b35ab0ff659c814c138a24 +"colord@npm:^2.9.1": + version: 2.9.3 + resolution: "colord@npm:2.9.3" + checksum: 95d909bfbcfd8d5605cbb5af56f2d1ce2b323990258fd7c0d2eb0e6d3bb177254d7fb8213758db56bb4ede708964f78c6b992b326615f81a18a6aaf11d64c650 languageName: node linkType: hard -"are-we-there-yet@npm:^3.0.0": - version: 3.0.1 - resolution: "are-we-there-yet@npm:3.0.1" - dependencies: - delegates: ^1.0.0 - readable-stream: ^3.6.0 - checksum: 52590c24860fa7173bedeb69a4c05fb573473e860197f618b9a28432ee4379049336727ae3a1f9c4cb083114601c1140cee578376164d0e651217a9843f9fe83 +"colorette@npm:^2.0.10, colorette@npm:^2.0.14": + version: 2.0.20 + resolution: "colorette@npm:2.0.20" + checksum: 0c016fea2b91b733eb9f4bcdb580018f52c0bc0979443dad930e5037a968237ac53d9beb98e218d2e9235834f8eebce7f8e080422d6194e957454255bde71d3d languageName: node linkType: hard -"arg@npm:^4.1.0": - version: 4.1.3 - resolution: "arg@npm:4.1.3" - checksum: 544af8dd3f60546d3e4aff084d451b96961d2267d668670199692f8d054f0415d86fc5497d0e641e91546f0aa920e7c29e5250e99fc89f5552a34b5d93b77f43 +"combine-promises@npm:^1.1.0": + version: 1.2.0 + resolution: "combine-promises@npm:1.2.0" + checksum: ddce91436e24da03d5dc360c59cd55abfc9da5e949a26255aa42761925c574797c43138f0aabfc364e184e738e5e218a94ac6e88ebc459045bcf048ac7fe5f07 languageName: node linkType: hard -"arg@npm:^5.0.0": - version: 5.0.2 - resolution: "arg@npm:5.0.2" - checksum: 6c69ada1a9943d332d9e5382393e897c500908d91d5cb735a01120d5f71daf1b339b7b8980cbeaba8fd1afc68e658a739746179e4315a26e8a28951ff9930078 +"combined-stream@npm:^1.0.8": + version: 1.0.8 + resolution: "combined-stream@npm:1.0.8" + dependencies: + delayed-stream: ~1.0.0 + checksum: 49fa4aeb4916567e33ea81d088f6584749fc90c7abec76fd516bf1c5aa5c79f3584b5ba3de6b86d26ddd64bae5329c4c7479343250cfe71c75bb366eae53bb7c languageName: node linkType: hard -"argparse@npm:^1.0.7": - version: 1.0.10 - resolution: "argparse@npm:1.0.10" - dependencies: - sprintf-js: ~1.0.2 - checksum: 7ca6e45583a28de7258e39e13d81e925cfa25d7d4aacbf806a382d3c02fcb13403a07fb8aeef949f10a7cfe4a62da0e2e807b348a5980554cc28ee573ef95945 +"comlink@npm:^4.4.1": + version: 4.4.1 + resolution: "comlink@npm:4.4.1" + checksum: 16d58a8f590087fc45432e31d6c138308dfd4b75b89aec0b7f7bb97ad33d810381bd2b1e608a1fb2cf05979af9cbfcdcaf1715996d5fcf77aeb013b6da3260af languageName: node linkType: hard -"argparse@npm:^2.0.1": - version: 2.0.1 - resolution: "argparse@npm:2.0.1" - checksum: 83644b56493e89a254bae05702abf3a1101b4fa4d0ca31df1c9985275a5a5bd47b3c27b7fa0b71098d41114d8ca000e6ed90cad764b306f8a503665e4d517ced +"comma-separated-tokens@npm:^1.0.0": + version: 1.0.8 + resolution: "comma-separated-tokens@npm:1.0.8" + checksum: 0adcb07174fa4d08cf0f5c8e3aec40a36b5ff0c2c720e5e23f50fe02e6789d1d00a67036c80e0c1e1539f41d3e7f0101b074039dd833b4e4a59031b659d6ca0d languageName: node linkType: hard -"array-back@npm:^3.0.1, array-back@npm:^3.1.0": - version: 3.1.0 - resolution: "array-back@npm:3.1.0" - checksum: 7205004fcd0f9edd926db921af901b083094608d5b265738d0290092f9822f73accb468e677db74c7c94ef432d39e5ed75a7b1786701e182efb25bbba9734209 +"comma-separated-tokens@npm:^2.0.0": + version: 2.0.3 + resolution: "comma-separated-tokens@npm:2.0.3" + checksum: e3bf9e0332a5c45f49b90e79bcdb4a7a85f28d6a6f0876a94f1bb9b2bfbdbbb9292aac50e1e742d8c0db1e62a0229a106f57917e2d067fca951d81737651700d languageName: node linkType: hard -"array-back@npm:^4.0.1, array-back@npm:^4.0.2": - version: 4.0.2 - resolution: "array-back@npm:4.0.2" - checksum: f30603270771eeb54e5aad5f54604c62b3577a18b6db212a7272b2b6c32049121b49431f656654790ed1469411e45f387e7627c0de8fd0515995cc40df9b9294 +"command-exists@npm:^1.2.7, command-exists@npm:^1.2.8": + version: 1.2.9 + resolution: "command-exists@npm:1.2.9" + checksum: 729ae3d88a2058c93c58840f30341b7f82688a573019535d198b57a4d8cb0135ced0ad7f52b591e5b28a90feb2c675080ce916e56254a0f7c15cb2395277cac3 languageName: node linkType: hard -"array-back@npm:^6.2.2": - version: 6.2.2 - resolution: "array-back@npm:6.2.2" - checksum: baae1e3a1687300a307d3bdf09715f6415e1099b5729d3d8e397309fb1e43d90b939d694602892172aaca7e0aeed38da89d04aa4951637d31c2a21350809e003 +"command-line-args@npm:^5.1.1, command-line-args@npm:^5.2.1": + version: 5.2.1 + resolution: "command-line-args@npm:5.2.1" + dependencies: + array-back: ^3.1.0 + find-replace: ^3.0.0 + lodash.camelcase: ^4.3.0 + typical: ^4.0.0 + checksum: e759519087be3cf2e86af8b9a97d3058b4910cd11ee852495be881a067b72891f6a32718fb685ee6d41531ab76b2b7bfb6602f79f882cd4b7587ff1e827982c7 languageName: node linkType: hard -"array-find-index@npm:^1.0.1": - version: 1.0.2 - resolution: "array-find-index@npm:1.0.2" - checksum: aac128bf369e1ac6c06ff0bb330788371c0e256f71279fb92d745e26fb4b9db8920e485b4ec25e841c93146bf71a34dcdbcefa115e7e0f96927a214d237b7081 +"command-line-usage@npm:^6.1.0": + version: 6.1.3 + resolution: "command-line-usage@npm:6.1.3" + dependencies: + array-back: ^4.0.2 + chalk: ^2.4.2 + table-layout: ^1.0.2 + typical: ^5.2.0 + checksum: 8261d4e5536eb0bcddee0ec5e89c05bb2abd18e5760785c8078ede5020bc1c612cbe28eb6586f5ed4a3660689748e5aaad4a72f21566f4ef39393694e2fa1a0b languageName: node linkType: hard -"array-flatten@npm:1.1.1": - version: 1.1.1 - resolution: "array-flatten@npm:1.1.1" - checksum: a9925bf3512d9dce202112965de90c222cd59a4fbfce68a0951d25d965cf44642931f40aac72309c41f12df19afa010ecadceb07cfff9ccc1621e99d89ab5f3b +"command-line-usage@npm:^7.0.0, command-line-usage@npm:^7.0.1": + version: 7.0.1 + resolution: "command-line-usage@npm:7.0.1" + dependencies: + array-back: ^6.2.2 + chalk-template: ^0.4.0 + table-layout: ^3.0.0 + typical: ^7.1.1 + checksum: ac78ad6b83b9622bb111ae8e82205bde1d2da74df237fdd0bd7d98eda3592c8933ec600818b0b028b2313ddca638b1b60f0780dd9457ad4a0384b17156641f79 languageName: node linkType: hard -"array-flatten@npm:^2.1.2": - version: 2.1.2 - resolution: "array-flatten@npm:2.1.2" - checksum: e8988aac1fbfcdaae343d08c9a06a6fddd2c6141721eeeea45c3cf523bf4431d29a46602929455ed548c7a3e0769928cdc630405427297e7081bd118fdec9262 +"commander@npm:3.0.2": + version: 3.0.2 + resolution: "commander@npm:3.0.2" + checksum: 6d14ad030d1904428139487ed31febcb04c1604db2b8d9fae711f60ee6718828dc0e11602249e91c8a97b0e721e9c6d53edbc166bad3cde1596851d59a8f824d languageName: node linkType: hard -"array-union@npm:^2.1.0": - version: 2.1.0 - resolution: "array-union@npm:2.1.0" - checksum: 5bee12395cba82da674931df6d0fea23c4aa4660cb3b338ced9f828782a65caa232573e6bf3968f23e0c5eb301764a382cef2f128b170a9dc59de0e36c39f98d +"commander@npm:^10.0.0, commander@npm:^10.0.1": + version: 10.0.1 + resolution: "commander@npm:10.0.1" + checksum: 436901d64a818295803c1996cd856621a74f30b9f9e28a588e726b2b1670665bccd7c1a77007ebf328729f0139838a88a19265858a0fa7a8728c4656796db948 languageName: node linkType: hard -"arrgv@npm:^1.0.2": - version: 1.0.2 - resolution: "arrgv@npm:1.0.2" - checksum: 470bbb406ea3b34810dd8b03c0b33282617a42d9fce0ab45d58596efefd042fc548eda49161fa8e3f607cbe9df90e7a67003a09043ab9081eff70f97c63dd0e2 +"commander@npm:^11.1.0": + version: 11.1.0 + resolution: "commander@npm:11.1.0" + checksum: fd1a8557c6b5b622c89ecdfde703242ab7db3b628ea5d1755784c79b8e7cb0d74d65b4a262289b533359cd58e1bfc0bf50245dfbcd2954682a6f367c828b79ef languageName: node linkType: hard -"arrify@npm:^3.0.0": - version: 3.0.0 - resolution: "arrify@npm:3.0.0" - checksum: d6c6f3dad9571234f320e130d57fddb2cc283c87f2ac7df6c7005dffc5161b7bb9376f4be655ed257050330336e84afc4f3020d77696ad231ff580a94ae5aba6 +"commander@npm:^2.20.0": + version: 2.20.3 + resolution: "commander@npm:2.20.3" + checksum: ab8c07884e42c3a8dbc5dd9592c606176c7eb5c1ca5ff274bcf907039b2c41de3626f684ea75ccf4d361ba004bbaff1f577d5384c155f3871e456bdf27becf9e languageName: node linkType: hard -"asap@npm:~2.0.3": - version: 2.0.6 - resolution: "asap@npm:2.0.6" - checksum: b296c92c4b969e973260e47523207cd5769abd27c245a68c26dc7a0fe8053c55bb04360237cb51cab1df52be939da77150ace99ad331fb7fb13b3423ed73ff3d +"commander@npm:^5.1.0": + version: 5.1.0 + resolution: "commander@npm:5.1.0" + checksum: 0b7fec1712fbcc6230fcb161d8d73b4730fa91a21dc089515489402ad78810547683f058e2a9835929c212fead1d6a6ade70db28bbb03edbc2829a9ab7d69447 languageName: node linkType: hard -"assertion-error@npm:^1.1.0": - version: 1.1.0 - resolution: "assertion-error@npm:1.1.0" - checksum: fd9429d3a3d4fd61782eb3962ae76b6d08aa7383123fca0596020013b3ebd6647891a85b05ce821c47d1471ed1271f00b0545cf6a4326cf2fc91efcc3b0fbecf +"commander@npm:^7.0.0, commander@npm:^7.2.0": + version: 7.2.0 + resolution: "commander@npm:7.2.0" + checksum: 53501cbeee61d5157546c0bef0fedb6cdfc763a882136284bed9a07225f09a14b82d2a84e7637edfd1a679fb35ed9502fd58ef1d091e6287f60d790147f68ddc languageName: node linkType: hard -"astral-regex@npm:^2.0.0": - version: 2.0.0 - resolution: "astral-regex@npm:2.0.0" - checksum: 876231688c66400473ba505731df37ea436e574dd524520294cc3bbc54ea40334865e01fa0d074d74d036ee874ee7e62f486ea38bc421ee8e6a871c06f011766 +"commander@npm:^8.3.0": + version: 8.3.0 + resolution: "commander@npm:8.3.0" + checksum: 0f82321821fc27b83bd409510bb9deeebcfa799ff0bf5d102128b500b7af22872c0c92cb6a0ebc5a4cf19c6b550fba9cedfa7329d18c6442a625f851377bacf0 languageName: node linkType: hard -"async@npm:^2.6.4": - version: 2.6.4 - resolution: "async@npm:2.6.4" +"comment-json@npm:^4.2.3": + version: 4.2.3 + resolution: "comment-json@npm:4.2.3" dependencies: - lodash: ^4.17.14 - checksum: a52083fb32e1ebe1d63e5c5624038bb30be68ff07a6c8d7dfe35e47c93fc144bd8652cbec869e0ac07d57dde387aa5f1386be3559cdee799cb1f789678d88e19 + array-timsort: ^1.0.3 + core-util-is: ^1.0.3 + esprima: ^4.0.1 + has-own-prop: ^2.0.0 + repeat-string: ^1.6.1 + checksum: 7f8d26266b0d49de9661f6365cbcc373fee4f4d0f422a203dfb17ad8f3d84c5be5ded444874935a197cd03cff297c53fe48910256cb4171cb2e52a3e6b9d317c languageName: node linkType: hard -"asynckit@npm:^0.4.0": - version: 0.4.0 - resolution: "asynckit@npm:0.4.0" - checksum: 7b78c451df768adba04e2d02e63e2d0bf3b07adcd6e42b4cf665cb7ce899bedd344c69a1dcbce355b5f972d597b25aaa1c1742b52cffd9caccb22f348114f6be +"common-path-prefix@npm:^3.0.0": + version: 3.0.0 + resolution: "common-path-prefix@npm:3.0.0" + checksum: fdb3c4f54e51e70d417ccd950c07f757582de800c0678ca388aedefefc84982039f346f9fd9a1252d08d2da9e9ef4019f580a1d1d3a10da031e4bb3c924c5818 languageName: node linkType: hard -"at-least-node@npm:^1.0.0": - version: 1.0.0 - resolution: "at-least-node@npm:1.0.0" - checksum: 463e2f8e43384f1afb54bc68485c436d7622acec08b6fad269b421cb1d29cebb5af751426793d0961ed243146fe4dc983402f6d5a51b720b277818dbf6f2e49e +"commondir@npm:^1.0.1": + version: 1.0.1 + resolution: "commondir@npm:1.0.1" + checksum: 59715f2fc456a73f68826285718503340b9f0dd89bfffc42749906c5cf3d4277ef11ef1cca0350d0e79204f00f1f6d83851ececc9095dc88512a697ac0b9bdcb languageName: node linkType: hard -"autoprefixer@npm:^10.4.12, autoprefixer@npm:^10.4.7": - version: 10.4.16 - resolution: "autoprefixer@npm:10.4.16" +"compressible@npm:~2.0.16": + version: 2.0.18 + resolution: "compressible@npm:2.0.18" dependencies: - browserslist: ^4.21.10 - caniuse-lite: ^1.0.30001538 - fraction.js: ^4.3.6 - normalize-range: ^0.1.2 - picocolors: ^1.0.0 - postcss-value-parser: ^4.2.0 - peerDependencies: - postcss: ^8.1.0 - bin: - autoprefixer: bin/autoprefixer - checksum: 45fad7086495048dacb14bb7b00313e70e135b5d8e8751dcc60548889400763705ab16fc2d99ea628b44c3472698fb0e39598f595ba28409c965ab159035afde + mime-db: ">= 1.43.0 < 2" + checksum: 58321a85b375d39230405654721353f709d0c1442129e9a17081771b816302a012471a9b8f4864c7dbe02eef7f2aaac3c614795197092262e94b409c9be108f0 languageName: node linkType: hard -"ava@npm:^5.2.0": - version: 5.3.1 - resolution: "ava@npm:5.3.1" +"compression@npm:1.7.4, compression@npm:^1.7.4": + version: 1.7.4 + resolution: "compression@npm:1.7.4" dependencies: - acorn: ^8.8.2 - acorn-walk: ^8.2.0 - ansi-styles: ^6.2.1 - arrgv: ^1.0.2 - arrify: ^3.0.0 - callsites: ^4.0.0 - cbor: ^8.1.0 - chalk: ^5.2.0 - chokidar: ^3.5.3 - chunkd: ^2.0.1 - ci-info: ^3.8.0 - ci-parallel-vars: ^1.0.1 - clean-yaml-object: ^0.1.0 - cli-truncate: ^3.1.0 - code-excerpt: ^4.0.0 - common-path-prefix: ^3.0.0 - concordance: ^5.0.4 - currently-unhandled: ^0.4.1 - debug: ^4.3.4 - emittery: ^1.0.1 - figures: ^5.0.0 - globby: ^13.1.4 - ignore-by-default: ^2.1.0 - indent-string: ^5.0.0 - is-error: ^2.2.2 - is-plain-object: ^5.0.0 - is-promise: ^4.0.0 - matcher: ^5.0.0 - mem: ^9.0.2 - ms: ^2.1.3 - p-event: ^5.0.1 - p-map: ^5.5.0 - picomatch: ^2.3.1 - pkg-conf: ^4.0.0 - plur: ^5.1.0 - pretty-ms: ^8.0.0 - resolve-cwd: ^3.0.0 - stack-utils: ^2.0.6 - strip-ansi: ^7.0.1 - supertap: ^3.0.1 - temp-dir: ^3.0.0 - write-file-atomic: ^5.0.1 - yargs: ^17.7.2 - peerDependencies: - "@ava/typescript": "*" - peerDependenciesMeta: - "@ava/typescript": - optional: true - bin: - ava: entrypoints/cli.mjs - checksum: 126a5932baef74eccd8bec992bd522e25c05b6ee4985dde87c20cece76c2377f0bf9448f242f3f9cd2abbf7a5ac932fe4e4abde2a23792d6271a6088e5a1984e + accepts: ~1.3.5 + bytes: 3.0.0 + compressible: ~2.0.16 + debug: 2.6.9 + on-headers: ~1.0.2 + safe-buffer: 5.1.2 + vary: ~1.1.2 + checksum: 35c0f2eb1f28418978615dc1bc02075b34b1568f7f56c62d60f4214d4b7cc00d0f6d282b5f8a954f59872396bd770b6b15ffd8aa94c67d4bce9b8887b906999b languageName: node linkType: hard -"axios@npm:^0.25.0": - version: 0.25.0 - resolution: "axios@npm:0.25.0" - dependencies: - follow-redirects: ^1.14.7 - checksum: 2a8a3787c05f2a0c9c3878f49782357e2a9f38945b93018fb0c4fd788171c43dceefbb577988628e09fea53952744d1ecebde234b561f1e703aa43e0a598a3ad +"concat-map@npm:0.0.1": + version: 0.0.1 + resolution: "concat-map@npm:0.0.1" + checksum: 902a9f5d8967a3e2faf138d5cb784b9979bad2e6db5357c5b21c568df4ebe62bcb15108af1b2253744844eb964fc023fbd9afbbbb6ddd0bcc204c6fb5b7bf3af languageName: node linkType: hard -"axios@npm:^1.4.0": - version: 1.5.1 - resolution: "axios@npm:1.5.1" +"concat-stream@npm:^1.4.4": + version: 1.6.2 + resolution: "concat-stream@npm:1.6.2" dependencies: - follow-redirects: ^1.15.0 - form-data: ^4.0.0 - proxy-from-env: ^1.1.0 - checksum: 4444f06601f4ede154183767863d2b8e472b4a6bfc5253597ed6d21899887e1fd0ee2b3de792ac4f8459fe2e359d2aa07c216e45fd8b9e4e0688a6ebf48a5a8d + buffer-from: ^1.0.0 + inherits: ^2.0.3 + readable-stream: ^2.2.2 + typedarray: ^0.0.6 + checksum: 1ef77032cb4459dcd5187bd710d6fc962b067b64ec6a505810de3d2b8cc0605638551b42f8ec91edf6fcd26141b32ef19ad749239b58fae3aba99187adc32285 languageName: node linkType: hard -"babel-loader@npm:^8.2.5": - version: 8.3.0 - resolution: "babel-loader@npm:8.3.0" +"config-chain@npm:^1.1.11": + version: 1.1.13 + resolution: "config-chain@npm:1.1.13" dependencies: - find-cache-dir: ^3.3.1 - loader-utils: ^2.0.0 - make-dir: ^3.1.0 - schema-utils: ^2.6.5 - peerDependencies: - "@babel/core": ^7.0.0 - webpack: ">=2" - checksum: d48bcf9e030e598656ad3ff5fb85967db2eaaf38af5b4a4b99d25618a2057f9f100e6b231af2a46c1913206db506115ca7a8cbdf52c9c73d767070dae4352ab5 + ini: ^1.3.4 + proto-list: ~1.2.1 + checksum: 828137a28e7c2fc4b7fb229bd0cd6c1397bcf83434de54347e608154008f411749041ee392cbe42fab6307e02de4c12480260bf769b7d44b778fdea3839eafab languageName: node linkType: hard -"babel-plugin-apply-mdx-type-prop@npm:1.6.22": - version: 1.6.22 - resolution: "babel-plugin-apply-mdx-type-prop@npm:1.6.22" +"configstore@npm:^5.0.1": + version: 5.0.1 + resolution: "configstore@npm:5.0.1" dependencies: - "@babel/helper-plugin-utils": 7.10.4 - "@mdx-js/util": 1.6.22 - peerDependencies: - "@babel/core": ^7.11.6 - checksum: 43e2100164a8f3e46fddd76afcbfb1f02cbebd5612cfe63f3d344a740b0afbdc4d2bf5659cffe9323dd2554c7b86b23ebedae9dadcec353b6594f4292a1a28e2 + dot-prop: ^5.2.0 + graceful-fs: ^4.1.2 + make-dir: ^3.0.0 + unique-string: ^2.0.0 + write-file-atomic: ^3.0.0 + xdg-basedir: ^4.0.0 + checksum: 60ef65d493b63f96e14b11ba7ec072fdbf3d40110a94fb7199d1c287761bdea5c5244e76b2596325f30c1b652213aa75de96ea20afd4a5f82065e61ea090988e languageName: node linkType: hard -"babel-plugin-dynamic-import-node@npm:^2.3.3": - version: 2.3.3 - resolution: "babel-plugin-dynamic-import-node@npm:2.3.3" +"configstore@npm:^6.0.0": + version: 6.0.0 + resolution: "configstore@npm:6.0.0" dependencies: - object.assign: ^4.1.0 - checksum: c9d24415bcc608d0db7d4c8540d8002ac2f94e2573d2eadced137a29d9eab7e25d2cbb4bc6b9db65cf6ee7430f7dd011d19c911a9a778f0533b4a05ce8292c9b + dot-prop: ^6.0.1 + graceful-fs: ^4.2.6 + unique-string: ^3.0.0 + write-file-atomic: ^3.0.3 + xdg-basedir: ^5.0.1 + checksum: 81995351c10bc04c58507f17748477aeac6f47465109d20e3534cebc881d22e927cfd29e73dd852c46c55f62c2b7be4cd1fe6eb3a93ba51f7f9813c218f9bae0 languageName: node linkType: hard -"babel-plugin-extract-import-names@npm:1.6.22": - version: 1.6.22 - resolution: "babel-plugin-extract-import-names@npm:1.6.22" - dependencies: - "@babel/helper-plugin-utils": 7.10.4 - checksum: 145ccf09c96d36411d340e78086555f8d4d5924ea39fcb0eca461c066cfa98bc4344982bb35eb85d054ef88f8d4dfc0205ba27370c1d8fcc78191b02908d044d +"connect-history-api-fallback@npm:^2.0.0": + version: 2.0.0 + resolution: "connect-history-api-fallback@npm:2.0.0" + checksum: dc5368690f4a5c413889792f8df70d5941ca9da44523cde3f87af0745faee5ee16afb8195434550f0504726642734f2683d6c07f8b460f828a12c45fbd4c9a68 languageName: node linkType: hard -"babel-plugin-polyfill-corejs2@npm:^0.4.6": - version: 0.4.6 - resolution: "babel-plugin-polyfill-corejs2@npm:0.4.6" - dependencies: - "@babel/compat-data": ^7.22.6 - "@babel/helper-define-polyfill-provider": ^0.4.3 - semver: ^6.3.1 - peerDependencies: - "@babel/core": ^7.4.0 || ^8.0.0-0 <8.0.0 - checksum: 08896811df31530be6a9bcdd630cb9fd4b5ae5181039d18db3796efbc54e38d57a42af460845c10a04434e1bc45c0d47743c7e6c860383cc6b141083cde22030 +"consola@npm:^2.15.3": + version: 2.15.3 + resolution: "consola@npm:2.15.3" + checksum: 8ef7a09b703ec67ac5c389a372a33b6dc97eda6c9876443a60d76a3076eea0259e7f67a4e54fd5a52f97df73690822d090cf8b7e102b5761348afef7c6d03e28 languageName: node linkType: hard -"babel-plugin-polyfill-corejs3@npm:^0.8.5": - version: 0.8.5 - resolution: "babel-plugin-polyfill-corejs3@npm:0.8.5" - dependencies: - "@babel/helper-define-polyfill-provider": ^0.4.3 - core-js-compat: ^3.32.2 - peerDependencies: - "@babel/core": ^7.4.0 || ^8.0.0-0 <8.0.0 - checksum: 54ff3956c4f88e483d38b27ceec6199b9e73fceac10ebf969469d215e6a62929384e4433f85335c9a6ba809329636e27f9bdae2f54075f833e7a745341c07d84 +"content-disposition@npm:0.5.2": + version: 0.5.2 + resolution: "content-disposition@npm:0.5.2" + checksum: 298d7da63255a38f7858ee19c7b6aae32b167e911293174b4c1349955e97e78e1d0b0d06c10e229405987275b417cf36ff65cbd4821a98bc9df4e41e9372cde7 languageName: node linkType: hard -"babel-plugin-polyfill-regenerator@npm:^0.5.3": - version: 0.5.3 - resolution: "babel-plugin-polyfill-regenerator@npm:0.5.3" +"content-disposition@npm:0.5.4, content-disposition@npm:~0.5.2": + version: 0.5.4 + resolution: "content-disposition@npm:0.5.4" dependencies: - "@babel/helper-define-polyfill-provider": ^0.4.3 - peerDependencies: - "@babel/core": ^7.4.0 || ^8.0.0-0 <8.0.0 - checksum: 2bb546582cda1870d19e646a7183baeb2cccd56e0ef3e4eaeabd28e120daf17cb87399194a9ccdcf32506bcaa68d23e73440fc8ab990a7a0f8c5a77c12d5d4bc + safe-buffer: 5.2.1 + checksum: afb9d545e296a5171d7574fcad634b2fdf698875f4006a9dd04a3e1333880c5c0c98d47b560d01216fb6505a54a2ba6a843ee3a02ec86d7e911e8315255f56c3 languageName: node linkType: hard -"bail@npm:^1.0.0": +"content-type@npm:^1.0.4, content-type@npm:~1.0.4": version: 1.0.5 - resolution: "bail@npm:1.0.5" - checksum: 6c334940d7eaa4e656a12fb12407b6555649b6deb6df04270fa806e0da82684ebe4a4e47815b271c794b40f8d6fa286e0c248b14ddbabb324a917fab09b7301a + resolution: "content-type@npm:1.0.5" + checksum: 566271e0a251642254cde0f845f9dd4f9856e52d988f4eb0d0dcffbb7a1f8ec98de7a5215fc628f3bce30fe2fb6fd2bc064b562d721658c59b544e2d34ea2766 languageName: node linkType: hard -"balanced-match@npm:^1.0.0": - version: 1.0.2 - resolution: "balanced-match@npm:1.0.2" - checksum: 9706c088a283058a8a99e0bf91b0a2f75497f185980d9ffa8b304de1d9e58ebda7c72c07ebf01dadedaac5b2907b2c6f566f660d62bd336c3468e960403b9d65 +"convert-source-map@npm:^1.7.0": + version: 1.9.0 + resolution: "convert-source-map@npm:1.9.0" + checksum: dc55a1f28ddd0e9485ef13565f8f756b342f9a46c4ae18b843fe3c30c675d058d6a4823eff86d472f187b176f0adf51ea7b69ea38be34be4a63cbbf91b0593c8 languageName: node linkType: hard -"base-x@npm:^3.0.2": - version: 3.0.9 - resolution: "base-x@npm:3.0.9" - dependencies: - safe-buffer: ^5.0.1 - checksum: 957101d6fd09e1903e846fd8f69fd7e5e3e50254383e61ab667c725866bec54e5ece5ba49ce385128ae48f9ec93a26567d1d5ebb91f4d56ef4a9cc0d5a5481e8 +"convert-source-map@npm:^2.0.0": + version: 2.0.0 + resolution: "convert-source-map@npm:2.0.0" + checksum: 63ae9933be5a2b8d4509daca5124e20c14d023c820258e484e32dc324d34c2754e71297c94a05784064ad27615037ef677e3f0c00469fb55f409d2bb21261035 languageName: node linkType: hard -"base16@npm:^1.0.0": - version: 1.0.0 - resolution: "base16@npm:1.0.0" - checksum: 0cd449a2db0f0f957e4b6b57e33bc43c9e20d4f1dd744065db94b5da35e8e71fa4dc4bc7a901e59a84d5f8b6936e3c520e2471787f667fc155fb0f50d8540f5d +"cookie-signature@npm:1.0.6": + version: 1.0.6 + resolution: "cookie-signature@npm:1.0.6" + checksum: f4e1b0a98a27a0e6e66fd7ea4e4e9d8e038f624058371bf4499cfcd8f3980be9a121486995202ba3fca74fbed93a407d6d54d43a43f96fd28d0bd7a06761591a languageName: node linkType: hard -"base64-js@npm:^1.3.1": - version: 1.5.1 - resolution: "base64-js@npm:1.5.1" - checksum: 669632eb3745404c2f822a18fc3a0122d2f9a7a13f7fb8b5823ee19d1d2ff9ee5b52c53367176ea4ad093c332fd5ab4bd0ebae5a8e27917a4105a4cfc86b1005 +"cookie@npm:0.5.0": + version: 0.5.0 + resolution: "cookie@npm:0.5.0" + checksum: 1f4bd2ca5765f8c9689a7e8954183f5332139eb72b6ff783d8947032ec1fdf43109852c178e21a953a30c0dd42257828185be01b49d1eb1a67fd054ca588a180 languageName: node linkType: hard -"batch@npm:0.6.1": - version: 0.6.1 - resolution: "batch@npm:0.6.1" - checksum: 61f9934c7378a51dce61b915586191078ef7f1c3eca707fdd58b96ff2ff56d9e0af2bdab66b1462301a73c73374239e6542d9821c0af787f3209a23365d07e7f +"cookie@npm:^0.4.1": + version: 0.4.2 + resolution: "cookie@npm:0.4.2" + checksum: a00833c998bedf8e787b4c342defe5fa419abd96b32f4464f718b91022586b8f1bafbddd499288e75c037642493c83083da426c6a9080d309e3bd90fd11baa9b languageName: node linkType: hard -"bech32@npm:1.1.4": - version: 1.1.4 - resolution: "bech32@npm:1.1.4" - checksum: 0e98db619191548390d6f09ff68b0253ba7ae6a55db93dfdbb070ba234c1fd3308c0606fbcc95fad50437227b10011e2698b89f0181f6e7f845c499bd14d0f4b +"cookies@npm:~0.8.0": + version: 0.8.0 + resolution: "cookies@npm:0.8.0" + dependencies: + depd: ~2.0.0 + keygrip: ~1.1.0 + checksum: 806055a44f128705265b1bc6a853058da18bf80dea3654ad99be20985b1fa1b14f86c1eef73644aab8071241f8a78acd57202b54c4c5c70769fc694fbb9c4edc languageName: node linkType: hard -"big-integer@npm:^1.6.44": - version: 1.6.51 - resolution: "big-integer@npm:1.6.51" - checksum: 3d444173d1b2e20747e2c175568bedeebd8315b0637ea95d75fd27830d3b8e8ba36c6af40374f36bdaea7b5de376dcada1b07587cb2a79a928fccdb6e6e3c518 +"copy-text-to-clipboard@npm:^3.2.0": + version: 3.2.0 + resolution: "copy-text-to-clipboard@npm:3.2.0" + checksum: df7115c197a166d51f59e4e20ab2a68a855ae8746d25ff149b5465c694d9a405c7e6684b73a9f87ba8d653070164e229c15dfdb9fd77c30be1ff0da569661060 languageName: node linkType: hard -"big.js@npm:^5.2.2": - version: 5.2.2 - resolution: "big.js@npm:5.2.2" - checksum: b89b6e8419b097a8fb4ed2399a1931a68c612bce3cfd5ca8c214b2d017531191070f990598de2fc6f3f993d91c0f08aa82697717f6b3b8732c9731866d233c9e +"copy-webpack-plugin@npm:^11.0.0": + version: 11.0.0 + resolution: "copy-webpack-plugin@npm:11.0.0" + dependencies: + fast-glob: ^3.2.11 + glob-parent: ^6.0.1 + globby: ^13.1.1 + normalize-path: ^3.0.0 + schema-utils: ^4.0.0 + serialize-javascript: ^6.0.0 + peerDependencies: + webpack: ^5.1.0 + checksum: df4f8743f003a29ee7dd3d9b1789998a3a99051c92afb2ba2203d3dacfa696f4e757b275560fafb8f206e520a0aa78af34b990324a0e36c2326cefdeef3ca82e languageName: node linkType: hard -"bigint-crypto-utils@npm:^3.0.23": - version: 3.3.0 - resolution: "bigint-crypto-utils@npm:3.3.0" - checksum: 9598ce57b23f776c8936d44114c9f051e62b5fa654915b664784cbcbacc5aa0485f4479571c51ff58008abb1210c0d6a234853742f07cf84bda890f2a1e01000 +"core-js-compat@npm:^3.31.0, core-js-compat@npm:^3.33.1": + version: 3.34.0 + resolution: "core-js-compat@npm:3.34.0" + dependencies: + browserslist: ^4.22.2 + checksum: 6281f7f57a72f254c06611ec088445e11cf84e0b4edfb5f43dece1a1ff8b0ed0e81ed0bc291024761cd90c39d0f007d8bc46548265139808081d311c7cbc9c81 languageName: node linkType: hard -"binary-extensions@npm:^2.0.0": - version: 2.2.0 - resolution: "binary-extensions@npm:2.2.0" - checksum: ccd267956c58d2315f5d3ea6757cf09863c5fc703e50fbeb13a7dc849b812ef76e3cf9ca8f35a0c48498776a7478d7b4a0418e1e2b8cb9cb9731f2922aaad7f8 +"core-js-pure@npm:^3.30.2": + version: 3.34.0 + resolution: "core-js-pure@npm:3.34.0" + checksum: 4c44ac4beff42e07f41eef3c9ecefc8ee3f9e91e1b9f278bf8520cc1fb37afb663cff77c182541dc42d58737f93ab0f30a33a5fe661fb161fdd8aa7fe78a5edf languageName: node linkType: hard -"bl@npm:^4.0.3": - version: 4.1.0 - resolution: "bl@npm:4.1.0" - dependencies: - buffer: ^5.5.0 - inherits: ^2.0.4 - readable-stream: ^3.4.0 - checksum: 9e8521fa7e83aa9427c6f8ccdcba6e8167ef30cc9a22df26effcc5ab682ef91d2cbc23a239f945d099289e4bbcfae7a192e9c28c84c6202e710a0dfec3722662 +"core-js@npm:^3.23.3, core-js@npm:^3.31.1": + version: 3.34.0 + resolution: "core-js@npm:3.34.0" + checksum: 26b0d103716b33fc660ee8737da7bc9475fbc655f93bbf1360ab692966449d18f2fc393805095937283db9f919ca2aa5c88d86d16f2846217983ad7da707e31e languageName: node linkType: hard -"blakejs@npm:^1.1.0": - version: 1.2.1 - resolution: "blakejs@npm:1.2.1" - checksum: d699ba116cfa21d0b01d12014a03e484dd76d483133e6dc9eb415aa70a119f08beb3bcefb8c71840106a00b542cba77383f8be60cd1f0d4589cb8afb922eefbe +"core-util-is@npm:^1.0.3, core-util-is@npm:~1.0.0": + version: 1.0.3 + resolution: "core-util-is@npm:1.0.3" + checksum: 9de8597363a8e9b9952491ebe18167e3b36e7707569eed0ebf14f8bba773611376466ae34575bca8cfe3c767890c859c74056084738f09d4e4a6f902b2ad7d99 languageName: node linkType: hard -"blueimp-md5@npm:^2.10.0": - version: 2.19.0 - resolution: "blueimp-md5@npm:2.19.0" - checksum: 28095dcbd2c67152a2938006e8d7c74c3406ba6556071298f872505432feb2c13241b0476644160ee0a5220383ba94cb8ccdac0053b51f68d168728f9c382530 +"cosmiconfig@npm:^6.0.0": + version: 6.0.0 + resolution: "cosmiconfig@npm:6.0.0" + dependencies: + "@types/parse-json": ^4.0.0 + import-fresh: ^3.1.0 + parse-json: ^5.0.0 + path-type: ^4.0.0 + yaml: ^1.7.2 + checksum: 8eed7c854b91643ecb820767d0deb038b50780ecc3d53b0b19e03ed8aabed4ae77271198d1ae3d49c3b110867edf679f5faad924820a8d1774144a87cb6f98fc languageName: node linkType: hard -"bn.js@npm:^4.11.0, bn.js@npm:^4.11.8, bn.js@npm:^4.11.9": - version: 4.12.0 - resolution: "bn.js@npm:4.12.0" - checksum: 39afb4f15f4ea537b55eaf1446c896af28ac948fdcf47171961475724d1bb65118cca49fa6e3d67706e4790955ec0e74de584e45c8f1ef89f46c812bee5b5a12 +"cosmiconfig@npm:^7.0.1": + version: 7.1.0 + resolution: "cosmiconfig@npm:7.1.0" + dependencies: + "@types/parse-json": ^4.0.0 + import-fresh: ^3.2.1 + parse-json: ^5.0.0 + path-type: ^4.0.0 + yaml: ^1.10.0 + checksum: c53bf7befc1591b2651a22414a5e786cd5f2eeaa87f3678a3d49d6069835a9d8d1aef223728e98aa8fec9a95bf831120d245096db12abe019fecb51f5696c96f languageName: node linkType: hard -"bn.js@npm:^5.2.0, bn.js@npm:^5.2.1": - version: 5.2.1 - resolution: "bn.js@npm:5.2.1" - checksum: 3dd8c8d38055fedfa95c1d5fc3c99f8dd547b36287b37768db0abab3c239711f88ff58d18d155dd8ad902b0b0cee973747b7ae20ea12a09473272b0201c9edd3 +"cosmiconfig@npm:^8.2.0": + version: 8.3.6 + resolution: "cosmiconfig@npm:8.3.6" + dependencies: + import-fresh: ^3.3.0 + js-yaml: ^4.1.0 + parse-json: ^5.2.0 + path-type: ^4.0.0 + peerDependencies: + typescript: ">=4.9.5" + peerDependenciesMeta: + typescript: + optional: true + checksum: dc339ebea427898c9e03bf01b56ba7afbac07fc7d2a2d5a15d6e9c14de98275a9565da949375aee1809591c152c0a3877bb86dbeaf74d5bd5aaa79955ad9e7a0 languageName: node linkType: hard -"body-parser@npm:1.20.1": - version: 1.20.1 - resolution: "body-parser@npm:1.20.1" +"crc-32@npm:^1.2.0": + version: 1.2.2 + resolution: "crc-32@npm:1.2.2" + bin: + crc32: bin/crc32.njs + checksum: ad2d0ad0cbd465b75dcaeeff0600f8195b686816ab5f3ba4c6e052a07f728c3e70df2e3ca9fd3d4484dc4ba70586e161ca5a2334ec8bf5a41bf022a6103ff243 + languageName: node + linkType: hard + +"create-hash@npm:^1.1.0, create-hash@npm:^1.1.2, create-hash@npm:^1.2.0": + version: 1.2.0 + resolution: "create-hash@npm:1.2.0" dependencies: - bytes: 3.1.2 - content-type: ~1.0.4 - debug: 2.6.9 - depd: 2.0.0 - destroy: 1.2.0 - http-errors: 2.0.0 - iconv-lite: 0.4.24 - on-finished: 2.4.1 - qs: 6.11.0 - raw-body: 2.5.1 - type-is: ~1.6.18 - unpipe: 1.0.0 - checksum: f1050dbac3bede6a78f0b87947a8d548ce43f91ccc718a50dd774f3c81f2d8b04693e52acf62659fad23101827dd318da1fb1363444ff9a8482b886a3e4a5266 + cipher-base: ^1.0.1 + inherits: ^2.0.1 + md5.js: ^1.3.4 + ripemd160: ^2.0.1 + sha.js: ^2.4.0 + checksum: 02a6ae3bb9cd4afee3fabd846c1d8426a0e6b495560a977ba46120c473cb283be6aa1cace76b5f927cf4e499c6146fb798253e48e83d522feba807d6b722eaa9 languageName: node linkType: hard -"bonjour-service@npm:^1.0.11": - version: 1.1.1 - resolution: "bonjour-service@npm:1.1.1" +"create-hmac@npm:^1.1.4, create-hmac@npm:^1.1.7": + version: 1.1.7 + resolution: "create-hmac@npm:1.1.7" dependencies: - array-flatten: ^2.1.2 - dns-equal: ^1.0.0 - fast-deep-equal: ^3.1.3 - multicast-dns: ^7.2.5 - checksum: 832d0cf78b91368fac8bb11fd7a714e46f4c4fb1bb14d7283bce614a6fb3aae2f3fe209aba5b4fa051811c1cab6921d073a83db8432fb23292f27dd4161fb0f1 + cipher-base: ^1.0.3 + create-hash: ^1.1.0 + inherits: ^2.0.1 + ripemd160: ^2.0.0 + safe-buffer: ^5.0.1 + sha.js: ^2.4.8 + checksum: ba12bb2257b585a0396108c72830e85f882ab659c3320c83584b1037f8ab72415095167ced80dc4ce8e446a8ecc4b2acf36d87befe0707d73b26cf9dc77440ed languageName: node linkType: hard -"boolbase@npm:^1.0.0": - version: 1.0.0 - resolution: "boolbase@npm:1.0.0" - checksum: 3e25c80ef626c3a3487c73dbfc70ac322ec830666c9ad915d11b701142fab25ec1e63eff2c450c74347acfd2de854ccde865cd79ef4db1683f7c7b046ea43bb0 +"create-require@npm:^1.1.0": + version: 1.1.1 + resolution: "create-require@npm:1.1.1" + checksum: a9a1503d4390d8b59ad86f4607de7870b39cad43d929813599a23714831e81c520bddf61bcdd1f8e30f05fd3a2b71ae8538e946eb2786dc65c2bbc520f692eff languageName: node linkType: hard -"boxen@npm:^5.0.0": - version: 5.1.2 - resolution: "boxen@npm:5.1.2" +"cross-fetch@npm:3.1.5": + version: 3.1.5 + resolution: "cross-fetch@npm:3.1.5" dependencies: - ansi-align: ^3.0.0 - camelcase: ^6.2.0 - chalk: ^4.1.0 - cli-boxes: ^2.2.1 - string-width: ^4.2.2 - type-fest: ^0.20.2 - widest-line: ^3.1.0 - wrap-ansi: ^7.0.0 - checksum: 82d03e42a72576ff235123f17b7c505372fe05c83f75f61e7d4fa4bcb393897ec95ce766fecb8f26b915f0f7a7227d66e5ec7cef43f5b2bd9d3aeed47ec55877 + node-fetch: 2.6.7 + checksum: f6b8c6ee3ef993ace6277fd789c71b6acf1b504fd5f5c7128df4ef2f125a429e29cd62dc8c127523f04a5f2fa4771ed80e3f3d9695617f441425045f505cf3bb languageName: node linkType: hard -"boxen@npm:^6.2.1": - version: 6.2.1 - resolution: "boxen@npm:6.2.1" +"cross-fetch@npm:4.0.0": + version: 4.0.0 + resolution: "cross-fetch@npm:4.0.0" dependencies: - ansi-align: ^3.0.1 - camelcase: ^6.2.0 - chalk: ^4.1.2 - cli-boxes: ^3.0.0 - string-width: ^5.0.1 - type-fest: ^2.5.0 - widest-line: ^4.0.1 - wrap-ansi: ^8.0.1 - checksum: 2b3226092f1ff8e149c02979098c976552afa15f9e0231c9ed2dfcaaf84604494d16a6f13b647f718439f64d3140a088e822d47c7db00d2266e9ffc8d7321774 + node-fetch: ^2.6.12 + checksum: ecca4f37ffa0e8283e7a8a590926b66713a7ef7892757aa36c2d20ffa27b0ac5c60dcf453119c809abe5923fc0bae3702a4d896bfb406ef1077b0d0018213e24 languageName: node linkType: hard -"bplist-parser@npm:^0.2.0": - version: 0.2.0 - resolution: "bplist-parser@npm:0.2.0" +"cross-spawn@npm:^7.0.0, cross-spawn@npm:^7.0.2, cross-spawn@npm:^7.0.3": + version: 7.0.3 + resolution: "cross-spawn@npm:7.0.3" dependencies: - big-integer: ^1.6.44 - checksum: d5339dd16afc51de6c88f88f58a45b72ed6a06aa31f5557d09877575f220b7c1d3fbe375da0b62e6a10d4b8ed80523567e351f24014f5bc886ad523758142cdd + path-key: ^3.1.0 + shebang-command: ^2.0.0 + which: ^2.0.1 + checksum: 671cc7c7288c3a8406f3c69a3ae2fc85555c04169e9d611def9a675635472614f1c0ed0ef80955d5b6d4e724f6ced67f0ad1bb006c2ea643488fcfef994d7f52 languageName: node linkType: hard -"brace-expansion@npm:^1.1.7": - version: 1.1.11 - resolution: "brace-expansion@npm:1.1.11" - dependencies: - balanced-match: ^1.0.0 - concat-map: 0.0.1 - checksum: faf34a7bb0c3fcf4b59c7808bc5d2a96a40988addf2e7e09dfbb67a2251800e0d14cd2bfc1aa79174f2f5095c54ff27f46fb1289fe2d77dac755b5eb3434cc07 +"crypto-random-string@npm:^2.0.0": + version: 2.0.0 + resolution: "crypto-random-string@npm:2.0.0" + checksum: 0283879f55e7c16fdceacc181f87a0a65c53bc16ffe1d58b9d19a6277adcd71900d02bb2c4843dd55e78c51e30e89b0fec618a7f170ebcc95b33182c28f05fd6 languageName: node linkType: hard -"brace-expansion@npm:^2.0.1": - version: 2.0.1 - resolution: "brace-expansion@npm:2.0.1" +"crypto-random-string@npm:^4.0.0": + version: 4.0.0 + resolution: "crypto-random-string@npm:4.0.0" dependencies: - balanced-match: ^1.0.0 - checksum: a61e7cd2e8a8505e9f0036b3b6108ba5e926b4b55089eeb5550cd04a471fe216c96d4fe7e4c7f995c728c554ae20ddfc4244cad10aef255e72b62930afd233d1 + type-fest: ^1.0.1 + checksum: 91f148f27bcc8582798f0fb3e75a09d9174557f39c3c40a89dd1bd70fb5a14a02548245aa26fa7d663c426ac5026f4729841231c84f9e30e8c8ece5e38656741 languageName: node linkType: hard -"braces@npm:^3.0.2, braces@npm:~3.0.2": - version: 3.0.2 - resolution: "braces@npm:3.0.2" +"cspell-config-lib@npm:8.3.2": + version: 8.3.2 + resolution: "cspell-config-lib@npm:8.3.2" dependencies: - fill-range: ^7.0.1 - checksum: e2a8e769a863f3d4ee887b5fe21f63193a891c68b612ddb4b68d82d1b5f3ff9073af066c343e9867a393fe4c2555dcb33e89b937195feb9c1613d259edfcd459 - languageName: node - linkType: hard - -"brorand@npm:^1.1.0": - version: 1.1.0 - resolution: "brorand@npm:1.1.0" - checksum: 8a05c9f3c4b46572dec6ef71012b1946db6cae8c7bb60ccd4b7dd5a84655db49fe043ecc6272e7ef1f69dc53d6730b9e2a3a03a8310509a3d797a618cbee52be + "@cspell/cspell-types": 8.3.2 + comment-json: ^4.2.3 + yaml: ^2.3.4 + checksum: 7d5563c2a49f13e9e2b965258f8d5539e282c75234f4375831db3f7ee3c209aa246f07f8bbbc7e7195c248edfdc3a58cb590c5119ad866446ee47df07a593481 languageName: node linkType: hard -"browser-level@npm:^1.0.1": - version: 1.0.1 - resolution: "browser-level@npm:1.0.1" +"cspell-dictionary@npm:8.3.2": + version: 8.3.2 + resolution: "cspell-dictionary@npm:8.3.2" dependencies: - abstract-level: ^1.0.2 - catering: ^2.1.1 - module-error: ^1.0.2 - run-parallel-limit: ^1.1.0 - checksum: 67fbc77ce832940bfa25073eccff279f512ad56f545deb996a5b23b02316f5e76f4a79d381acc27eda983f5c9a2566aaf9c97e4fdd0748288c4407307537a29b + "@cspell/cspell-pipe": 8.3.2 + "@cspell/cspell-types": 8.3.2 + cspell-trie-lib: 8.3.2 + fast-equals: ^5.0.1 + gensequence: ^6.0.0 + checksum: 374e8cc94b7d46230b8a3ddb066c14bee746963c01b13a60bf43cfb2e6da1a32dcf963c573016aee3fdf8ffbda6253850679d4c93257de2bf4ec45d4cbaf5888 languageName: node linkType: hard -"browser-stdout@npm:1.3.1": - version: 1.3.1 - resolution: "browser-stdout@npm:1.3.1" - checksum: b717b19b25952dd6af483e368f9bcd6b14b87740c3d226c2977a65e84666ffd67000bddea7d911f111a9b6ddc822b234de42d52ab6507bce4119a4cc003ef7b3 +"cspell-gitignore@npm:8.3.2": + version: 8.3.2 + resolution: "cspell-gitignore@npm:8.3.2" + dependencies: + cspell-glob: 8.3.2 + find-up-simple: ^1.0.0 + bin: + cspell-gitignore: bin.mjs + checksum: 0ae847391993078bca601214812aa9476311d501fcc3a20ceb6e4a22825335188354f71328b0ebc45d6ed35756c6d29062cb4e4b2c26a28a9fb57d3573113b01 languageName: node linkType: hard -"browserify-aes@npm:^1.2.0": - version: 1.2.0 - resolution: "browserify-aes@npm:1.2.0" +"cspell-glob@npm:8.3.2": + version: 8.3.2 + resolution: "cspell-glob@npm:8.3.2" dependencies: - buffer-xor: ^1.0.3 - cipher-base: ^1.0.0 - create-hash: ^1.1.0 - evp_bytestokey: ^1.0.3 - inherits: ^2.0.1 - safe-buffer: ^5.0.1 - checksum: 4a17c3eb55a2aa61c934c286f34921933086bf6d67f02d4adb09fcc6f2fc93977b47d9d884c25619144fccd47b3b3a399e1ad8b3ff5a346be47270114bcf7104 + micromatch: ^4.0.5 + checksum: c1abb55fdfde46014963ccd73451218bec7cd9bc0f3cb9308efa0cf066c7783eb69749400aa0d0c1507fbac3fe598ccc26eb5932985db01264ca933f7564a5b2 languageName: node linkType: hard -"browserslist@npm:^4.0.0, browserslist@npm:^4.14.5, browserslist@npm:^4.18.1, browserslist@npm:^4.21.10, browserslist@npm:^4.21.4, browserslist@npm:^4.21.9, browserslist@npm:^4.22.1": - version: 4.22.1 - resolution: "browserslist@npm:4.22.1" +"cspell-grammar@npm:8.3.2": + version: 8.3.2 + resolution: "cspell-grammar@npm:8.3.2" dependencies: - caniuse-lite: ^1.0.30001541 - electron-to-chromium: ^1.4.535 - node-releases: ^2.0.13 - update-browserslist-db: ^1.0.13 + "@cspell/cspell-pipe": 8.3.2 + "@cspell/cspell-types": 8.3.2 bin: - browserslist: cli.js - checksum: 7e6b10c53f7dd5d83fd2b95b00518889096382539fed6403829d447e05df4744088de46a571071afb447046abc3c66ad06fbc790e70234ec2517452e32ffd862 + cspell-grammar: bin.mjs + checksum: 3f48e267f6c26c98f7f24159460bad1b341e0560f00a2e62761ad49ba3fcc8e953af3e4a712e598b2525242cb3dddfb339d85e9917fd72aca33ab292cea71bc9 languageName: node linkType: hard -"bs58@npm:^4.0.0": - version: 4.0.1 - resolution: "bs58@npm:4.0.1" +"cspell-io@npm:8.3.2": + version: 8.3.2 + resolution: "cspell-io@npm:8.3.2" dependencies: - base-x: ^3.0.2 - checksum: b3c5365bb9e0c561e1a82f1a2d809a1a692059fae016be233a6127ad2f50a6b986467c3a50669ce4c18929dcccb297c5909314dd347a25a68c21b68eb3e95ac2 + "@cspell/cspell-service-bus": 8.3.2 + checksum: 609f544d6c23dd471548cbc531ad5d704db4ffaa0513611cdc5f731e9f4de24057dcbc4c1d11e94df3725129549fe2629c9e73750f7de4d3a61eae2cbef13ff1 languageName: node linkType: hard -"bs58check@npm:^2.1.2": - version: 2.1.2 - resolution: "bs58check@npm:2.1.2" - dependencies: - bs58: ^4.0.0 - create-hash: ^1.1.0 - safe-buffer: ^5.1.2 - checksum: 43bdf08a5dd04581b78f040bc4169480e17008da482ffe2a6507327bbc4fc5c28de0501f7faf22901cfe57fbca79cbb202ca529003fedb4cb8dccd265b38e54d +"cspell-lib@npm:8.3.2": + version: 8.3.2 + resolution: "cspell-lib@npm:8.3.2" + dependencies: + "@cspell/cspell-bundled-dicts": 8.3.2 + "@cspell/cspell-pipe": 8.3.2 + "@cspell/cspell-resolver": 8.3.2 + "@cspell/cspell-types": 8.3.2 + "@cspell/dynamic-import": 8.3.2 + "@cspell/strong-weak-map": 8.3.2 + clear-module: ^4.1.2 + comment-json: ^4.2.3 + configstore: ^6.0.0 + cspell-config-lib: 8.3.2 + cspell-dictionary: 8.3.2 + cspell-glob: 8.3.2 + cspell-grammar: 8.3.2 + cspell-io: 8.3.2 + cspell-trie-lib: 8.3.2 + fast-equals: ^5.0.1 + gensequence: ^6.0.0 + import-fresh: ^3.3.0 + resolve-from: ^5.0.0 + vscode-languageserver-textdocument: ^1.0.11 + vscode-uri: ^3.0.8 + checksum: 398f8b799aa4b7e60b9a0b2e751fbd0d3d81f8ccdc0b331668568ab5f4cced0e64ddc8325a7901ae950fd3838f8fb37bb4dd8f73938fb78b50bdd5f2f5668375 languageName: node linkType: hard -"buffer-crc32@npm:~0.2.3": - version: 0.2.13 - resolution: "buffer-crc32@npm:0.2.13" - checksum: 06252347ae6daca3453b94e4b2f1d3754a3b146a111d81c68924c22d91889a40623264e95e67955b1cb4a68cbedf317abeabb5140a9766ed248973096db5ce1c +"cspell-trie-lib@npm:8.3.2": + version: 8.3.2 + resolution: "cspell-trie-lib@npm:8.3.2" + dependencies: + "@cspell/cspell-pipe": 8.3.2 + "@cspell/cspell-types": 8.3.2 + gensequence: ^6.0.0 + checksum: 43d3bcf4c1bf1d8fce9601beec52a4f5bd2e27ccd1ea737aca2c32dbec721942cb8671b6add6b57d3a5953389a494a28878a53abccc49de7c59bbb7e78413c3e languageName: node linkType: hard -"buffer-from@npm:^1.0.0": - version: 1.1.2 - resolution: "buffer-from@npm:1.1.2" - checksum: 0448524a562b37d4d7ed9efd91685a5b77a50672c556ea254ac9a6d30e3403a517d8981f10e565db24e8339413b43c97ca2951f10e399c6125a0d8911f5679bb +"cspell@npm:^8.3.2": + version: 8.3.2 + resolution: "cspell@npm:8.3.2" + dependencies: + "@cspell/cspell-json-reporter": 8.3.2 + "@cspell/cspell-pipe": 8.3.2 + "@cspell/cspell-types": 8.3.2 + "@cspell/dynamic-import": 8.3.2 + chalk: ^5.3.0 + chalk-template: ^1.1.0 + commander: ^11.1.0 + cspell-gitignore: 8.3.2 + cspell-glob: 8.3.2 + cspell-io: 8.3.2 + cspell-lib: 8.3.2 + fast-glob: ^3.3.2 + fast-json-stable-stringify: ^2.1.0 + file-entry-cache: ^8.0.0 + get-stdin: ^9.0.0 + semver: ^7.5.4 + strip-ansi: ^7.1.0 + vscode-uri: ^3.0.8 + bin: + cspell: bin.mjs + cspell-esm: bin.mjs + checksum: c798482b411ba94d8dc475445082d56b70fc23d88752f3e6e8e18174f247d35a0c50ed1a1a9f578cdfb1294dc8d175613f1aa7828f92b1f841047580156ab986 languageName: node linkType: hard -"buffer-xor@npm:^1.0.3": - version: 1.0.3 - resolution: "buffer-xor@npm:1.0.3" - checksum: 10c520df29d62fa6e785e2800e586a20fc4f6dfad84bcdbd12e1e8a83856de1cb75c7ebd7abe6d036bbfab738a6cf18a3ae9c8e5a2e2eb3167ca7399ce65373a +"css-declaration-sorter@npm:^6.3.1": + version: 6.4.1 + resolution: "css-declaration-sorter@npm:6.4.1" + peerDependencies: + postcss: ^8.0.9 + checksum: cbdc9e0d481011b1a28fd5b60d4eb55fe204391d31a0b1b490b2cecf4baa85810f9b8c48adab4df644f4718104ed3ed72c64a9745e3216173767bf4aeca7f9b8 languageName: node linkType: hard -"buffer@npm:^5.2.1, buffer@npm:^5.5.0": - version: 5.7.1 - resolution: "buffer@npm:5.7.1" +"css-loader@npm:^6.7.1, css-loader@npm:^6.8.1": + version: 6.8.1 + resolution: "css-loader@npm:6.8.1" dependencies: - base64-js: ^1.3.1 - ieee754: ^1.1.13 - checksum: e2cf8429e1c4c7b8cbd30834ac09bd61da46ce35f5c22a78e6c2f04497d6d25541b16881e30a019c6fd3154150650ccee27a308eff3e26229d788bbdeb08ab84 + icss-utils: ^5.1.0 + postcss: ^8.4.21 + postcss-modules-extract-imports: ^3.0.0 + postcss-modules-local-by-default: ^4.0.3 + postcss-modules-scope: ^3.0.0 + postcss-modules-values: ^4.0.0 + postcss-value-parser: ^4.2.0 + semver: ^7.3.8 + peerDependencies: + webpack: ^5.0.0 + checksum: 7c1784247bdbe76dc5c55fb1ac84f1d4177a74c47259942c9cfdb7a8e6baef11967a0bc85ac285f26bd26d5059decb848af8154a03fdb4f4894f41212f45eef3 languageName: node linkType: hard -"buffer@npm:^6.0.3": - version: 6.0.3 - resolution: "buffer@npm:6.0.3" +"css-minimizer-webpack-plugin@npm:^4.0.0, css-minimizer-webpack-plugin@npm:^4.2.2": + version: 4.2.2 + resolution: "css-minimizer-webpack-plugin@npm:4.2.2" dependencies: - base64-js: ^1.3.1 - ieee754: ^1.2.1 - checksum: 5ad23293d9a731e4318e420025800b42bf0d264004c0286c8cc010af7a270c7a0f6522e84f54b9ad65cbd6db20b8badbfd8d2ebf4f80fa03dab093b89e68c3f9 + cssnano: ^5.1.8 + jest-worker: ^29.1.2 + postcss: ^8.4.17 + schema-utils: ^4.0.0 + serialize-javascript: ^6.0.0 + source-map: ^0.6.1 + peerDependencies: + webpack: ^5.0.0 + peerDependenciesMeta: + "@parcel/css": + optional: true + "@swc/css": + optional: true + clean-css: + optional: true + csso: + optional: true + esbuild: + optional: true + lightningcss: + optional: true + checksum: 5417e76a445f35832aa96807c835b8e92834a6cd285b1b788dfe3ca0fa90fec7eb2dd6efa9d3649f9d8244b99b7da2d065951603b94918e8f6a366a5049cacdd languageName: node linkType: hard -"builtin-modules@npm:^3.3.0": - version: 3.3.0 - resolution: "builtin-modules@npm:3.3.0" - checksum: db021755d7ed8be048f25668fe2117620861ef6703ea2c65ed2779c9e3636d5c3b82325bd912244293959ff3ae303afa3471f6a15bf5060c103e4cc3a839749d +"css-select@npm:^4.1.3": + version: 4.3.0 + resolution: "css-select@npm:4.3.0" + dependencies: + boolbase: ^1.0.0 + css-what: ^6.0.1 + domhandler: ^4.3.1 + domutils: ^2.8.0 + nth-check: ^2.0.1 + checksum: d6202736839194dd7f910320032e7cfc40372f025e4bf21ca5bf6eb0a33264f322f50ba9c0adc35dadd342d3d6fae5ca244779a4873afbfa76561e343f2058e0 languageName: node linkType: hard -"bundle-name@npm:^3.0.0": - version: 3.0.0 - resolution: "bundle-name@npm:3.0.0" +"css-select@npm:^5.1.0": + version: 5.1.0 + resolution: "css-select@npm:5.1.0" dependencies: - run-applescript: ^5.0.0 - checksum: edf2b1fbe6096ed32e7566947ace2ea937ee427391744d7510a2880c4b9a5b3543d3f6c551236a29e5c87d3195f8e2912516290e638c15bcbede7b37cc375615 + boolbase: ^1.0.0 + css-what: ^6.1.0 + domhandler: ^5.0.2 + domutils: ^3.0.1 + nth-check: ^2.0.1 + checksum: 2772c049b188d3b8a8159907192e926e11824aea525b8282981f72ba3f349cf9ecd523fdf7734875ee2cb772246c22117fc062da105b6d59afe8dcd5c99c9bda languageName: node linkType: hard -"bytes@npm:3.0.0": - version: 3.0.0 - resolution: "bytes@npm:3.0.0" - checksum: a2b386dd8188849a5325f58eef69c3b73c51801c08ffc6963eddc9be244089ba32d19347caf6d145c86f315ae1b1fc7061a32b0c1aa6379e6a719090287ed101 +"css-tree@npm:^1.1.2, css-tree@npm:^1.1.3": + version: 1.1.3 + resolution: "css-tree@npm:1.1.3" + dependencies: + mdn-data: 2.0.14 + source-map: ^0.6.1 + checksum: 79f9b81803991b6977b7fcb1588799270438274d89066ce08f117f5cdb5e20019b446d766c61506dd772c839df84caa16042d6076f20c97187f5abe3b50e7d1f languageName: node linkType: hard -"bytes@npm:3.1.2": - version: 3.1.2 - resolution: "bytes@npm:3.1.2" - checksum: e4bcd3948d289c5127591fbedf10c0b639ccbf00243504e4e127374a15c3bc8eed0d28d4aaab08ff6f1cf2abc0cce6ba3085ed32f4f90e82a5683ce0014e1b6e +"css-what@npm:^6.0.1, css-what@npm:^6.1.0": + version: 6.1.0 + resolution: "css-what@npm:6.1.0" + checksum: b975e547e1e90b79625918f84e67db5d33d896e6de846c9b584094e529f0c63e2ab85ee33b9daffd05bff3a146a1916bec664e18bb76dd5f66cbff9fc13b2bbe languageName: node linkType: hard -"cacache@npm:^17.0.0": - version: 17.1.4 - resolution: "cacache@npm:17.1.4" - dependencies: - "@npmcli/fs": ^3.1.0 - fs-minipass: ^3.0.0 - glob: ^10.2.2 - lru-cache: ^7.7.1 - minipass: ^7.0.3 - minipass-collect: ^1.0.2 - minipass-flush: ^1.0.5 - minipass-pipeline: ^1.2.4 - p-map: ^4.0.0 - ssri: ^10.0.0 - tar: ^6.1.11 - unique-filename: ^3.0.0 - checksum: b7751df756656954a51201335addced8f63fc53266fa56392c9f5ae83c8d27debffb4458ac2d168a744a4517ec3f2163af05c20097f93d17bdc2dc8a385e14a6 +"cssesc@npm:^3.0.0": + version: 3.0.0 + resolution: "cssesc@npm:3.0.0" + bin: + cssesc: bin/cssesc + checksum: f8c4ababffbc5e2ddf2fa9957dda1ee4af6048e22aeda1869d0d00843223c1b13ad3f5d88b51caa46c994225eacb636b764eb807a8883e2fb6f99b4f4e8c48b2 languageName: node linkType: hard -"cache-content-type@npm:^1.0.0": - version: 1.0.1 - resolution: "cache-content-type@npm:1.0.1" +"cssnano-preset-advanced@npm:^5.3.10, cssnano-preset-advanced@npm:^5.3.8": + version: 5.3.10 + resolution: "cssnano-preset-advanced@npm:5.3.10" dependencies: - mime-types: ^2.1.18 - ylru: ^1.2.0 - checksum: 18db4d59452669ccbfd7146a1510a37eb28e9eccf18ca7a4eb603dff2edc5cccdca7498fc3042a2978f76f11151fba486eb9eb69d9afa3fb124957870aef4fd3 + autoprefixer: ^10.4.12 + cssnano-preset-default: ^5.2.14 + postcss-discard-unused: ^5.1.0 + postcss-merge-idents: ^5.1.1 + postcss-reduce-idents: ^5.2.0 + postcss-zindex: ^5.1.0 + peerDependencies: + postcss: ^8.2.15 + checksum: d21cb382aea2f35c9eaa50686280bbd5158260edf73020731364b03bae0d887292da51ed0b20b369f51d2573ee8c02c695f604647b839a9ca746be8a44c3bb5b languageName: node linkType: hard -"cacheable-request@npm:^6.0.0": - version: 6.1.0 - resolution: "cacheable-request@npm:6.1.0" +"cssnano-preset-default@npm:^5.2.14": + version: 5.2.14 + resolution: "cssnano-preset-default@npm:5.2.14" dependencies: - clone-response: ^1.0.2 - get-stream: ^5.1.0 - http-cache-semantics: ^4.0.0 - keyv: ^3.0.0 - lowercase-keys: ^2.0.0 - normalize-url: ^4.1.0 - responselike: ^1.0.2 - checksum: b510b237b18d17e89942e9ee2d2a077cb38db03f12167fd100932dfa8fc963424bfae0bfa1598df4ae16c944a5484e43e03df8f32105b04395ee9495e9e4e9f1 + css-declaration-sorter: ^6.3.1 + cssnano-utils: ^3.1.0 + postcss-calc: ^8.2.3 + postcss-colormin: ^5.3.1 + postcss-convert-values: ^5.1.3 + postcss-discard-comments: ^5.1.2 + postcss-discard-duplicates: ^5.1.0 + postcss-discard-empty: ^5.1.1 + postcss-discard-overridden: ^5.1.0 + postcss-merge-longhand: ^5.1.7 + postcss-merge-rules: ^5.1.4 + postcss-minify-font-values: ^5.1.0 + postcss-minify-gradients: ^5.1.1 + postcss-minify-params: ^5.1.4 + postcss-minify-selectors: ^5.2.1 + postcss-normalize-charset: ^5.1.0 + postcss-normalize-display-values: ^5.1.0 + postcss-normalize-positions: ^5.1.1 + postcss-normalize-repeat-style: ^5.1.1 + postcss-normalize-string: ^5.1.0 + postcss-normalize-timing-functions: ^5.1.0 + postcss-normalize-unicode: ^5.1.1 + postcss-normalize-url: ^5.1.0 + postcss-normalize-whitespace: ^5.1.1 + postcss-ordered-values: ^5.1.3 + postcss-reduce-initial: ^5.1.2 + postcss-reduce-transforms: ^5.1.0 + postcss-svgo: ^5.1.0 + postcss-unique-selectors: ^5.1.1 + peerDependencies: + postcss: ^8.2.15 + checksum: d3bbbe3d50c6174afb28d0bdb65b511fdab33952ec84810aef58b87189f3891c34aaa8b6a6101acd5314f8acded839b43513e39a75f91a698ddc985a1b1d9e95 languageName: node linkType: hard -"call-bind@npm:^1.0.0, call-bind@npm:^1.0.2": - version: 1.0.2 - resolution: "call-bind@npm:1.0.2" +"cssnano-utils@npm:^3.1.0": + version: 3.1.0 + resolution: "cssnano-utils@npm:3.1.0" + peerDependencies: + postcss: ^8.2.15 + checksum: 975c84ce9174cf23bb1da1e9faed8421954607e9ea76440cd3bb0c1bea7e17e490d800fca5ae2812d1d9e9d5524eef23ede0a3f52497d7ccc628e5d7321536f2 + languageName: node + linkType: hard + +"cssnano@npm:^5.1.12, cssnano@npm:^5.1.15, cssnano@npm:^5.1.8": + version: 5.1.15 + resolution: "cssnano@npm:5.1.15" dependencies: - function-bind: ^1.1.1 - get-intrinsic: ^1.0.2 - checksum: f8e31de9d19988a4b80f3e704788c4a2d6b6f3d17cfec4f57dc29ced450c53a49270dc66bf0fbd693329ee948dd33e6c90a329519aef17474a4d961e8d6426b0 + cssnano-preset-default: ^5.2.14 + lilconfig: ^2.0.3 + yaml: ^1.10.2 + peerDependencies: + postcss: ^8.2.15 + checksum: ca9e1922178617c66c2f1548824b2c7af2ecf69cc3a187fc96bf8d29251c2e84d9e4966c69cf64a2a6a057a37dff7d6d057bc8a2a0957e6ea382e452ae9d0bbb languageName: node linkType: hard -"callsites@npm:^3.0.0": - version: 3.1.0 - resolution: "callsites@npm:3.1.0" - checksum: 072d17b6abb459c2ba96598918b55868af677154bec7e73d222ef95a8fdb9bbf7dae96a8421085cdad8cd190d86653b5b6dc55a4484f2e5b2e27d5e0c3fc15b3 +"csso@npm:^4.2.0": + version: 4.2.0 + resolution: "csso@npm:4.2.0" + dependencies: + css-tree: ^1.1.2 + checksum: 380ba9663da3bcea58dee358a0d8c4468bb6539be3c439dc266ac41c047217f52fd698fb7e4b6b6ccdfb8cf53ef4ceed8cc8ceccb8dfca2aa628319826b5b998 languageName: node linkType: hard -"callsites@npm:^4.0.0": - version: 4.1.0 - resolution: "callsites@npm:4.1.0" - checksum: 4ad31de7b7615fa25bdab9c2373865209d2d5190f895cdf2e2f518bd1dafa7ebcda2e6e9cc9640f2dfde6b3893d82fa4359a78ffc27baad2503227553c6882fa +"csstype@npm:^3.0.2": + version: 3.1.3 + resolution: "csstype@npm:3.1.3" + checksum: 8db785cc92d259102725b3c694ec0c823f5619a84741b5c7991b8ad135dfaa66093038a1cc63e03361a6cd28d122be48f2106ae72334e067dd619a51f49eddf7 languageName: node linkType: hard -"camel-case@npm:^4.1.2": - version: 4.1.2 - resolution: "camel-case@npm:4.1.2" - dependencies: - pascal-case: ^3.1.2 - tslib: ^2.0.3 - checksum: bcbd25cd253b3cbc69be3f535750137dbf2beb70f093bdc575f73f800acc8443d34fd52ab8f0a2413c34f1e8203139ffc88428d8863e4dfe530cfb257a379ad6 +"data-uri-to-buffer@npm:^4.0.0": + version: 4.0.1 + resolution: "data-uri-to-buffer@npm:4.0.1" + checksum: 0d0790b67ffec5302f204c2ccca4494f70b4e2d940fea3d36b09f0bb2b8539c2e86690429eb1f1dc4bcc9e4df0644193073e63d9ee48ac9fce79ec1506e4aa4c languageName: node linkType: hard -"camelcase-css@npm:2.0.1": - version: 2.0.1 - resolution: "camelcase-css@npm:2.0.1" - checksum: 1cec2b3b3dcb5026688a470b00299a8db7d904c4802845c353dbd12d9d248d3346949a814d83bfd988d4d2e5b9904c07efe76fecd195a1d4f05b543e7c0b56b1 +"data-uri-to-buffer@npm:^6.0.0": + version: 6.0.1 + resolution: "data-uri-to-buffer@npm:6.0.1" + checksum: 9140e68c585ae33d950f5943bd476751346c8b789ae80b01a578a33cb8f7f706d1ca7378aff2b1878b2a6d9a8c88c55cc286d88191c8b8ead8255c3c4d934530 languageName: node linkType: hard -"camelcase@npm:^6.0.0, camelcase@npm:^6.2.0": - version: 6.3.0 - resolution: "camelcase@npm:6.3.0" - checksum: 8c96818a9076434998511251dcb2761a94817ea17dbdc37f47ac080bd088fc62c7369429a19e2178b993497132c8cbcf5cc1f44ba963e76782ba469c0474938d +"debounce@npm:^1.2.0, debounce@npm:^1.2.1": + version: 1.2.1 + resolution: "debounce@npm:1.2.1" + checksum: 682a89506d9e54fb109526f4da255c5546102fbb8e3ae75eef3b04effaf5d4853756aee97475cd4650641869794e44f410eeb20ace2b18ea592287ab2038519e languageName: node linkType: hard -"caniuse-api@npm:^3.0.0": - version: 3.0.0 - resolution: "caniuse-api@npm:3.0.0" +"debug@npm:2.6.9, debug@npm:^2.6.0, debug@npm:^2.6.9": + version: 2.6.9 + resolution: "debug@npm:2.6.9" dependencies: - browserslist: ^4.0.0 - caniuse-lite: ^1.0.0 - lodash.memoize: ^4.1.2 - lodash.uniq: ^4.5.0 - checksum: db2a229383b20d0529b6b589dde99d7b6cb56ba371366f58cbbfa2929c9f42c01f873e2b6ef641d4eda9f0b4118de77dbb2805814670bdad4234bf08e720b0b4 + ms: 2.0.0 + checksum: d2f51589ca66df60bf36e1fa6e4386b318c3f1e06772280eea5b1ae9fd3d05e9c2b7fd8a7d862457d00853c75b00451aa2d7459b924629ee385287a650f58fe6 languageName: node linkType: hard -"caniuse-lite@npm:^1.0.0, caniuse-lite@npm:^1.0.30001538, caniuse-lite@npm:^1.0.30001541": - version: 1.0.30001549 - resolution: "caniuse-lite@npm:1.0.30001549" - checksum: 7f2abeedc8cf8b92cc0613855d71b995ce436068c0bcdd798c5af7d297ccf9f52496b00181beda42d82d25079dd4b6e389c67486156d40d8854e5707a25cb054 +"debug@npm:4, debug@npm:4.3.4, debug@npm:^4.0.0, debug@npm:^4.1.0, debug@npm:^4.1.1, debug@npm:^4.2.0, debug@npm:^4.3.2, debug@npm:^4.3.3, debug@npm:^4.3.4": + version: 4.3.4 + resolution: "debug@npm:4.3.4" + dependencies: + ms: 2.1.2 + peerDependenciesMeta: + supports-color: + optional: true + checksum: 3dbad3f94ea64f34431a9cbf0bafb61853eda57bff2880036153438f50fb5a84f27683ba0d8e5426bf41a8c6ff03879488120cf5b3a761e77953169c0600a708 languageName: node linkType: hard -"case@npm:^1.6.3": - version: 1.6.3 - resolution: "case@npm:1.6.3" - checksum: febe73278f910b0d28aab7efd6f51c235f9aa9e296148edb56dfb83fd58faa88308c30ce9a0122b6e53e0362c44f4407105bd5ef89c46860fc2b184e540fd68d +"debug@npm:^3.1.0, debug@npm:^3.2.7": + version: 3.2.7 + resolution: "debug@npm:3.2.7" + dependencies: + ms: ^2.1.1 + checksum: b3d8c5940799914d30314b7c3304a43305fd0715581a919dacb8b3176d024a782062368405b47491516d2091d6462d4d11f2f4974a405048094f8bfebfa3071c languageName: node linkType: hard -"catering@npm:^2.1.0, catering@npm:^2.1.1": - version: 2.1.1 - resolution: "catering@npm:2.1.1" - checksum: 205daefa69c935b0c19f3d8f2e0a520dd69aebe9bda55902958003f7c9cff8f967dfb90071b421bd6eb618576f657a89d2bc0986872c9bc04bbd66655e9d4bd6 +"decamelize@npm:^4.0.0": + version: 4.0.0 + resolution: "decamelize@npm:4.0.0" + checksum: b7d09b82652c39eead4d6678bb578e3bebd848add894b76d0f6b395bc45b2d692fb88d977e7cfb93c4ed6c119b05a1347cef261174916c2e75c0a8ca57da1809 languageName: node linkType: hard -"cbor@npm:^8.1.0": - version: 8.1.0 - resolution: "cbor@npm:8.1.0" +"decode-named-character-reference@npm:^1.0.0": + version: 1.0.2 + resolution: "decode-named-character-reference@npm:1.0.2" dependencies: - nofilter: ^3.1.0 - checksum: a90338435dc7b45cc01461af979e3bb6ddd4f2a08584c437586039cd5f2235014c06e49d664295debbfb3514d87b2f06728092ab6aa6175e2e85e9cd7dc0c1fd + character-entities: ^2.0.0 + checksum: f4c71d3b93105f20076052f9cb1523a22a9c796b8296cd35eef1ca54239c78d182c136a848b83ff8da2071e3ae2b1d300bf29d00650a6d6e675438cc31b11d78 languageName: node linkType: hard -"ccount@npm:^1.0.0": - version: 1.1.0 - resolution: "ccount@npm:1.1.0" - checksum: b335a79d0aa4308919cf7507babcfa04ac63d389ebed49dbf26990d4607c8a4713cde93cc83e707d84571ddfe1e7615dad248be9bc422ae4c188210f71b08b78 +"decompress-response@npm:^3.3.0": + version: 3.3.0 + resolution: "decompress-response@npm:3.3.0" + dependencies: + mimic-response: ^1.0.0 + checksum: 952552ac3bd7de2fc18015086b09468645c9638d98a551305e485230ada278c039c91116e946d07894b39ee53c0f0d5b6473f25a224029344354513b412d7380 languageName: node linkType: hard -"chai-as-promised@npm:^7.1.1": - version: 7.1.1 - resolution: "chai-as-promised@npm:7.1.1" +"decompress-response@npm:^6.0.0": + version: 6.0.0 + resolution: "decompress-response@npm:6.0.0" dependencies: - check-error: ^1.0.2 - peerDependencies: - chai: ">= 2.1.2 < 5" - checksum: 7262868a5b51a12af4e432838ddf97a893109266a505808e1868ba63a12de7ee1166e9d43b5c501a190c377c1b11ecb9ff8e093c89f097ad96c397e8ec0f8d6a + mimic-response: ^3.1.0 + checksum: d377cf47e02d805e283866c3f50d3d21578b779731e8c5072d6ce8c13cc31493db1c2f6784da9d1d5250822120cefa44f1deab112d5981015f2e17444b763812 languageName: node linkType: hard -"chai@npm:^4.3.7, chai@npm:^4.3.8": - version: 4.3.8 - resolution: "chai@npm:4.3.8" +"deep-eql@npm:^4.0.1, deep-eql@npm:^4.1.3": + version: 4.1.3 + resolution: "deep-eql@npm:4.1.3" dependencies: - assertion-error: ^1.1.0 - check-error: ^1.0.2 - deep-eql: ^4.1.2 - get-func-name: ^2.0.0 - loupe: ^2.3.1 - pathval: ^1.1.1 - type-detect: ^4.0.5 - checksum: 29e0984ed13308319cadc35437c8ef0a3e271544d226c991bf7e3b6d771bf89707321669e11d05e362bc0ad0bd26585079b989d1032f3c106e3bb95d7f079cce + type-detect: ^4.0.0 + checksum: 7f6d30cb41c713973dc07eaadded848b2ab0b835e518a88b91bea72f34e08c4c71d167a722a6f302d3a6108f05afd8e6d7650689a84d5d29ec7fe6220420397f languageName: node linkType: hard -"chalk-template@npm:^0.4.0": - version: 0.4.0 - resolution: "chalk-template@npm:0.4.0" - dependencies: - chalk: ^4.1.2 - checksum: 6c706802a79a7963cbce18f022b046fe86e438a67843151868852f80ea7346e975a6a9749991601e7e5d3b6a6c4852a04c53dc966a9a3d04031bd0e0ed53c819 +"deep-equal@npm:~1.0.1": + version: 1.0.1 + resolution: "deep-equal@npm:1.0.1" + checksum: 5af8cbfcebf190491878a498caccc7dc9592f8ebd1685b976eacc3825619d222b5e929923163b92c4f414494e2b884f7ebf00c022e8198e8292deb70dd9785f4 languageName: node linkType: hard -"chalk@npm:^2.4.2": - version: 2.4.2 - resolution: "chalk@npm:2.4.2" - dependencies: - ansi-styles: ^3.2.1 - escape-string-regexp: ^1.0.5 - supports-color: ^5.3.0 - checksum: ec3661d38fe77f681200f878edbd9448821924e0f93a9cefc0e26a33b145f1027a2084bf19967160d11e1f03bfe4eaffcabf5493b89098b2782c3fe0b03d80c2 +"deep-extend@npm:^0.6.0, deep-extend@npm:~0.6.0": + version: 0.6.0 + resolution: "deep-extend@npm:0.6.0" + checksum: 7be7e5a8d468d6b10e6a67c3de828f55001b6eb515d014f7aeb9066ce36bd5717161eb47d6a0f7bed8a9083935b465bc163ee2581c8b128d29bf61092fdf57a7 languageName: node linkType: hard -"chalk@npm:^4.0.0, chalk@npm:^4.1.0, chalk@npm:^4.1.2": - version: 4.1.2 - resolution: "chalk@npm:4.1.2" - dependencies: - ansi-styles: ^4.1.0 - supports-color: ^7.1.0 - checksum: fe75c9d5c76a7a98d45495b91b2172fa3b7a09e0cc9370e5c8feb1c567b85c4288e2b3fded7cfdd7359ac28d6b3844feb8b82b8686842e93d23c827c417e83fc +"deep-is@npm:^0.1.3": + version: 0.1.4 + resolution: "deep-is@npm:0.1.4" + checksum: edb65dd0d7d1b9c40b2f50219aef30e116cedd6fc79290e740972c132c09106d2e80aa0bc8826673dd5a00222d4179c84b36a790eef63a4c4bca75a37ef90804 languageName: node linkType: hard -"chalk@npm:^5.0.1, chalk@npm:^5.2.0": - version: 5.3.0 - resolution: "chalk@npm:5.3.0" - checksum: 623922e077b7d1e9dedaea6f8b9e9352921f8ae3afe739132e0e00c275971bdd331268183b2628cf4ab1727c45ea1f28d7e24ac23ce1db1eb653c414ca8a5a80 +"deepmerge@npm:^4.2.2": + version: 4.3.1 + resolution: "deepmerge@npm:4.3.1" + checksum: 2024c6a980a1b7128084170c4cf56b0fd58a63f2da1660dcfe977415f27b17dbe5888668b59d0b063753f3220719d5e400b7f113609489c90160bb9a5518d052 languageName: node linkType: hard -"character-entities-legacy@npm:^1.0.0": - version: 1.1.4 - resolution: "character-entities-legacy@npm:1.1.4" - checksum: fe03a82c154414da3a0c8ab3188e4237ec68006cbcd681cf23c7cfb9502a0e76cd30ab69a2e50857ca10d984d57de3b307680fff5328ccd427f400e559c3a811 +"default-browser-id@npm:^3.0.0": + version: 3.0.0 + resolution: "default-browser-id@npm:3.0.0" + dependencies: + bplist-parser: ^0.2.0 + untildify: ^4.0.0 + checksum: 279c7ad492542e5556336b6c254a4eaf31b2c63a5433265655ae6e47301197b6cfb15c595a6fdc6463b2ff8e1a1a1ed3cba56038a60e1527ba4ab1628c6b9941 languageName: node linkType: hard -"character-entities@npm:^1.0.0": - version: 1.2.4 - resolution: "character-entities@npm:1.2.4" - checksum: e1545716571ead57beac008433c1ff69517cd8ca5b336889321c5b8ff4a99c29b65589a701e9c086cda8a5e346a67295e2684f6c7ea96819fe85cbf49bf8686d +"default-browser@npm:^4.0.0": + version: 4.0.0 + resolution: "default-browser@npm:4.0.0" + dependencies: + bundle-name: ^3.0.0 + default-browser-id: ^3.0.0 + execa: ^7.1.1 + titleize: ^3.0.0 + checksum: 40c5af984799042b140300be5639c9742599bda76dc9eba5ac9ad5943c83dd36cebc4471eafcfddf8e0ec817166d5ba89d56f08e66a126c7c7908a179cead1a7 languageName: node linkType: hard -"character-reference-invalid@npm:^1.0.0": - version: 1.1.4 - resolution: "character-reference-invalid@npm:1.1.4" - checksum: 20274574c70e05e2f81135f3b93285536bc8ff70f37f0809b0d17791a832838f1e49938382899ed4cb444e5bbd4314ca1415231344ba29f4222ce2ccf24fea0b +"default-gateway@npm:^6.0.3": + version: 6.0.3 + resolution: "default-gateway@npm:6.0.3" + dependencies: + execa: ^5.0.0 + checksum: 126f8273ecac8ee9ff91ea778e8784f6cd732d77c3157e8c5bdd6ed03651b5291f71446d05bc02d04073b1e67583604db5394ea3cf992ede0088c70ea15b7378 languageName: node linkType: hard -"check-error@npm:^1.0.2": - version: 1.0.2 - resolution: "check-error@npm:1.0.2" - checksum: d9d106504404b8addd1ee3f63f8c0eaa7cd962a1a28eb9c519b1c4a1dc7098be38007fc0060f045ee00f075fbb7a2a4f42abcf61d68323677e11ab98dc16042e +"defer-to-connect@npm:^1.0.1": + version: 1.1.3 + resolution: "defer-to-connect@npm:1.1.3" + checksum: 9491b301dcfa04956f989481ba7a43c2231044206269eb4ab64a52d6639ee15b1252262a789eb4239fb46ab63e44d4e408641bae8e0793d640aee55398cb3930 languageName: node linkType: hard -"cheerio-select@npm:^2.1.0": - version: 2.1.0 - resolution: "cheerio-select@npm:2.1.0" - dependencies: - boolbase: ^1.0.0 - css-select: ^5.1.0 - css-what: ^6.1.0 - domelementtype: ^2.3.0 - domhandler: ^5.0.3 - domutils: ^3.0.1 - checksum: 843d6d479922f28a6c5342c935aff1347491156814de63c585a6eb73baf7bb4185c1b4383a1195dca0f12e3946d737c7763bcef0b9544c515d905c5c44c5308b +"defer-to-connect@npm:^2.0.1": + version: 2.0.1 + resolution: "defer-to-connect@npm:2.0.1" + checksum: 8a9b50d2f25446c0bfefb55a48e90afd58f85b21bcf78e9207cd7b804354f6409032a1705c2491686e202e64fc05f147aa5aa45f9aa82627563f045937f5791b languageName: node linkType: hard -"cheerio@npm:^1.0.0-rc.12, cheerio@npm:^1.0.0-rc.3": - version: 1.0.0-rc.12 - resolution: "cheerio@npm:1.0.0-rc.12" +"deferred-leveldown@npm:~0.2.0": + version: 0.2.0 + resolution: "deferred-leveldown@npm:0.2.0" dependencies: - cheerio-select: ^2.1.0 - dom-serializer: ^2.0.0 - domhandler: ^5.0.3 - domutils: ^3.0.1 - htmlparser2: ^8.0.1 - parse5: ^7.0.0 - parse5-htmlparser2-tree-adapter: ^7.0.0 - checksum: 5d4c1b7a53cf22d3a2eddc0aff70cf23cbb30d01a4c79013e703a012475c02461aa1fcd99127e8d83a02216386ed6942b2c8103845fd0812300dd199e6e7e054 + abstract-leveldown: ~0.12.1 + checksum: f7690ec5b1e951e6f56998be26dd0a1331ef28cb7eaa9e090a282780d47dc006effd4b82a2a82b636cae801378047997aca10c0b44b09c8624633cdb96b07913 languageName: node linkType: hard -"chokidar@npm:3.5.3, chokidar@npm:^3.4.0, chokidar@npm:^3.4.2, chokidar@npm:^3.4.3, chokidar@npm:^3.5.3": - version: 3.5.3 - resolution: "chokidar@npm:3.5.3" +"define-data-property@npm:^1.0.1, define-data-property@npm:^1.1.1": + version: 1.1.1 + resolution: "define-data-property@npm:1.1.1" dependencies: - anymatch: ~3.1.2 - braces: ~3.0.2 - fsevents: ~2.3.2 - glob-parent: ~5.1.2 - is-binary-path: ~2.1.0 - is-glob: ~4.0.1 - normalize-path: ~3.0.0 - readdirp: ~3.6.0 - dependenciesMeta: - fsevents: - optional: true - checksum: b49fcde40176ba007ff361b198a2d35df60d9bb2a5aab228279eb810feae9294a6b4649ab15981304447afe1e6ffbf4788ad5db77235dc770ab777c6e771980c + get-intrinsic: ^1.2.1 + gopd: ^1.0.1 + has-property-descriptors: ^1.0.0 + checksum: a29855ad3f0630ea82e3c5012c812efa6ca3078d5c2aa8df06b5f597c1cde6f7254692df41945851d903e05a1668607b6d34e778f402b9ff9ffb38111f1a3f0d languageName: node linkType: hard -"chownr@npm:^1.1.1": - version: 1.1.4 - resolution: "chownr@npm:1.1.4" - checksum: 115648f8eb38bac5e41c3857f3e663f9c39ed6480d1349977c4d96c95a47266fcacc5a5aabf3cb6c481e22d72f41992827db47301851766c4fd77ac21a4f081d +"define-lazy-prop@npm:^2.0.0": + version: 2.0.0 + resolution: "define-lazy-prop@npm:2.0.0" + checksum: 0115fdb065e0490918ba271d7339c42453d209d4cb619dfe635870d906731eff3e1ade8028bb461ea27ce8264ec5e22c6980612d332895977e89c1bbc80fcee2 languageName: node linkType: hard -"chownr@npm:^2.0.0": - version: 2.0.0 - resolution: "chownr@npm:2.0.0" - checksum: c57cf9dd0791e2f18a5ee9c1a299ae6e801ff58fee96dc8bfd0dcb4738a6ce58dd252a3605b1c93c6418fe4f9d5093b28ffbf4d66648cb2a9c67eaef9679be2f +"define-lazy-prop@npm:^3.0.0": + version: 3.0.0 + resolution: "define-lazy-prop@npm:3.0.0" + checksum: 54884f94caac0791bf6395a3ec530ce901cf71c47b0196b8754f3fd17edb6c0e80149c1214429d851873bb0d689dbe08dcedbb2306dc45c8534a5934723851b6 languageName: node linkType: hard -"chrome-launcher@npm:^0.15.0": - version: 0.15.2 - resolution: "chrome-launcher@npm:0.15.2" +"define-properties@npm:^1.1.3, define-properties@npm:^1.2.1": + version: 1.2.1 + resolution: "define-properties@npm:1.2.1" dependencies: - "@types/node": "*" - escape-string-regexp: ^4.0.0 - is-wsl: ^2.2.0 - lighthouse-logger: ^1.0.0 - bin: - print-chrome-path: bin/print-chrome-path.js - checksum: e1f8131b9f7bd931248ea85f413c6cdb93a0d41440ff5bf0987f36afb081d2b2c7b60ba6062ee7ae2dd9b052143f6b275b38c9eb115d11b49c3ea8829bad7db0 + define-data-property: ^1.0.1 + has-property-descriptors: ^1.0.0 + object-keys: ^1.1.1 + checksum: b4ccd00597dd46cb2d4a379398f5b19fca84a16f3374e2249201992f36b30f6835949a9429669ee6b41b6e837205a163eadd745e472069e70dfc10f03e5fcc12 languageName: node linkType: hard -"chrome-trace-event@npm:^1.0.2": - version: 1.0.3 - resolution: "chrome-trace-event@npm:1.0.3" - checksum: cb8b1fc7e881aaef973bd0c4a43cd353c2ad8323fb471a041e64f7c2dd849cde4aad15f8b753331a32dda45c973f032c8a03b8177fc85d60eaa75e91e08bfb97 +"degenerator@npm:^5.0.0": + version: 5.0.1 + resolution: "degenerator@npm:5.0.1" + dependencies: + ast-types: ^0.13.4 + escodegen: ^2.1.0 + esprima: ^4.0.1 + checksum: a64fa39cdf6c2edd75188157d32338ee9de7193d7dbb2aeb4acb1eb30fa4a15ed80ba8dae9bd4d7b085472cf174a5baf81adb761aaa8e326771392c922084152 languageName: node linkType: hard -"chromium-bidi@npm:0.4.7": - version: 0.4.7 - resolution: "chromium-bidi@npm:0.4.7" +"del@npm:^6.1.1": + version: 6.1.1 + resolution: "del@npm:6.1.1" dependencies: - mitt: 3.0.0 - peerDependencies: - devtools-protocol: "*" - checksum: eec7581e2eddd2c95014c6edc5aae0b036c79bbeadee05166436b16139b6932c902c5ce21d95ed919a592f58d3a47c5469dc5f3de2a300700b2748ab119ad65e + globby: ^11.0.1 + graceful-fs: ^4.2.4 + is-glob: ^4.0.1 + is-path-cwd: ^2.2.0 + is-path-inside: ^3.0.2 + p-map: ^4.0.0 + rimraf: ^3.0.2 + slash: ^3.0.0 + checksum: 563288b73b8b19a7261c47fd21a330eeab6e2acd7c6208c49790dfd369127120dd7836cdf0c1eca216b77c94782a81507eac6b4734252d3bef2795cb366996b6 languageName: node linkType: hard -"chunkd@npm:^2.0.1": - version: 2.0.1 - resolution: "chunkd@npm:2.0.1" - checksum: bab8cc08c752a3648984385dc6f61d751e89dbeef648d22a3b661e1d470eaa0f5182f0b4303710f13ae83d2f85144f8eb2dde7a975861d9021b5c56b881f457b +"delayed-stream@npm:~1.0.0": + version: 1.0.0 + resolution: "delayed-stream@npm:1.0.0" + checksum: 46fe6e83e2cb1d85ba50bd52803c68be9bd953282fa7096f51fc29edd5d67ff84ff753c51966061e5ba7cb5e47ef6d36a91924eddb7f3f3483b1c560f77a0020 languageName: node linkType: hard -"ci-info@npm:^2.0.0": +"delegates@npm:^1.0.0": + version: 1.0.0 + resolution: "delegates@npm:1.0.0" + checksum: a51744d9b53c164ba9c0492471a1a2ffa0b6727451bdc89e31627fdf4adda9d51277cfcbfb20f0a6f08ccb3c436f341df3e92631a3440226d93a8971724771fd + languageName: node + linkType: hard + +"depd@npm:2.0.0, depd@npm:^2.0.0, depd@npm:~2.0.0": version: 2.0.0 - resolution: "ci-info@npm:2.0.0" - checksum: 3b374666a85ea3ca43fa49aa3a048d21c9b475c96eb13c133505d2324e7ae5efd6a454f41efe46a152269e9b6a00c9edbe63ec7fa1921957165aae16625acd67 + resolution: "depd@npm:2.0.0" + checksum: abbe19c768c97ee2eed6282d8ce3031126662252c58d711f646921c9623f9052e3e1906443066beec1095832f534e57c523b7333f8e7e0d93051ab6baef5ab3a languageName: node linkType: hard -"ci-info@npm:^3.2.0": - version: 3.9.0 - resolution: "ci-info@npm:3.9.0" - checksum: 6b19dc9b2966d1f8c2041a838217299718f15d6c4b63ae36e4674edd2bee48f780e94761286a56aa59eb305a85fbea4ddffb7630ec063e7ec7e7e5ad42549a87 +"depd@npm:~1.1.2": + version: 1.1.2 + resolution: "depd@npm:1.1.2" + checksum: 6b406620d269619852885ce15965272b829df6f409724415e0002c8632ab6a8c0a08ec1f0bd2add05dc7bd7507606f7e2cc034fa24224ab829580040b835ecd9 languageName: node linkType: hard -"ci-info@npm:^3.8.0": - version: 3.8.0 - resolution: "ci-info@npm:3.8.0" - checksum: d0a4d3160497cae54294974a7246202244fff031b0a6ea20dd57b10ec510aa17399c41a1b0982142c105f3255aff2173e5c0dd7302ee1b2f28ba3debda375098 +"dependency-graph@npm:^0.11.0": + version: 0.11.0 + resolution: "dependency-graph@npm:0.11.0" + checksum: 477204beaa9be69e642bc31ffe7a8c383d0cf48fa27acbc91c5df01431ab913e65c154213d2ef83d034c98d77280743ec85e5da018a97a18dd43d3c0b78b28cd languageName: node linkType: hard -"ci-parallel-vars@npm:^1.0.1": - version: 1.0.1 - resolution: "ci-parallel-vars@npm:1.0.1" - checksum: ae859831f7e8e3585db731b8306c336616e37bd709dad1d7775ea4c0731aefd94741dabb48201edc6827d000008fd7fb72cb977967614ee2d99d6b499f0c35fe +"dequal@npm:^2.0.0": + version: 2.0.3 + resolution: "dequal@npm:2.0.3" + checksum: 8679b850e1a3d0ebbc46ee780d5df7b478c23f335887464023a631d1b9af051ad4a6595a44220f9ff8ff95a8ddccf019b5ad778a976fd7bbf77383d36f412f90 languageName: node linkType: hard -"cipher-base@npm:^1.0.0, cipher-base@npm:^1.0.1, cipher-base@npm:^1.0.3": - version: 1.0.4 - resolution: "cipher-base@npm:1.0.4" - dependencies: - inherits: ^2.0.1 - safe-buffer: ^5.0.1 - checksum: 47d3568dbc17431a339bad1fe7dff83ac0891be8206911ace3d3b818fc695f376df809bea406e759cdea07fff4b454fa25f1013e648851bec790c1d75763032e +"destroy@npm:1.2.0, destroy@npm:^1.0.4": + version: 1.2.0 + resolution: "destroy@npm:1.2.0" + checksum: 0acb300b7478a08b92d810ab229d5afe0d2f4399272045ab22affa0d99dbaf12637659411530a6fcd597a9bdac718fc94373a61a95b4651bbc7b83684a565e38 languageName: node linkType: hard -"classic-level@npm:^1.2.0": - version: 1.3.0 - resolution: "classic-level@npm:1.3.0" +"detab@npm:2.0.4": + version: 2.0.4 + resolution: "detab@npm:2.0.4" dependencies: - abstract-level: ^1.0.2 - catering: ^2.1.0 - module-error: ^1.0.1 - napi-macros: ^2.2.2 - node-gyp: latest - node-gyp-build: ^4.3.0 - checksum: 773da48aef52a041115d413fee8340b357a4da2eb505764f327183b155edd7cc9d24819eb4f707c83dbdae8588024f5dddeb322125567c59d5d1f6f16334cdb9 + repeat-string: ^1.5.4 + checksum: 34b077521ecd4c6357d32ff7923be644d34aa6f6b7d717d40ec4a9168243eefaea2b512a75a460a6f70c31b0bbc31ff90f820a891803b4ddaf99e9d04d0d389d languageName: node linkType: hard -"clean-css@npm:^5.2.2, clean-css@npm:^5.3.0": - version: 5.3.2 - resolution: "clean-css@npm:5.3.2" - dependencies: - source-map: ~0.6.0 - checksum: 8787b281acc9878f309b5f835d410085deedfd4e126472666773040a6a8a72f472a1d24185947d23b87b1c419bf2c5ed429395d5c5ff8279c98b05d8011e9758 +"detect-node@npm:^2.0.4": + version: 2.1.0 + resolution: "detect-node@npm:2.1.0" + checksum: 832184ec458353e41533ac9c622f16c19f7c02d8b10c303dfd3a756f56be93e903616c0bb2d4226183c9351c15fc0b3dba41a17a2308262afabcfa3776e6ae6e languageName: node linkType: hard -"clean-stack@npm:^2.0.0": - version: 2.2.0 - resolution: "clean-stack@npm:2.2.0" - checksum: 2ac8cd2b2f5ec986a3c743935ec85b07bc174d5421a5efc8017e1f146a1cf5f781ae962618f416352103b32c9cd7e203276e8c28241bbe946160cab16149fb68 +"detect-port-alt@npm:^1.1.6": + version: 1.1.6 + resolution: "detect-port-alt@npm:1.1.6" + dependencies: + address: ^1.0.1 + debug: ^2.6.0 + bin: + detect: ./bin/detect-port + detect-port: ./bin/detect-port + checksum: 9dc37b1fa4a9dd6d4889e1045849b8d841232b598d1ca888bf712f4035b07a17cf6d537465a0d7323250048d3a5a0540e3b7cf89457efc222f96f77e2c40d16a languageName: node linkType: hard -"clean-stack@npm:^4.0.0": - version: 4.2.0 - resolution: "clean-stack@npm:4.2.0" +"detect-port@npm:^1.3.0, detect-port@npm:^1.5.1": + version: 1.5.1 + resolution: "detect-port@npm:1.5.1" dependencies: - escape-string-regexp: 5.0.0 - checksum: 373f656a31face5c615c0839213b9b542a0a48057abfb1df66900eab4dc2a5c6097628e4a0b5aa559cdfc4e66f8a14ea47be9681773165a44470ef5fb8ccc172 + address: ^1.0.1 + debug: 4 + bin: + detect: bin/detect-port.js + detect-port: bin/detect-port.js + checksum: b48da9340481742547263d5d985e65d078592557863402ecf538511735e83575867e94f91fe74405ea19b61351feb99efccae7e55de9a151d5654e3417cea05b languageName: node linkType: hard -"clean-yaml-object@npm:^0.1.0": - version: 0.1.0 - resolution: "clean-yaml-object@npm:0.1.0" - checksum: 0374ad2f1fbd4984ecf56ebc62200092f6372b9ccf1b7971bb979c328fb12fe76e759fb1e8adc491c80b7b1861f9f00c7f19813dd2a0f49c88231422c70451f4 +"devlop@npm:^1.0.0, devlop@npm:^1.1.0": + version: 1.1.0 + resolution: "devlop@npm:1.1.0" + dependencies: + dequal: ^2.0.0 + checksum: d2ff650bac0bb6ef08c48f3ba98640bb5fec5cce81e9957eb620408d1bab1204d382a45b785c6b3314dc867bb0684936b84c6867820da6db97cbb5d3c15dd185 languageName: node linkType: hard -"cli-boxes@npm:^2.2.1": - version: 2.2.1 - resolution: "cli-boxes@npm:2.2.1" - checksum: be79f8ec23a558b49e01311b39a1ea01243ecee30539c880cf14bf518a12e223ef40c57ead0cb44f509bffdffc5c129c746cd50d863ab879385370112af4f585 +"devtools-protocol@npm:0.0.1107588": + version: 0.0.1107588 + resolution: "devtools-protocol@npm:0.0.1107588" + checksum: 9064fd643f39ae0adabb8f425b746899ff24371d89a5047d38752653259e6afcb6bcb2d9759ff727eb5885cfc0f9ba8eb384850a2af00694135622e88080e3e5 languageName: node linkType: hard -"cli-boxes@npm:^3.0.0": - version: 3.0.0 - resolution: "cli-boxes@npm:3.0.0" - checksum: 637d84419d293a9eac40a1c8c96a2859e7d98b24a1a317788e13c8f441be052fc899480c6acab3acc82eaf1bccda6b7542d7cdcf5c9c3cc39227175dc098d5b2 +"devtools-protocol@npm:0.0.1147663": + version: 0.0.1147663 + resolution: "devtools-protocol@npm:0.0.1147663" + checksum: 0631f2b6c6cd7f56e7d62a85bfc291f7e167f0f2de90969ef61fb24e2bd546b2e9530043d2bc3fe6c4d7a9e00473004272d2c2832a10a05e4b75c03a22f549fc languageName: node linkType: hard -"cli-cursor@npm:^3.1.0": - version: 3.1.0 - resolution: "cli-cursor@npm:3.1.0" - dependencies: - restore-cursor: ^3.1.0 - checksum: 2692784c6cd2fd85cfdbd11f53aea73a463a6d64a77c3e098b2b4697a20443f430c220629e1ca3b195ea5ac4a97a74c2ee411f3807abf6df2b66211fec0c0a29 +"diff@npm:5.0.0": + version: 5.0.0 + resolution: "diff@npm:5.0.0" + checksum: f19fe29284b633afdb2725c2a8bb7d25761ea54d321d8e67987ac851c5294be4afeab532bd84531e02583a3fe7f4014aa314a3eda84f5590e7a9e6b371ef3b46 languageName: node linkType: hard -"cli-table3@npm:^0.6.2": - version: 0.6.3 - resolution: "cli-table3@npm:0.6.3" - dependencies: - "@colors/colors": 1.5.0 - string-width: ^4.2.0 - dependenciesMeta: - "@colors/colors": - optional: true - checksum: 09897f68467973f827c04e7eaadf13b55f8aec49ecd6647cc276386ea660059322e2dd8020a8b6b84d422dbdd619597046fa89cbbbdc95b2cea149a2df7c096c +"diff@npm:^4.0.1": + version: 4.0.2 + resolution: "diff@npm:4.0.2" + checksum: f2c09b0ce4e6b301c221addd83bf3f454c0bc00caa3dd837cf6c127d6edf7223aa2bbe3b688feea110b7f262adbfc845b757c44c8a9f8c0c5b15d8fa9ce9d20d languageName: node linkType: hard -"cli-truncate@npm:^3.1.0": - version: 3.1.0 - resolution: "cli-truncate@npm:3.1.0" - dependencies: - slice-ansi: ^5.0.0 - string-width: ^5.0.0 - checksum: c3243e41974445691c63f8b405df1d5a24049dc33d324fe448dc572e561a7b772ae982692900b1a5960901cc4fc7def25a629b9c69a4208ee89d12ab3332617a +"diff@npm:^5.0.0, diff@npm:^5.1.0": + version: 5.1.0 + resolution: "diff@npm:5.1.0" + checksum: c7bf0df7c9bfbe1cf8a678fd1b2137c4fb11be117a67bc18a0e03ae75105e8533dbfb1cda6b46beb3586ef5aed22143ef9d70713977d5fb1f9114e21455fba90 languageName: node linkType: hard -"cliui@npm:^7.0.2": - version: 7.0.4 - resolution: "cliui@npm:7.0.4" +"dir-glob@npm:^3.0.1": + version: 3.0.1 + resolution: "dir-glob@npm:3.0.1" dependencies: - string-width: ^4.2.0 - strip-ansi: ^6.0.0 - wrap-ansi: ^7.0.0 - checksum: ce2e8f578a4813806788ac399b9e866297740eecd4ad1823c27fd344d78b22c5f8597d548adbcc46f0573e43e21e751f39446c5a5e804a12aace402b7a315d7f + path-type: ^4.0.0 + checksum: fa05e18324510d7283f55862f3161c6759a3f2f8dbce491a2fc14c8324c498286c54282c1f0e933cb930da8419b30679389499b919122952a4f8592362ef4615 languageName: node linkType: hard -"cliui@npm:^8.0.1": - version: 8.0.1 - resolution: "cliui@npm:8.0.1" - dependencies: - string-width: ^4.2.0 - strip-ansi: ^6.0.1 - wrap-ansi: ^7.0.0 - checksum: 79648b3b0045f2e285b76fb2e24e207c6db44323581e421c3acbd0e86454cba1b37aea976ab50195a49e7384b871e6dfb2247ad7dec53c02454ac6497394cb56 +"dns-equal@npm:^1.0.0": + version: 1.0.0 + resolution: "dns-equal@npm:1.0.0" + checksum: a8471ac849c7c13824f053babea1bc26e2f359394dd5a460f8340d8abd13434be01e3327a5c59d212f8c8997817450efd3f3ac77bec709b21979cf0235644524 languageName: node linkType: hard -"clone-deep@npm:^4.0.1": - version: 4.0.1 - resolution: "clone-deep@npm:4.0.1" +"dns-packet@npm:^5.2.2": + version: 5.6.1 + resolution: "dns-packet@npm:5.6.1" dependencies: - is-plain-object: ^2.0.4 - kind-of: ^6.0.2 - shallow-clone: ^3.0.0 - checksum: 770f912fe4e6f21873c8e8fbb1e99134db3b93da32df271d00589ea4a29dbe83a9808a322c93f3bcaf8584b8b4fa6fc269fc8032efbaa6728e0c9886c74467d2 + "@leichtgewicht/ip-codec": ^2.0.1 + checksum: 64c06457f0c6e143f7a0946e0aeb8de1c5f752217cfa143ef527467c00a6d78db1835cfdb6bb68333d9f9a4963cf23f410439b5262a8935cce1236f45e344b81 languageName: node linkType: hard -"clone-response@npm:^1.0.2": - version: 1.0.3 - resolution: "clone-response@npm:1.0.3" +"docs@workspace:docs": + version: 0.0.0-use.local + resolution: "docs@workspace:docs" dependencies: - mimic-response: ^1.0.0 - checksum: 4e671cac39b11c60aa8ba0a450657194a5d6504df51bca3fac5b3bd0145c4f8e8464898f87c8406b83232e3bc5cca555f51c1f9c8ac023969ebfbf7f6bdabb2e - languageName: node - linkType: hard + "@docusaurus/core": ^3.0.1 + "@docusaurus/module-type-aliases": ^3.0.1 + "@docusaurus/preset-classic": ^3.0.1 + "@docusaurus/tsconfig": ^3.0.1 + "@docusaurus/types": ^3.0.1 + "@easyops-cn/docusaurus-search-local": ^0.35.0 + "@mdx-js/react": ^3.0.0 + "@noir-lang/noir_js": "workspace:*" + "@noir-lang/noirc_abi": "workspace:*" + "@noir-lang/types": "workspace:*" + "@signorecello/noir_playground": ^0.6.0 + "@types/prettier": ^3 + axios: ^1.4.0 + clsx: ^1.2.1 + docusaurus-plugin-typedoc: 1.0.0-next.18 + eslint-plugin-prettier: ^5.0.0 + hast-util-is-element: ^1.1.0 + prettier: 3.0.3 + prism-react-renderer: ^2.1.0 + react: ^18.2.0 + react-dom: ^18.2.0 + react-spinners: ^0.13.8 + rehype-katex: ^7.0.0 + remark-math: ^6.0.0 + serve: ^14.2.1 + ts-node: ^10.9.1 + typedoc: ^0.25.0 + typedoc-plugin-frontmatter: ^0.0.2 + typedoc-plugin-markdown: 4.0.0-next.25 + typedoc-plugin-merge-modules: ^5.1.0 + typescript: ~5.2.2 + languageName: unknown + linkType: soft -"clone@npm:^2.1.2": - version: 2.1.2 - resolution: "clone@npm:2.1.2" - checksum: aaf106e9bc025b21333e2f4c12da539b568db4925c0501a1bf4070836c9e848c892fa22c35548ce0d1132b08bbbfa17a00144fe58fccdab6fa900fec4250f67d +"doctrine@npm:^3.0.0": + version: 3.0.0 + resolution: "doctrine@npm:3.0.0" + dependencies: + esutils: ^2.0.2 + checksum: fd7673ca77fe26cd5cba38d816bc72d641f500f1f9b25b83e8ce28827fe2da7ad583a8da26ab6af85f834138cf8dae9f69b0cd6ab925f52ddab1754db44d99ce languageName: node linkType: hard -"clsx@npm:^1.1.1, clsx@npm:^1.2.1": - version: 1.2.1 - resolution: "clsx@npm:1.2.1" - checksum: 30befca8019b2eb7dbad38cff6266cf543091dae2825c856a62a8ccf2c3ab9c2907c4d12b288b73101196767f66812365400a227581484a05f968b0307cfaf12 +"docusaurus-plugin-typedoc@npm:1.0.0-next.18": + version: 1.0.0-next.18 + resolution: "docusaurus-plugin-typedoc@npm:1.0.0-next.18" + dependencies: + "@docusaurus/types": ^2.4.1 + peerDependencies: + typedoc-plugin-markdown: ">=4.0.0-next.24" + checksum: a501e3bd1cc5b33d215a1b71b018a34d4aa5bd98f39580ab114a127d3f555078443d6b690119c0adb17fb8867ba4131382ad5d14dbaa7919715020cdfaf6b9c4 languageName: node linkType: hard -"co-body@npm:^6.1.0": - version: 6.1.0 - resolution: "co-body@npm:6.1.0" +"dom-converter@npm:^0.2.0": + version: 0.2.0 + resolution: "dom-converter@npm:0.2.0" dependencies: - inflation: ^2.0.0 - qs: ^6.5.2 - raw-body: ^2.3.3 - type-is: ^1.6.16 - checksum: d0a78831a6651f2085fce16b0ecdc49f45fb5baf4f94148c2f499e7ec89d188205362548b9c500eae15a819360cfda208079e68a72c204cf66ca3ffa2fc0f57e + utila: ~0.4 + checksum: ea52fe303f5392e48dea563abef0e6fb3a478b8dbe3c599e99bb5d53981c6c38fc4944e56bb92a8ead6bb989d10b7914722ae11febbd2fd0910e33b9fc4aaa77 languageName: node linkType: hard -"co@npm:^4.6.0": - version: 4.6.0 - resolution: "co@npm:4.6.0" - checksum: 5210d9223010eb95b29df06a91116f2cf7c8e0748a9013ed853b53f362ea0e822f1e5bb054fb3cefc645239a4cf966af1f6133a3b43f40d591f3b68ed6cf0510 +"dom-serializer@npm:^1.0.1": + version: 1.4.1 + resolution: "dom-serializer@npm:1.4.1" + dependencies: + domelementtype: ^2.0.1 + domhandler: ^4.2.0 + entities: ^2.0.0 + checksum: fbb0b01f87a8a2d18e6e5a388ad0f7ec4a5c05c06d219377da1abc7bb0f674d804f4a8a94e3f71ff15f6cb7dcfc75704a54b261db672b9b3ab03da6b758b0b22 languageName: node linkType: hard -"code-excerpt@npm:^4.0.0": - version: 4.0.0 - resolution: "code-excerpt@npm:4.0.0" +"dom-serializer@npm:^2.0.0": + version: 2.0.0 + resolution: "dom-serializer@npm:2.0.0" dependencies: - convert-to-spaces: ^2.0.1 - checksum: d57137d8f4825879283a828cc02a1115b56858dc54ed06c625c8f67d6685d1becd2fbaa7f0ab19ecca1f5cca03f8c97bbc1f013cab40261e4d3275032e65efe9 + domelementtype: ^2.3.0 + domhandler: ^5.0.2 + entities: ^4.2.0 + checksum: cd1810544fd8cdfbd51fa2c0c1128ec3a13ba92f14e61b7650b5de421b88205fd2e3f0cc6ace82f13334114addb90ed1c2f23074a51770a8e9c1273acbc7f3e6 languageName: node linkType: hard -"collapse-white-space@npm:^1.0.2": - version: 1.0.6 - resolution: "collapse-white-space@npm:1.0.6" - checksum: 9673fb797952c5c888341435596c69388b22cd5560c8cd3f40edb72734a9c820f56a7c9525166bcb7068b5d5805372e6fd0c4b9f2869782ad070cb5d3faf26e7 +"domelementtype@npm:^2.0.1, domelementtype@npm:^2.2.0, domelementtype@npm:^2.3.0": + version: 2.3.0 + resolution: "domelementtype@npm:2.3.0" + checksum: ee837a318ff702622f383409d1f5b25dd1024b692ef64d3096ff702e26339f8e345820f29a68bcdcea8cfee3531776b3382651232fbeae95612d6f0a75efb4f6 languageName: node linkType: hard -"color-convert@npm:^1.9.0": - version: 1.9.3 - resolution: "color-convert@npm:1.9.3" +"domhandler@npm:^4.0.0, domhandler@npm:^4.2.0, domhandler@npm:^4.3.1": + version: 4.3.1 + resolution: "domhandler@npm:4.3.1" dependencies: - color-name: 1.1.3 - checksum: fd7a64a17cde98fb923b1dd05c5f2e6f7aefda1b60d67e8d449f9328b4e53b228a428fd38bfeaeb2db2ff6b6503a776a996150b80cdf224062af08a5c8a3a203 + domelementtype: ^2.2.0 + checksum: 4c665ceed016e1911bf7d1dadc09dc888090b64dee7851cccd2fcf5442747ec39c647bb1cb8c8919f8bbdd0f0c625a6bafeeed4b2d656bbecdbae893f43ffaaa languageName: node linkType: hard -"color-convert@npm:^2.0.1": - version: 2.0.1 - resolution: "color-convert@npm:2.0.1" +"domhandler@npm:^5.0.2, domhandler@npm:^5.0.3": + version: 5.0.3 + resolution: "domhandler@npm:5.0.3" dependencies: - color-name: ~1.1.4 - checksum: 79e6bdb9fd479a205c71d89574fccfb22bd9053bd98c6c4d870d65c132e5e904e6034978e55b43d69fcaa7433af2016ee203ce76eeba9cfa554b373e7f7db336 + domelementtype: ^2.3.0 + checksum: 0f58f4a6af63e6f3a4320aa446d28b5790a009018707bce2859dcb1d21144c7876482b5188395a188dfa974238c019e0a1e610d2fc269a12b2c192ea2b0b131c languageName: node linkType: hard -"color-name@npm:1.1.3": - version: 1.1.3 - resolution: "color-name@npm:1.1.3" - checksum: 09c5d3e33d2105850153b14466501f2bfb30324a2f76568a408763a3b7433b0e50e5b4ab1947868e65cb101bb7cb75029553f2c333b6d4b8138a73fcc133d69d +"domutils@npm:^2.5.2, domutils@npm:^2.8.0": + version: 2.8.0 + resolution: "domutils@npm:2.8.0" + dependencies: + dom-serializer: ^1.0.1 + domelementtype: ^2.2.0 + domhandler: ^4.2.0 + checksum: abf7434315283e9aadc2a24bac0e00eab07ae4313b40cc239f89d84d7315ebdfd2fb1b5bf750a96bc1b4403d7237c7b2ebf60459be394d625ead4ca89b934391 languageName: node linkType: hard -"color-name@npm:~1.1.4": - version: 1.1.4 - resolution: "color-name@npm:1.1.4" - checksum: b0445859521eb4021cd0fb0cc1a75cecf67fceecae89b63f62b201cca8d345baf8b952c966862a9d9a2632987d4f6581f0ec8d957dfacece86f0a7919316f610 +"domutils@npm:^3.0.1": + version: 3.1.0 + resolution: "domutils@npm:3.1.0" + dependencies: + dom-serializer: ^2.0.0 + domelementtype: ^2.3.0 + domhandler: ^5.0.3 + checksum: e5757456ddd173caa411cfc02c2bb64133c65546d2c4081381a3bafc8a57411a41eed70494551aa58030be9e58574fcc489828bebd673863d39924fb4878f416 languageName: node linkType: hard -"color-support@npm:^1.1.3": - version: 1.1.3 - resolution: "color-support@npm:1.1.3" - bin: - color-support: bin.js - checksum: 9b7356817670b9a13a26ca5af1c21615463b500783b739b7634a0c2047c16cef4b2865d7576875c31c3cddf9dd621fa19285e628f20198b233a5cfdda6d0793b +"dot-case@npm:^3.0.4": + version: 3.0.4 + resolution: "dot-case@npm:3.0.4" + dependencies: + no-case: ^3.0.4 + tslib: ^2.0.3 + checksum: a65e3519414856df0228b9f645332f974f2bf5433370f544a681122eab59e66038fc3349b4be1cdc47152779dac71a5864f1ccda2f745e767c46e9c6543b1169 languageName: node linkType: hard -"colord@npm:^2.9.1": - version: 2.9.3 - resolution: "colord@npm:2.9.3" - checksum: 95d909bfbcfd8d5605cbb5af56f2d1ce2b323990258fd7c0d2eb0e6d3bb177254d7fb8213758db56bb4ede708964f78c6b992b326615f81a18a6aaf11d64c650 +"dot-prop@npm:^5.2.0": + version: 5.3.0 + resolution: "dot-prop@npm:5.3.0" + dependencies: + is-obj: ^2.0.0 + checksum: d5775790093c234ef4bfd5fbe40884ff7e6c87573e5339432870616331189f7f5d86575c5b5af2dcf0f61172990f4f734d07844b1f23482fff09e3c4bead05ea languageName: node linkType: hard -"colorette@npm:^2.0.10": - version: 2.0.20 - resolution: "colorette@npm:2.0.20" - checksum: 0c016fea2b91b733eb9f4bcdb580018f52c0bc0979443dad930e5037a968237ac53d9beb98e218d2e9235834f8eebce7f8e080422d6194e957454255bde71d3d +"dot-prop@npm:^6.0.1": + version: 6.0.1 + resolution: "dot-prop@npm:6.0.1" + dependencies: + is-obj: ^2.0.0 + checksum: 0f47600a4b93e1dc37261da4e6909652c008832a5d3684b5bf9a9a0d3f4c67ea949a86dceed9b72f5733ed8e8e6383cc5958df3bbd0799ee317fd181f2ece700 languageName: node linkType: hard -"combine-promises@npm:^1.1.0": - version: 1.2.0 - resolution: "combine-promises@npm:1.2.0" - checksum: ddce91436e24da03d5dc360c59cd55abfc9da5e949a26255aa42761925c574797c43138f0aabfc364e184e738e5e218a94ac6e88ebc459045bcf048ac7fe5f07 +"duplexer3@npm:^0.1.4": + version: 0.1.5 + resolution: "duplexer3@npm:0.1.5" + checksum: e677cb4c48f031ca728601d6a20bf6aed4c629d69ef9643cb89c67583d673c4ec9317cc6427501f38bd8c368d3a18f173987cc02bd99d8cf8fe3d94259a22a20 languageName: node linkType: hard -"combined-stream@npm:^1.0.8": - version: 1.0.8 - resolution: "combined-stream@npm:1.0.8" - dependencies: - delayed-stream: ~1.0.0 - checksum: 49fa4aeb4916567e33ea81d088f6584749fc90c7abec76fd516bf1c5aa5c79f3584b5ba3de6b86d26ddd64bae5329c4c7479343250cfe71c75bb366eae53bb7c +"duplexer@npm:^0.1.2, duplexer@npm:~0.1.1": + version: 0.1.2 + resolution: "duplexer@npm:0.1.2" + checksum: 62ba61a830c56801db28ff6305c7d289b6dc9f859054e8c982abd8ee0b0a14d2e9a8e7d086ffee12e868d43e2bbe8a964be55ddbd8c8957714c87373c7a4f9b0 languageName: node linkType: hard -"comlink@npm:^4.4.1": - version: 4.4.1 - resolution: "comlink@npm:4.4.1" - checksum: 16d58a8f590087fc45432e31d6c138308dfd4b75b89aec0b7f7bb97ad33d810381bd2b1e608a1fb2cf05979af9cbfcdcaf1715996d5fcf77aeb013b6da3260af +"eastasianwidth@npm:^0.2.0": + version: 0.2.0 + resolution: "eastasianwidth@npm:0.2.0" + checksum: 7d00d7cd8e49b9afa762a813faac332dee781932d6f2c848dc348939c4253f1d4564341b7af1d041853bc3f32c2ef141b58e0a4d9862c17a7f08f68df1e0f1ed languageName: node linkType: hard -"comma-separated-tokens@npm:^1.0.0": - version: 1.0.8 - resolution: "comma-separated-tokens@npm:1.0.8" - checksum: 0adcb07174fa4d08cf0f5c8e3aec40a36b5ff0c2c720e5e23f50fe02e6789d1d00a67036c80e0c1e1539f41d3e7f0101b074039dd833b4e4a59031b659d6ca0d +"ee-first@npm:1.1.1": + version: 1.1.1 + resolution: "ee-first@npm:1.1.1" + checksum: 1b4cac778d64ce3b582a7e26b218afe07e207a0f9bfe13cc7395a6d307849cfe361e65033c3251e00c27dd060cab43014c2d6b2647676135e18b77d2d05b3f4f languageName: node linkType: hard -"command-exists@npm:^1.2.8": - version: 1.2.9 - resolution: "command-exists@npm:1.2.9" - checksum: 729ae3d88a2058c93c58840f30341b7f82688a573019535d198b57a4d8cb0135ced0ad7f52b591e5b28a90feb2c675080ce916e56254a0f7c15cb2395277cac3 +"electron-to-chromium@npm:^1.4.601": + version: 1.4.609 + resolution: "electron-to-chromium@npm:1.4.609" + checksum: 4ef3c32b11adc01ed8227d7bdbe0978b436b817e6b3bd09f19b42afbf9affdb6ddf99f4d20a3b28a494772a2985df12ec95abca849a38cccb217c4ee338561bb languageName: node linkType: hard -"command-line-args@npm:^5.1.1, command-line-args@npm:^5.2.1": - version: 5.2.1 - resolution: "command-line-args@npm:5.2.1" +"elliptic@npm:6.5.4, elliptic@npm:^6.5.2, elliptic@npm:^6.5.4": + version: 6.5.4 + resolution: "elliptic@npm:6.5.4" dependencies: - array-back: ^3.1.0 - find-replace: ^3.0.0 - lodash.camelcase: ^4.3.0 - typical: ^4.0.0 - checksum: e759519087be3cf2e86af8b9a97d3058b4910cd11ee852495be881a067b72891f6a32718fb685ee6d41531ab76b2b7bfb6602f79f882cd4b7587ff1e827982c7 + bn.js: ^4.11.9 + brorand: ^1.1.0 + hash.js: ^1.0.0 + hmac-drbg: ^1.0.1 + inherits: ^2.0.4 + minimalistic-assert: ^1.0.1 + minimalistic-crypto-utils: ^1.0.1 + checksum: d56d21fd04e97869f7ffcc92e18903b9f67f2d4637a23c860492fbbff5a3155fd9ca0184ce0c865dd6eb2487d234ce9551335c021c376cd2d3b7cb749c7d10f4 languageName: node linkType: hard -"command-line-usage@npm:^6.1.0": - version: 6.1.3 - resolution: "command-line-usage@npm:6.1.3" - dependencies: - array-back: ^4.0.2 - chalk: ^2.4.2 - table-layout: ^1.0.2 - typical: ^5.2.0 - checksum: 8261d4e5536eb0bcddee0ec5e89c05bb2abd18e5760785c8078ede5020bc1c612cbe28eb6586f5ed4a3660689748e5aaad4a72f21566f4ef39393694e2fa1a0b +"emoji-regex@npm:^8.0.0": + version: 8.0.0 + resolution: "emoji-regex@npm:8.0.0" + checksum: d4c5c39d5a9868b5fa152f00cada8a936868fd3367f33f71be515ecee4c803132d11b31a6222b2571b1e5f7e13890156a94880345594d0ce7e3c9895f560f192 + languageName: node + linkType: hard + +"emoji-regex@npm:^9.2.2": + version: 9.2.2 + resolution: "emoji-regex@npm:9.2.2" + checksum: 8487182da74aabd810ac6d6f1994111dfc0e331b01271ae01ec1eb0ad7b5ecc2bbbbd2f053c05cb55a1ac30449527d819bbfbf0e3de1023db308cbcb47f86601 languageName: node linkType: hard -"command-line-usage@npm:^7.0.0, command-line-usage@npm:^7.0.1": - version: 7.0.1 - resolution: "command-line-usage@npm:7.0.1" - dependencies: - array-back: ^6.2.2 - chalk-template: ^0.4.0 - table-layout: ^3.0.0 - typical: ^7.1.1 - checksum: ac78ad6b83b9622bb111ae8e82205bde1d2da74df237fdd0bd7d98eda3592c8933ec600818b0b028b2313ddca638b1b60f0780dd9457ad4a0384b17156641f79 +"emojilib@npm:^2.4.0": + version: 2.4.0 + resolution: "emojilib@npm:2.4.0" + checksum: ea241c342abda5a86ffd3a15d8f4871a616d485f700e03daea38c6ce38205847cea9f6ff8d5e962c00516b004949cc96c6e37b05559ea71a0a496faba53b56da languageName: node linkType: hard -"commander@npm:3.0.2": - version: 3.0.2 - resolution: "commander@npm:3.0.2" - checksum: 6d14ad030d1904428139487ed31febcb04c1604db2b8d9fae711f60ee6718828dc0e11602249e91c8a97b0e721e9c6d53edbc166bad3cde1596851d59a8f824d +"emojis-list@npm:^3.0.0": + version: 3.0.0 + resolution: "emojis-list@npm:3.0.0" + checksum: ddaaa02542e1e9436c03970eeed445f4ed29a5337dfba0fe0c38dfdd2af5da2429c2a0821304e8a8d1cadf27fdd5b22ff793571fa803ae16852a6975c65e8e70 languageName: node linkType: hard -"commander@npm:^10.0.1": - version: 10.0.1 - resolution: "commander@npm:10.0.1" - checksum: 436901d64a818295803c1996cd856621a74f30b9f9e28a588e726b2b1670665bccd7c1a77007ebf328729f0139838a88a19265858a0fa7a8728c4656796db948 +"emoticon@npm:^3.2.0": + version: 3.2.0 + resolution: "emoticon@npm:3.2.0" + checksum: f30649d18b672ab3139e95cb04f77b2442feb95c99dc59372ff80fbfd639b2bf4018bc68ab0b549bd765aecf8230d7899c43f86cfcc7b6370c06c3232783e24f languageName: node linkType: hard -"commander@npm:^2.20.0": - version: 2.20.3 - resolution: "commander@npm:2.20.3" - checksum: ab8c07884e42c3a8dbc5dd9592c606176c7eb5c1ca5ff274bcf907039b2c41de3626f684ea75ccf4d361ba004bbaff1f577d5384c155f3871e456bdf27becf9e +"emoticon@npm:^4.0.1": + version: 4.0.1 + resolution: "emoticon@npm:4.0.1" + checksum: 991ab6421927601af4eb44036b60e3125759a4d81f32d2ad96b66e3491e2fdb6a026eeb6bffbfa66724592dca95235570785963607d16961ea73a62ecce715e2 languageName: node linkType: hard -"commander@npm:^5.1.0": - version: 5.1.0 - resolution: "commander@npm:5.1.0" - checksum: 0b7fec1712fbcc6230fcb161d8d73b4730fa91a21dc089515489402ad78810547683f058e2a9835929c212fead1d6a6ade70db28bbb03edbc2829a9ab7d69447 +"encodeurl@npm:^1.0.2, encodeurl@npm:~1.0.2": + version: 1.0.2 + resolution: "encodeurl@npm:1.0.2" + checksum: e50e3d508cdd9c4565ba72d2012e65038e5d71bdc9198cb125beb6237b5b1ade6c0d343998da9e170fb2eae52c1bed37d4d6d98a46ea423a0cddbed5ac3f780c languageName: node linkType: hard -"commander@npm:^7.2.0": - version: 7.2.0 - resolution: "commander@npm:7.2.0" - checksum: 53501cbeee61d5157546c0bef0fedb6cdfc763a882136284bed9a07225f09a14b82d2a84e7637edfd1a679fb35ed9502fd58ef1d091e6287f60d790147f68ddc +"encoding@npm:^0.1.13": + version: 0.1.13 + resolution: "encoding@npm:0.1.13" + dependencies: + iconv-lite: ^0.6.2 + checksum: bb98632f8ffa823996e508ce6a58ffcf5856330fde839ae42c9e1f436cc3b5cc651d4aeae72222916545428e54fd0f6aa8862fd8d25bdbcc4589f1e3f3715e7f languageName: node linkType: hard -"commander@npm:^8.0.0, commander@npm:^8.3.0": - version: 8.3.0 - resolution: "commander@npm:8.3.0" - checksum: 0f82321821fc27b83bd409510bb9deeebcfa799ff0bf5d102128b500b7af22872c0c92cb6a0ebc5a4cf19c6b550fba9cedfa7329d18c6442a625f851377bacf0 +"end-of-stream@npm:^1.1.0, end-of-stream@npm:^1.4.1": + version: 1.4.4 + resolution: "end-of-stream@npm:1.4.4" + dependencies: + once: ^1.4.0 + checksum: 530a5a5a1e517e962854a31693dbb5c0b2fc40b46dad2a56a2deec656ca040631124f4795823acc68238147805f8b021abbe221f4afed5ef3c8e8efc2024908b languageName: node linkType: hard -"common-path-prefix@npm:^3.0.0": - version: 3.0.0 - resolution: "common-path-prefix@npm:3.0.0" - checksum: fdb3c4f54e51e70d417ccd950c07f757582de800c0678ca388aedefefc84982039f346f9fd9a1252d08d2da9e9ef4019f580a1d1d3a10da031e4bb3c924c5818 +"enhanced-resolve@npm:^5.0.0, enhanced-resolve@npm:^5.15.0": + version: 5.15.0 + resolution: "enhanced-resolve@npm:5.15.0" + dependencies: + graceful-fs: ^4.2.4 + tapable: ^2.2.0 + checksum: fbd8cdc9263be71cc737aa8a7d6c57b43d6aa38f6cc75dde6fcd3598a130cc465f979d2f4d01bb3bf475acb43817749c79f8eef9be048683602ca91ab52e4f11 languageName: node linkType: hard -"commondir@npm:^1.0.1": - version: 1.0.1 - resolution: "commondir@npm:1.0.1" - checksum: 59715f2fc456a73f68826285718503340b9f0dd89bfffc42749906c5cf3d4277ef11ef1cca0350d0e79204f00f1f6d83851ececc9095dc88512a697ac0b9bdcb +"enquirer@npm:^2.3.0": + version: 2.4.1 + resolution: "enquirer@npm:2.4.1" + dependencies: + ansi-colors: ^4.1.1 + strip-ansi: ^6.0.1 + checksum: f080f11a74209647dbf347a7c6a83c8a47ae1ebf1e75073a808bc1088eb780aa54075bfecd1bcdb3e3c724520edb8e6ee05da031529436b421b71066fcc48cb5 languageName: node linkType: hard -"compressible@npm:~2.0.16": - version: 2.0.18 - resolution: "compressible@npm:2.0.18" - dependencies: - mime-db: ">= 1.43.0 < 2" - checksum: 58321a85b375d39230405654721353f709d0c1442129e9a17081771b816302a012471a9b8f4864c7dbe02eef7f2aaac3c614795197092262e94b409c9be108f0 +"entities@npm:^2.0.0": + version: 2.2.0 + resolution: "entities@npm:2.2.0" + checksum: 19010dacaf0912c895ea262b4f6128574f9ccf8d4b3b65c7e8334ad0079b3706376360e28d8843ff50a78aabcb8f08f0a32dbfacdc77e47ed77ca08b713669b3 languageName: node linkType: hard -"compression@npm:^1.7.4": - version: 1.7.4 - resolution: "compression@npm:1.7.4" - dependencies: - accepts: ~1.3.5 - bytes: 3.0.0 - compressible: ~2.0.16 - debug: 2.6.9 - on-headers: ~1.0.2 - safe-buffer: 5.1.2 - vary: ~1.1.2 - checksum: 35c0f2eb1f28418978615dc1bc02075b34b1568f7f56c62d60f4214d4b7cc00d0f6d282b5f8a954f59872396bd770b6b15ffd8aa94c67d4bce9b8887b906999b +"entities@npm:^4.2.0, entities@npm:^4.4.0": + version: 4.5.0 + resolution: "entities@npm:4.5.0" + checksum: 853f8ebd5b425d350bffa97dd6958143179a5938352ccae092c62d1267c4e392a039be1bae7d51b6e4ffad25f51f9617531fedf5237f15df302ccfb452cbf2d7 languageName: node linkType: hard -"concat-map@npm:0.0.1": - version: 0.0.1 - resolution: "concat-map@npm:0.0.1" - checksum: 902a9f5d8967a3e2faf138d5cb784b9979bad2e6db5357c5b21c568df4ebe62bcb15108af1b2253744844eb964fc023fbd9afbbbb6ddd0bcc204c6fb5b7bf3af +"env-paths@npm:^2.2.0": + version: 2.2.1 + resolution: "env-paths@npm:2.2.1" + checksum: 65b5df55a8bab92229ab2b40dad3b387fad24613263d103a97f91c9fe43ceb21965cd3392b1ccb5d77088021e525c4e0481adb309625d0cb94ade1d1fb8dc17e languageName: node linkType: hard -"concordance@npm:^5.0.4": - version: 5.0.4 - resolution: "concordance@npm:5.0.4" - dependencies: - date-time: ^3.1.0 - esutils: ^2.0.3 - fast-diff: ^1.2.0 - js-string-escape: ^1.0.1 - lodash: ^4.17.15 - md5-hex: ^3.0.1 - semver: ^7.3.2 - well-known-symbols: ^2.0.0 - checksum: 749153ba711492feb7c3d2f5bb04c107157440b3e39509bd5dd19ee7b3ac751d1e4cd75796d9f702e0a713312dbc661421c68aa4a2c34d5f6d91f47e3a1c64a6 +"envinfo@npm:^7.7.3": + version: 7.11.0 + resolution: "envinfo@npm:7.11.0" + bin: + envinfo: dist/cli.js + checksum: c45a7d20409d5f4cda72483b150d3816b15b434f2944d72c1495d8838bd7c4e7b2f32c12128ffb9b92b5f66f436237b8a525eb3a9a5da2d20013bc4effa28aef languageName: node linkType: hard -"configstore@npm:^5.0.1": - version: 5.0.1 - resolution: "configstore@npm:5.0.1" - dependencies: - dot-prop: ^5.2.0 - graceful-fs: ^4.1.2 - make-dir: ^3.0.0 - unique-string: ^2.0.0 - write-file-atomic: ^3.0.0 - xdg-basedir: ^4.0.0 - checksum: 60ef65d493b63f96e14b11ba7ec072fdbf3d40110a94fb7199d1c287761bdea5c5244e76b2596325f30c1b652213aa75de96ea20afd4a5f82065e61ea090988e +"err-code@npm:^2.0.2": + version: 2.0.3 + resolution: "err-code@npm:2.0.3" + checksum: 8b7b1be20d2de12d2255c0bc2ca638b7af5171142693299416e6a9339bd7d88fc8d7707d913d78e0993176005405a236b066b45666b27b797252c771156ace54 languageName: node linkType: hard -"connect-history-api-fallback@npm:^2.0.0": - version: 2.0.0 - resolution: "connect-history-api-fallback@npm:2.0.0" - checksum: dc5368690f4a5c413889792f8df70d5941ca9da44523cde3f87af0745faee5ee16afb8195434550f0504726642734f2683d6c07f8b460f828a12c45fbd4c9a68 +"errno@npm:^0.1.1, errno@npm:~0.1.1": + version: 0.1.8 + resolution: "errno@npm:0.1.8" + dependencies: + prr: ~1.0.1 + bin: + errno: cli.js + checksum: 1271f7b9fbb3bcbec76ffde932485d1e3561856d21d847ec613a9722ee924cdd4e523a62dc71a44174d91e898fe21fdc8d5b50823f4b5e0ce8c35c8271e6ef4a languageName: node linkType: hard -"consola@npm:^2.15.3": - version: 2.15.3 - resolution: "consola@npm:2.15.3" - checksum: 8ef7a09b703ec67ac5c389a372a33b6dc97eda6c9876443a60d76a3076eea0259e7f67a4e54fd5a52f97df73690822d090cf8b7e102b5761348afef7c6d03e28 +"error-ex@npm:^1.3.1": + version: 1.3.2 + resolution: "error-ex@npm:1.3.2" + dependencies: + is-arrayish: ^0.2.1 + checksum: c1c2b8b65f9c91b0f9d75f0debaa7ec5b35c266c2cac5de412c1a6de86d4cbae04ae44e510378cb14d032d0645a36925d0186f8bb7367bcc629db256b743a001 languageName: node linkType: hard -"console-control-strings@npm:^1.1.0": - version: 1.1.0 - resolution: "console-control-strings@npm:1.1.0" - checksum: 8755d76787f94e6cf79ce4666f0c5519906d7f5b02d4b884cf41e11dcd759ed69c57da0670afd9236d229a46e0f9cf519db0cd829c6dca820bb5a5c3def584ed +"errorstacks@npm:^2.2.0": + version: 2.4.1 + resolution: "errorstacks@npm:2.4.1" + checksum: 1b46bdd3c40d3e30dbb6945c0529ffbef6ccdf2260eeecff6cc1ee95b708ec732094597d6adaa53ffe18d045150b366e3f7472d8594946f430941bfa4ad54479 languageName: node linkType: hard -"content-disposition@npm:0.5.2": - version: 0.5.2 - resolution: "content-disposition@npm:0.5.2" - checksum: 298d7da63255a38f7858ee19c7b6aae32b167e911293174b4c1349955e97e78e1d0b0d06c10e229405987275b417cf36ff65cbd4821a98bc9df4e41e9372cde7 +"es-module-lexer@npm:^1.0.0, es-module-lexer@npm:^1.2.1": + version: 1.4.1 + resolution: "es-module-lexer@npm:1.4.1" + checksum: a11b5a256d4e8e9c7d94c2fd87415ccd1591617b6edd847e064503f8eaece2d25e2e9078a02c5ce3ed5e83bb748f5b4820efbe78072c8beb07ac619c2edec35d languageName: node linkType: hard -"content-disposition@npm:0.5.4, content-disposition@npm:~0.5.2": - version: 0.5.4 - resolution: "content-disposition@npm:0.5.4" +"esbuild@npm:^0.16 || ^0.17": + version: 0.17.19 + resolution: "esbuild@npm:0.17.19" dependencies: - safe-buffer: 5.2.1 - checksum: afb9d545e296a5171d7574fcad634b2fdf698875f4006a9dd04a3e1333880c5c0c98d47b560d01216fb6505a54a2ba6a843ee3a02ec86d7e911e8315255f56c3 + "@esbuild/android-arm": 0.17.19 + "@esbuild/android-arm64": 0.17.19 + "@esbuild/android-x64": 0.17.19 + "@esbuild/darwin-arm64": 0.17.19 + "@esbuild/darwin-x64": 0.17.19 + "@esbuild/freebsd-arm64": 0.17.19 + "@esbuild/freebsd-x64": 0.17.19 + "@esbuild/linux-arm": 0.17.19 + "@esbuild/linux-arm64": 0.17.19 + "@esbuild/linux-ia32": 0.17.19 + "@esbuild/linux-loong64": 0.17.19 + "@esbuild/linux-mips64el": 0.17.19 + "@esbuild/linux-ppc64": 0.17.19 + "@esbuild/linux-riscv64": 0.17.19 + "@esbuild/linux-s390x": 0.17.19 + "@esbuild/linux-x64": 0.17.19 + "@esbuild/netbsd-x64": 0.17.19 + "@esbuild/openbsd-x64": 0.17.19 + "@esbuild/sunos-x64": 0.17.19 + "@esbuild/win32-arm64": 0.17.19 + "@esbuild/win32-ia32": 0.17.19 + "@esbuild/win32-x64": 0.17.19 + dependenciesMeta: + "@esbuild/android-arm": + optional: true + "@esbuild/android-arm64": + optional: true + "@esbuild/android-x64": + optional: true + "@esbuild/darwin-arm64": + optional: true + "@esbuild/darwin-x64": + optional: true + "@esbuild/freebsd-arm64": + optional: true + "@esbuild/freebsd-x64": + optional: true + "@esbuild/linux-arm": + optional: true + "@esbuild/linux-arm64": + optional: true + "@esbuild/linux-ia32": + optional: true + "@esbuild/linux-loong64": + optional: true + "@esbuild/linux-mips64el": + optional: true + "@esbuild/linux-ppc64": + optional: true + "@esbuild/linux-riscv64": + optional: true + "@esbuild/linux-s390x": + optional: true + "@esbuild/linux-x64": + optional: true + "@esbuild/netbsd-x64": + optional: true + "@esbuild/openbsd-x64": + optional: true + "@esbuild/sunos-x64": + optional: true + "@esbuild/win32-arm64": + optional: true + "@esbuild/win32-ia32": + optional: true + "@esbuild/win32-x64": + optional: true + bin: + esbuild: bin/esbuild + checksum: ac11b1a5a6008e4e37ccffbd6c2c054746fc58d0ed4a2f9ee643bd030cfcea9a33a235087bc777def8420f2eaafb3486e76adb7bdb7241a9143b43a69a10afd8 + languageName: node + linkType: hard + +"esbuild@npm:~0.18.20": + version: 0.18.20 + resolution: "esbuild@npm:0.18.20" + dependencies: + "@esbuild/android-arm": 0.18.20 + "@esbuild/android-arm64": 0.18.20 + "@esbuild/android-x64": 0.18.20 + "@esbuild/darwin-arm64": 0.18.20 + "@esbuild/darwin-x64": 0.18.20 + "@esbuild/freebsd-arm64": 0.18.20 + "@esbuild/freebsd-x64": 0.18.20 + "@esbuild/linux-arm": 0.18.20 + "@esbuild/linux-arm64": 0.18.20 + "@esbuild/linux-ia32": 0.18.20 + "@esbuild/linux-loong64": 0.18.20 + "@esbuild/linux-mips64el": 0.18.20 + "@esbuild/linux-ppc64": 0.18.20 + "@esbuild/linux-riscv64": 0.18.20 + "@esbuild/linux-s390x": 0.18.20 + "@esbuild/linux-x64": 0.18.20 + "@esbuild/netbsd-x64": 0.18.20 + "@esbuild/openbsd-x64": 0.18.20 + "@esbuild/sunos-x64": 0.18.20 + "@esbuild/win32-arm64": 0.18.20 + "@esbuild/win32-ia32": 0.18.20 + "@esbuild/win32-x64": 0.18.20 + dependenciesMeta: + "@esbuild/android-arm": + optional: true + "@esbuild/android-arm64": + optional: true + "@esbuild/android-x64": + optional: true + "@esbuild/darwin-arm64": + optional: true + "@esbuild/darwin-x64": + optional: true + "@esbuild/freebsd-arm64": + optional: true + "@esbuild/freebsd-x64": + optional: true + "@esbuild/linux-arm": + optional: true + "@esbuild/linux-arm64": + optional: true + "@esbuild/linux-ia32": + optional: true + "@esbuild/linux-loong64": + optional: true + "@esbuild/linux-mips64el": + optional: true + "@esbuild/linux-ppc64": + optional: true + "@esbuild/linux-riscv64": + optional: true + "@esbuild/linux-s390x": + optional: true + "@esbuild/linux-x64": + optional: true + "@esbuild/netbsd-x64": + optional: true + "@esbuild/openbsd-x64": + optional: true + "@esbuild/sunos-x64": + optional: true + "@esbuild/win32-arm64": + optional: true + "@esbuild/win32-ia32": + optional: true + "@esbuild/win32-x64": + optional: true + bin: + esbuild: bin/esbuild + checksum: 5d253614e50cdb6ec22095afd0c414f15688e7278a7eb4f3720a6dd1306b0909cf431e7b9437a90d065a31b1c57be60130f63fe3e8d0083b588571f31ee6ec7b languageName: node linkType: hard -"content-type@npm:^1.0.4, content-type@npm:~1.0.4": - version: 1.0.5 - resolution: "content-type@npm:1.0.5" - checksum: 566271e0a251642254cde0f845f9dd4f9856e52d988f4eb0d0dcffbb7a1f8ec98de7a5215fc628f3bce30fe2fb6fd2bc064b562d721658c59b544e2d34ea2766 +"escalade@npm:^3.1.1": + version: 3.1.1 + resolution: "escalade@npm:3.1.1" + checksum: a3e2a99f07acb74b3ad4989c48ca0c3140f69f923e56d0cba0526240ee470b91010f9d39001f2a4a313841d237ede70a729e92125191ba5d21e74b106800b133 languageName: node linkType: hard -"convert-source-map@npm:^1.6.0, convert-source-map@npm:^1.7.0": - version: 1.9.0 - resolution: "convert-source-map@npm:1.9.0" - checksum: dc55a1f28ddd0e9485ef13565f8f756b342f9a46c4ae18b843fe3c30c675d058d6a4823eff86d472f187b176f0adf51ea7b69ea38be34be4a63cbbf91b0593c8 +"escape-goat@npm:^2.0.0": + version: 2.1.1 + resolution: "escape-goat@npm:2.1.1" + checksum: ce05c70c20dd7007b60d2d644b625da5412325fdb57acf671ba06cb2ab3cd6789e2087026921a05b665b0a03fadee2955e7fc0b9a67da15a6551a980b260eba7 languageName: node linkType: hard -"convert-source-map@npm:^2.0.0": - version: 2.0.0 - resolution: "convert-source-map@npm:2.0.0" - checksum: 63ae9933be5a2b8d4509daca5124e20c14d023c820258e484e32dc324d34c2754e71297c94a05784064ad27615037ef677e3f0c00469fb55f409d2bb21261035 +"escape-goat@npm:^4.0.0": + version: 4.0.0 + resolution: "escape-goat@npm:4.0.0" + checksum: 7034e0025eec7b751074b837f10312c5b768493265bdad046347c0aadbc1e652776f7e5df94766473fecb5d3681169cc188fe9ccc1e22be53318c18be1671cc0 languageName: node linkType: hard -"convert-to-spaces@npm:^2.0.1": - version: 2.0.1 - resolution: "convert-to-spaces@npm:2.0.1" - checksum: bbb324e5916fe9866f65c0ff5f9c1ea933764d0bdb09fccaf59542e40545ed483db6b2339c6d9eb56a11965a58f1a6038f3174f0e2fb7601343c7107ca5e2751 +"escape-html@npm:^1.0.3, escape-html@npm:~1.0.3": + version: 1.0.3 + resolution: "escape-html@npm:1.0.3" + checksum: 6213ca9ae00d0ab8bccb6d8d4e0a98e76237b2410302cf7df70aaa6591d509a2a37ce8998008cbecae8fc8ffaadf3fb0229535e6a145f3ce0b211d060decbb24 languageName: node linkType: hard -"cookie-signature@npm:1.0.6": - version: 1.0.6 - resolution: "cookie-signature@npm:1.0.6" - checksum: f4e1b0a98a27a0e6e66fd7ea4e4e9d8e038f624058371bf4499cfcd8f3980be9a121486995202ba3fca74fbed93a407d6d54d43a43f96fd28d0bd7a06761591a +"escape-string-regexp@npm:4.0.0, escape-string-regexp@npm:^4.0.0": + version: 4.0.0 + resolution: "escape-string-regexp@npm:4.0.0" + checksum: 98b48897d93060f2322108bf29db0feba7dd774be96cd069458d1453347b25ce8682ecc39859d4bca2203cc0ab19c237bcc71755eff49a0f8d90beadeeba5cc5 languageName: node linkType: hard -"cookie@npm:0.5.0": - version: 0.5.0 - resolution: "cookie@npm:0.5.0" - checksum: 1f4bd2ca5765f8c9689a7e8954183f5332139eb72b6ff783d8947032ec1fdf43109852c178e21a953a30c0dd42257828185be01b49d1eb1a67fd054ca588a180 +"escape-string-regexp@npm:^1.0.5": + version: 1.0.5 + resolution: "escape-string-regexp@npm:1.0.5" + checksum: 6092fda75c63b110c706b6a9bfde8a612ad595b628f0bd2147eea1d3406723020810e591effc7db1da91d80a71a737a313567c5abb3813e8d9c71f4aa595b410 languageName: node linkType: hard -"cookie@npm:^0.4.1": - version: 0.4.2 - resolution: "cookie@npm:0.4.2" - checksum: a00833c998bedf8e787b4c342defe5fa419abd96b32f4464f718b91022586b8f1bafbddd499288e75c037642493c83083da426c6a9080d309e3bd90fd11baa9b +"escape-string-regexp@npm:^5.0.0": + version: 5.0.0 + resolution: "escape-string-regexp@npm:5.0.0" + checksum: 20daabe197f3cb198ec28546deebcf24b3dbb1a5a269184381b3116d12f0532e06007f4bc8da25669d6a7f8efb68db0758df4cd981f57bc5b57f521a3e12c59e languageName: node linkType: hard -"cookies@npm:~0.8.0": - version: 0.8.0 - resolution: "cookies@npm:0.8.0" +"escodegen@npm:^2.1.0": + version: 2.1.0 + resolution: "escodegen@npm:2.1.0" dependencies: - depd: ~2.0.0 - keygrip: ~1.1.0 - checksum: 806055a44f128705265b1bc6a853058da18bf80dea3654ad99be20985b1fa1b14f86c1eef73644aab8071241f8a78acd57202b54c4c5c70769fc694fbb9c4edc + esprima: ^4.0.1 + estraverse: ^5.2.0 + esutils: ^2.0.2 + source-map: ~0.6.1 + dependenciesMeta: + source-map: + optional: true + bin: + escodegen: bin/escodegen.js + esgenerate: bin/esgenerate.js + checksum: 096696407e161305cd05aebb95134ad176708bc5cb13d0dcc89a5fcbb959b8ed757e7f2591a5f8036f8f4952d4a724de0df14cd419e29212729fa6df5ce16bf6 languageName: node linkType: hard -"copy-text-to-clipboard@npm:^3.0.1": - version: 3.2.0 - resolution: "copy-text-to-clipboard@npm:3.2.0" - checksum: df7115c197a166d51f59e4e20ab2a68a855ae8746d25ff149b5465c694d9a405c7e6684b73a9f87ba8d653070164e229c15dfdb9fd77c30be1ff0da569661060 +"eslint-plugin-prettier@npm:^5.0.0": + version: 5.0.1 + resolution: "eslint-plugin-prettier@npm:5.0.1" + dependencies: + prettier-linter-helpers: ^1.0.0 + synckit: ^0.8.5 + peerDependencies: + "@types/eslint": ">=8.0.0" + eslint: ">=8.0.0" + prettier: ">=3.0.0" + peerDependenciesMeta: + "@types/eslint": + optional: true + eslint-config-prettier: + optional: true + checksum: c2261033b97bafe99ccb7cc47c2fac6fa85b8bbc8b128042e52631f906b69e12afed2cdd9d7e3021cc892ee8dd4204a3574e1f32a0b718b4bb3b440944b6983b languageName: node linkType: hard -"copy-webpack-plugin@npm:^11.0.0": - version: 11.0.0 - resolution: "copy-webpack-plugin@npm:11.0.0" +"eslint-scope@npm:5.1.1": + version: 5.1.1 + resolution: "eslint-scope@npm:5.1.1" dependencies: - fast-glob: ^3.2.11 - glob-parent: ^6.0.1 - globby: ^13.1.1 - normalize-path: ^3.0.0 - schema-utils: ^4.0.0 - serialize-javascript: ^6.0.0 - peerDependencies: - webpack: ^5.1.0 - checksum: df4f8743f003a29ee7dd3d9b1789998a3a99051c92afb2ba2203d3dacfa696f4e757b275560fafb8f206e520a0aa78af34b990324a0e36c2326cefdeef3ca82e + esrecurse: ^4.3.0 + estraverse: ^4.1.1 + checksum: 47e4b6a3f0cc29c7feedee6c67b225a2da7e155802c6ea13bbef4ac6b9e10c66cd2dcb987867ef176292bf4e64eccc680a49e35e9e9c669f4a02bac17e86abdb languageName: node linkType: hard -"core-js-compat@npm:^3.31.0, core-js-compat@npm:^3.32.2": - version: 3.33.0 - resolution: "core-js-compat@npm:3.33.0" +"eslint-scope@npm:^7.2.2": + version: 7.2.2 + resolution: "eslint-scope@npm:7.2.2" dependencies: - browserslist: ^4.22.1 - checksum: 83ae54008c09b8e0ae3c59457039866c342c7e28b0d30eebb638a5b51c01432e63fe97695c90645cbc6a8b073a4f9a8b0e75f0818bbf8b4b054e01f4c17d3181 + esrecurse: ^4.3.0 + estraverse: ^5.2.0 + checksum: ec97dbf5fb04b94e8f4c5a91a7f0a6dd3c55e46bfc7bbcd0e3138c3a76977570e02ed89a1810c778dcd72072ff0e9621ba1379b4babe53921d71e2e4486fda3e languageName: node linkType: hard -"core-js-pure@npm:^3.30.2": - version: 3.33.0 - resolution: "core-js-pure@npm:3.33.0" - checksum: d47084a4de9a0cef9779eccd3ac9f435cf9fd7aa71794150cd4c6b305036bcc392d94766d4a7b6456bdd08faba7752d55c2ec40185bda161c3563081c9fa1e17 +"eslint-visitor-keys@npm:^3.3.0, eslint-visitor-keys@npm:^3.4.1, eslint-visitor-keys@npm:^3.4.3": + version: 3.4.3 + resolution: "eslint-visitor-keys@npm:3.4.3" + checksum: 36e9ef87fca698b6fd7ca5ca35d7b2b6eeaaf106572e2f7fd31c12d3bfdaccdb587bba6d3621067e5aece31c8c3a348b93922ab8f7b2cbc6aaab5e1d89040c60 languageName: node linkType: hard -"core-js@npm:^3.23.3": - version: 3.33.0 - resolution: "core-js@npm:3.33.0" - checksum: dd62217935ac281faf6f833bb306fb891162919fcf9c1f0c975b1b91e82ac09a940f5deb5950bbb582739ceef716e8bd7e4f9eab8328932fb029d3bc2ecb2881 +"eslint@npm:^8.50.0": + version: 8.55.0 + resolution: "eslint@npm:8.55.0" + dependencies: + "@eslint-community/eslint-utils": ^4.2.0 + "@eslint-community/regexpp": ^4.6.1 + "@eslint/eslintrc": ^2.1.4 + "@eslint/js": 8.55.0 + "@humanwhocodes/config-array": ^0.11.13 + "@humanwhocodes/module-importer": ^1.0.1 + "@nodelib/fs.walk": ^1.2.8 + "@ungap/structured-clone": ^1.2.0 + ajv: ^6.12.4 + chalk: ^4.0.0 + cross-spawn: ^7.0.2 + debug: ^4.3.2 + doctrine: ^3.0.0 + escape-string-regexp: ^4.0.0 + eslint-scope: ^7.2.2 + eslint-visitor-keys: ^3.4.3 + espree: ^9.6.1 + esquery: ^1.4.2 + esutils: ^2.0.2 + fast-deep-equal: ^3.1.3 + file-entry-cache: ^6.0.1 + find-up: ^5.0.0 + glob-parent: ^6.0.2 + globals: ^13.19.0 + graphemer: ^1.4.0 + ignore: ^5.2.0 + imurmurhash: ^0.1.4 + is-glob: ^4.0.0 + is-path-inside: ^3.0.3 + js-yaml: ^4.1.0 + json-stable-stringify-without-jsonify: ^1.0.1 + levn: ^0.4.1 + lodash.merge: ^4.6.2 + minimatch: ^3.1.2 + natural-compare: ^1.4.0 + optionator: ^0.9.3 + strip-ansi: ^6.0.1 + text-table: ^0.2.0 + bin: + eslint: bin/eslint.js + checksum: 83f82a604559dc1faae79d28fdf3dfc9e592ca221052e2ea516e1b379b37e77e4597705a16880e2f5ece4f79087c1dd13fd7f6e9746f794a401175519db18b41 languageName: node linkType: hard -"core-util-is@npm:~1.0.0": - version: 1.0.3 - resolution: "core-util-is@npm:1.0.3" - checksum: 9de8597363a8e9b9952491ebe18167e3b36e7707569eed0ebf14f8bba773611376466ae34575bca8cfe3c767890c859c74056084738f09d4e4a6f902b2ad7d99 +"espree@npm:^9.6.0, espree@npm:^9.6.1": + version: 9.6.1 + resolution: "espree@npm:9.6.1" + dependencies: + acorn: ^8.9.0 + acorn-jsx: ^5.3.2 + eslint-visitor-keys: ^3.4.1 + checksum: eb8c149c7a2a77b3f33a5af80c10875c3abd65450f60b8af6db1bfcfa8f101e21c1e56a561c6dc13b848e18148d43469e7cd208506238554fb5395a9ea5a1ab9 languageName: node linkType: hard -"cosmiconfig@npm:^6.0.0": - version: 6.0.0 - resolution: "cosmiconfig@npm:6.0.0" - dependencies: - "@types/parse-json": ^4.0.0 - import-fresh: ^3.1.0 - parse-json: ^5.0.0 - path-type: ^4.0.0 - yaml: ^1.7.2 - checksum: 8eed7c854b91643ecb820767d0deb038b50780ecc3d53b0b19e03ed8aabed4ae77271198d1ae3d49c3b110867edf679f5faad924820a8d1774144a87cb6f98fc +"esprima@npm:^4.0.0, esprima@npm:^4.0.1": + version: 4.0.1 + resolution: "esprima@npm:4.0.1" + bin: + esparse: ./bin/esparse.js + esvalidate: ./bin/esvalidate.js + checksum: b45bc805a613dbea2835278c306b91aff6173c8d034223fa81498c77dcbce3b2931bf6006db816f62eacd9fd4ea975dfd85a5b7f3c6402cfd050d4ca3c13a628 languageName: node linkType: hard -"cosmiconfig@npm:^7.0.1": - version: 7.1.0 - resolution: "cosmiconfig@npm:7.1.0" +"esquery@npm:^1.4.2": + version: 1.5.0 + resolution: "esquery@npm:1.5.0" dependencies: - "@types/parse-json": ^4.0.0 - import-fresh: ^3.2.1 - parse-json: ^5.0.0 - path-type: ^4.0.0 - yaml: ^1.10.0 - checksum: c53bf7befc1591b2651a22414a5e786cd5f2eeaa87f3678a3d49d6069835a9d8d1aef223728e98aa8fec9a95bf831120d245096db12abe019fecb51f5696c96f + estraverse: ^5.1.0 + checksum: aefb0d2596c230118656cd4ec7532d447333a410a48834d80ea648b1e7b5c9bc9ed8b5e33a89cb04e487b60d622f44cf5713bf4abed7c97343edefdc84a35900 languageName: node linkType: hard -"cosmiconfig@npm:^8.2.0": - version: 8.3.6 - resolution: "cosmiconfig@npm:8.3.6" +"esrecurse@npm:^4.3.0": + version: 4.3.0 + resolution: "esrecurse@npm:4.3.0" dependencies: - import-fresh: ^3.3.0 - js-yaml: ^4.1.0 - parse-json: ^5.2.0 - path-type: ^4.0.0 - peerDependencies: - typescript: ">=4.9.5" - peerDependenciesMeta: - typescript: - optional: true - checksum: dc339ebea427898c9e03bf01b56ba7afbac07fc7d2a2d5a15d6e9c14de98275a9565da949375aee1809591c152c0a3877bb86dbeaf74d5bd5aaa79955ad9e7a0 + estraverse: ^5.2.0 + checksum: ebc17b1a33c51cef46fdc28b958994b1dc43cd2e86237515cbc3b4e5d2be6a811b2315d0a1a4d9d340b6d2308b15322f5c8291059521cc5f4802f65e7ec32837 languageName: node linkType: hard -"crc-32@npm:^1.2.0": - version: 1.2.2 - resolution: "crc-32@npm:1.2.2" - bin: - crc32: bin/crc32.njs - checksum: ad2d0ad0cbd465b75dcaeeff0600f8195b686816ab5f3ba4c6e052a07f728c3e70df2e3ca9fd3d4484dc4ba70586e161ca5a2334ec8bf5a41bf022a6103ff243 +"estraverse@npm:^4.1.1": + version: 4.3.0 + resolution: "estraverse@npm:4.3.0" + checksum: a6299491f9940bb246124a8d44b7b7a413a8336f5436f9837aaa9330209bd9ee8af7e91a654a3545aee9c54b3308e78ee360cef1d777d37cfef77d2fa33b5827 languageName: node linkType: hard -"create-hash@npm:^1.1.0, create-hash@npm:^1.1.2, create-hash@npm:^1.2.0": - version: 1.2.0 - resolution: "create-hash@npm:1.2.0" - dependencies: - cipher-base: ^1.0.1 - inherits: ^2.0.1 - md5.js: ^1.3.4 - ripemd160: ^2.0.1 - sha.js: ^2.4.0 - checksum: 02a6ae3bb9cd4afee3fabd846c1d8426a0e6b495560a977ba46120c473cb283be6aa1cace76b5f927cf4e499c6146fb798253e48e83d522feba807d6b722eaa9 +"estraverse@npm:^5.1.0, estraverse@npm:^5.2.0": + version: 5.3.0 + resolution: "estraverse@npm:5.3.0" + checksum: 072780882dc8416ad144f8fe199628d2b3e7bbc9989d9ed43795d2c90309a2047e6bc5979d7e2322a341163d22cfad9e21f4110597fe487519697389497e4e2b languageName: node linkType: hard -"create-hmac@npm:^1.1.4, create-hmac@npm:^1.1.7": - version: 1.1.7 - resolution: "create-hmac@npm:1.1.7" +"estree-util-attach-comments@npm:^3.0.0": + version: 3.0.0 + resolution: "estree-util-attach-comments@npm:3.0.0" dependencies: - cipher-base: ^1.0.3 - create-hash: ^1.1.0 - inherits: ^2.0.1 - ripemd160: ^2.0.0 - safe-buffer: ^5.0.1 - sha.js: ^2.4.8 - checksum: ba12bb2257b585a0396108c72830e85f882ab659c3320c83584b1037f8ab72415095167ced80dc4ce8e446a8ecc4b2acf36d87befe0707d73b26cf9dc77440ed + "@types/estree": ^1.0.0 + checksum: 56254eaef39659e6351919ebc2ae53a37a09290a14571c19e373e9d5fad343a3403d9ad0c23ae465d6e7d08c3e572fd56fb8c793efe6434a261bf1489932dbd5 languageName: node linkType: hard -"create-require@npm:^1.1.0": - version: 1.1.1 - resolution: "create-require@npm:1.1.1" - checksum: a9a1503d4390d8b59ad86f4607de7870b39cad43d929813599a23714831e81c520bddf61bcdd1f8e30f05fd3a2b71ae8538e946eb2786dc65c2bbc520f692eff +"estree-util-build-jsx@npm:^3.0.0": + version: 3.0.1 + resolution: "estree-util-build-jsx@npm:3.0.1" + dependencies: + "@types/estree-jsx": ^1.0.0 + devlop: ^1.0.0 + estree-util-is-identifier-name: ^3.0.0 + estree-walker: ^3.0.0 + checksum: 185eff060eda2ba32cecd15904db4f5ba0681159fbdf54f0f6586cd9411e77e733861a833d0aee3415e1d1fd4b17edf08bc9e9872cee98e6ec7b0800e1a85064 languageName: node linkType: hard -"cross-fetch@npm:3.1.5": - version: 3.1.5 - resolution: "cross-fetch@npm:3.1.5" - dependencies: - node-fetch: 2.6.7 - checksum: f6b8c6ee3ef993ace6277fd789c71b6acf1b504fd5f5c7128df4ef2f125a429e29cd62dc8c127523f04a5f2fa4771ed80e3f3d9695617f441425045f505cf3bb +"estree-util-is-identifier-name@npm:^3.0.0": + version: 3.0.0 + resolution: "estree-util-is-identifier-name@npm:3.0.0" + checksum: ea3909f0188ea164af0aadeca87c087e3e5da78d76da5ae9c7954ff1340ea3e4679c4653bbf4299ffb70caa9b322218cc1128db2541f3d2976eb9704f9857787 languageName: node linkType: hard -"cross-fetch@npm:^3.1.5": - version: 3.1.8 - resolution: "cross-fetch@npm:3.1.8" +"estree-util-to-js@npm:^2.0.0": + version: 2.0.0 + resolution: "estree-util-to-js@npm:2.0.0" dependencies: - node-fetch: ^2.6.12 - checksum: 78f993fa099eaaa041122ab037fe9503ecbbcb9daef234d1d2e0b9230a983f64d645d088c464e21a247b825a08dc444a6e7064adfa93536d3a9454b4745b3632 + "@types/estree-jsx": ^1.0.0 + astring: ^1.8.0 + source-map: ^0.7.0 + checksum: 833edc94ab9978e0918f90261e0a3361bf4564fec4901f326d2237a9235d3f5fc6482da3be5acc545e702c8c7cb8bc5de5c7c71ba3b080eb1975bcfdf3923d79 languageName: node linkType: hard -"cross-spawn@npm:^7.0.0, cross-spawn@npm:^7.0.2, cross-spawn@npm:^7.0.3": - version: 7.0.3 - resolution: "cross-spawn@npm:7.0.3" +"estree-util-value-to-estree@npm:^3.0.1": + version: 3.0.1 + resolution: "estree-util-value-to-estree@npm:3.0.1" dependencies: - path-key: ^3.1.0 - shebang-command: ^2.0.0 - which: ^2.0.1 - checksum: 671cc7c7288c3a8406f3c69a3ae2fc85555c04169e9d611def9a675635472614f1c0ed0ef80955d5b6d4e724f6ced67f0ad1bb006c2ea643488fcfef994d7f52 + "@types/estree": ^1.0.0 + is-plain-obj: ^4.0.0 + checksum: 7ab89084aa2c5677aeb0d7350ff21e71c9bbc424dc872a55bb4f25f63a7fd99fc7861626dd89b5544db3d3696212154bcf2b12b63ecd5a59dbfd07915c88aee4 languageName: node linkType: hard -"crypto-random-string@npm:^2.0.0": +"estree-util-visit@npm:^2.0.0": version: 2.0.0 - resolution: "crypto-random-string@npm:2.0.0" - checksum: 0283879f55e7c16fdceacc181f87a0a65c53bc16ffe1d58b9d19a6277adcd71900d02bb2c4843dd55e78c51e30e89b0fec618a7f170ebcc95b33182c28f05fd6 + resolution: "estree-util-visit@npm:2.0.0" + dependencies: + "@types/estree-jsx": ^1.0.0 + "@types/unist": ^3.0.0 + checksum: 6444b38f224322945a6d19ea81a8828a0eec64aefb2bf1ea791fe20df496f7b7c543408d637df899e6a8e318b638f66226f16378a33c4c2b192ba5c3f891121f languageName: node linkType: hard -"css-declaration-sorter@npm:^6.3.1": - version: 6.4.1 - resolution: "css-declaration-sorter@npm:6.4.1" - peerDependencies: - postcss: ^8.0.9 - checksum: cbdc9e0d481011b1a28fd5b60d4eb55fe204391d31a0b1b490b2cecf4baa85810f9b8c48adab4df644f4718104ed3ed72c64a9745e3216173767bf4aeca7f9b8 +"estree-walker@npm:^1.0.1": + version: 1.0.1 + resolution: "estree-walker@npm:1.0.1" + checksum: 7e70da539691f6db03a08e7ce94f394ce2eef4180e136d251af299d41f92fb2d28ebcd9a6e393e3728d7970aeb5358705ddf7209d52fbcb2dd4693f95dcf925f languageName: node linkType: hard -"css-loader@npm:^6.7.1": - version: 6.8.1 - resolution: "css-loader@npm:6.8.1" - dependencies: - icss-utils: ^5.1.0 - postcss: ^8.4.21 - postcss-modules-extract-imports: ^3.0.0 - postcss-modules-local-by-default: ^4.0.3 - postcss-modules-scope: ^3.0.0 - postcss-modules-values: ^4.0.0 - postcss-value-parser: ^4.2.0 - semver: ^7.3.8 - peerDependencies: - webpack: ^5.0.0 - checksum: 7c1784247bdbe76dc5c55fb1ac84f1d4177a74c47259942c9cfdb7a8e6baef11967a0bc85ac285f26bd26d5059decb848af8154a03fdb4f4894f41212f45eef3 +"estree-walker@npm:^2.0.2": + version: 2.0.2 + resolution: "estree-walker@npm:2.0.2" + checksum: 6151e6f9828abe2259e57f5fd3761335bb0d2ebd76dc1a01048ccee22fabcfef3c0859300f6d83ff0d1927849368775ec5a6d265dde2f6de5a1be1721cd94efc languageName: node linkType: hard -"css-minimizer-webpack-plugin@npm:^4.0.0": - version: 4.2.2 - resolution: "css-minimizer-webpack-plugin@npm:4.2.2" +"estree-walker@npm:^3.0.0": + version: 3.0.3 + resolution: "estree-walker@npm:3.0.3" dependencies: - cssnano: ^5.1.8 - jest-worker: ^29.1.2 - postcss: ^8.4.17 - schema-utils: ^4.0.0 - serialize-javascript: ^6.0.0 - source-map: ^0.6.1 - peerDependencies: - webpack: ^5.0.0 - peerDependenciesMeta: - "@parcel/css": - optional: true - "@swc/css": - optional: true - clean-css: - optional: true - csso: - optional: true - esbuild: - optional: true - lightningcss: - optional: true - checksum: 5417e76a445f35832aa96807c835b8e92834a6cd285b1b788dfe3ca0fa90fec7eb2dd6efa9d3649f9d8244b99b7da2d065951603b94918e8f6a366a5049cacdd + "@types/estree": ^1.0.0 + checksum: a65728d5727b71de172c5df323385755a16c0fdab8234dc756c3854cfee343261ddfbb72a809a5660fac8c75d960bb3e21aa898c2d7e9b19bb298482ca58a3af languageName: node linkType: hard -"css-select@npm:^4.1.3": - version: 4.3.0 - resolution: "css-select@npm:4.3.0" - dependencies: - boolbase: ^1.0.0 - css-what: ^6.0.1 - domhandler: ^4.3.1 - domutils: ^2.8.0 - nth-check: ^2.0.1 - checksum: d6202736839194dd7f910320032e7cfc40372f025e4bf21ca5bf6eb0a33264f322f50ba9c0adc35dadd342d3d6fae5ca244779a4873afbfa76561e343f2058e0 +"esutils@npm:^2.0.2": + version: 2.0.3 + resolution: "esutils@npm:2.0.3" + checksum: 22b5b08f74737379a840b8ed2036a5fb35826c709ab000683b092d9054e5c2a82c27818f12604bfc2a9a76b90b6834ef081edbc1c7ae30d1627012e067c6ec87 languageName: node linkType: hard -"css-select@npm:^5.1.0": - version: 5.1.0 - resolution: "css-select@npm:5.1.0" - dependencies: - boolbase: ^1.0.0 - css-what: ^6.1.0 - domhandler: ^5.0.2 - domutils: ^3.0.1 - nth-check: ^2.0.1 - checksum: 2772c049b188d3b8a8159907192e926e11824aea525b8282981f72ba3f349cf9ecd523fdf7734875ee2cb772246c22117fc062da105b6d59afe8dcd5c99c9bda +"eta@npm:^2.0.0, eta@npm:^2.2.0": + version: 2.2.0 + resolution: "eta@npm:2.2.0" + checksum: 6a09631481d4f26a9662a1eb736a65cc1cbc48e24935e6ff5d83a83b0cb509ea56d588d66d7c087d590601dc59bdabdac2356936b1b789d020eb0cf2d8304d54 languageName: node linkType: hard -"css-tree@npm:^1.1.2, css-tree@npm:^1.1.3": - version: 1.1.3 - resolution: "css-tree@npm:1.1.3" - dependencies: - mdn-data: 2.0.14 - source-map: ^0.6.1 - checksum: 79f9b81803991b6977b7fcb1588799270438274d89066ce08f117f5cdb5e20019b446d766c61506dd772c839df84caa16042d6076f20c97187f5abe3b50e7d1f +"etag@npm:^1.8.1, etag@npm:~1.8.1": + version: 1.8.1 + resolution: "etag@npm:1.8.1" + checksum: 571aeb3dbe0f2bbd4e4fadbdb44f325fc75335cd5f6f6b6a091e6a06a9f25ed5392f0863c5442acb0646787446e816f13cbfc6edce5b07658541dff573cab1ff languageName: node linkType: hard -"css-what@npm:^6.0.1, css-what@npm:^6.1.0": - version: 6.1.0 - resolution: "css-what@npm:6.1.0" - checksum: b975e547e1e90b79625918f84e67db5d33d896e6de846c9b584094e529f0c63e2ab85ee33b9daffd05bff3a146a1916bec664e18bb76dd5f66cbff9fc13b2bbe +"ethereum-cryptography@npm:0.1.3, ethereum-cryptography@npm:^0.1.3": + version: 0.1.3 + resolution: "ethereum-cryptography@npm:0.1.3" + dependencies: + "@types/pbkdf2": ^3.0.0 + "@types/secp256k1": ^4.0.1 + blakejs: ^1.1.0 + browserify-aes: ^1.2.0 + bs58check: ^2.1.2 + create-hash: ^1.2.0 + create-hmac: ^1.1.7 + hash.js: ^1.1.7 + keccak: ^3.0.0 + pbkdf2: ^3.0.17 + randombytes: ^2.1.0 + safe-buffer: ^5.1.2 + scrypt-js: ^3.0.0 + secp256k1: ^4.0.1 + setimmediate: ^1.0.5 + checksum: 54bae7a4a96bd81398cdc35c91cfcc74339f71a95ed1b5b694663782e69e8e3afd21357de3b8bac9ff4877fd6f043601e200a7ad9133d94be6fd7d898ee0a449 languageName: node linkType: hard -"cssesc@npm:^3.0.0": - version: 3.0.0 - resolution: "cssesc@npm:3.0.0" - bin: - cssesc: bin/cssesc - checksum: f8c4ababffbc5e2ddf2fa9957dda1ee4af6048e22aeda1869d0d00843223c1b13ad3f5d88b51caa46c994225eacb636b764eb807a8883e2fb6f99b4f4e8c48b2 +"ethereum-cryptography@npm:^1.0.3": + version: 1.2.0 + resolution: "ethereum-cryptography@npm:1.2.0" + dependencies: + "@noble/hashes": 1.2.0 + "@noble/secp256k1": 1.7.1 + "@scure/bip32": 1.1.5 + "@scure/bip39": 1.1.1 + checksum: 97e8e8253cb9f5a9271bd0201c37609c451c890eb85883b9c564f14743c3d7c673287406c93bf5604307593ee298ad9a03983388b85c11ca61461b9fc1a4f2c7 languageName: node linkType: hard -"cssnano-preset-advanced@npm:^5.3.8": - version: 5.3.10 - resolution: "cssnano-preset-advanced@npm:5.3.10" +"ethereumjs-abi@npm:^0.6.8": + version: 0.6.8 + resolution: "ethereumjs-abi@npm:0.6.8" dependencies: - autoprefixer: ^10.4.12 - cssnano-preset-default: ^5.2.14 - postcss-discard-unused: ^5.1.0 - postcss-merge-idents: ^5.1.1 - postcss-reduce-idents: ^5.2.0 - postcss-zindex: ^5.1.0 - peerDependencies: - postcss: ^8.2.15 - checksum: d21cb382aea2f35c9eaa50686280bbd5158260edf73020731364b03bae0d887292da51ed0b20b369f51d2573ee8c02c695f604647b839a9ca746be8a44c3bb5b + bn.js: ^4.11.8 + ethereumjs-util: ^6.0.0 + checksum: cede2a8ae7c7e04eeaec079c2f925601a25b2ef75cf9230e7c5da63b4ea27883b35447365a47e35c1e831af520973a2252af89022c292c18a09a4607821a366b languageName: node linkType: hard -"cssnano-preset-default@npm:^5.2.14": - version: 5.2.14 - resolution: "cssnano-preset-default@npm:5.2.14" +"ethereumjs-util@npm:^6.0.0, ethereumjs-util@npm:^6.2.1": + version: 6.2.1 + resolution: "ethereumjs-util@npm:6.2.1" dependencies: - css-declaration-sorter: ^6.3.1 - cssnano-utils: ^3.1.0 - postcss-calc: ^8.2.3 - postcss-colormin: ^5.3.1 - postcss-convert-values: ^5.1.3 - postcss-discard-comments: ^5.1.2 - postcss-discard-duplicates: ^5.1.0 - postcss-discard-empty: ^5.1.1 - postcss-discard-overridden: ^5.1.0 - postcss-merge-longhand: ^5.1.7 - postcss-merge-rules: ^5.1.4 - postcss-minify-font-values: ^5.1.0 - postcss-minify-gradients: ^5.1.1 - postcss-minify-params: ^5.1.4 - postcss-minify-selectors: ^5.2.1 - postcss-normalize-charset: ^5.1.0 - postcss-normalize-display-values: ^5.1.0 - postcss-normalize-positions: ^5.1.1 - postcss-normalize-repeat-style: ^5.1.1 - postcss-normalize-string: ^5.1.0 - postcss-normalize-timing-functions: ^5.1.0 - postcss-normalize-unicode: ^5.1.1 - postcss-normalize-url: ^5.1.0 - postcss-normalize-whitespace: ^5.1.1 - postcss-ordered-values: ^5.1.3 - postcss-reduce-initial: ^5.1.2 - postcss-reduce-transforms: ^5.1.0 - postcss-svgo: ^5.1.0 - postcss-unique-selectors: ^5.1.1 - peerDependencies: - postcss: ^8.2.15 - checksum: d3bbbe3d50c6174afb28d0bdb65b511fdab33952ec84810aef58b87189f3891c34aaa8b6a6101acd5314f8acded839b43513e39a75f91a698ddc985a1b1d9e95 + "@types/bn.js": ^4.11.3 + bn.js: ^4.11.0 + create-hash: ^1.1.2 + elliptic: ^6.5.2 + ethereum-cryptography: ^0.1.3 + ethjs-util: 0.1.6 + rlp: ^2.2.3 + checksum: e3cb4a2c034a2529281fdfc21a2126fe032fdc3038863f5720352daa65ddcc50fc8c67dbedf381a882dc3802e05d979287126d7ecf781504bde1fd8218693bde languageName: node linkType: hard -"cssnano-utils@npm:^3.1.0": - version: 3.1.0 - resolution: "cssnano-utils@npm:3.1.0" - peerDependencies: - postcss: ^8.2.15 - checksum: 975c84ce9174cf23bb1da1e9faed8421954607e9ea76440cd3bb0c1bea7e17e490d800fca5ae2812d1d9e9d5524eef23ede0a3f52497d7ccc628e5d7321536f2 +"ethers@npm:^5.7.1": + version: 5.7.2 + resolution: "ethers@npm:5.7.2" + dependencies: + "@ethersproject/abi": 5.7.0 + "@ethersproject/abstract-provider": 5.7.0 + "@ethersproject/abstract-signer": 5.7.0 + "@ethersproject/address": 5.7.0 + "@ethersproject/base64": 5.7.0 + "@ethersproject/basex": 5.7.0 + "@ethersproject/bignumber": 5.7.0 + "@ethersproject/bytes": 5.7.0 + "@ethersproject/constants": 5.7.0 + "@ethersproject/contracts": 5.7.0 + "@ethersproject/hash": 5.7.0 + "@ethersproject/hdnode": 5.7.0 + "@ethersproject/json-wallets": 5.7.0 + "@ethersproject/keccak256": 5.7.0 + "@ethersproject/logger": 5.7.0 + "@ethersproject/networks": 5.7.1 + "@ethersproject/pbkdf2": 5.7.0 + "@ethersproject/properties": 5.7.0 + "@ethersproject/providers": 5.7.2 + "@ethersproject/random": 5.7.0 + "@ethersproject/rlp": 5.7.0 + "@ethersproject/sha2": 5.7.0 + "@ethersproject/signing-key": 5.7.0 + "@ethersproject/solidity": 5.7.0 + "@ethersproject/strings": 5.7.0 + "@ethersproject/transactions": 5.7.0 + "@ethersproject/units": 5.7.0 + "@ethersproject/wallet": 5.7.0 + "@ethersproject/web": 5.7.1 + "@ethersproject/wordlists": 5.7.0 + checksum: b7c08cf3e257185a7946117dbbf764433b7ba0e77c27298dec6088b3bc871aff711462b0621930c56880ff0a7ceb8b1d3a361ffa259f93377b48e34107f62553 languageName: node linkType: hard -"cssnano@npm:^5.1.12, cssnano@npm:^5.1.8": - version: 5.1.15 - resolution: "cssnano@npm:5.1.15" +"ethers@npm:^6.7.1": + version: 6.9.0 + resolution: "ethers@npm:6.9.0" dependencies: - cssnano-preset-default: ^5.2.14 - lilconfig: ^2.0.3 - yaml: ^1.10.2 - peerDependencies: - postcss: ^8.2.15 - checksum: ca9e1922178617c66c2f1548824b2c7af2ecf69cc3a187fc96bf8d29251c2e84d9e4966c69cf64a2a6a057a37dff7d6d057bc8a2a0957e6ea382e452ae9d0bbb + "@adraffy/ens-normalize": 1.10.0 + "@noble/curves": 1.2.0 + "@noble/hashes": 1.3.2 + "@types/node": 18.15.13 + aes-js: 4.0.0-beta.5 + tslib: 2.4.0 + ws: 8.5.0 + checksum: e7b3b912b92b818fe65a192b32f4dd87ce0b71f4ffc194b4b3764b4b17d3b6ed953b667293e4c9276f785fa8c8659934c513843350317c61178af8e6165afbdd languageName: node linkType: hard -"csso@npm:^4.2.0": - version: 4.2.0 - resolution: "csso@npm:4.2.0" +"ethjs-util@npm:0.1.6, ethjs-util@npm:^0.1.6": + version: 0.1.6 + resolution: "ethjs-util@npm:0.1.6" dependencies: - css-tree: ^1.1.2 - checksum: 380ba9663da3bcea58dee358a0d8c4468bb6539be3c439dc266ac41c047217f52fd698fb7e4b6b6ccdfb8cf53ef4ceed8cc8ceccb8dfca2aa628319826b5b998 + is-hex-prefixed: 1.0.0 + strip-hex-prefix: 1.0.0 + checksum: 1f42959e78ec6f49889c49c8a98639e06f52a15966387dd39faf2930db48663d026efb7db2702dcffe7f2a99c4a0144b7ce784efdbf733f4077aae95de76d65f languageName: node linkType: hard -"csstype@npm:^3.0.2": - version: 3.1.2 - resolution: "csstype@npm:3.1.2" - checksum: e1a52e6c25c1314d6beef5168da704ab29c5186b877c07d822bd0806717d9a265e8493a2e35ca7e68d0f5d472d43fac1cdce70fd79fd0853dff81f3028d857b5 +"eval@npm:^0.1.8": + version: 0.1.8 + resolution: "eval@npm:0.1.8" + dependencies: + "@types/node": "*" + require-like: ">= 0.1.1" + checksum: d005567f394cfbe60948e34982e4637d2665030f9aa7dcac581ea6f9ec6eceb87133ed3dc0ae21764aa362485c242a731dbb6371f1f1a86807c58676431e9d1a languageName: node linkType: hard -"currently-unhandled@npm:^0.4.1": - version: 0.4.1 - resolution: "currently-unhandled@npm:0.4.1" +"event-stream@npm:=3.3.4": + version: 3.3.4 + resolution: "event-stream@npm:3.3.4" dependencies: - array-find-index: ^1.0.1 - checksum: 1f59fe10b5339b54b1a1eee110022f663f3495cf7cf2f480686e89edc7fa8bfe42dbab4b54f85034bc8b092a76cc7becbc2dad4f9adad332ab5831bec39ad540 + duplexer: ~0.1.1 + from: ~0 + map-stream: ~0.1.0 + pause-stream: 0.0.11 + split: 0.3 + stream-combiner: ~0.0.4 + through: ~2.3.1 + checksum: 80b467820b6daf824d9fb4345d2daf115a056e5c104463f2e98534e92d196a27f2df5ea2aa085624db26f4c45698905499e881d13bc7c01f7a13eac85be72a22 languageName: node linkType: hard -"data-uri-to-buffer@npm:^4.0.0": - version: 4.0.1 - resolution: "data-uri-to-buffer@npm:4.0.1" - checksum: 0d0790b67ffec5302f204c2ccca4494f70b4e2d940fea3d36b09f0bb2b8539c2e86690429eb1f1dc4bcc9e4df0644193073e63d9ee48ac9fce79ec1506e4aa4c +"event-target-shim@npm:^5.0.0": + version: 5.0.1 + resolution: "event-target-shim@npm:5.0.1" + checksum: 1ffe3bb22a6d51bdeb6bf6f7cf97d2ff4a74b017ad12284cc9e6a279e727dc30a5de6bb613e5596ff4dc3e517841339ad09a7eec44266eccb1aa201a30448166 languageName: node linkType: hard -"date-time@npm:^3.1.0": - version: 3.1.0 - resolution: "date-time@npm:3.1.0" - dependencies: - time-zone: ^1.0.0 - checksum: f9cfcd1b15dfeabab15c0b9d18eb9e4e2d9d4371713564178d46a8f91ad577a290b5178b80050718d02d9c0cf646f8a875011e12d1ed05871e9f72c72c8a8fe6 +"eventemitter3@npm:^4.0.0": + version: 4.0.7 + resolution: "eventemitter3@npm:4.0.7" + checksum: 1875311c42fcfe9c707b2712c32664a245629b42bb0a5a84439762dd0fd637fc54d078155ea83c2af9e0323c9ac13687e03cfba79b03af9f40c89b4960099374 languageName: node linkType: hard -"debounce@npm:^1.2.0": - version: 1.2.1 - resolution: "debounce@npm:1.2.1" - checksum: 682a89506d9e54fb109526f4da255c5546102fbb8e3ae75eef3b04effaf5d4853756aee97475cd4650641869794e44f410eeb20ace2b18ea592287ab2038519e +"events@npm:^3.2.0, events@npm:^3.3.0": + version: 3.3.0 + resolution: "events@npm:3.3.0" + checksum: f6f487ad2198aa41d878fa31452f1a3c00958f46e9019286ff4787c84aac329332ab45c9cdc8c445928fc6d7ded294b9e005a7fce9426488518017831b272780 languageName: node linkType: hard -"debug@npm:2.6.9, debug@npm:^2.6.0, debug@npm:^2.6.9": - version: 2.6.9 - resolution: "debug@npm:2.6.9" +"evp_bytestokey@npm:^1.0.3": + version: 1.0.3 + resolution: "evp_bytestokey@npm:1.0.3" dependencies: - ms: 2.0.0 - checksum: d2f51589ca66df60bf36e1fa6e4386b318c3f1e06772280eea5b1ae9fd3d05e9c2b7fd8a7d862457d00853c75b00451aa2d7459b924629ee385287a650f58fe6 + md5.js: ^1.3.4 + node-gyp: latest + safe-buffer: ^5.1.1 + checksum: ad4e1577f1a6b721c7800dcc7c733fe01f6c310732bb5bf2240245c2a5b45a38518b91d8be2c610611623160b9d1c0e91f1ce96d639f8b53e8894625cf20fa45 languageName: node linkType: hard -"debug@npm:4, debug@npm:4.3.4, debug@npm:^4.1.0, debug@npm:^4.1.1, debug@npm:^4.2.0, debug@npm:^4.3.2, debug@npm:^4.3.3, debug@npm:^4.3.4": - version: 4.3.4 - resolution: "debug@npm:4.3.4" +"execa@npm:^5.0.0, execa@npm:^5.1.1": + version: 5.1.1 + resolution: "execa@npm:5.1.1" dependencies: - ms: 2.1.2 - peerDependenciesMeta: - supports-color: - optional: true - checksum: 3dbad3f94ea64f34431a9cbf0bafb61853eda57bff2880036153438f50fb5a84f27683ba0d8e5426bf41a8c6ff03879488120cf5b3a761e77953169c0600a708 + cross-spawn: ^7.0.3 + get-stream: ^6.0.0 + human-signals: ^2.1.0 + is-stream: ^2.0.0 + merge-stream: ^2.0.0 + npm-run-path: ^4.0.1 + onetime: ^5.1.2 + signal-exit: ^3.0.3 + strip-final-newline: ^2.0.0 + checksum: fba9022c8c8c15ed862847e94c252b3d946036d7547af310e344a527e59021fd8b6bb0723883ea87044dc4f0201f949046993124a42ccb0855cae5bf8c786343 languageName: node linkType: hard -"debug@npm:^3.1.0, debug@npm:^3.2.7": - version: 3.2.7 - resolution: "debug@npm:3.2.7" +"execa@npm:^7.1.1": + version: 7.2.0 + resolution: "execa@npm:7.2.0" dependencies: - ms: ^2.1.1 - checksum: b3d8c5940799914d30314b7c3304a43305fd0715581a919dacb8b3176d024a782062368405b47491516d2091d6462d4d11f2f4974a405048094f8bfebfa3071c + cross-spawn: ^7.0.3 + get-stream: ^6.0.1 + human-signals: ^4.3.0 + is-stream: ^3.0.0 + merge-stream: ^2.0.0 + npm-run-path: ^5.1.0 + onetime: ^6.0.0 + signal-exit: ^3.0.7 + strip-final-newline: ^3.0.0 + checksum: 14fd17ba0ca8c87b277584d93b1d9fc24f2a65e5152b31d5eb159a3b814854283eaae5f51efa9525e304447e2f757c691877f7adff8fde5746aae67eb1edd1cc languageName: node linkType: hard -"decamelize@npm:^4.0.0": - version: 4.0.0 - resolution: "decamelize@npm:4.0.0" - checksum: b7d09b82652c39eead4d6678bb578e3bebd848add894b76d0f6b395bc45b2d692fb88d977e7cfb93c4ed6c119b05a1347cef261174916c2e75c0a8ca57da1809 +"exponential-backoff@npm:^3.1.1": + version: 3.1.1 + resolution: "exponential-backoff@npm:3.1.1" + checksum: 3d21519a4f8207c99f7457287291316306255a328770d320b401114ec8481986e4e467e854cb9914dd965e0a1ca810a23ccb559c642c88f4c7f55c55778a9b48 languageName: node linkType: hard -"decompress-response@npm:^3.3.0": - version: 3.3.0 - resolution: "decompress-response@npm:3.3.0" +"express@npm:^4.17.3": + version: 4.18.2 + resolution: "express@npm:4.18.2" dependencies: - mimic-response: ^1.0.0 - checksum: 952552ac3bd7de2fc18015086b09468645c9638d98a551305e485230ada278c039c91116e946d07894b39ee53c0f0d5b6473f25a224029344354513b412d7380 + accepts: ~1.3.8 + array-flatten: 1.1.1 + body-parser: 1.20.1 + content-disposition: 0.5.4 + content-type: ~1.0.4 + cookie: 0.5.0 + cookie-signature: 1.0.6 + debug: 2.6.9 + depd: 2.0.0 + encodeurl: ~1.0.2 + escape-html: ~1.0.3 + etag: ~1.8.1 + finalhandler: 1.2.0 + fresh: 0.5.2 + http-errors: 2.0.0 + merge-descriptors: 1.0.1 + methods: ~1.1.2 + on-finished: 2.4.1 + parseurl: ~1.3.3 + path-to-regexp: 0.1.7 + proxy-addr: ~2.0.7 + qs: 6.11.0 + range-parser: ~1.2.1 + safe-buffer: 5.2.1 + send: 0.18.0 + serve-static: 1.15.0 + setprototypeof: 1.2.0 + statuses: 2.0.1 + type-is: ~1.6.18 + utils-merge: 1.0.1 + vary: ~1.1.2 + checksum: 3c4b9b076879442f6b968fe53d85d9f1eeacbb4f4c41e5f16cc36d77ce39a2b0d81b3f250514982110d815b2f7173f5561367f9110fcc541f9371948e8c8b037 languageName: node linkType: hard -"deep-eql@npm:^4.0.1, deep-eql@npm:^4.1.2": - version: 4.1.3 - resolution: "deep-eql@npm:4.1.3" +"extend-shallow@npm:^2.0.1": + version: 2.0.1 + resolution: "extend-shallow@npm:2.0.1" dependencies: - type-detect: ^4.0.0 - checksum: 7f6d30cb41c713973dc07eaadded848b2ab0b835e518a88b91bea72f34e08c4c71d167a722a6f302d3a6108f05afd8e6d7650689a84d5d29ec7fe6220420397f - languageName: node - linkType: hard - -"deep-equal@npm:~1.0.1": - version: 1.0.1 - resolution: "deep-equal@npm:1.0.1" - checksum: 5af8cbfcebf190491878a498caccc7dc9592f8ebd1685b976eacc3825619d222b5e929923163b92c4f414494e2b884f7ebf00c022e8198e8292deb70dd9785f4 + is-extendable: ^0.1.0 + checksum: 8fb58d9d7a511f4baf78d383e637bd7d2e80843bd9cd0853649108ea835208fb614da502a553acc30208e1325240bb7cc4a68473021612496bb89725483656d8 languageName: node linkType: hard -"deep-extend@npm:^0.6.0, deep-extend@npm:~0.6.0": - version: 0.6.0 - resolution: "deep-extend@npm:0.6.0" - checksum: 7be7e5a8d468d6b10e6a67c3de828f55001b6eb515d014f7aeb9066ce36bd5717161eb47d6a0f7bed8a9083935b465bc163ee2581c8b128d29bf61092fdf57a7 +"extend@npm:^3.0.0": + version: 3.0.2 + resolution: "extend@npm:3.0.2" + checksum: a50a8309ca65ea5d426382ff09f33586527882cf532931cb08ca786ea3146c0553310bda688710ff61d7668eba9f96b923fe1420cdf56a2c3eaf30fcab87b515 languageName: node linkType: hard -"deep-is@npm:^0.1.3": - version: 0.1.4 - resolution: "deep-is@npm:0.1.4" - checksum: edb65dd0d7d1b9c40b2f50219aef30e116cedd6fc79290e740972c132c09106d2e80aa0bc8826673dd5a00222d4179c84b36a790eef63a4c4bca75a37ef90804 +"extract-zip@npm:2.0.1": + version: 2.0.1 + resolution: "extract-zip@npm:2.0.1" + dependencies: + "@types/yauzl": ^2.9.1 + debug: ^4.1.1 + get-stream: ^5.1.0 + yauzl: ^2.10.0 + dependenciesMeta: + "@types/yauzl": + optional: true + bin: + extract-zip: cli.js + checksum: 8cbda9debdd6d6980819cc69734d874ddd71051c9fe5bde1ef307ebcedfe949ba57b004894b585f758b7c9eeeea0e3d87f2dda89b7d25320459c2c9643ebb635 languageName: node linkType: hard -"deepmerge@npm:^4.2.2": - version: 4.3.1 - resolution: "deepmerge@npm:4.3.1" - checksum: 2024c6a980a1b7128084170c4cf56b0fd58a63f2da1660dcfe977415f27b17dbe5888668b59d0b063753f3220719d5e400b7f113609489c90160bb9a5518d052 +"fast-deep-equal@npm:^3.1.1, fast-deep-equal@npm:^3.1.3": + version: 3.1.3 + resolution: "fast-deep-equal@npm:3.1.3" + checksum: e21a9d8d84f53493b6aa15efc9cfd53dd5b714a1f23f67fb5dc8f574af80df889b3bce25dc081887c6d25457cce704e636395333abad896ccdec03abaf1f3f9d languageName: node linkType: hard -"default-browser-id@npm:^3.0.0": - version: 3.0.0 - resolution: "default-browser-id@npm:3.0.0" - dependencies: - bplist-parser: ^0.2.0 - untildify: ^4.0.0 - checksum: 279c7ad492542e5556336b6c254a4eaf31b2c63a5433265655ae6e47301197b6cfb15c595a6fdc6463b2ff8e1a1a1ed3cba56038a60e1527ba4ab1628c6b9941 +"fast-diff@npm:^1.1.2": + version: 1.3.0 + resolution: "fast-diff@npm:1.3.0" + checksum: d22d371b994fdc8cce9ff510d7b8dc4da70ac327bcba20df607dd5b9cae9f908f4d1028f5fe467650f058d1e7270235ae0b8230809a262b4df587a3b3aa216c3 languageName: node linkType: hard -"default-browser@npm:^4.0.0": - version: 4.0.0 - resolution: "default-browser@npm:4.0.0" - dependencies: - bundle-name: ^3.0.0 - default-browser-id: ^3.0.0 - execa: ^7.1.1 - titleize: ^3.0.0 - checksum: 40c5af984799042b140300be5639c9742599bda76dc9eba5ac9ad5943c83dd36cebc4471eafcfddf8e0ec817166d5ba89d56f08e66a126c7c7908a179cead1a7 +"fast-equals@npm:^5.0.1": + version: 5.0.1 + resolution: "fast-equals@npm:5.0.1" + checksum: fbb3b6a74f3a0fa930afac151ff7d01639159b4fddd2678b5d50708e0ba38e9ec14602222d10dadb8398187342692c04fbef5a62b1cfcc7942fe03e754e064bc languageName: node linkType: hard -"default-gateway@npm:^6.0.3": - version: 6.0.3 - resolution: "default-gateway@npm:6.0.3" - dependencies: - execa: ^5.0.0 - checksum: 126f8273ecac8ee9ff91ea778e8784f6cd732d77c3157e8c5bdd6ed03651b5291f71446d05bc02d04073b1e67583604db5394ea3cf992ede0088c70ea15b7378 +"fast-fifo@npm:^1.1.0, fast-fifo@npm:^1.2.0": + version: 1.3.2 + resolution: "fast-fifo@npm:1.3.2" + checksum: 6bfcba3e4df5af7be3332703b69a7898a8ed7020837ec4395bb341bd96cc3a6d86c3f6071dd98da289618cf2234c70d84b2a6f09a33dd6f988b1ff60d8e54275 languageName: node linkType: hard -"defer-to-connect@npm:^1.0.1": - version: 1.1.3 - resolution: "defer-to-connect@npm:1.1.3" - checksum: 9491b301dcfa04956f989481ba7a43c2231044206269eb4ab64a52d6639ee15b1252262a789eb4239fb46ab63e44d4e408641bae8e0793d640aee55398cb3930 +"fast-glob@npm:^3.2.11, fast-glob@npm:^3.2.12, fast-glob@npm:^3.2.9, fast-glob@npm:^3.3.0, fast-glob@npm:^3.3.2": + version: 3.3.2 + resolution: "fast-glob@npm:3.3.2" + dependencies: + "@nodelib/fs.stat": ^2.0.2 + "@nodelib/fs.walk": ^1.2.3 + glob-parent: ^5.1.2 + merge2: ^1.3.0 + micromatch: ^4.0.4 + checksum: 900e4979f4dbc3313840078419245621259f349950411ca2fa445a2f9a1a6d98c3b5e7e0660c5ccd563aa61abe133a21765c6c0dec8e57da1ba71d8000b05ec1 languageName: node linkType: hard -"define-data-property@npm:^1.0.1": - version: 1.1.1 - resolution: "define-data-property@npm:1.1.1" - dependencies: - get-intrinsic: ^1.2.1 - gopd: ^1.0.1 - has-property-descriptors: ^1.0.0 - checksum: a29855ad3f0630ea82e3c5012c812efa6ca3078d5c2aa8df06b5f597c1cde6f7254692df41945851d903e05a1668607b6d34e778f402b9ff9ffb38111f1a3f0d +"fast-json-stable-stringify@npm:^2.0.0, fast-json-stable-stringify@npm:^2.1.0": + version: 2.1.0 + resolution: "fast-json-stable-stringify@npm:2.1.0" + checksum: b191531e36c607977e5b1c47811158733c34ccb3bfde92c44798929e9b4154884378536d26ad90dfecd32e1ffc09c545d23535ad91b3161a27ddbb8ebe0cbecb languageName: node linkType: hard -"define-lazy-prop@npm:^2.0.0": - version: 2.0.0 - resolution: "define-lazy-prop@npm:2.0.0" - checksum: 0115fdb065e0490918ba271d7339c42453d209d4cb619dfe635870d906731eff3e1ade8028bb461ea27ce8264ec5e22c6980612d332895977e89c1bbc80fcee2 +"fast-levenshtein@npm:^2.0.6": + version: 2.0.6 + resolution: "fast-levenshtein@npm:2.0.6" + checksum: 92cfec0a8dfafd9c7a15fba8f2cc29cd0b62b85f056d99ce448bbcd9f708e18ab2764bda4dd5158364f4145a7c72788538994f0d1787b956ef0d1062b0f7c24c languageName: node linkType: hard -"define-lazy-prop@npm:^3.0.0": - version: 3.0.0 - resolution: "define-lazy-prop@npm:3.0.0" - checksum: 54884f94caac0791bf6395a3ec530ce901cf71c47b0196b8754f3fd17edb6c0e80149c1214429d851873bb0d689dbe08dcedbb2306dc45c8534a5934723851b6 +"fast-plist@npm:^0.1.2": + version: 0.1.3 + resolution: "fast-plist@npm:0.1.3" + checksum: e879f548db3a1fc89c654c476e9c9846f4335fdcd2283ec99e5f234c897f5616cee2a0a4201bf4b64ab6269e75c09daafc3933bd4a038c85af943fac0f113caa languageName: node linkType: hard -"define-properties@npm:^1.1.4": - version: 1.2.1 - resolution: "define-properties@npm:1.2.1" +"fast-url-parser@npm:1.1.3": + version: 1.1.3 + resolution: "fast-url-parser@npm:1.1.3" dependencies: - define-data-property: ^1.0.1 - has-property-descriptors: ^1.0.0 - object-keys: ^1.1.1 - checksum: b4ccd00597dd46cb2d4a379398f5b19fca84a16f3374e2249201992f36b30f6835949a9429669ee6b41b6e837205a163eadd745e472069e70dfc10f03e5fcc12 + punycode: ^1.3.2 + checksum: 5043d0c4a8d775ff58504d56c096563c11b113e4cb8a2668c6f824a1cd4fb3812e2fdf76537eb24a7ce4ae7def6bd9747da630c617cf2a4b6ce0c42514e4f21c languageName: node linkType: hard -"del@npm:^6.1.1": - version: 6.1.1 - resolution: "del@npm:6.1.1" - dependencies: - globby: ^11.0.1 - graceful-fs: ^4.2.4 - is-glob: ^4.0.1 - is-path-cwd: ^2.2.0 - is-path-inside: ^3.0.2 - p-map: ^4.0.0 - rimraf: ^3.0.2 - slash: ^3.0.0 - checksum: 563288b73b8b19a7261c47fd21a330eeab6e2acd7c6208c49790dfd369127120dd7836cdf0c1eca216b77c94782a81507eac6b4734252d3bef2795cb366996b6 +"fastest-levenshtein@npm:^1.0.12": + version: 1.0.16 + resolution: "fastest-levenshtein@npm:1.0.16" + checksum: a78d44285c9e2ae2c25f3ef0f8a73f332c1247b7ea7fb4a191e6bb51aa6ee1ef0dfb3ed113616dcdc7023e18e35a8db41f61c8d88988e877cf510df8edafbc71 languageName: node linkType: hard -"delayed-stream@npm:~1.0.0": - version: 1.0.0 - resolution: "delayed-stream@npm:1.0.0" - checksum: 46fe6e83e2cb1d85ba50bd52803c68be9bd953282fa7096f51fc29edd5d67ff84ff753c51966061e5ba7cb5e47ef6d36a91924eddb7f3f3483b1c560f77a0020 +"fastq@npm:^1.6.0": + version: 1.15.0 + resolution: "fastq@npm:1.15.0" + dependencies: + reusify: ^1.0.4 + checksum: 0170e6bfcd5d57a70412440b8ef600da6de3b2a6c5966aeaf0a852d542daff506a0ee92d6de7679d1de82e644bce69d7a574a6c93f0b03964b5337eed75ada1a languageName: node linkType: hard -"delegates@npm:^1.0.0": - version: 1.0.0 - resolution: "delegates@npm:1.0.0" - checksum: a51744d9b53c164ba9c0492471a1a2ffa0b6727451bdc89e31627fdf4adda9d51277cfcbfb20f0a6f08ccb3c436f341df3e92631a3440226d93a8971724771fd +"fault@npm:^2.0.0": + version: 2.0.1 + resolution: "fault@npm:2.0.1" + dependencies: + format: ^0.2.0 + checksum: c9b30f47d95769177130a9409976a899ed31eb598450fbad5b0d39f2f5f56d5f4a9ff9257e0bee8407cb0fc3ce37165657888c6aa6d78472e403893104329b72 languageName: node linkType: hard -"depd@npm:2.0.0, depd@npm:^2.0.0, depd@npm:~2.0.0": - version: 2.0.0 - resolution: "depd@npm:2.0.0" - checksum: abbe19c768c97ee2eed6282d8ce3031126662252c58d711f646921c9623f9052e3e1906443066beec1095832f534e57c523b7333f8e7e0d93051ab6baef5ab3a +"faye-websocket@npm:^0.11.3": + version: 0.11.4 + resolution: "faye-websocket@npm:0.11.4" + dependencies: + websocket-driver: ">=0.5.1" + checksum: d49a62caf027f871149fc2b3f3c7104dc6d62744277eb6f9f36e2d5714e847d846b9f7f0d0b7169b25a012e24a594cde11a93034b30732e4c683f20b8a5019fa languageName: node linkType: hard -"depd@npm:~1.1.2": - version: 1.1.2 - resolution: "depd@npm:1.1.2" - checksum: 6b406620d269619852885ce15965272b829df6f409724415e0002c8632ab6a8c0a08ec1f0bd2add05dc7bd7507606f7e2cc034fa24224ab829580040b835ecd9 +"fd-slicer@npm:~1.1.0": + version: 1.1.0 + resolution: "fd-slicer@npm:1.1.0" + dependencies: + pend: ~1.2.0 + checksum: c8585fd5713f4476eb8261150900d2cb7f6ff2d87f8feb306ccc8a1122efd152f1783bdb2b8dc891395744583436bfd8081d8e63ece0ec8687eeefea394d4ff2 languageName: node linkType: hard -"dependency-graph@npm:^0.11.0": - version: 0.11.0 - resolution: "dependency-graph@npm:0.11.0" - checksum: 477204beaa9be69e642bc31ffe7a8c383d0cf48fa27acbc91c5df01431ab913e65c154213d2ef83d034c98d77280743ec85e5da018a97a18dd43d3c0b78b28cd +"feed@npm:^4.2.2": + version: 4.2.2 + resolution: "feed@npm:4.2.2" + dependencies: + xml-js: ^1.6.11 + checksum: 2e6992a675a049511eef7bda8ca6c08cb9540cd10e8b275ec4c95d166228ec445a335fa8de990358759f248a92861e51decdcd32bf1c54737d5b7aed7c7ffe97 languageName: node linkType: hard -"dequal@npm:^2.0.0": - version: 2.0.3 - resolution: "dequal@npm:2.0.3" - checksum: 8679b850e1a3d0ebbc46ee780d5df7b478c23f335887464023a631d1b9af051ad4a6595a44220f9ff8ff95a8ddccf019b5ad778a976fd7bbf77383d36f412f90 +"fetch-blob@npm:^3.1.2, fetch-blob@npm:^3.1.4": + version: 3.2.0 + resolution: "fetch-blob@npm:3.2.0" + dependencies: + node-domexception: ^1.0.0 + web-streams-polyfill: ^3.0.3 + checksum: f19bc28a2a0b9626e69fd7cf3a05798706db7f6c7548da657cbf5026a570945f5eeaedff52007ea35c8bcd3d237c58a20bf1543bc568ab2422411d762dd3d5bf languageName: node linkType: hard -"destroy@npm:1.2.0, destroy@npm:^1.0.4": - version: 1.2.0 - resolution: "destroy@npm:1.2.0" - checksum: 0acb300b7478a08b92d810ab229d5afe0d2f4399272045ab22affa0d99dbaf12637659411530a6fcd597a9bdac718fc94373a61a95b4651bbc7b83684a565e38 +"fflate@npm:^0.8.0, fflate@npm:^0.8.1": + version: 0.8.1 + resolution: "fflate@npm:0.8.1" + checksum: 7207e2d333243724485d2488095256b776184bd4545aa9967b655feaee5dc18e9525ed9b6d75f94cfd71d98fb285336f4902641683472f1d0c19a99137084cec languageName: node linkType: hard -"detab@npm:2.0.4": - version: 2.0.4 - resolution: "detab@npm:2.0.4" +"file-entry-cache@npm:^6.0.1": + version: 6.0.1 + resolution: "file-entry-cache@npm:6.0.1" dependencies: - repeat-string: ^1.5.4 - checksum: 34b077521ecd4c6357d32ff7923be644d34aa6f6b7d717d40ec4a9168243eefaea2b512a75a460a6f70c31b0bbc31ff90f820a891803b4ddaf99e9d04d0d389d + flat-cache: ^3.0.4 + checksum: f49701feaa6314c8127c3c2f6173cfefff17612f5ed2daaafc6da13b5c91fd43e3b2a58fd0d63f9f94478a501b167615931e7200e31485e320f74a33885a9c74 languageName: node linkType: hard -"detect-node@npm:^2.0.4": - version: 2.1.0 - resolution: "detect-node@npm:2.1.0" - checksum: 832184ec458353e41533ac9c622f16c19f7c02d8b10c303dfd3a756f56be93e903616c0bb2d4226183c9351c15fc0b3dba41a17a2308262afabcfa3776e6ae6e +"file-entry-cache@npm:^8.0.0": + version: 8.0.0 + resolution: "file-entry-cache@npm:8.0.0" + dependencies: + flat-cache: ^4.0.0 + checksum: f67802d3334809048c69b3d458f672e1b6d26daefda701761c81f203b80149c35dea04d78ea4238969dd617678e530876722a0634c43031a0957f10cc3ed190f languageName: node linkType: hard -"detect-port-alt@npm:^1.1.6": - version: 1.1.6 - resolution: "detect-port-alt@npm:1.1.6" +"file-loader@npm:^6.2.0": + version: 6.2.0 + resolution: "file-loader@npm:6.2.0" dependencies: - address: ^1.0.1 - debug: ^2.6.0 - bin: - detect: ./bin/detect-port - detect-port: ./bin/detect-port - checksum: 9dc37b1fa4a9dd6d4889e1045849b8d841232b598d1ca888bf712f4035b07a17cf6d537465a0d7323250048d3a5a0540e3b7cf89457efc222f96f77e2c40d16a + loader-utils: ^2.0.0 + schema-utils: ^3.0.0 + peerDependencies: + webpack: ^4.0.0 || ^5.0.0 + checksum: faf43eecf233f4897b0150aaa874eeeac214e4f9de49738a9e0ef734a30b5260059e85b7edadf852b98e415f875bd5f12587768a93fd52aaf2e479ecf95fab20 languageName: node linkType: hard -"detect-port@npm:^1.3.0": - version: 1.5.1 - resolution: "detect-port@npm:1.5.1" - dependencies: - address: ^1.0.1 - debug: 4 - bin: - detect: bin/detect-port.js - detect-port: bin/detect-port.js - checksum: b48da9340481742547263d5d985e65d078592557863402ecf538511735e83575867e94f91fe74405ea19b61351feb99efccae7e55de9a151d5654e3417cea05b +"filesize@npm:^8.0.6": + version: 8.0.7 + resolution: "filesize@npm:8.0.7" + checksum: 8603d27c5287b984cb100733640645e078f5f5ad65c6d913173e01fb99e09b0747828498fd86647685ccecb69be31f3587b9739ab1e50732116b2374aff4cbf9 languageName: node linkType: hard -"devtools-protocol@npm:0.0.1107588": - version: 0.0.1107588 - resolution: "devtools-protocol@npm:0.0.1107588" - checksum: 9064fd643f39ae0adabb8f425b746899ff24371d89a5047d38752653259e6afcb6bcb2d9759ff727eb5885cfc0f9ba8eb384850a2af00694135622e88080e3e5 +"fill-range@npm:^7.0.1": + version: 7.0.1 + resolution: "fill-range@npm:7.0.1" + dependencies: + to-regex-range: ^5.0.1 + checksum: cc283f4e65b504259e64fd969bcf4def4eb08d85565e906b7d36516e87819db52029a76b6363d0f02d0d532f0033c9603b9e2d943d56ee3b0d4f7ad3328ff917 languageName: node linkType: hard -"diff@npm:5.0.0": - version: 5.0.0 - resolution: "diff@npm:5.0.0" - checksum: f19fe29284b633afdb2725c2a8bb7d25761ea54d321d8e67987ac851c5294be4afeab532bd84531e02583a3fe7f4014aa314a3eda84f5590e7a9e6b371ef3b46 +"finalhandler@npm:1.2.0": + version: 1.2.0 + resolution: "finalhandler@npm:1.2.0" + dependencies: + debug: 2.6.9 + encodeurl: ~1.0.2 + escape-html: ~1.0.3 + on-finished: 2.4.1 + parseurl: ~1.3.3 + statuses: 2.0.1 + unpipe: ~1.0.0 + checksum: 92effbfd32e22a7dff2994acedbd9bcc3aa646a3e919ea6a53238090e87097f8ef07cced90aa2cc421abdf993aefbdd5b00104d55c7c5479a8d00ed105b45716 languageName: node linkType: hard -"diff@npm:^4.0.1": - version: 4.0.2 - resolution: "diff@npm:4.0.2" - checksum: f2c09b0ce4e6b301c221addd83bf3f454c0bc00caa3dd837cf6c127d6edf7223aa2bbe3b688feea110b7f262adbfc845b757c44c8a9f8c0c5b15d8fa9ce9d20d +"find-cache-dir@npm:^3.3.1": + version: 3.3.2 + resolution: "find-cache-dir@npm:3.3.2" + dependencies: + commondir: ^1.0.1 + make-dir: ^3.0.2 + pkg-dir: ^4.1.0 + checksum: 1e61c2e64f5c0b1c535bd85939ae73b0e5773142713273818cc0b393ee3555fb0fd44e1a5b161b8b6c3e03e98c2fcc9c227d784850a13a90a8ab576869576817 languageName: node linkType: hard -"diff@npm:^5.0.0": - version: 5.1.0 - resolution: "diff@npm:5.1.0" - checksum: c7bf0df7c9bfbe1cf8a678fd1b2137c4fb11be117a67bc18a0e03ae75105e8533dbfb1cda6b46beb3586ef5aed22143ef9d70713977d5fb1f9114e21455fba90 +"find-cache-dir@npm:^4.0.0": + version: 4.0.0 + resolution: "find-cache-dir@npm:4.0.0" + dependencies: + common-path-prefix: ^3.0.0 + pkg-dir: ^7.0.0 + checksum: 52a456a80deeb27daa3af6e06059b63bdb9cc4af4d845fc6d6229887e505ba913cd56000349caa60bc3aa59dacdb5b4c37903d4ba34c75102d83cab330b70d2f languageName: node linkType: hard -"dir-glob@npm:^3.0.1": - version: 3.0.1 - resolution: "dir-glob@npm:3.0.1" +"find-replace@npm:^3.0.0": + version: 3.0.0 + resolution: "find-replace@npm:3.0.0" dependencies: - path-type: ^4.0.0 - checksum: fa05e18324510d7283f55862f3161c6759a3f2f8dbce491a2fc14c8324c498286c54282c1f0e933cb930da8419b30679389499b919122952a4f8592362ef4615 + array-back: ^3.0.1 + checksum: 6b04bcfd79027f5b84aa1dfe100e3295da989bdac4b4de6b277f4d063e78f5c9e92ebc8a1fec6dd3b448c924ba404ee051cc759e14a3ee3e825fa1361025df08 languageName: node linkType: hard -"dns-equal@npm:^1.0.0": +"find-up-simple@npm:^1.0.0": version: 1.0.0 - resolution: "dns-equal@npm:1.0.0" - checksum: a8471ac849c7c13824f053babea1bc26e2f359394dd5a460f8340d8abd13434be01e3327a5c59d212f8c8997817450efd3f3ac77bec709b21979cf0235644524 + resolution: "find-up-simple@npm:1.0.0" + checksum: 91c3d51c1111b5eb4e6e6d71d21438f6571a37a69dc288d4222b98996756e2f472fa5393a4dddb5e1a84929405d87e86f4bdce798ba84ee513b79854960ec140 languageName: node linkType: hard -"dns-packet@npm:^5.2.2": - version: 5.6.1 - resolution: "dns-packet@npm:5.6.1" +"find-up@npm:5.0.0, find-up@npm:^5.0.0": + version: 5.0.0 + resolution: "find-up@npm:5.0.0" dependencies: - "@leichtgewicht/ip-codec": ^2.0.1 - checksum: 64c06457f0c6e143f7a0946e0aeb8de1c5f752217cfa143ef527467c00a6d78db1835cfdb6bb68333d9f9a4963cf23f410439b5262a8935cce1236f45e344b81 + locate-path: ^6.0.0 + path-exists: ^4.0.0 + checksum: 07955e357348f34660bde7920783204ff5a26ac2cafcaa28bace494027158a97b9f56faaf2d89a6106211a8174db650dd9f503f9c0d526b1202d5554a00b9095 languageName: node linkType: hard -"docs@workspace:docs": - version: 0.0.0-use.local - resolution: "docs@workspace:docs" +"find-up@npm:^2.1.0": + version: 2.1.0 + resolution: "find-up@npm:2.1.0" dependencies: - "@docusaurus/core": ^2.4.0 - "@docusaurus/module-type-aliases": ^2.4.0 - "@docusaurus/plugin-google-gtag": ^2.4.0 - "@docusaurus/preset-classic": ^2.4.0 - "@easyops-cn/docusaurus-search-local": ^0.35.0 - "@mdx-js/react": ^1.6.22 - axios: ^1.4.0 - clsx: ^1.2.1 - docusaurus-plugin-typedoc: 1.0.0-next.18 - hast-util-is-element: ^1.1.0 - prism-react-renderer: ^1.3.5 - react: ^17.0.2 - react-dom: ^17.0.2 - rehype-katex: ^5.0.0 - remark-math: ^3.0.1 - typedoc: ^0.25.0 - typedoc-plugin-frontmatter: ^0.0.2 - typedoc-plugin-markdown: 4.0.0-next.25 - typedoc-plugin-merge-modules: ^5.1.0 - typescript: ^5.2.2 - languageName: unknown - linkType: soft + locate-path: ^2.0.0 + checksum: 43284fe4da09f89011f08e3c32cd38401e786b19226ea440b75386c1b12a4cb738c94969808d53a84f564ede22f732c8409e3cfc3f7fb5b5c32378ad0bbf28bd + languageName: node + linkType: hard -"doctrine@npm:^3.0.0": +"find-up@npm:^3.0.0": version: 3.0.0 - resolution: "doctrine@npm:3.0.0" + resolution: "find-up@npm:3.0.0" dependencies: - esutils: ^2.0.2 - checksum: fd7673ca77fe26cd5cba38d816bc72d641f500f1f9b25b83e8ce28827fe2da7ad583a8da26ab6af85f834138cf8dae9f69b0cd6ab925f52ddab1754db44d99ce + locate-path: ^3.0.0 + checksum: 38eba3fe7a66e4bc7f0f5a1366dc25508b7cfc349f852640e3678d26ad9a6d7e2c43eff0a472287de4a9753ef58f066a0ea892a256fa3636ad51b3fe1e17fae9 languageName: node linkType: hard -"docusaurus-plugin-typedoc@npm:1.0.0-next.18": - version: 1.0.0-next.18 - resolution: "docusaurus-plugin-typedoc@npm:1.0.0-next.18" +"find-up@npm:^4.0.0": + version: 4.1.0 + resolution: "find-up@npm:4.1.0" dependencies: - "@docusaurus/types": ^2.4.1 - peerDependencies: - typedoc-plugin-markdown: ">=4.0.0-next.24" - checksum: a501e3bd1cc5b33d215a1b71b018a34d4aa5bd98f39580ab114a127d3f555078443d6b690119c0adb17fb8867ba4131382ad5d14dbaa7919715020cdfaf6b9c4 + locate-path: ^5.0.0 + path-exists: ^4.0.0 + checksum: 4c172680e8f8c1f78839486e14a43ef82e9decd0e74145f40707cc42e7420506d5ec92d9a11c22bd2c48fb0c384ea05dd30e10dd152fefeec6f2f75282a8b844 languageName: node linkType: hard -"dom-converter@npm:^0.2.0": - version: 0.2.0 - resolution: "dom-converter@npm:0.2.0" +"find-up@npm:^6.3.0": + version: 6.3.0 + resolution: "find-up@npm:6.3.0" dependencies: - utila: ~0.4 - checksum: ea52fe303f5392e48dea563abef0e6fb3a478b8dbe3c599e99bb5d53981c6c38fc4944e56bb92a8ead6bb989d10b7914722ae11febbd2fd0910e33b9fc4aaa77 + locate-path: ^7.1.0 + path-exists: ^5.0.0 + checksum: 9a21b7f9244a420e54c6df95b4f6fc3941efd3c3e5476f8274eb452f6a85706e7a6a90de71353ee4f091fcb4593271a6f92810a324ec542650398f928783c280 languageName: node linkType: hard -"dom-serializer@npm:^1.0.1": - version: 1.4.1 - resolution: "dom-serializer@npm:1.4.1" +"flat-cache@npm:^3.0.4": + version: 3.2.0 + resolution: "flat-cache@npm:3.2.0" dependencies: - domelementtype: ^2.0.1 - domhandler: ^4.2.0 - entities: ^2.0.0 - checksum: fbb0b01f87a8a2d18e6e5a388ad0f7ec4a5c05c06d219377da1abc7bb0f674d804f4a8a94e3f71ff15f6cb7dcfc75704a54b261db672b9b3ab03da6b758b0b22 + flatted: ^3.2.9 + keyv: ^4.5.3 + rimraf: ^3.0.2 + checksum: e7e0f59801e288b54bee5cb9681e9ee21ee28ef309f886b312c9d08415b79fc0f24ac842f84356ce80f47d6a53de62197ce0e6e148dc42d5db005992e2a756ec languageName: node linkType: hard -"dom-serializer@npm:^2.0.0": - version: 2.0.0 - resolution: "dom-serializer@npm:2.0.0" +"flat-cache@npm:^4.0.0": + version: 4.0.0 + resolution: "flat-cache@npm:4.0.0" dependencies: - domelementtype: ^2.3.0 - domhandler: ^5.0.2 - entities: ^4.2.0 - checksum: cd1810544fd8cdfbd51fa2c0c1128ec3a13ba92f14e61b7650b5de421b88205fd2e3f0cc6ace82f13334114addb90ed1c2f23074a51770a8e9c1273acbc7f3e6 + flatted: ^3.2.9 + keyv: ^4.5.4 + rimraf: ^5.0.5 + checksum: 744d5f111aeecdfb963faab7089230c737a90c325137251b4fe144fd76932e19738a861e356c5ee828bb310592b42a1da667912d74d0403f1f4ef75be8bfdbac languageName: node linkType: hard -"domelementtype@npm:^2.0.1, domelementtype@npm:^2.2.0, domelementtype@npm:^2.3.0": - version: 2.3.0 - resolution: "domelementtype@npm:2.3.0" - checksum: ee837a318ff702622f383409d1f5b25dd1024b692ef64d3096ff702e26339f8e345820f29a68bcdcea8cfee3531776b3382651232fbeae95612d6f0a75efb4f6 +"flat@npm:^5.0.2": + version: 5.0.2 + resolution: "flat@npm:5.0.2" + bin: + flat: cli.js + checksum: 12a1536ac746db74881316a181499a78ef953632ddd28050b7a3a43c62ef5462e3357c8c29d76072bb635f147f7a9a1f0c02efef6b4be28f8db62ceb3d5c7f5d languageName: node linkType: hard -"domhandler@npm:^4.0.0, domhandler@npm:^4.2.0, domhandler@npm:^4.3.1": - version: 4.3.1 - resolution: "domhandler@npm:4.3.1" - dependencies: - domelementtype: ^2.2.0 - checksum: 4c665ceed016e1911bf7d1dadc09dc888090b64dee7851cccd2fcf5442747ec39c647bb1cb8c8919f8bbdd0f0c625a6bafeeed4b2d656bbecdbae893f43ffaaa +"flatted@npm:^3.2.9": + version: 3.2.9 + resolution: "flatted@npm:3.2.9" + checksum: f14167fbe26a9d20f6fca8d998e8f1f41df72c8e81f9f2c9d61ed2bea058248f5e1cbd05e7f88c0e5087a6a0b822a1e5e2b446e879f3cfbe0b07ba2d7f80b026 languageName: node linkType: hard -"domhandler@npm:^5.0.2, domhandler@npm:^5.0.3": - version: 5.0.3 - resolution: "domhandler@npm:5.0.3" - dependencies: - domelementtype: ^2.3.0 - checksum: 0f58f4a6af63e6f3a4320aa446d28b5790a009018707bce2859dcb1d21144c7876482b5188395a188dfa974238c019e0a1e610d2fc269a12b2c192ea2b0b131c +"follow-redirects@npm:^1.0.0, follow-redirects@npm:^1.12.1, follow-redirects@npm:^1.14.7, follow-redirects@npm:^1.15.0": + version: 1.15.3 + resolution: "follow-redirects@npm:1.15.3" + peerDependenciesMeta: + debug: + optional: true + checksum: 584da22ec5420c837bd096559ebfb8fe69d82512d5585004e36a3b4a6ef6d5905780e0c74508c7b72f907d1fa2b7bd339e613859e9c304d0dc96af2027fd0231 languageName: node linkType: hard -"domutils@npm:^2.5.2, domutils@npm:^2.8.0": - version: 2.8.0 - resolution: "domutils@npm:2.8.0" +"for-each@npm:^0.3.3": + version: 0.3.3 + resolution: "for-each@npm:0.3.3" dependencies: - dom-serializer: ^1.0.1 - domelementtype: ^2.2.0 - domhandler: ^4.2.0 - checksum: abf7434315283e9aadc2a24bac0e00eab07ae4313b40cc239f89d84d7315ebdfd2fb1b5bf750a96bc1b4403d7237c7b2ebf60459be394d625ead4ca89b934391 + is-callable: ^1.1.3 + checksum: 6c48ff2bc63362319c65e2edca4a8e1e3483a2fabc72fbe7feaf8c73db94fc7861bd53bc02c8a66a0c1dd709da6b04eec42e0abdd6b40ce47305ae92a25e5d28 languageName: node linkType: hard -"domutils@npm:^3.0.1": - version: 3.1.0 - resolution: "domutils@npm:3.1.0" - dependencies: - dom-serializer: ^2.0.0 - domelementtype: ^2.3.0 - domhandler: ^5.0.3 - checksum: e5757456ddd173caa411cfc02c2bb64133c65546d2c4081381a3bafc8a57411a41eed70494551aa58030be9e58574fcc489828bebd673863d39924fb4878f416 +"foreach@npm:~2.0.1": + version: 2.0.6 + resolution: "foreach@npm:2.0.6" + checksum: f7b68494545ee41cbd0b0425ebf5386c265dc38ef2a9b0d5cd91a1b82172e939b4cf9387f8e0ebf6db4e368fc79ed323f2198424d5c774515ac3ed9b08901c0e languageName: node linkType: hard -"dot-case@npm:^3.0.4": - version: 3.0.4 - resolution: "dot-case@npm:3.0.4" +"foreground-child@npm:^3.1.0": + version: 3.1.1 + resolution: "foreground-child@npm:3.1.1" dependencies: - no-case: ^3.0.4 - tslib: ^2.0.3 - checksum: a65e3519414856df0228b9f645332f974f2bf5433370f544a681122eab59e66038fc3349b4be1cdc47152779dac71a5864f1ccda2f745e767c46e9c6543b1169 + cross-spawn: ^7.0.0 + signal-exit: ^4.0.1 + checksum: 139d270bc82dc9e6f8bc045fe2aae4001dc2472157044fdfad376d0a3457f77857fa883c1c8b21b491c6caade9a926a4bed3d3d2e8d3c9202b151a4cbbd0bcd5 languageName: node linkType: hard -"dot-prop@npm:^5.2.0": - version: 5.3.0 - resolution: "dot-prop@npm:5.3.0" +"fork-ts-checker-webpack-plugin@npm:^6.5.0": + version: 6.5.3 + resolution: "fork-ts-checker-webpack-plugin@npm:6.5.3" dependencies: - is-obj: ^2.0.0 - checksum: d5775790093c234ef4bfd5fbe40884ff7e6c87573e5339432870616331189f7f5d86575c5b5af2dcf0f61172990f4f734d07844b1f23482fff09e3c4bead05ea + "@babel/code-frame": ^7.8.3 + "@types/json-schema": ^7.0.5 + chalk: ^4.1.0 + chokidar: ^3.4.2 + cosmiconfig: ^6.0.0 + deepmerge: ^4.2.2 + fs-extra: ^9.0.0 + glob: ^7.1.6 + memfs: ^3.1.2 + minimatch: ^3.0.4 + schema-utils: 2.7.0 + semver: ^7.3.2 + tapable: ^1.0.0 + peerDependencies: + eslint: ">= 6" + typescript: ">= 2.7" + vue-template-compiler: "*" + webpack: ">= 4" + peerDependenciesMeta: + eslint: + optional: true + vue-template-compiler: + optional: true + checksum: 9732a49bfeed8fc23e6e8a59795fa7c238edeba91040a9b520db54b4d316dda27f9f1893d360e296fd0ad8930627d364417d28a8c7007fba60cc730ebfce4956 languageName: node linkType: hard -"duplexer3@npm:^0.1.4": - version: 0.1.5 - resolution: "duplexer3@npm:0.1.5" - checksum: e677cb4c48f031ca728601d6a20bf6aed4c629d69ef9643cb89c67583d673c4ec9317cc6427501f38bd8c368d3a18f173987cc02bd99d8cf8fe3d94259a22a20 +"form-data-encoder@npm:^2.1.2": + version: 2.1.4 + resolution: "form-data-encoder@npm:2.1.4" + checksum: e0b3e5950fb69b3f32c273944620f9861f1933df9d3e42066e038e26dfb343d0f4465de9f27e0ead1a09d9df20bc2eed06a63c2ca2f8f00949e7202bae9e29dd languageName: node linkType: hard -"duplexer@npm:^0.1.2, duplexer@npm:~0.1.1": - version: 0.1.2 - resolution: "duplexer@npm:0.1.2" - checksum: 62ba61a830c56801db28ff6305c7d289b6dc9f859054e8c982abd8ee0b0a14d2e9a8e7d086ffee12e868d43e2bbe8a964be55ddbd8c8957714c87373c7a4f9b0 +"form-data@npm:^4.0.0": + version: 4.0.0 + resolution: "form-data@npm:4.0.0" + dependencies: + asynckit: ^0.4.0 + combined-stream: ^1.0.8 + mime-types: ^2.1.12 + checksum: 01135bf8675f9d5c61ff18e2e2932f719ca4de964e3be90ef4c36aacfc7b9cb2fceb5eca0b7e0190e3383fe51c5b37f4cb80b62ca06a99aaabfcfd6ac7c9328c languageName: node linkType: hard -"eastasianwidth@npm:^0.2.0": - version: 0.2.0 - resolution: "eastasianwidth@npm:0.2.0" - checksum: 7d00d7cd8e49b9afa762a813faac332dee781932d6f2c848dc348939c4253f1d4564341b7af1d041853bc3f32c2ef141b58e0a4d9862c17a7f08f68df1e0f1ed +"format@npm:^0.2.0": + version: 0.2.2 + resolution: "format@npm:0.2.2" + checksum: 646a60e1336250d802509cf24fb801e43bd4a70a07510c816fa133aa42cdbc9c21e66e9cc0801bb183c5b031c9d68be62e7fbb6877756e52357850f92aa28799 languageName: node linkType: hard -"ee-first@npm:1.1.1": - version: 1.1.1 - resolution: "ee-first@npm:1.1.1" - checksum: 1b4cac778d64ce3b582a7e26b218afe07e207a0f9bfe13cc7395a6d307849cfe361e65033c3251e00c27dd060cab43014c2d6b2647676135e18b77d2d05b3f4f +"formdata-polyfill@npm:^4.0.10": + version: 4.0.10 + resolution: "formdata-polyfill@npm:4.0.10" + dependencies: + fetch-blob: ^3.1.2 + checksum: 82a34df292afadd82b43d4a740ce387bc08541e0a534358425193017bf9fb3567875dc5f69564984b1da979979b70703aa73dee715a17b6c229752ae736dd9db languageName: node linkType: hard -"electron-to-chromium@npm:^1.4.535": - version: 1.4.554 - resolution: "electron-to-chromium@npm:1.4.554" - checksum: cbac43c50b43777327f4a7bf149ee3088c1da8b91bbcd80f78d2cc77bc52763f6d0941574499239d9caefd3430d3093f865e5f1093371418f7d6b70301eeae9b +"forwarded@npm:0.2.0": + version: 0.2.0 + resolution: "forwarded@npm:0.2.0" + checksum: fd27e2394d8887ebd16a66ffc889dc983fbbd797d5d3f01087c020283c0f019a7d05ee85669383d8e0d216b116d720fc0cef2f6e9b7eb9f4c90c6e0bc7fd28e6 languageName: node linkType: hard -"elliptic@npm:6.5.4, elliptic@npm:^6.5.2, elliptic@npm:^6.5.4": - version: 6.5.4 - resolution: "elliptic@npm:6.5.4" - dependencies: - bn.js: ^4.11.9 - brorand: ^1.1.0 - hash.js: ^1.0.0 - hmac-drbg: ^1.0.1 - inherits: ^2.0.4 - minimalistic-assert: ^1.0.1 - minimalistic-crypto-utils: ^1.0.1 - checksum: d56d21fd04e97869f7ffcc92e18903b9f67f2d4637a23c860492fbbff5a3155fd9ca0184ce0c865dd6eb2487d234ce9551335c021c376cd2d3b7cb749c7d10f4 +"fp-ts@npm:1.19.3": + version: 1.19.3 + resolution: "fp-ts@npm:1.19.3" + checksum: eb0d4766ad561e9c5c01bfdd3d0ae589af135556921c733d26cf5289aad9f400110defdd93e6ac1d71f626697bb44d9d95ed2879c53dfd868f7cac3cf5c5553c languageName: node linkType: hard -"emittery@npm:^1.0.1": - version: 1.0.1 - resolution: "emittery@npm:1.0.1" - checksum: d95faee6ffb2e023cadaa6804265fea5298c53d079f170112af8dfae3e141761363ea4510966128259346418e3ec7639310fd75059ecce2423bf8afd07004226 +"fp-ts@npm:^1.0.0": + version: 1.19.5 + resolution: "fp-ts@npm:1.19.5" + checksum: 67d2d9c3855d211ca2592b1ef805f98b618157e7681791a776d9d0f7f3e52fcca2122ebf5bc215908c9099fad69756d40e37210cf46cb4075dae1b61efe69e40 languageName: node linkType: hard -"emoji-regex@npm:^8.0.0": - version: 8.0.0 - resolution: "emoji-regex@npm:8.0.0" - checksum: d4c5c39d5a9868b5fa152f00cada8a936868fd3367f33f71be515ecee4c803132d11b31a6222b2571b1e5f7e13890156a94880345594d0ce7e3c9895f560f192 +"fraction.js@npm:^4.3.6": + version: 4.3.7 + resolution: "fraction.js@npm:4.3.7" + checksum: e1553ae3f08e3ba0e8c06e43a3ab20b319966dfb7ddb96fd9b5d0ee11a66571af7f993229c88ebbb0d4a816eb813a24ed48207b140d442a8f76f33763b8d1f3f languageName: node linkType: hard -"emoji-regex@npm:^9.2.2": - version: 9.2.2 - resolution: "emoji-regex@npm:9.2.2" - checksum: 8487182da74aabd810ac6d6f1994111dfc0e331b01271ae01ec1eb0ad7b5ecc2bbbbd2f053c05cb55a1ac30449527d819bbfbf0e3de1023db308cbcb47f86601 +"fresh@npm:0.5.2, fresh@npm:~0.5.2": + version: 0.5.2 + resolution: "fresh@npm:0.5.2" + checksum: 13ea8b08f91e669a64e3ba3a20eb79d7ca5379a81f1ff7f4310d54e2320645503cc0c78daedc93dfb6191287295f6479544a649c64d8e41a1c0fb0c221552346 languageName: node linkType: hard -"emojis-list@npm:^3.0.0": - version: 3.0.0 - resolution: "emojis-list@npm:3.0.0" - checksum: ddaaa02542e1e9436c03970eeed445f4ed29a5337dfba0fe0c38dfdd2af5da2429c2a0821304e8a8d1cadf27fdd5b22ff793571fa803ae16852a6975c65e8e70 +"from@npm:~0": + version: 0.1.7 + resolution: "from@npm:0.1.7" + checksum: b85125b7890489656eb2e4f208f7654a93ec26e3aefaf3bbbcc0d496fc1941e4405834fcc9fe7333192aa2187905510ace70417bbf9ac6f6f4784a731d986939 languageName: node linkType: hard -"emoticon@npm:^3.2.0": - version: 3.2.0 - resolution: "emoticon@npm:3.2.0" - checksum: f30649d18b672ab3139e95cb04f77b2442feb95c99dc59372ff80fbfd639b2bf4018bc68ab0b549bd765aecf8230d7899c43f86cfcc7b6370c06c3232783e24f +"fs-constants@npm:^1.0.0": + version: 1.0.0 + resolution: "fs-constants@npm:1.0.0" + checksum: 18f5b718371816155849475ac36c7d0b24d39a11d91348cfcb308b4494824413e03572c403c86d3a260e049465518c4f0d5bd00f0371cdfcad6d4f30a85b350d languageName: node linkType: hard -"encodeurl@npm:^1.0.2, encodeurl@npm:~1.0.2": - version: 1.0.2 - resolution: "encodeurl@npm:1.0.2" - checksum: e50e3d508cdd9c4565ba72d2012e65038e5d71bdc9198cb125beb6237b5b1ade6c0d343998da9e170fb2eae52c1bed37d4d6d98a46ea423a0cddbed5ac3f780c +"fs-extra@npm:^0.30.0": + version: 0.30.0 + resolution: "fs-extra@npm:0.30.0" + dependencies: + graceful-fs: ^4.1.2 + jsonfile: ^2.1.0 + klaw: ^1.0.0 + path-is-absolute: ^1.0.0 + rimraf: ^2.2.8 + checksum: 6edfd65fc813baa27f1603778c0f5ec11f8c5006a20b920437813ee2023eba18aeec8bef1c89b2e6c84f9fc90fdc7c916f4a700466c8c69d22a35d018f2570f0 languageName: node linkType: hard -"encoding@npm:^0.1.13": - version: 0.1.13 - resolution: "encoding@npm:0.1.13" +"fs-extra@npm:^10.0.0, fs-extra@npm:^10.1.0": + version: 10.1.0 + resolution: "fs-extra@npm:10.1.0" + dependencies: + graceful-fs: ^4.2.0 + jsonfile: ^6.0.1 + universalify: ^2.0.0 + checksum: dc94ab37096f813cc3ca12f0f1b5ad6744dfed9ed21e953d72530d103cea193c2f81584a39e9dee1bea36de5ee66805678c0dddc048e8af1427ac19c00fffc50 + languageName: node + linkType: hard + +"fs-extra@npm:^11.1.1": + version: 11.2.0 + resolution: "fs-extra@npm:11.2.0" dependencies: - iconv-lite: ^0.6.2 - checksum: bb98632f8ffa823996e508ce6a58ffcf5856330fde839ae42c9e1f436cc3b5cc651d4aeae72222916545428e54fd0f6aa8862fd8d25bdbcc4589f1e3f3715e7f + graceful-fs: ^4.2.0 + jsonfile: ^6.0.1 + universalify: ^2.0.0 + checksum: b12e42fa40ba47104202f57b8480dd098aa931c2724565e5e70779ab87605665594e76ee5fb00545f772ab9ace167fe06d2ab009c416dc8c842c5ae6df7aa7e8 languageName: node linkType: hard -"end-of-stream@npm:^1.1.0, end-of-stream@npm:^1.4.1": - version: 1.4.4 - resolution: "end-of-stream@npm:1.4.4" +"fs-extra@npm:^7.0.1": + version: 7.0.1 + resolution: "fs-extra@npm:7.0.1" dependencies: - once: ^1.4.0 - checksum: 530a5a5a1e517e962854a31693dbb5c0b2fc40b46dad2a56a2deec656ca040631124f4795823acc68238147805f8b021abbe221f4afed5ef3c8e8efc2024908b + graceful-fs: ^4.1.2 + jsonfile: ^4.0.0 + universalify: ^0.1.0 + checksum: 141b9dccb23b66a66cefdd81f4cda959ff89282b1d721b98cea19ba08db3dcbe6f862f28841f3cf24bb299e0b7e6c42303908f65093cb7e201708e86ea5a8dcf languageName: node linkType: hard -"enhanced-resolve@npm:^5.15.0": - version: 5.15.0 - resolution: "enhanced-resolve@npm:5.15.0" +"fs-extra@npm:^8.1.0": + version: 8.1.0 + resolution: "fs-extra@npm:8.1.0" dependencies: - graceful-fs: ^4.2.4 - tapable: ^2.2.0 - checksum: fbd8cdc9263be71cc737aa8a7d6c57b43d6aa38f6cc75dde6fcd3598a130cc465f979d2f4d01bb3bf475acb43817749c79f8eef9be048683602ca91ab52e4f11 + graceful-fs: ^4.2.0 + jsonfile: ^4.0.0 + universalify: ^0.1.0 + checksum: bf44f0e6cea59d5ce071bba4c43ca76d216f89e402dc6285c128abc0902e9b8525135aa808adad72c9d5d218e9f4bcc63962815529ff2f684ad532172a284880 languageName: node linkType: hard -"enquirer@npm:^2.3.0": - version: 2.4.1 - resolution: "enquirer@npm:2.4.1" +"fs-extra@npm:^9.0.0": + version: 9.1.0 + resolution: "fs-extra@npm:9.1.0" dependencies: - ansi-colors: ^4.1.1 - strip-ansi: ^6.0.1 - checksum: f080f11a74209647dbf347a7c6a83c8a47ae1ebf1e75073a808bc1088eb780aa54075bfecd1bcdb3e3c724520edb8e6ee05da031529436b421b71066fcc48cb5 + at-least-node: ^1.0.0 + graceful-fs: ^4.2.0 + jsonfile: ^6.0.1 + universalify: ^2.0.0 + checksum: ba71ba32e0faa74ab931b7a0031d1523c66a73e225de7426e275e238e312d07313d2da2d33e34a52aa406c8763ade5712eb3ec9ba4d9edce652bcacdc29e6b20 languageName: node linkType: hard -"entities@npm:^2.0.0": - version: 2.2.0 - resolution: "entities@npm:2.2.0" - checksum: 19010dacaf0912c895ea262b4f6128574f9ccf8d4b3b65c7e8334ad0079b3706376360e28d8843ff50a78aabcb8f08f0a32dbfacdc77e47ed77ca08b713669b3 +"fs-minipass@npm:^2.0.0": + version: 2.1.0 + resolution: "fs-minipass@npm:2.1.0" + dependencies: + minipass: ^3.0.0 + checksum: 1b8d128dae2ac6cc94230cc5ead341ba3e0efaef82dab46a33d171c044caaa6ca001364178d42069b2809c35a1c3c35079a32107c770e9ffab3901b59af8c8b1 languageName: node linkType: hard -"entities@npm:^4.2.0, entities@npm:^4.4.0": - version: 4.5.0 - resolution: "entities@npm:4.5.0" - checksum: 853f8ebd5b425d350bffa97dd6958143179a5938352ccae092c62d1267c4e392a039be1bae7d51b6e4ffad25f51f9617531fedf5237f15df302ccfb452cbf2d7 +"fs-minipass@npm:^3.0.0": + version: 3.0.3 + resolution: "fs-minipass@npm:3.0.3" + dependencies: + minipass: ^7.0.3 + checksum: 8722a41109130851d979222d3ec88aabaceeaaf8f57b2a8f744ef8bd2d1ce95453b04a61daa0078822bc5cd21e008814f06fe6586f56fef511e71b8d2394d802 languageName: node linkType: hard -"env-paths@npm:^2.2.0": - version: 2.2.1 - resolution: "env-paths@npm:2.2.1" - checksum: 65b5df55a8bab92229ab2b40dad3b387fad24613263d103a97f91c9fe43ceb21965cd3392b1ccb5d77088021e525c4e0481adb309625d0cb94ade1d1fb8dc17e +"fs-monkey@npm:^1.0.4": + version: 1.0.5 + resolution: "fs-monkey@npm:1.0.5" + checksum: 424b67f65b37fe66117ae2bb061f790fe6d4b609e1d160487c74b3d69fbf42f262c665ccfba32e8b5f113f96f92e9a58fcdebe42d5f6649bdfc72918093a3119 languageName: node linkType: hard -"err-code@npm:^2.0.2": - version: 2.0.3 - resolution: "err-code@npm:2.0.3" - checksum: 8b7b1be20d2de12d2255c0bc2ca638b7af5171142693299416e6a9339bd7d88fc8d7707d913d78e0993176005405a236b066b45666b27b797252c771156ace54 +"fs.realpath@npm:^1.0.0": + version: 1.0.0 + resolution: "fs.realpath@npm:1.0.0" + checksum: 99ddea01a7e75aa276c250a04eedeffe5662bce66c65c07164ad6264f9de18fb21be9433ead460e54cff20e31721c811f4fb5d70591799df5f85dce6d6746fd0 languageName: node linkType: hard -"error-ex@npm:^1.3.1": - version: 1.3.2 - resolution: "error-ex@npm:1.3.2" +"fsevents@npm:2.3.2": + version: 2.3.2 + resolution: "fsevents@npm:2.3.2" dependencies: - is-arrayish: ^0.2.1 - checksum: c1c2b8b65f9c91b0f9d75f0debaa7ec5b35c266c2cac5de412c1a6de86d4cbae04ae44e510378cb14d032d0645a36925d0186f8bb7367bcc629db256b743a001 + node-gyp: latest + checksum: 97ade64e75091afee5265e6956cb72ba34db7819b4c3e94c431d4be2b19b8bb7a2d4116da417950c3425f17c8fe693d25e20212cac583ac1521ad066b77ae31f + conditions: os=darwin languageName: node linkType: hard -"errorstacks@npm:^2.2.0": - version: 2.4.0 - resolution: "errorstacks@npm:2.4.0" - checksum: 59186ccd26d8b782682a17aa8c96a71c5b977e7e073ec1648a5b59d11acb02348000921751c28a1a8a5bea37481369b2dab27e6330c5b8ddf3ae9b610dfc02bf +"fsevents@npm:~2.3.2, fsevents@npm:~2.3.3": + version: 2.3.3 + resolution: "fsevents@npm:2.3.3" + dependencies: + node-gyp: latest + checksum: 11e6ea6fea15e42461fc55b4b0e4a0a3c654faa567f1877dbd353f39156f69def97a69936d1746619d656c4b93de2238bf731f6085a03a50cabf287c9d024317 + conditions: os=darwin languageName: node linkType: hard -"es-module-lexer@npm:^1.0.0, es-module-lexer@npm:^1.2.1": - version: 1.3.1 - resolution: "es-module-lexer@npm:1.3.1" - checksum: 3beafa7e171eb1e8cc45695edf8d51638488dddf65294d7911f8d6a96249da6a9838c87529262cc6ea53988d8272cec0f4bff93f476ed031a54ba3afb51a0ed3 +"fsevents@patch:fsevents@2.3.2#~builtin": + version: 2.3.2 + resolution: "fsevents@patch:fsevents@npm%3A2.3.2#~builtin::version=2.3.2&hash=df0bf1" + dependencies: + node-gyp: latest + conditions: os=darwin languageName: node linkType: hard -"esbuild@npm:^0.16 || ^0.17": - version: 0.17.19 - resolution: "esbuild@npm:0.17.19" +"fsevents@patch:fsevents@~2.3.2#~builtin, fsevents@patch:fsevents@~2.3.3#~builtin": + version: 2.3.3 + resolution: "fsevents@patch:fsevents@npm%3A2.3.3#~builtin::version=2.3.3&hash=df0bf1" dependencies: - "@esbuild/android-arm": 0.17.19 - "@esbuild/android-arm64": 0.17.19 - "@esbuild/android-x64": 0.17.19 - "@esbuild/darwin-arm64": 0.17.19 - "@esbuild/darwin-x64": 0.17.19 - "@esbuild/freebsd-arm64": 0.17.19 - "@esbuild/freebsd-x64": 0.17.19 - "@esbuild/linux-arm": 0.17.19 - "@esbuild/linux-arm64": 0.17.19 - "@esbuild/linux-ia32": 0.17.19 - "@esbuild/linux-loong64": 0.17.19 - "@esbuild/linux-mips64el": 0.17.19 - "@esbuild/linux-ppc64": 0.17.19 - "@esbuild/linux-riscv64": 0.17.19 - "@esbuild/linux-s390x": 0.17.19 - "@esbuild/linux-x64": 0.17.19 - "@esbuild/netbsd-x64": 0.17.19 - "@esbuild/openbsd-x64": 0.17.19 - "@esbuild/sunos-x64": 0.17.19 - "@esbuild/win32-arm64": 0.17.19 - "@esbuild/win32-ia32": 0.17.19 - "@esbuild/win32-x64": 0.17.19 - dependenciesMeta: - "@esbuild/android-arm": - optional: true - "@esbuild/android-arm64": - optional: true - "@esbuild/android-x64": - optional: true - "@esbuild/darwin-arm64": - optional: true - "@esbuild/darwin-x64": - optional: true - "@esbuild/freebsd-arm64": - optional: true - "@esbuild/freebsd-x64": - optional: true - "@esbuild/linux-arm": - optional: true - "@esbuild/linux-arm64": - optional: true - "@esbuild/linux-ia32": - optional: true - "@esbuild/linux-loong64": - optional: true - "@esbuild/linux-mips64el": - optional: true - "@esbuild/linux-ppc64": - optional: true - "@esbuild/linux-riscv64": - optional: true - "@esbuild/linux-s390x": - optional: true - "@esbuild/linux-x64": - optional: true - "@esbuild/netbsd-x64": - optional: true - "@esbuild/openbsd-x64": - optional: true - "@esbuild/sunos-x64": - optional: true - "@esbuild/win32-arm64": - optional: true - "@esbuild/win32-ia32": - optional: true - "@esbuild/win32-x64": - optional: true - bin: - esbuild: bin/esbuild - checksum: ac11b1a5a6008e4e37ccffbd6c2c054746fc58d0ed4a2f9ee643bd030cfcea9a33a235087bc777def8420f2eaafb3486e76adb7bdb7241a9143b43a69a10afd8 + node-gyp: latest + conditions: os=darwin languageName: node linkType: hard -"escalade@npm:^3.1.1": - version: 3.1.1 - resolution: "escalade@npm:3.1.1" - checksum: a3e2a99f07acb74b3ad4989c48ca0c3140f69f923e56d0cba0526240ee470b91010f9d39001f2a4a313841d237ede70a729e92125191ba5d21e74b106800b133 +"function-bind@npm:^1.1.2": + version: 1.1.2 + resolution: "function-bind@npm:1.1.2" + checksum: 2b0ff4ce708d99715ad14a6d1f894e2a83242e4a52ccfcefaee5e40050562e5f6dafc1adbb4ce2d4ab47279a45dc736ab91ea5042d843c3c092820dfe032efb1 languageName: node linkType: hard -"escape-goat@npm:^2.0.0": - version: 2.1.1 - resolution: "escape-goat@npm:2.1.1" - checksum: ce05c70c20dd7007b60d2d644b625da5412325fdb57acf671ba06cb2ab3cd6789e2087026921a05b665b0a03fadee2955e7fc0b9a67da15a6551a980b260eba7 +"functional-red-black-tree@npm:^1.0.1": + version: 1.0.1 + resolution: "functional-red-black-tree@npm:1.0.1" + checksum: ca6c170f37640e2d94297da8bb4bf27a1d12bea3e00e6a3e007fd7aa32e37e000f5772acf941b4e4f3cf1c95c3752033d0c509af157ad8f526e7f00723b9eb9f languageName: node linkType: hard -"escape-html@npm:^1.0.3, escape-html@npm:~1.0.3": - version: 1.0.3 - resolution: "escape-html@npm:1.0.3" - checksum: 6213ca9ae00d0ab8bccb6d8d4e0a98e76237b2410302cf7df70aaa6591d509a2a37ce8998008cbecae8fc8ffaadf3fb0229535e6a145f3ce0b211d060decbb24 +"fwd-stream@npm:^1.0.4": + version: 1.0.4 + resolution: "fwd-stream@npm:1.0.4" + dependencies: + readable-stream: ~1.0.26-4 + checksum: db4dcf68f214b3fabd6cd9658630dfd1d7ed8d43f7f45408027a90220cd75276e782d1e958821775d7a3a4a83034778e75a097bdc7002c758e8896f76213c65d languageName: node linkType: hard -"escape-string-regexp@npm:4.0.0, escape-string-regexp@npm:^4.0.0": - version: 4.0.0 - resolution: "escape-string-regexp@npm:4.0.0" - checksum: 98b48897d93060f2322108bf29db0feba7dd774be96cd069458d1453347b25ce8682ecc39859d4bca2203cc0ab19c237bcc71755eff49a0f8d90beadeeba5cc5 +"gensequence@npm:^6.0.0": + version: 6.0.0 + resolution: "gensequence@npm:6.0.0" + checksum: 7c23404b227647cb033a2c6cf5a5266442409e9dcc6bc140565d28de64adfa03dc474119ae14d776dc177fe63d605d2f4b228fa3bf926145dddd68c8df95d029 languageName: node linkType: hard -"escape-string-regexp@npm:5.0.0, escape-string-regexp@npm:^5.0.0": - version: 5.0.0 - resolution: "escape-string-regexp@npm:5.0.0" - checksum: 20daabe197f3cb198ec28546deebcf24b3dbb1a5a269184381b3116d12f0532e06007f4bc8da25669d6a7f8efb68db0758df4cd981f57bc5b57f521a3e12c59e +"gensync@npm:^1.0.0-beta.1, gensync@npm:^1.0.0-beta.2": + version: 1.0.0-beta.2 + resolution: "gensync@npm:1.0.0-beta.2" + checksum: a7437e58c6be12aa6c90f7730eac7fa9833dc78872b4ad2963d2031b00a3367a93f98aec75f9aaac7220848e4026d67a8655e870b24f20a543d103c0d65952ec languageName: node linkType: hard -"escape-string-regexp@npm:^1.0.5": - version: 1.0.5 - resolution: "escape-string-regexp@npm:1.0.5" - checksum: 6092fda75c63b110c706b6a9bfde8a612ad595b628f0bd2147eea1d3406723020810e591effc7db1da91d80a71a737a313567c5abb3813e8d9c71f4aa595b410 +"get-caller-file@npm:^2.0.5": + version: 2.0.5 + resolution: "get-caller-file@npm:2.0.5" + checksum: b9769a836d2a98c3ee734a88ba712e62703f1df31b94b784762c433c27a386dd6029ff55c2a920c392e33657d80191edbf18c61487e198844844516f843496b9 languageName: node linkType: hard -"escape-string-regexp@npm:^2.0.0": - version: 2.0.0 - resolution: "escape-string-regexp@npm:2.0.0" - checksum: 9f8a2d5743677c16e85c810e3024d54f0c8dea6424fad3c79ef6666e81dd0846f7437f5e729dfcdac8981bc9e5294c39b4580814d114076b8d36318f46ae4395 +"get-func-name@npm:^2.0.1, get-func-name@npm:^2.0.2": + version: 2.0.2 + resolution: "get-func-name@npm:2.0.2" + checksum: 3f62f4c23647de9d46e6f76d2b3eafe58933a9b3830c60669e4180d6c601ce1b4aa310ba8366143f55e52b139f992087a9f0647274e8745621fa2af7e0acf13b languageName: node linkType: hard -"eslint-plugin-prettier@npm:^5.0.0": - version: 5.0.0 - resolution: "eslint-plugin-prettier@npm:5.0.0" +"get-intrinsic@npm:^1.0.2, get-intrinsic@npm:^1.1.3, get-intrinsic@npm:^1.2.1, get-intrinsic@npm:^1.2.2": + version: 1.2.2 + resolution: "get-intrinsic@npm:1.2.2" dependencies: - prettier-linter-helpers: ^1.0.0 - synckit: ^0.8.5 - peerDependencies: - "@types/eslint": ">=8.0.0" - eslint: ">=8.0.0" - prettier: ">=3.0.0" - peerDependenciesMeta: - "@types/eslint": - optional: true - eslint-config-prettier: - optional: true - checksum: 84e88744b9050f2d5ef31b94e85294dda16f3a53c2449f9d33eac8ae6264889b459bf35a68e438fb6b329c2a1d6491aac4bfa00d86317e7009de3dad0311bec6 + function-bind: ^1.1.2 + has-proto: ^1.0.1 + has-symbols: ^1.0.3 + hasown: ^2.0.0 + checksum: 447ff0724df26829908dc033b62732359596fcf66027bc131ab37984afb33842d9cd458fd6cecadfe7eac22fd8a54b349799ed334cf2726025c921c7250e7417 languageName: node linkType: hard -"eslint-scope@npm:5.1.1": - version: 5.1.1 - resolution: "eslint-scope@npm:5.1.1" - dependencies: - esrecurse: ^4.3.0 - estraverse: ^4.1.1 - checksum: 47e4b6a3f0cc29c7feedee6c67b225a2da7e155802c6ea13bbef4ac6b9e10c66cd2dcb987867ef176292bf4e64eccc680a49e35e9e9c669f4a02bac17e86abdb +"get-own-enumerable-property-symbols@npm:^3.0.0": + version: 3.0.2 + resolution: "get-own-enumerable-property-symbols@npm:3.0.2" + checksum: 8f0331f14159f939830884799f937343c8c0a2c330506094bc12cbee3665d88337fe97a4ea35c002cc2bdba0f5d9975ad7ec3abb925015cdf2a93e76d4759ede languageName: node linkType: hard -"eslint-scope@npm:^7.2.2": - version: 7.2.2 - resolution: "eslint-scope@npm:7.2.2" - dependencies: - esrecurse: ^4.3.0 - estraverse: ^5.2.0 - checksum: ec97dbf5fb04b94e8f4c5a91a7f0a6dd3c55e46bfc7bbcd0e3138c3a76977570e02ed89a1810c778dcd72072ff0e9621ba1379b4babe53921d71e2e4486fda3e +"get-stdin@npm:^8.0.0": + version: 8.0.0 + resolution: "get-stdin@npm:8.0.0" + checksum: 40128b6cd25781ddbd233344f1a1e4006d4284906191ed0a7d55ec2c1a3e44d650f280b2c9eeab79c03ac3037da80257476c0e4e5af38ddfb902d6ff06282d77 languageName: node linkType: hard -"eslint-visitor-keys@npm:^3.3.0, eslint-visitor-keys@npm:^3.4.1, eslint-visitor-keys@npm:^3.4.3": - version: 3.4.3 - resolution: "eslint-visitor-keys@npm:3.4.3" - checksum: 36e9ef87fca698b6fd7ca5ca35d7b2b6eeaaf106572e2f7fd31c12d3bfdaccdb587bba6d3621067e5aece31c8c3a348b93922ab8f7b2cbc6aaab5e1d89040c60 +"get-stdin@npm:^9.0.0": + version: 9.0.0 + resolution: "get-stdin@npm:9.0.0" + checksum: 5972bc34d05932b45512c8e2d67b040f1c1ca8afb95c56cbc480985f2d761b7e37fe90dc8abd22527f062cc5639a6930ff346e9952ae4c11a2d4275869459594 languageName: node linkType: hard -"eslint@npm:^8.50.0": - version: 8.50.0 - resolution: "eslint@npm:8.50.0" +"get-stream@npm:^4.1.0": + version: 4.1.0 + resolution: "get-stream@npm:4.1.0" dependencies: - "@eslint-community/eslint-utils": ^4.2.0 - "@eslint-community/regexpp": ^4.6.1 - "@eslint/eslintrc": ^2.1.2 - "@eslint/js": 8.50.0 - "@humanwhocodes/config-array": ^0.11.11 - "@humanwhocodes/module-importer": ^1.0.1 - "@nodelib/fs.walk": ^1.2.8 - ajv: ^6.12.4 - chalk: ^4.0.0 - cross-spawn: ^7.0.2 - debug: ^4.3.2 - doctrine: ^3.0.0 - escape-string-regexp: ^4.0.0 - eslint-scope: ^7.2.2 - eslint-visitor-keys: ^3.4.3 - espree: ^9.6.1 - esquery: ^1.4.2 - esutils: ^2.0.2 - fast-deep-equal: ^3.1.3 - file-entry-cache: ^6.0.1 - find-up: ^5.0.0 - glob-parent: ^6.0.2 - globals: ^13.19.0 - graphemer: ^1.4.0 - ignore: ^5.2.0 - imurmurhash: ^0.1.4 - is-glob: ^4.0.0 - is-path-inside: ^3.0.3 - js-yaml: ^4.1.0 - json-stable-stringify-without-jsonify: ^1.0.1 - levn: ^0.4.1 - lodash.merge: ^4.6.2 - minimatch: ^3.1.2 - natural-compare: ^1.4.0 - optionator: ^0.9.3 - strip-ansi: ^6.0.1 - text-table: ^0.2.0 - bin: - eslint: bin/eslint.js - checksum: 9ebfe5615dc84700000d218e32ddfdcfc227ca600f65f18e5541ec34f8902a00356a9a8804d9468fd6c8637a5ef6a3897291dad91ba6579d5b32ffeae5e31768 + pump: ^3.0.0 + checksum: 443e1914170c15bd52ff8ea6eff6dfc6d712b031303e36302d2778e3de2506af9ee964d6124010f7818736dcfde05c04ba7ca6cc26883106e084357a17ae7d73 languageName: node linkType: hard -"espree@npm:^9.6.0, espree@npm:^9.6.1": - version: 9.6.1 - resolution: "espree@npm:9.6.1" +"get-stream@npm:^5.1.0": + version: 5.2.0 + resolution: "get-stream@npm:5.2.0" dependencies: - acorn: ^8.9.0 - acorn-jsx: ^5.3.2 - eslint-visitor-keys: ^3.4.1 - checksum: eb8c149c7a2a77b3f33a5af80c10875c3abd65450f60b8af6db1bfcfa8f101e21c1e56a561c6dc13b848e18148d43469e7cd208506238554fb5395a9ea5a1ab9 + pump: ^3.0.0 + checksum: 8bc1a23174a06b2b4ce600df38d6c98d2ef6d84e020c1ddad632ad75bac4e092eeb40e4c09e0761c35fc2dbc5e7fff5dab5e763a383582c4a167dd69a905bd12 languageName: node linkType: hard -"esprima@npm:^4.0.0": - version: 4.0.1 - resolution: "esprima@npm:4.0.1" - bin: - esparse: ./bin/esparse.js - esvalidate: ./bin/esvalidate.js - checksum: b45bc805a613dbea2835278c306b91aff6173c8d034223fa81498c77dcbce3b2931bf6006db816f62eacd9fd4ea975dfd85a5b7f3c6402cfd050d4ca3c13a628 +"get-stream@npm:^6.0.0, get-stream@npm:^6.0.1": + version: 6.0.1 + resolution: "get-stream@npm:6.0.1" + checksum: e04ecece32c92eebf5b8c940f51468cd53554dcbb0ea725b2748be583c9523d00128137966afce410b9b051eb2ef16d657cd2b120ca8edafcf5a65e81af63cad languageName: node linkType: hard -"esquery@npm:^1.4.2": - version: 1.5.0 - resolution: "esquery@npm:1.5.0" +"get-tsconfig@npm:^4.7.2": + version: 4.7.2 + resolution: "get-tsconfig@npm:4.7.2" dependencies: - estraverse: ^5.1.0 - checksum: aefb0d2596c230118656cd4ec7532d447333a410a48834d80ea648b1e7b5c9bc9ed8b5e33a89cb04e487b60d622f44cf5713bf4abed7c97343edefdc84a35900 + resolve-pkg-maps: ^1.0.0 + checksum: 172358903250eff0103943f816e8a4e51d29b8e5449058bdf7266714a908a48239f6884308bd3a6ff28b09f692b9533dbebfd183ab63e4e14f073cda91f1bca9 languageName: node linkType: hard -"esrecurse@npm:^4.3.0": - version: 4.3.0 - resolution: "esrecurse@npm:4.3.0" +"get-uri@npm:^6.0.1": + version: 6.0.2 + resolution: "get-uri@npm:6.0.2" dependencies: - estraverse: ^5.2.0 - checksum: ebc17b1a33c51cef46fdc28b958994b1dc43cd2e86237515cbc3b4e5d2be6a811b2315d0a1a4d9d340b6d2308b15322f5c8291059521cc5f4802f65e7ec32837 + basic-ftp: ^5.0.2 + data-uri-to-buffer: ^6.0.0 + debug: ^4.3.4 + fs-extra: ^8.1.0 + checksum: 762de3b0e3d4e7afc966e4ce93be587d70c270590da9b4c8fbff888362656c055838d926903d1774cbfeed4d392b4d6def4b2c06d48c050580070426a3a8629b languageName: node linkType: hard -"estraverse@npm:^4.1.1": - version: 4.3.0 - resolution: "estraverse@npm:4.3.0" - checksum: a6299491f9940bb246124a8d44b7b7a413a8336f5436f9837aaa9330209bd9ee8af7e91a654a3545aee9c54b3308e78ee360cef1d777d37cfef77d2fa33b5827 +"github-slugger@npm:^1.4.0, github-slugger@npm:^1.5.0": + version: 1.5.0 + resolution: "github-slugger@npm:1.5.0" + checksum: c70988224578b3bdaa25df65973ffc8c24594a77a28550c3636e495e49d17aef5cdb04c04fa3f1744babef98c61eecc6a43299a13ea7f3cc33d680bf9053ffbe languageName: node linkType: hard -"estraverse@npm:^5.1.0, estraverse@npm:^5.2.0": - version: 5.3.0 - resolution: "estraverse@npm:5.3.0" - checksum: 072780882dc8416ad144f8fe199628d2b3e7bbc9989d9ed43795d2c90309a2047e6bc5979d7e2322a341163d22cfad9e21f4110597fe487519697389497e4e2b +"glob-parent@npm:^5.1.2, glob-parent@npm:~5.1.2": + version: 5.1.2 + resolution: "glob-parent@npm:5.1.2" + dependencies: + is-glob: ^4.0.1 + checksum: f4f2bfe2425296e8a47e36864e4f42be38a996db40420fe434565e4480e3322f18eb37589617a98640c5dc8fdec1a387007ee18dbb1f3f5553409c34d17f425e languageName: node linkType: hard -"estree-walker@npm:^1.0.1": - version: 1.0.1 - resolution: "estree-walker@npm:1.0.1" - checksum: 7e70da539691f6db03a08e7ce94f394ce2eef4180e136d251af299d41f92fb2d28ebcd9a6e393e3728d7970aeb5358705ddf7209d52fbcb2dd4693f95dcf925f +"glob-parent@npm:^6.0.1, glob-parent@npm:^6.0.2": + version: 6.0.2 + resolution: "glob-parent@npm:6.0.2" + dependencies: + is-glob: ^4.0.3 + checksum: c13ee97978bef4f55106b71e66428eb1512e71a7466ba49025fc2aec59a5bfb0954d5abd58fc5ee6c9b076eef4e1f6d3375c2e964b88466ca390da4419a786a8 languageName: node linkType: hard -"esutils@npm:^2.0.2, esutils@npm:^2.0.3": - version: 2.0.3 - resolution: "esutils@npm:2.0.3" - checksum: 22b5b08f74737379a840b8ed2036a5fb35826c709ab000683b092d9054e5c2a82c27818f12604bfc2a9a76b90b6834ef081edbc1c7ae30d1627012e067c6ec87 +"glob-to-regexp@npm:^0.4.1": + version: 0.4.1 + resolution: "glob-to-regexp@npm:0.4.1" + checksum: e795f4e8f06d2a15e86f76e4d92751cf8bbfcf0157cea5c2f0f35678a8195a750b34096b1256e436f0cebc1883b5ff0888c47348443e69546a5a87f9e1eb1167 languageName: node linkType: hard -"eta@npm:^2.0.0": - version: 2.2.0 - resolution: "eta@npm:2.2.0" - checksum: 6a09631481d4f26a9662a1eb736a65cc1cbc48e24935e6ff5d83a83b0cb509ea56d588d66d7c087d590601dc59bdabdac2356936b1b789d020eb0cf2d8304d54 +"glob@npm:7.2.0": + version: 7.2.0 + resolution: "glob@npm:7.2.0" + dependencies: + fs.realpath: ^1.0.0 + inflight: ^1.0.4 + inherits: 2 + minimatch: ^3.0.4 + once: ^1.3.0 + path-is-absolute: ^1.0.0 + checksum: 78a8ea942331f08ed2e055cb5b9e40fe6f46f579d7fd3d694f3412fe5db23223d29b7fee1575440202e9a7ff9a72ab106a39fee39934c7bedafe5e5f8ae20134 languageName: node linkType: hard -"etag@npm:^1.8.1, etag@npm:~1.8.1": - version: 1.8.1 - resolution: "etag@npm:1.8.1" - checksum: 571aeb3dbe0f2bbd4e4fadbdb44f325fc75335cd5f6f6b6a091e6a06a9f25ed5392f0863c5442acb0646787446e816f13cbfc6edce5b07658541dff573cab1ff +"glob@npm:^10.2.2, glob@npm:^10.3.10, glob@npm:^10.3.7": + version: 10.3.10 + resolution: "glob@npm:10.3.10" + dependencies: + foreground-child: ^3.1.0 + jackspeak: ^2.3.5 + minimatch: ^9.0.1 + minipass: ^5.0.0 || ^6.0.2 || ^7.0.0 + path-scurry: ^1.10.1 + bin: + glob: dist/esm/bin.mjs + checksum: 4f2fe2511e157b5a3f525a54092169a5f92405f24d2aed3142f4411df328baca13059f4182f1db1bf933e2c69c0bd89e57ae87edd8950cba8c7ccbe84f721cf3 languageName: node linkType: hard -"ethereum-cryptography@npm:0.1.3, ethereum-cryptography@npm:^0.1.3": - version: 0.1.3 - resolution: "ethereum-cryptography@npm:0.1.3" +"glob@npm:^7.0.0, glob@npm:^7.1.3, glob@npm:^7.1.6": + version: 7.2.3 + resolution: "glob@npm:7.2.3" dependencies: - "@types/pbkdf2": ^3.0.0 - "@types/secp256k1": ^4.0.1 - blakejs: ^1.1.0 - browserify-aes: ^1.2.0 - bs58check: ^2.1.2 - create-hash: ^1.2.0 - create-hmac: ^1.1.7 - hash.js: ^1.1.7 - keccak: ^3.0.0 - pbkdf2: ^3.0.17 - randombytes: ^2.1.0 - safe-buffer: ^5.1.2 - scrypt-js: ^3.0.0 - secp256k1: ^4.0.1 - setimmediate: ^1.0.5 - checksum: 54bae7a4a96bd81398cdc35c91cfcc74339f71a95ed1b5b694663782e69e8e3afd21357de3b8bac9ff4877fd6f043601e200a7ad9133d94be6fd7d898ee0a449 + fs.realpath: ^1.0.0 + inflight: ^1.0.4 + inherits: 2 + minimatch: ^3.1.1 + once: ^1.3.0 + path-is-absolute: ^1.0.0 + checksum: 29452e97b38fa704dabb1d1045350fb2467cf0277e155aa9ff7077e90ad81d1ea9d53d3ee63bd37c05b09a065e90f16aec4a65f5b8de401d1dac40bc5605d133 languageName: node linkType: hard -"ethereum-cryptography@npm:^1.0.3": - version: 1.2.0 - resolution: "ethereum-cryptography@npm:1.2.0" +"global-directory@npm:^4.0.1": + version: 4.0.1 + resolution: "global-directory@npm:4.0.1" dependencies: - "@noble/hashes": 1.2.0 - "@noble/secp256k1": 1.7.1 - "@scure/bip32": 1.1.5 - "@scure/bip39": 1.1.1 - checksum: 97e8e8253cb9f5a9271bd0201c37609c451c890eb85883b9c564f14743c3d7c673287406c93bf5604307593ee298ad9a03983388b85c11ca61461b9fc1a4f2c7 + ini: 4.1.1 + checksum: 5b4df24438a4e5f21e43fbdd9e54f5e12bb48dce01a0a83b415d8052ce91be2d3a97e0c8f98a535e69649b2190036155e9f0f7d3c62f9318f31bdc3fd4f235f5 languageName: node linkType: hard -"ethereumjs-abi@npm:^0.6.8": - version: 0.6.8 - resolution: "ethereumjs-abi@npm:0.6.8" +"global-dirs@npm:^3.0.0": + version: 3.0.1 + resolution: "global-dirs@npm:3.0.1" dependencies: - bn.js: ^4.11.8 - ethereumjs-util: ^6.0.0 - checksum: cede2a8ae7c7e04eeaec079c2f925601a25b2ef75cf9230e7c5da63b4ea27883b35447365a47e35c1e831af520973a2252af89022c292c18a09a4607821a366b + ini: 2.0.0 + checksum: 70147b80261601fd40ac02a104581432325c1c47329706acd773f3a6ce99bb36d1d996038c85ccacd482ad22258ec233c586b6a91535b1a116b89663d49d6438 languageName: node linkType: hard -"ethereumjs-util@npm:^6.0.0, ethereumjs-util@npm:^6.2.1": - version: 6.2.1 - resolution: "ethereumjs-util@npm:6.2.1" +"global-modules@npm:^2.0.0": + version: 2.0.0 + resolution: "global-modules@npm:2.0.0" dependencies: - "@types/bn.js": ^4.11.3 - bn.js: ^4.11.0 - create-hash: ^1.1.2 - elliptic: ^6.5.2 - ethereum-cryptography: ^0.1.3 - ethjs-util: 0.1.6 - rlp: ^2.2.3 - checksum: e3cb4a2c034a2529281fdfc21a2126fe032fdc3038863f5720352daa65ddcc50fc8c67dbedf381a882dc3802e05d979287126d7ecf781504bde1fd8218693bde + global-prefix: ^3.0.0 + checksum: d6197f25856c878c2fb5f038899f2dca7cbb2f7b7cf8999660c0104972d5cfa5c68b5a0a77fa8206bb536c3903a4615665acb9709b4d80846e1bb47eaef65430 languageName: node linkType: hard -"ethers@npm:^5.7.1": - version: 5.7.2 - resolution: "ethers@npm:5.7.2" +"global-prefix@npm:^3.0.0": + version: 3.0.0 + resolution: "global-prefix@npm:3.0.0" dependencies: - "@ethersproject/abi": 5.7.0 - "@ethersproject/abstract-provider": 5.7.0 - "@ethersproject/abstract-signer": 5.7.0 - "@ethersproject/address": 5.7.0 - "@ethersproject/base64": 5.7.0 - "@ethersproject/basex": 5.7.0 - "@ethersproject/bignumber": 5.7.0 - "@ethersproject/bytes": 5.7.0 - "@ethersproject/constants": 5.7.0 - "@ethersproject/contracts": 5.7.0 - "@ethersproject/hash": 5.7.0 - "@ethersproject/hdnode": 5.7.0 - "@ethersproject/json-wallets": 5.7.0 - "@ethersproject/keccak256": 5.7.0 - "@ethersproject/logger": 5.7.0 - "@ethersproject/networks": 5.7.1 - "@ethersproject/pbkdf2": 5.7.0 - "@ethersproject/properties": 5.7.0 - "@ethersproject/providers": 5.7.2 - "@ethersproject/random": 5.7.0 - "@ethersproject/rlp": 5.7.0 - "@ethersproject/sha2": 5.7.0 - "@ethersproject/signing-key": 5.7.0 - "@ethersproject/solidity": 5.7.0 - "@ethersproject/strings": 5.7.0 - "@ethersproject/transactions": 5.7.0 - "@ethersproject/units": 5.7.0 - "@ethersproject/wallet": 5.7.0 - "@ethersproject/web": 5.7.1 - "@ethersproject/wordlists": 5.7.0 - checksum: b7c08cf3e257185a7946117dbbf764433b7ba0e77c27298dec6088b3bc871aff711462b0621930c56880ff0a7ceb8b1d3a361ffa259f93377b48e34107f62553 + ini: ^1.3.5 + kind-of: ^6.0.2 + which: ^1.3.1 + checksum: 8a82fc1d6f22c45484a4e34656cc91bf021a03e03213b0035098d605bfc612d7141f1e14a21097e8a0413b4884afd5b260df0b6a25605ce9d722e11f1df2881d + languageName: node + linkType: hard + +"globals@npm:^11.1.0": + version: 11.12.0 + resolution: "globals@npm:11.12.0" + checksum: 67051a45eca3db904aee189dfc7cd53c20c7d881679c93f6146ddd4c9f4ab2268e68a919df740d39c71f4445d2b38ee360fc234428baea1dbdfe68bbcb46979e languageName: node linkType: hard -"ethers@npm:^6.7.1": - version: 6.7.1 - resolution: "ethers@npm:6.7.1" +"globals@npm:^13.19.0": + version: 13.24.0 + resolution: "globals@npm:13.24.0" dependencies: - "@adraffy/ens-normalize": 1.9.2 - "@noble/hashes": 1.1.2 - "@noble/secp256k1": 1.7.1 - "@types/node": 18.15.13 - aes-js: 4.0.0-beta.5 - tslib: 2.4.0 - ws: 8.5.0 - checksum: 07833692e3f53b18e28c4cba9f53f3d5ebff8360de02ad57b2584c00c52b88f5b790373f9b9f6b4f6b52ffa2074530a6101192b30c3260f7cdeff929d34bb88b + type-fest: ^0.20.2 + checksum: 56066ef058f6867c04ff203b8a44c15b038346a62efbc3060052a1016be9f56f4cf0b2cd45b74b22b81e521a889fc7786c73691b0549c2f3a6e825b3d394f43c languageName: node linkType: hard -"ethjs-util@npm:0.1.6, ethjs-util@npm:^0.1.6": - version: 0.1.6 - resolution: "ethjs-util@npm:0.1.6" +"globby@npm:^11.0.1, globby@npm:^11.0.4, globby@npm:^11.1.0": + version: 11.1.0 + resolution: "globby@npm:11.1.0" dependencies: - is-hex-prefixed: 1.0.0 - strip-hex-prefix: 1.0.0 - checksum: 1f42959e78ec6f49889c49c8a98639e06f52a15966387dd39faf2930db48663d026efb7db2702dcffe7f2a99c4a0144b7ce784efdbf733f4077aae95de76d65f + array-union: ^2.1.0 + dir-glob: ^3.0.1 + fast-glob: ^3.2.9 + ignore: ^5.2.0 + merge2: ^1.4.1 + slash: ^3.0.0 + checksum: b4be8885e0cfa018fc783792942d53926c35c50b3aefd3fdcfb9d22c627639dc26bd2327a40a0b74b074100ce95bb7187bfeae2f236856aa3de183af7a02aea6 languageName: node linkType: hard -"eval@npm:^0.1.8": - version: 0.1.8 - resolution: "eval@npm:0.1.8" +"globby@npm:^13.1.1, globby@npm:^13.1.2": + version: 13.2.2 + resolution: "globby@npm:13.2.2" dependencies: - "@types/node": "*" - require-like: ">= 0.1.1" - checksum: d005567f394cfbe60948e34982e4637d2665030f9aa7dcac581ea6f9ec6eceb87133ed3dc0ae21764aa362485c242a731dbb6371f1f1a86807c58676431e9d1a + dir-glob: ^3.0.1 + fast-glob: ^3.3.0 + ignore: ^5.2.4 + merge2: ^1.4.1 + slash: ^4.0.0 + checksum: f3d84ced58a901b4fcc29c846983108c426631fe47e94872868b65565495f7bee7b3defd68923bd480582771fd4bbe819217803a164a618ad76f1d22f666f41e languageName: node linkType: hard -"event-stream@npm:=3.3.4": - version: 3.3.4 - resolution: "event-stream@npm:3.3.4" +"gopd@npm:^1.0.1": + version: 1.0.1 + resolution: "gopd@npm:1.0.1" dependencies: - duplexer: ~0.1.1 - from: ~0 - map-stream: ~0.1.0 - pause-stream: 0.0.11 - split: 0.3 - stream-combiner: ~0.0.4 - through: ~2.3.1 - checksum: 80b467820b6daf824d9fb4345d2daf115a056e5c104463f2e98534e92d196a27f2df5ea2aa085624db26f4c45698905499e881d13bc7c01f7a13eac85be72a22 - languageName: node - linkType: hard - -"eventemitter3@npm:^4.0.0": - version: 4.0.7 - resolution: "eventemitter3@npm:4.0.7" - checksum: 1875311c42fcfe9c707b2712c32664a245629b42bb0a5a84439762dd0fd637fc54d078155ea83c2af9e0323c9ac13687e03cfba79b03af9f40c89b4960099374 + get-intrinsic: ^1.1.3 + checksum: a5ccfb8806e0917a94e0b3de2af2ea4979c1da920bc381667c260e00e7cafdbe844e2cb9c5bcfef4e5412e8bf73bab837285bc35c7ba73aaaf0134d4583393a6 languageName: node linkType: hard -"events@npm:^3.2.0": - version: 3.3.0 - resolution: "events@npm:3.3.0" - checksum: f6f487ad2198aa41d878fa31452f1a3c00958f46e9019286ff4787c84aac329332ab45c9cdc8c445928fc6d7ded294b9e005a7fce9426488518017831b272780 +"got@npm:^12.1.0": + version: 12.6.1 + resolution: "got@npm:12.6.1" + dependencies: + "@sindresorhus/is": ^5.2.0 + "@szmarczak/http-timer": ^5.0.1 + cacheable-lookup: ^7.0.0 + cacheable-request: ^10.2.8 + decompress-response: ^6.0.0 + form-data-encoder: ^2.1.2 + get-stream: ^6.0.1 + http2-wrapper: ^2.1.10 + lowercase-keys: ^3.0.0 + p-cancelable: ^3.0.0 + responselike: ^3.0.0 + checksum: 3c37f5d858aca2859f9932e7609d35881d07e7f2d44c039d189396f0656896af6c77c22f2c51c563f8918be483f60ff41e219de742ab4642d4b106711baccbd5 languageName: node linkType: hard -"evp_bytestokey@npm:^1.0.3": - version: 1.0.3 - resolution: "evp_bytestokey@npm:1.0.3" +"got@npm:^9.6.0": + version: 9.6.0 + resolution: "got@npm:9.6.0" dependencies: - md5.js: ^1.3.4 - node-gyp: latest - safe-buffer: ^5.1.1 - checksum: ad4e1577f1a6b721c7800dcc7c733fe01f6c310732bb5bf2240245c2a5b45a38518b91d8be2c610611623160b9d1c0e91f1ce96d639f8b53e8894625cf20fa45 + "@sindresorhus/is": ^0.14.0 + "@szmarczak/http-timer": ^1.1.2 + cacheable-request: ^6.0.0 + decompress-response: ^3.3.0 + duplexer3: ^0.1.4 + get-stream: ^4.1.0 + lowercase-keys: ^1.0.1 + mimic-response: ^1.0.1 + p-cancelable: ^1.0.0 + to-readable-stream: ^1.0.0 + url-parse-lax: ^3.0.0 + checksum: 941807bd9704bacf5eb401f0cc1212ffa1f67c6642f2d028fd75900471c221b1da2b8527f4553d2558f3faeda62ea1cf31665f8b002c6137f5de8732f07370b0 languageName: node linkType: hard -"execa@npm:^5.0.0": - version: 5.1.1 - resolution: "execa@npm:5.1.1" - dependencies: - cross-spawn: ^7.0.3 - get-stream: ^6.0.0 - human-signals: ^2.1.0 - is-stream: ^2.0.0 - merge-stream: ^2.0.0 - npm-run-path: ^4.0.1 - onetime: ^5.1.2 - signal-exit: ^3.0.3 - strip-final-newline: ^2.0.0 - checksum: fba9022c8c8c15ed862847e94c252b3d946036d7547af310e344a527e59021fd8b6bb0723883ea87044dc4f0201f949046993124a42ccb0855cae5bf8c786343 +"graceful-fs@npm:4.2.10": + version: 4.2.10 + resolution: "graceful-fs@npm:4.2.10" + checksum: 3f109d70ae123951905d85032ebeae3c2a5a7a997430df00ea30df0e3a6c60cf6689b109654d6fdacd28810a053348c4d14642da1d075049e6be1ba5216218da languageName: node linkType: hard -"execa@npm:^7.1.1": - version: 7.2.0 - resolution: "execa@npm:7.2.0" - dependencies: - cross-spawn: ^7.0.3 - get-stream: ^6.0.1 - human-signals: ^4.3.0 - is-stream: ^3.0.0 - merge-stream: ^2.0.0 - npm-run-path: ^5.1.0 - onetime: ^6.0.0 - signal-exit: ^3.0.7 - strip-final-newline: ^3.0.0 - checksum: 14fd17ba0ca8c87b277584d93b1d9fc24f2a65e5152b31d5eb159a3b814854283eaae5f51efa9525e304447e2f757c691877f7adff8fde5746aae67eb1edd1cc +"graceful-fs@npm:^4.1.11, graceful-fs@npm:^4.1.2, graceful-fs@npm:^4.1.6, graceful-fs@npm:^4.1.9, graceful-fs@npm:^4.2.0, graceful-fs@npm:^4.2.4, graceful-fs@npm:^4.2.6, graceful-fs@npm:^4.2.9": + version: 4.2.11 + resolution: "graceful-fs@npm:4.2.11" + checksum: ac85f94da92d8eb6b7f5a8b20ce65e43d66761c55ce85ac96df6865308390da45a8d3f0296dd3a663de65d30ba497bd46c696cc1e248c72b13d6d567138a4fc7 languageName: node linkType: hard -"exponential-backoff@npm:^3.1.1": - version: 3.1.1 - resolution: "exponential-backoff@npm:3.1.1" - checksum: 3d21519a4f8207c99f7457287291316306255a328770d320b401114ec8481986e4e467e854cb9914dd965e0a1ca810a23ccb559c642c88f4c7f55c55778a9b48 +"graphemer@npm:^1.4.0": + version: 1.4.0 + resolution: "graphemer@npm:1.4.0" + checksum: bab8f0be9b568857c7bec9fda95a89f87b783546d02951c40c33f84d05bb7da3fd10f863a9beb901463669b6583173a8c8cc6d6b306ea2b9b9d5d3d943c3a673 languageName: node linkType: hard -"express@npm:^4.17.3": - version: 4.18.2 - resolution: "express@npm:4.18.2" +"gray-matter@npm:^4.0.3": + version: 4.0.3 + resolution: "gray-matter@npm:4.0.3" dependencies: - accepts: ~1.3.8 - array-flatten: 1.1.1 - body-parser: 1.20.1 - content-disposition: 0.5.4 - content-type: ~1.0.4 - cookie: 0.5.0 - cookie-signature: 1.0.6 - debug: 2.6.9 - depd: 2.0.0 - encodeurl: ~1.0.2 - escape-html: ~1.0.3 - etag: ~1.8.1 - finalhandler: 1.2.0 - fresh: 0.5.2 - http-errors: 2.0.0 - merge-descriptors: 1.0.1 - methods: ~1.1.2 - on-finished: 2.4.1 - parseurl: ~1.3.3 - path-to-regexp: 0.1.7 - proxy-addr: ~2.0.7 - qs: 6.11.0 - range-parser: ~1.2.1 - safe-buffer: 5.2.1 - send: 0.18.0 - serve-static: 1.15.0 - setprototypeof: 1.2.0 - statuses: 2.0.1 - type-is: ~1.6.18 - utils-merge: 1.0.1 - vary: ~1.1.2 - checksum: 3c4b9b076879442f6b968fe53d85d9f1eeacbb4f4c41e5f16cc36d77ce39a2b0d81b3f250514982110d815b2f7173f5561367f9110fcc541f9371948e8c8b037 + js-yaml: ^3.13.1 + kind-of: ^6.0.2 + section-matter: ^1.0.0 + strip-bom-string: ^1.0.0 + checksum: 37717bd424344487d655392251ce8d8878a1275ee087003e61208fba3bfd59cbb73a85b2159abf742ae95e23db04964813fdc33ae18b074208428b2528205222 languageName: node linkType: hard -"extend-shallow@npm:^2.0.1": - version: 2.0.1 - resolution: "extend-shallow@npm:2.0.1" +"gzip-size@npm:^6.0.0": + version: 6.0.0 + resolution: "gzip-size@npm:6.0.0" dependencies: - is-extendable: ^0.1.0 - checksum: 8fb58d9d7a511f4baf78d383e637bd7d2e80843bd9cd0853649108ea835208fb614da502a553acc30208e1325240bb7cc4a68473021612496bb89725483656d8 + duplexer: ^0.1.2 + checksum: 2df97f359696ad154fc171dcb55bc883fe6e833bca7a65e457b9358f3cb6312405ed70a8da24a77c1baac0639906cd52358dc0ce2ec1a937eaa631b934c94194 languageName: node linkType: hard -"extend@npm:^3.0.0": - version: 3.0.2 - resolution: "extend@npm:3.0.2" - checksum: a50a8309ca65ea5d426382ff09f33586527882cf532931cb08ca786ea3146c0553310bda688710ff61d7668eba9f96b923fe1420cdf56a2c3eaf30fcab87b515 +"handle-thing@npm:^2.0.0": + version: 2.0.1 + resolution: "handle-thing@npm:2.0.1" + checksum: 68071f313062315cd9dce55710e9496873945f1dd425107007058fc1629f93002a7649fcc3e464281ce02c7e809a35f5925504ab8105d972cf649f1f47cb7d6c languageName: node linkType: hard -"extract-zip@npm:2.0.1": - version: 2.0.1 - resolution: "extract-zip@npm:2.0.1" +"hardhat@npm:^2.17.4": + version: 2.19.2 + resolution: "hardhat@npm:2.19.2" dependencies: - "@types/yauzl": ^2.9.1 + "@ethersproject/abi": ^5.1.2 + "@metamask/eth-sig-util": ^4.0.0 + "@nomicfoundation/ethereumjs-block": 5.0.2 + "@nomicfoundation/ethereumjs-blockchain": 7.0.2 + "@nomicfoundation/ethereumjs-common": 4.0.2 + "@nomicfoundation/ethereumjs-evm": 2.0.2 + "@nomicfoundation/ethereumjs-rlp": 5.0.2 + "@nomicfoundation/ethereumjs-statemanager": 2.0.2 + "@nomicfoundation/ethereumjs-trie": 6.0.2 + "@nomicfoundation/ethereumjs-tx": 5.0.2 + "@nomicfoundation/ethereumjs-util": 9.0.2 + "@nomicfoundation/ethereumjs-vm": 7.0.2 + "@nomicfoundation/solidity-analyzer": ^0.1.0 + "@sentry/node": ^5.18.1 + "@types/bn.js": ^5.1.0 + "@types/lru-cache": ^5.1.0 + adm-zip: ^0.4.16 + aggregate-error: ^3.0.0 + ansi-escapes: ^4.3.0 + chalk: ^2.4.2 + chokidar: ^3.4.0 + ci-info: ^2.0.0 debug: ^4.1.1 - get-stream: ^5.1.0 - yauzl: ^2.10.0 - dependenciesMeta: - "@types/yauzl": + enquirer: ^2.3.0 + env-paths: ^2.2.0 + ethereum-cryptography: ^1.0.3 + ethereumjs-abi: ^0.6.8 + find-up: ^2.1.0 + fp-ts: 1.19.3 + fs-extra: ^7.0.1 + glob: 7.2.0 + immutable: ^4.0.0-rc.12 + io-ts: 1.10.4 + keccak: ^3.0.2 + lodash: ^4.17.11 + mnemonist: ^0.38.0 + mocha: ^10.0.0 + p-map: ^4.0.0 + raw-body: ^2.4.1 + resolve: 1.17.0 + semver: ^6.3.0 + solc: 0.7.3 + source-map-support: ^0.5.13 + stacktrace-parser: ^0.1.10 + tsort: 0.0.1 + undici: ^5.14.0 + uuid: ^8.3.2 + ws: ^7.4.6 + peerDependencies: + ts-node: "*" + typescript: "*" + peerDependenciesMeta: + ts-node: + optional: true + typescript: optional: true bin: - extract-zip: cli.js - checksum: 8cbda9debdd6d6980819cc69734d874ddd71051c9fe5bde1ef307ebcedfe949ba57b004894b585f758b7c9eeeea0e3d87f2dda89b7d25320459c2c9643ebb635 + hardhat: internal/cli/bootstrap.js + checksum: 0b5499890e46750ca8c51bbe1205599b1424a2e5293b40c9f7cb56320d56b9935fbd4e276de370e07664ae81fa57dc7ab227bf2b2363f5732ef9f06df1a9a6d9 languageName: node linkType: hard -"fast-deep-equal@npm:^3.1.1, fast-deep-equal@npm:^3.1.3": - version: 3.1.3 - resolution: "fast-deep-equal@npm:3.1.3" - checksum: e21a9d8d84f53493b6aa15efc9cfd53dd5b714a1f23f67fb5dc8f574af80df889b3bce25dc081887c6d25457cce704e636395333abad896ccdec03abaf1f3f9d +"has-flag@npm:^3.0.0": + version: 3.0.0 + resolution: "has-flag@npm:3.0.0" + checksum: 4a15638b454bf086c8148979aae044dd6e39d63904cd452d970374fa6a87623423da485dfb814e7be882e05c096a7ccf1ebd48e7e7501d0208d8384ff4dea73b languageName: node linkType: hard -"fast-diff@npm:^1.1.2, fast-diff@npm:^1.2.0": - version: 1.3.0 - resolution: "fast-diff@npm:1.3.0" - checksum: d22d371b994fdc8cce9ff510d7b8dc4da70ac327bcba20df607dd5b9cae9f908f4d1028f5fe467650f058d1e7270235ae0b8230809a262b4df587a3b3aa216c3 +"has-flag@npm:^4.0.0": + version: 4.0.0 + resolution: "has-flag@npm:4.0.0" + checksum: 261a1357037ead75e338156b1f9452c016a37dcd3283a972a30d9e4a87441ba372c8b81f818cd0fbcd9c0354b4ae7e18b9e1afa1971164aef6d18c2b6095a8ad languageName: node linkType: hard -"fast-glob@npm:^3.2.11, fast-glob@npm:^3.2.12, fast-glob@npm:^3.2.9, fast-glob@npm:^3.3.0": - version: 3.3.1 - resolution: "fast-glob@npm:3.3.1" - dependencies: - "@nodelib/fs.stat": ^2.0.2 - "@nodelib/fs.walk": ^1.2.3 - glob-parent: ^5.1.2 - merge2: ^1.3.0 - micromatch: ^4.0.4 - checksum: b6f3add6403e02cf3a798bfbb1183d0f6da2afd368f27456010c0bc1f9640aea308243d4cb2c0ab142f618276e65ecb8be1661d7c62a7b4e5ba774b9ce5432e5 +"has-own-prop@npm:^2.0.0": + version: 2.0.0 + resolution: "has-own-prop@npm:2.0.0" + checksum: ca6336e85ead2295c9603880cbc199e2d3ff7eaea0e9035d68fbc79892e9cf681abc62c0909520f112c671dad9961be2173b21dff951358cc98425c560e789e0 languageName: node linkType: hard -"fast-json-stable-stringify@npm:^2.0.0": - version: 2.1.0 - resolution: "fast-json-stable-stringify@npm:2.1.0" - checksum: b191531e36c607977e5b1c47811158733c34ccb3bfde92c44798929e9b4154884378536d26ad90dfecd32e1ffc09c545d23535ad91b3161a27ddbb8ebe0cbecb +"has-property-descriptors@npm:^1.0.0": + version: 1.0.1 + resolution: "has-property-descriptors@npm:1.0.1" + dependencies: + get-intrinsic: ^1.2.2 + checksum: 2bcc6bf6ec6af375add4e4b4ef586e43674850a91ad4d46666d0b28ba8e1fd69e424c7677d24d60f69470ad0afaa2f3197f508b20b0bb7dd99a8ab77ffc4b7c4 languageName: node linkType: hard -"fast-levenshtein@npm:^2.0.6": - version: 2.0.6 - resolution: "fast-levenshtein@npm:2.0.6" - checksum: 92cfec0a8dfafd9c7a15fba8f2cc29cd0b62b85f056d99ce448bbcd9f708e18ab2764bda4dd5158364f4145a7c72788538994f0d1787b956ef0d1062b0f7c24c +"has-proto@npm:^1.0.1": + version: 1.0.1 + resolution: "has-proto@npm:1.0.1" + checksum: febc5b5b531de8022806ad7407935e2135f1cc9e64636c3916c6842bd7995994ca3b29871ecd7954bd35f9e2986c17b3b227880484d22259e2f8e6ce63fd383e languageName: node linkType: hard -"fast-url-parser@npm:1.1.3": - version: 1.1.3 - resolution: "fast-url-parser@npm:1.1.3" - dependencies: - punycode: ^1.3.2 - checksum: 5043d0c4a8d775ff58504d56c096563c11b113e4cb8a2668c6f824a1cd4fb3812e2fdf76537eb24a7ce4ae7def6bd9747da630c617cf2a4b6ce0c42514e4f21c +"has-symbols@npm:^1.0.2, has-symbols@npm:^1.0.3": + version: 1.0.3 + resolution: "has-symbols@npm:1.0.3" + checksum: a054c40c631c0d5741a8285010a0777ea0c068f99ed43e5d6eb12972da223f8af553a455132fdb0801bdcfa0e0f443c0c03a68d8555aa529b3144b446c3f2410 languageName: node linkType: hard -"fastq@npm:^1.6.0": - version: 1.15.0 - resolution: "fastq@npm:1.15.0" +"has-tostringtag@npm:^1.0.0": + version: 1.0.0 + resolution: "has-tostringtag@npm:1.0.0" dependencies: - reusify: ^1.0.4 - checksum: 0170e6bfcd5d57a70412440b8ef600da6de3b2a6c5966aeaf0a852d542daff506a0ee92d6de7679d1de82e644bce69d7a574a6c93f0b03964b5337eed75ada1a + has-symbols: ^1.0.2 + checksum: cc12eb28cb6ae22369ebaad3a8ab0799ed61270991be88f208d508076a1e99abe4198c965935ce85ea90b60c94ddda73693b0920b58e7ead048b4a391b502c1c languageName: node linkType: hard -"faye-websocket@npm:^0.11.3": - version: 0.11.4 - resolution: "faye-websocket@npm:0.11.4" - dependencies: - websocket-driver: ">=0.5.1" - checksum: d49a62caf027f871149fc2b3f3c7104dc6d62744277eb6f9f36e2d5714e847d846b9f7f0d0b7169b25a012e24a594cde11a93034b30732e4c683f20b8a5019fa +"has-yarn@npm:^2.1.0": + version: 2.1.0 + resolution: "has-yarn@npm:2.1.0" + checksum: 5eb1d0bb8518103d7da24532bdbc7124ffc6d367b5d3c10840b508116f2f1bcbcf10fd3ba843ff6e2e991bdf9969fd862d42b2ed58aade88343326c950b7e7f7 languageName: node linkType: hard -"fbemitter@npm:^3.0.0": +"has-yarn@npm:^3.0.0": version: 3.0.0 - resolution: "fbemitter@npm:3.0.0" - dependencies: - fbjs: ^3.0.0 - checksum: 069690b8cdff3521ade3c9beb92ba0a38d818a86ef36dff8690e66749aef58809db4ac0d6938eb1cacea2dbef5f2a508952d455669590264cdc146bbe839f605 + resolution: "has-yarn@npm:3.0.0" + checksum: b9e14e78e0a37bc070550c862b201534287bc10e62a86ec9c1f455ffb082db42817ce9aed914bd73f1d589bbf268520e194629ff2f62ff6b98a482c4bd2dcbfb languageName: node linkType: hard -"fbjs-css-vars@npm:^1.0.0": - version: 1.0.2 - resolution: "fbjs-css-vars@npm:1.0.2" - checksum: 72baf6d22c45b75109118b4daecb6c8016d4c83c8c0f23f683f22e9d7c21f32fff6201d288df46eb561e3c7d4bb4489b8ad140b7f56444c453ba407e8bd28511 +"hash-base@npm:^3.0.0": + version: 3.1.0 + resolution: "hash-base@npm:3.1.0" + dependencies: + inherits: ^2.0.4 + readable-stream: ^3.6.0 + safe-buffer: ^5.2.0 + checksum: 26b7e97ac3de13cb23fc3145e7e3450b0530274a9562144fc2bf5c1e2983afd0e09ed7cc3b20974ba66039fad316db463da80eb452e7373e780cbee9a0d2f2dc languageName: node linkType: hard -"fbjs@npm:^3.0.0, fbjs@npm:^3.0.1": - version: 3.0.5 - resolution: "fbjs@npm:3.0.5" +"hash.js@npm:1.1.7, hash.js@npm:^1.0.0, hash.js@npm:^1.0.3, hash.js@npm:^1.1.7": + version: 1.1.7 + resolution: "hash.js@npm:1.1.7" dependencies: - cross-fetch: ^3.1.5 - fbjs-css-vars: ^1.0.0 - loose-envify: ^1.0.0 - object-assign: ^4.1.0 - promise: ^7.1.1 - setimmediate: ^1.0.5 - ua-parser-js: ^1.0.35 - checksum: e609b5b64686bc96495a5c67728ed9b2710b9b3d695c5759c5f5e47c9483d1c323543ac777a86459e3694efc5712c6ce7212e944feb19752867d699568bb0e54 + inherits: ^2.0.3 + minimalistic-assert: ^1.0.1 + checksum: e350096e659c62422b85fa508e4b3669017311aa4c49b74f19f8e1bc7f3a54a584fdfd45326d4964d6011f2b2d882e38bea775a96046f2a61b7779a979629d8f languageName: node linkType: hard -"fd-slicer@npm:~1.1.0": - version: 1.1.0 - resolution: "fd-slicer@npm:1.1.0" +"hasown@npm:^2.0.0": + version: 2.0.0 + resolution: "hasown@npm:2.0.0" dependencies: - pend: ~1.2.0 - checksum: c8585fd5713f4476eb8261150900d2cb7f6ff2d87f8feb306ccc8a1122efd152f1783bdb2b8dc891395744583436bfd8081d8e63ece0ec8687eeefea394d4ff2 + function-bind: ^1.1.2 + checksum: 6151c75ca12554565098641c98a40f4cc86b85b0fd5b6fe92360967e4605a4f9610f7757260b4e8098dd1c2ce7f4b095f2006fe72a570e3b6d2d28de0298c176 languageName: node linkType: hard -"feed@npm:^4.2.2": - version: 4.2.2 - resolution: "feed@npm:4.2.2" +"hast-to-hyperscript@npm:^9.0.0": + version: 9.0.1 + resolution: "hast-to-hyperscript@npm:9.0.1" dependencies: - xml-js: ^1.6.11 - checksum: 2e6992a675a049511eef7bda8ca6c08cb9540cd10e8b275ec4c95d166228ec445a335fa8de990358759f248a92861e51decdcd32bf1c54737d5b7aed7c7ffe97 + "@types/unist": ^2.0.3 + comma-separated-tokens: ^1.0.0 + property-information: ^5.3.0 + space-separated-tokens: ^1.0.0 + style-to-object: ^0.3.0 + unist-util-is: ^4.0.0 + web-namespaces: ^1.0.0 + checksum: de570d789853018fff2fd38fc096549b9814e366b298f60c90c159a57018230eefc44d46a246027b0e2426ed9e99f2e270050bc183d5bdfe4c9487c320b392cd languageName: node linkType: hard -"fetch-blob@npm:^3.1.2, fetch-blob@npm:^3.1.4": - version: 3.2.0 - resolution: "fetch-blob@npm:3.2.0" +"hast-util-from-dom@npm:^5.0.0": + version: 5.0.0 + resolution: "hast-util-from-dom@npm:5.0.0" dependencies: - node-domexception: ^1.0.0 - web-streams-polyfill: ^3.0.3 - checksum: f19bc28a2a0b9626e69fd7cf3a05798706db7f6c7548da657cbf5026a570945f5eeaedff52007ea35c8bcd3d237c58a20bf1543bc568ab2422411d762dd3d5bf + "@types/hast": ^3.0.0 + hastscript: ^8.0.0 + web-namespaces: ^2.0.0 + checksum: bf8f96c480a598b42156227be2210bbb7a08da519ae4d57814385c8560b01e2b6b5fbde2afce808ce7ba7c5cd172822d4285b8f5edde2d13089bc9c3177c0d09 languageName: node linkType: hard -"fflate@npm:^0.8.0": - version: 0.8.1 - resolution: "fflate@npm:0.8.1" - checksum: 7207e2d333243724485d2488095256b776184bd4545aa9967b655feaee5dc18e9525ed9b6d75f94cfd71d98fb285336f4902641683472f1d0c19a99137084cec +"hast-util-from-html-isomorphic@npm:^2.0.0": + version: 2.0.0 + resolution: "hast-util-from-html-isomorphic@npm:2.0.0" + dependencies: + "@types/hast": ^3.0.0 + hast-util-from-dom: ^5.0.0 + hast-util-from-html: ^2.0.0 + unist-util-remove-position: ^5.0.0 + checksum: a98d02890bd1b5a804a1b2aaacd0332a6563f2a8df620450e38ab8962728cda0485cd29435824840621d1e653943776864e912d78d24cce6a7f484011ee7cef0 languageName: node linkType: hard -"figures@npm:^5.0.0": - version: 5.0.0 - resolution: "figures@npm:5.0.0" +"hast-util-from-html@npm:^2.0.0": + version: 2.0.1 + resolution: "hast-util-from-html@npm:2.0.1" dependencies: - escape-string-regexp: ^5.0.0 - is-unicode-supported: ^1.2.0 - checksum: e6e8b6d1df2f554d4effae4a5ceff5d796f9449f6d4e912d74dab7d5f25916ecda6c305b9084833157d56485a0c78b37164430ddc5675bcee1330e346710669e + "@types/hast": ^3.0.0 + devlop: ^1.1.0 + hast-util-from-parse5: ^8.0.0 + parse5: ^7.0.0 + vfile: ^6.0.0 + vfile-message: ^4.0.0 + checksum: 8decdec1f2750d3d8d4933a4d06d78846a9fb3c97cded07395d160adae22bacfc69eaf113fd95a6ad696d1e5877580f2ac83a4161fa9f3becb0fafe2cec8b0ea languageName: node linkType: hard -"file-entry-cache@npm:^6.0.1": +"hast-util-from-parse5@npm:^6.0.0": version: 6.0.1 - resolution: "file-entry-cache@npm:6.0.1" + resolution: "hast-util-from-parse5@npm:6.0.1" dependencies: - flat-cache: ^3.0.4 - checksum: f49701feaa6314c8127c3c2f6173cfefff17612f5ed2daaafc6da13b5c91fd43e3b2a58fd0d63f9f94478a501b167615931e7200e31485e320f74a33885a9c74 + "@types/parse5": ^5.0.0 + hastscript: ^6.0.0 + property-information: ^5.0.0 + vfile: ^4.0.0 + vfile-location: ^3.2.0 + web-namespaces: ^1.0.0 + checksum: 4daa78201468af7779161e7caa2513c329830778e0528481ab16b3e1bcef4b831f6285b526aacdddbee802f3bd9d64df55f80f010591ea1916da535e3a923b83 languageName: node linkType: hard -"file-loader@npm:^6.2.0": - version: 6.2.0 - resolution: "file-loader@npm:6.2.0" +"hast-util-from-parse5@npm:^8.0.0": + version: 8.0.1 + resolution: "hast-util-from-parse5@npm:8.0.1" dependencies: - loader-utils: ^2.0.0 - schema-utils: ^3.0.0 - peerDependencies: - webpack: ^4.0.0 || ^5.0.0 - checksum: faf43eecf233f4897b0150aaa874eeeac214e4f9de49738a9e0ef734a30b5260059e85b7edadf852b98e415f875bd5f12587768a93fd52aaf2e479ecf95fab20 + "@types/hast": ^3.0.0 + "@types/unist": ^3.0.0 + devlop: ^1.0.0 + hastscript: ^8.0.0 + property-information: ^6.0.0 + vfile: ^6.0.0 + vfile-location: ^5.0.0 + web-namespaces: ^2.0.0 + checksum: fdd1ab8b03af13778ecb94ef9a58b1e3528410cdfceb3d6bb7600508967d0d836b451bc7bc3baf66efb7c730d3d395eea4bb1b30352b0162823d9f0de976774b languageName: node linkType: hard -"filesize@npm:^8.0.6": - version: 8.0.7 - resolution: "filesize@npm:8.0.7" - checksum: 8603d27c5287b984cb100733640645e078f5f5ad65c6d913173e01fb99e09b0747828498fd86647685ccecb69be31f3587b9739ab1e50732116b2374aff4cbf9 +"hast-util-is-element@npm:^1.1.0": + version: 1.1.0 + resolution: "hast-util-is-element@npm:1.1.0" + checksum: 30fad3f65e7ab2f0efd5db9e7344d0820b70971988dfe79f62d8447598b2a1ce8a59cd4bfc05ae0d9a1c451b9b53cbe1023743d7eac764d64720b6b73475f62f languageName: node linkType: hard -"fill-range@npm:^7.0.1": - version: 7.0.1 - resolution: "fill-range@npm:7.0.1" +"hast-util-is-element@npm:^3.0.0": + version: 3.0.0 + resolution: "hast-util-is-element@npm:3.0.0" dependencies: - to-regex-range: ^5.0.1 - checksum: cc283f4e65b504259e64fd969bcf4def4eb08d85565e906b7d36516e87819db52029a76b6363d0f02d0d532f0033c9603b9e2d943d56ee3b0d4f7ad3328ff917 + "@types/hast": ^3.0.0 + checksum: 82569a420eda5877c52fdbbdbe26675f012c02d70813dfd19acffdee328e42e4bd0b7ae34454cfcbcb932b2bedbd7ddc119f943a0cfb234120f9456d6c0c4331 languageName: node linkType: hard -"finalhandler@npm:1.2.0": - version: 1.2.0 - resolution: "finalhandler@npm:1.2.0" - dependencies: - debug: 2.6.9 - encodeurl: ~1.0.2 - escape-html: ~1.0.3 - on-finished: 2.4.1 - parseurl: ~1.3.3 - statuses: 2.0.1 - unpipe: ~1.0.0 - checksum: 92effbfd32e22a7dff2994acedbd9bcc3aa646a3e919ea6a53238090e87097f8ef07cced90aa2cc421abdf993aefbdd5b00104d55c7c5479a8d00ed105b45716 +"hast-util-parse-selector@npm:^2.0.0": + version: 2.2.5 + resolution: "hast-util-parse-selector@npm:2.2.5" + checksum: 22ee4afbd11754562144cb3c4f3ec52524dafba4d90ee52512902d17cf11066d83b38f7bdf6ca571bbc2541f07ba30db0d234657b6ecb8ca4631587466459605 languageName: node linkType: hard -"find-cache-dir@npm:^3.3.1": - version: 3.3.2 - resolution: "find-cache-dir@npm:3.3.2" +"hast-util-parse-selector@npm:^4.0.0": + version: 4.0.0 + resolution: "hast-util-parse-selector@npm:4.0.0" dependencies: - commondir: ^1.0.1 - make-dir: ^3.0.2 - pkg-dir: ^4.1.0 - checksum: 1e61c2e64f5c0b1c535bd85939ae73b0e5773142713273818cc0b393ee3555fb0fd44e1a5b161b8b6c3e03e98c2fcc9c227d784850a13a90a8ab576869576817 + "@types/hast": ^3.0.0 + checksum: 76087670d3b0b50b23a6cb70bca53a6176d6608307ccdbb3ed18b650b82e7c3513bfc40348f1389dc0c5ae872b9a768851f4335f44654abd7deafd6974c52402 languageName: node linkType: hard -"find-replace@npm:^3.0.0": - version: 3.0.0 - resolution: "find-replace@npm:3.0.0" +"hast-util-raw@npm:6.0.1": + version: 6.0.1 + resolution: "hast-util-raw@npm:6.0.1" dependencies: - array-back: ^3.0.1 - checksum: 6b04bcfd79027f5b84aa1dfe100e3295da989bdac4b4de6b277f4d063e78f5c9e92ebc8a1fec6dd3b448c924ba404ee051cc759e14a3ee3e825fa1361025df08 + "@types/hast": ^2.0.0 + hast-util-from-parse5: ^6.0.0 + hast-util-to-parse5: ^6.0.0 + html-void-elements: ^1.0.0 + parse5: ^6.0.0 + unist-util-position: ^3.0.0 + vfile: ^4.0.0 + web-namespaces: ^1.0.0 + xtend: ^4.0.0 + zwitch: ^1.0.0 + checksum: f6d960644f9fbbe0b92d0227b20a24d659cce021d5f9fd218e077154931b4524ee920217b7fd5a45ec2736ec1dee53de9209fe449f6f89454c01d225ff0e7851 languageName: node linkType: hard -"find-up@npm:5.0.0, find-up@npm:^5.0.0": - version: 5.0.0 - resolution: "find-up@npm:5.0.0" +"hast-util-raw@npm:^9.0.0": + version: 9.0.1 + resolution: "hast-util-raw@npm:9.0.1" + dependencies: + "@types/hast": ^3.0.0 + "@types/unist": ^3.0.0 + "@ungap/structured-clone": ^1.0.0 + hast-util-from-parse5: ^8.0.0 + hast-util-to-parse5: ^8.0.0 + html-void-elements: ^3.0.0 + mdast-util-to-hast: ^13.0.0 + parse5: ^7.0.0 + unist-util-position: ^5.0.0 + unist-util-visit: ^5.0.0 + vfile: ^6.0.0 + web-namespaces: ^2.0.0 + zwitch: ^2.0.0 + checksum: 4b486eb4782eafb471ae639d45c14ac8797676518cf5da16adc973f52d7b8e1075a1451558c023b390820bd9fd213213e6248a2dae71b68ac5040b277509b8d9 + languageName: node + linkType: hard + +"hast-util-to-estree@npm:^3.0.0": + version: 3.1.0 + resolution: "hast-util-to-estree@npm:3.1.0" dependencies: - locate-path: ^6.0.0 - path-exists: ^4.0.0 - checksum: 07955e357348f34660bde7920783204ff5a26ac2cafcaa28bace494027158a97b9f56faaf2d89a6106211a8174db650dd9f503f9c0d526b1202d5554a00b9095 + "@types/estree": ^1.0.0 + "@types/estree-jsx": ^1.0.0 + "@types/hast": ^3.0.0 + comma-separated-tokens: ^2.0.0 + devlop: ^1.0.0 + estree-util-attach-comments: ^3.0.0 + estree-util-is-identifier-name: ^3.0.0 + hast-util-whitespace: ^3.0.0 + mdast-util-mdx-expression: ^2.0.0 + mdast-util-mdx-jsx: ^3.0.0 + mdast-util-mdxjs-esm: ^2.0.0 + property-information: ^6.0.0 + space-separated-tokens: ^2.0.0 + style-to-object: ^0.4.0 + unist-util-position: ^5.0.0 + zwitch: ^2.0.0 + checksum: 61272f7c18c9d2a5e34df7cfd2c97cbf12f6e9d05114d60e4dedd64e5576565eb1e35c78b9213c909bb8f984f0f8e9c49b568f04bdb444b83d0bca9159e14f3c + languageName: node + linkType: hard + +"hast-util-to-jsx-runtime@npm:^2.0.0": + version: 2.3.0 + resolution: "hast-util-to-jsx-runtime@npm:2.3.0" + dependencies: + "@types/estree": ^1.0.0 + "@types/hast": ^3.0.0 + "@types/unist": ^3.0.0 + comma-separated-tokens: ^2.0.0 + devlop: ^1.0.0 + estree-util-is-identifier-name: ^3.0.0 + hast-util-whitespace: ^3.0.0 + mdast-util-mdx-expression: ^2.0.0 + mdast-util-mdx-jsx: ^3.0.0 + mdast-util-mdxjs-esm: ^2.0.0 + property-information: ^6.0.0 + space-separated-tokens: ^2.0.0 + style-to-object: ^1.0.0 + unist-util-position: ^5.0.0 + vfile-message: ^4.0.0 + checksum: 599a97c6ec61c1430776813d7fb42e6f96032bf4a04dfcbb8eceef3bc8d1845ecf242387a4426b9d3f52320dbbfa26450643b81124b3d6a0b9bbb0fff4d0ba83 languageName: node linkType: hard -"find-up@npm:^2.1.0": - version: 2.1.0 - resolution: "find-up@npm:2.1.0" +"hast-util-to-parse5@npm:^6.0.0": + version: 6.0.0 + resolution: "hast-util-to-parse5@npm:6.0.0" dependencies: - locate-path: ^2.0.0 - checksum: 43284fe4da09f89011f08e3c32cd38401e786b19226ea440b75386c1b12a4cb738c94969808d53a84f564ede22f732c8409e3cfc3f7fb5b5c32378ad0bbf28bd + hast-to-hyperscript: ^9.0.0 + property-information: ^5.0.0 + web-namespaces: ^1.0.0 + xtend: ^4.0.0 + zwitch: ^1.0.0 + checksum: 91a36244e37df1d63c8b7e865ab0c0a25bb7396155602be005cf71d95c348e709568f80e0f891681a3711d733ad896e70642dc41a05b574eddf2e07d285408a8 languageName: node linkType: hard -"find-up@npm:^3.0.0": - version: 3.0.0 - resolution: "find-up@npm:3.0.0" +"hast-util-to-parse5@npm:^8.0.0": + version: 8.0.0 + resolution: "hast-util-to-parse5@npm:8.0.0" dependencies: - locate-path: ^3.0.0 - checksum: 38eba3fe7a66e4bc7f0f5a1366dc25508b7cfc349f852640e3678d26ad9a6d7e2c43eff0a472287de4a9753ef58f066a0ea892a256fa3636ad51b3fe1e17fae9 + "@types/hast": ^3.0.0 + comma-separated-tokens: ^2.0.0 + devlop: ^1.0.0 + property-information: ^6.0.0 + space-separated-tokens: ^2.0.0 + web-namespaces: ^2.0.0 + zwitch: ^2.0.0 + checksum: 137469209cb2b32b57387928878dc85310fbd5afa4807a8da69529199bb1d19044bfc95b50c3dc68d4fb2b6cb8bf99b899285597ab6ab318f50422eefd5599dd languageName: node linkType: hard -"find-up@npm:^4.0.0": - version: 4.1.0 - resolution: "find-up@npm:4.1.0" +"hast-util-to-text@npm:^4.0.0": + version: 4.0.0 + resolution: "hast-util-to-text@npm:4.0.0" dependencies: - locate-path: ^5.0.0 - path-exists: ^4.0.0 - checksum: 4c172680e8f8c1f78839486e14a43ef82e9decd0e74145f40707cc42e7420506d5ec92d9a11c22bd2c48fb0c384ea05dd30e10dd152fefeec6f2f75282a8b844 + "@types/hast": ^3.0.0 + "@types/unist": ^3.0.0 + hast-util-is-element: ^3.0.0 + unist-util-find-after: ^5.0.0 + checksum: 3beedbdf20e2ecc9352cdd0963848b741294c1f3872d43400125bc0ca2d7938d5358081b0f9c252ec15c22368f30a4e7e6ca0ce5a57d4d2bcdd02adbf884e2af languageName: node linkType: hard -"find-up@npm:^6.0.0": - version: 6.3.0 - resolution: "find-up@npm:6.3.0" +"hast-util-whitespace@npm:^3.0.0": + version: 3.0.0 + resolution: "hast-util-whitespace@npm:3.0.0" dependencies: - locate-path: ^7.1.0 - path-exists: ^5.0.0 - checksum: 9a21b7f9244a420e54c6df95b4f6fc3941efd3c3e5476f8274eb452f6a85706e7a6a90de71353ee4f091fcb4593271a6f92810a324ec542650398f928783c280 + "@types/hast": ^3.0.0 + checksum: 41d93ccce218ba935dc3c12acdf586193c35069489c8c8f50c2aa824c00dec94a3c78b03d1db40fa75381942a189161922e4b7bca700b3a2cc779634c351a1e4 + languageName: node + linkType: hard + +"hastscript@npm:^6.0.0": + version: 6.0.0 + resolution: "hastscript@npm:6.0.0" + dependencies: + "@types/hast": ^2.0.0 + comma-separated-tokens: ^1.0.0 + hast-util-parse-selector: ^2.0.0 + property-information: ^5.0.0 + space-separated-tokens: ^1.0.0 + checksum: 5e50b85af0d2cb7c17979cb1ddca75d6b96b53019dd999b39e7833192c9004201c3cee6445065620ea05d0087d9ae147a4844e582d64868be5bc6b0232dfe52d languageName: node linkType: hard -"flat-cache@npm:^3.0.4": - version: 3.1.0 - resolution: "flat-cache@npm:3.1.0" +"hastscript@npm:^8.0.0": + version: 8.0.0 + resolution: "hastscript@npm:8.0.0" dependencies: - flatted: ^3.2.7 - keyv: ^4.5.3 - rimraf: ^3.0.2 - checksum: 99312601d5b90f44aef403f17f056dc09be7e437703740b166cdc9386d99e681f74e6b6e8bd7d010bda66904ea643c9527276b1b80308a2119741d94108a4d8f + "@types/hast": ^3.0.0 + comma-separated-tokens: ^2.0.0 + hast-util-parse-selector: ^4.0.0 + property-information: ^6.0.0 + space-separated-tokens: ^2.0.0 + checksum: ae3c20223e7b847320c0f98b6fb3c763ebe1bf3913c5805fbc176cf84553a9db1117ca34cf842a5235890b4b9ae0e94501bfdc9a9b870a5dbf5fc52426db1097 languageName: node linkType: hard -"flat@npm:^5.0.2": - version: 5.0.2 - resolution: "flat@npm:5.0.2" +"he@npm:1.2.0, he@npm:^1.2.0": + version: 1.2.0 + resolution: "he@npm:1.2.0" bin: - flat: cli.js - checksum: 12a1536ac746db74881316a181499a78ef953632ddd28050b7a3a43c62ef5462e3357c8c29d76072bb635f147f7a9a1f0c02efef6b4be28f8db62ceb3d5c7f5d + he: bin/he + checksum: 3d4d6babccccd79c5c5a3f929a68af33360d6445587d628087f39a965079d84f18ce9c3d3f917ee1e3978916fc833bb8b29377c3b403f919426f91bc6965e7a7 languageName: node linkType: hard -"flatted@npm:^3.2.7": - version: 3.2.9 - resolution: "flatted@npm:3.2.9" - checksum: f14167fbe26a9d20f6fca8d998e8f1f41df72c8e81f9f2c9d61ed2bea058248f5e1cbd05e7f88c0e5087a6a0b822a1e5e2b446e879f3cfbe0b07ba2d7f80b026 +"history@npm:^4.9.0": + version: 4.10.1 + resolution: "history@npm:4.10.1" + dependencies: + "@babel/runtime": ^7.1.2 + loose-envify: ^1.2.0 + resolve-pathname: ^3.0.0 + tiny-invariant: ^1.0.2 + tiny-warning: ^1.0.0 + value-equal: ^1.0.1 + checksum: addd84bc4683929bae4400419b5af132ff4e4e9b311a0d4e224579ea8e184a6b80d7f72c55927e4fa117f69076a9e47ce082d8d0b422f1a9ddac7991490ca1d0 languageName: node linkType: hard -"flux@npm:^4.0.1": - version: 4.0.4 - resolution: "flux@npm:4.0.4" +"hmac-drbg@npm:^1.0.1": + version: 1.0.1 + resolution: "hmac-drbg@npm:1.0.1" dependencies: - fbemitter: ^3.0.0 - fbjs: ^3.0.1 - peerDependencies: - react: ^15.0.2 || ^16.0.0 || ^17.0.0 - checksum: 8fa5c2f9322258de3e331f67c6f1078a7f91c4dec9dbe8a54c4b8a80eed19a4f91889028b768668af4a796e8f2ee75e461e1571b8615432a3920ae95cc4ff794 + hash.js: ^1.0.3 + minimalistic-assert: ^1.0.0 + minimalistic-crypto-utils: ^1.0.1 + checksum: bd30b6a68d7f22d63f10e1888aee497d7c2c5c0bb469e66bbdac99f143904d1dfe95f8131f95b3e86c86dd239963c9d972fcbe147e7cffa00e55d18585c43fe0 languageName: node linkType: hard -"follow-redirects@npm:^1.0.0, follow-redirects@npm:^1.12.1, follow-redirects@npm:^1.14.7, follow-redirects@npm:^1.15.0": - version: 1.15.3 - resolution: "follow-redirects@npm:1.15.3" - peerDependenciesMeta: - debug: - optional: true - checksum: 584da22ec5420c837bd096559ebfb8fe69d82512d5585004e36a3b4a6ef6d5905780e0c74508c7b72f907d1fa2b7bd339e613859e9c304d0dc96af2027fd0231 +"hoist-non-react-statics@npm:^3.1.0": + version: 3.3.2 + resolution: "hoist-non-react-statics@npm:3.3.2" + dependencies: + react-is: ^16.7.0 + checksum: b1538270429b13901ee586aa44f4cc3ecd8831c061d06cb8322e50ea17b3f5ce4d0e2e66394761e6c8e152cd8c34fb3b4b690116c6ce2bd45b18c746516cb9e8 languageName: node linkType: hard -"foreground-child@npm:^3.1.0": - version: 3.1.1 - resolution: "foreground-child@npm:3.1.1" +"hpack.js@npm:^2.1.6": + version: 2.1.6 + resolution: "hpack.js@npm:2.1.6" dependencies: - cross-spawn: ^7.0.0 - signal-exit: ^4.0.1 - checksum: 139d270bc82dc9e6f8bc045fe2aae4001dc2472157044fdfad376d0a3457f77857fa883c1c8b21b491c6caade9a926a4bed3d3d2e8d3c9202b151a4cbbd0bcd5 + inherits: ^2.0.1 + obuf: ^1.0.0 + readable-stream: ^2.0.1 + wbuf: ^1.1.0 + checksum: 2de144115197967ad6eeee33faf41096c6ba87078703c5cb011632dcfbffeb45784569e0cf02c317bd79c48375597c8ec88c30fff5bb0b023e8f654fb6e9c06e languageName: node linkType: hard -"fork-ts-checker-webpack-plugin@npm:^6.5.0": - version: 6.5.3 - resolution: "fork-ts-checker-webpack-plugin@npm:6.5.3" - dependencies: - "@babel/code-frame": ^7.8.3 - "@types/json-schema": ^7.0.5 - chalk: ^4.1.0 - chokidar: ^3.4.2 - cosmiconfig: ^6.0.0 - deepmerge: ^4.2.2 - fs-extra: ^9.0.0 - glob: ^7.1.6 - memfs: ^3.1.2 - minimatch: ^3.0.4 - schema-utils: 2.7.0 - semver: ^7.3.2 - tapable: ^1.0.0 - peerDependencies: - eslint: ">= 6" - typescript: ">= 2.7" - vue-template-compiler: "*" - webpack: ">= 4" - peerDependenciesMeta: - eslint: - optional: true - vue-template-compiler: - optional: true - checksum: 9732a49bfeed8fc23e6e8a59795fa7c238edeba91040a9b520db54b4d316dda27f9f1893d360e296fd0ad8930627d364417d28a8c7007fba60cc730ebfce4956 +"html-entities@npm:^2.3.2": + version: 2.4.0 + resolution: "html-entities@npm:2.4.0" + checksum: 25bea32642ce9ebd0eedc4d24381883ecb0335ccb8ac26379a0958b9b16652fdbaa725d70207ce54a51db24103436a698a8e454397d3ba8ad81460224751f1dc languageName: node linkType: hard -"form-data@npm:^4.0.0": - version: 4.0.0 - resolution: "form-data@npm:4.0.0" - dependencies: - asynckit: ^0.4.0 - combined-stream: ^1.0.8 - mime-types: ^2.1.12 - checksum: 01135bf8675f9d5c61ff18e2e2932f719ca4de964e3be90ef4c36aacfc7b9cb2fceb5eca0b7e0190e3383fe51c5b37f4cb80b62ca06a99aaabfcfd6ac7c9328c +"html-escaper@npm:^2.0.0, html-escaper@npm:^2.0.2": + version: 2.0.2 + resolution: "html-escaper@npm:2.0.2" + checksum: d2df2da3ad40ca9ee3a39c5cc6475ef67c8f83c234475f24d8e9ce0dc80a2c82df8e1d6fa78ddd1e9022a586ea1bd247a615e80a5cd9273d90111ddda7d9e974 languageName: node linkType: hard -"formdata-polyfill@npm:^4.0.10": - version: 4.0.10 - resolution: "formdata-polyfill@npm:4.0.10" +"html-minifier-terser@npm:^6.0.2, html-minifier-terser@npm:^6.1.0": + version: 6.1.0 + resolution: "html-minifier-terser@npm:6.1.0" dependencies: - fetch-blob: ^3.1.2 - checksum: 82a34df292afadd82b43d4a740ce387bc08541e0a534358425193017bf9fb3567875dc5f69564984b1da979979b70703aa73dee715a17b6c229752ae736dd9db + camel-case: ^4.1.2 + clean-css: ^5.2.2 + commander: ^8.3.0 + he: ^1.2.0 + param-case: ^3.0.4 + relateurl: ^0.2.7 + terser: ^5.10.0 + bin: + html-minifier-terser: cli.js + checksum: ac52c14006476f773204c198b64838477859dc2879490040efab8979c0207424da55d59df7348153f412efa45a0840a1ca3c757bf14767d23a15e3e389d37a93 languageName: node linkType: hard -"forwarded@npm:0.2.0": - version: 0.2.0 - resolution: "forwarded@npm:0.2.0" - checksum: fd27e2394d8887ebd16a66ffc889dc983fbbd797d5d3f01087c020283c0f019a7d05ee85669383d8e0d216b116d720fc0cef2f6e9b7eb9f4c90c6e0bc7fd28e6 +"html-minifier-terser@npm:^7.2.0": + version: 7.2.0 + resolution: "html-minifier-terser@npm:7.2.0" + dependencies: + camel-case: ^4.1.2 + clean-css: ~5.3.2 + commander: ^10.0.0 + entities: ^4.4.0 + param-case: ^3.0.4 + relateurl: ^0.2.7 + terser: ^5.15.1 + bin: + html-minifier-terser: cli.js + checksum: 39feed354b5a8aafc8e910977d68cfd961d6db330a8e1a5b16a528c86b8ee7745d8945134822cf00acf7bf0d0135bf1abad650bf308bee4ea73adb003f5b8656 languageName: node linkType: hard -"fp-ts@npm:1.19.3": - version: 1.19.3 - resolution: "fp-ts@npm:1.19.3" - checksum: eb0d4766ad561e9c5c01bfdd3d0ae589af135556921c733d26cf5289aad9f400110defdd93e6ac1d71f626697bb44d9d95ed2879c53dfd868f7cac3cf5c5553c +"html-tags@npm:^3.2.0, html-tags@npm:^3.3.1": + version: 3.3.1 + resolution: "html-tags@npm:3.3.1" + checksum: b4ef1d5a76b678e43cce46e3783d563607b1d550cab30b4f511211564574770aa8c658a400b100e588bc60b8234e59b35ff72c7851cc28f3b5403b13a2c6cbce languageName: node linkType: hard -"fp-ts@npm:^1.0.0": - version: 1.19.5 - resolution: "fp-ts@npm:1.19.5" - checksum: 67d2d9c3855d211ca2592b1ef805f98b618157e7681791a776d9d0f7f3e52fcca2122ebf5bc215908c9099fad69756d40e37210cf46cb4075dae1b61efe69e40 +"html-void-elements@npm:^1.0.0": + version: 1.0.5 + resolution: "html-void-elements@npm:1.0.5" + checksum: 1a56f4f6cfbeb994c21701ff72b4b7f556fe784a70e5e554d1566ff775af83b91ea93f10664f039a67802d9f7b40d4a7f1ed20312bab47bd88d89bd792ea84ca languageName: node linkType: hard -"fraction.js@npm:^4.3.6": - version: 4.3.7 - resolution: "fraction.js@npm:4.3.7" - checksum: e1553ae3f08e3ba0e8c06e43a3ab20b319966dfb7ddb96fd9b5d0ee11a66571af7f993229c88ebbb0d4a816eb813a24ed48207b140d442a8f76f33763b8d1f3f +"html-void-elements@npm:^3.0.0": + version: 3.0.0 + resolution: "html-void-elements@npm:3.0.0" + checksum: 59be397525465a7489028afa064c55763d9cccd1d7d9f630cca47137317f0e897a9ca26cef7e745e7cff1abc44260cfa407742b243a54261dfacd42230e94fce languageName: node linkType: hard -"fresh@npm:0.5.2, fresh@npm:~0.5.2": - version: 0.5.2 - resolution: "fresh@npm:0.5.2" - checksum: 13ea8b08f91e669a64e3ba3a20eb79d7ca5379a81f1ff7f4310d54e2320645503cc0c78daedc93dfb6191287295f6479544a649c64d8e41a1c0fb0c221552346 +"html-webpack-plugin@npm:^5.5.0, html-webpack-plugin@npm:^5.5.3, html-webpack-plugin@npm:^5.5.4": + version: 5.5.4 + resolution: "html-webpack-plugin@npm:5.5.4" + dependencies: + "@types/html-minifier-terser": ^6.0.0 + html-minifier-terser: ^6.0.2 + lodash: ^4.17.21 + pretty-error: ^4.0.0 + tapable: ^2.0.0 + peerDependencies: + webpack: ^5.20.0 + checksum: b49befb73d67a3716fd0e6f7776b108d2b0b7050fb8221f05cd114cbae13c03150a13b7cdf5e76170be040ce7936a1cf76f7a4bfd9ebe1552b72d7889a74c374 languageName: node linkType: hard -"from@npm:~0": - version: 0.1.7 - resolution: "from@npm:0.1.7" - checksum: b85125b7890489656eb2e4f208f7654a93ec26e3aefaf3bbbcc0d496fc1941e4405834fcc9fe7333192aa2187905510ace70417bbf9ac6f6f4784a731d986939 +"htmlparser2@npm:^6.1.0": + version: 6.1.0 + resolution: "htmlparser2@npm:6.1.0" + dependencies: + domelementtype: ^2.0.1 + domhandler: ^4.0.0 + domutils: ^2.5.2 + entities: ^2.0.0 + checksum: 81a7b3d9c3bb9acb568a02fc9b1b81ffbfa55eae7f1c41ae0bf840006d1dbf54cb3aa245b2553e2c94db674840a9f0fdad7027c9a9d01a062065314039058c4e languageName: node linkType: hard -"fs-constants@npm:^1.0.0": - version: 1.0.0 - resolution: "fs-constants@npm:1.0.0" - checksum: 18f5b718371816155849475ac36c7d0b24d39a11d91348cfcb308b4494824413e03572c403c86d3a260e049465518c4f0d5bd00f0371cdfcad6d4f30a85b350d +"htmlparser2@npm:^8.0.1": + version: 8.0.2 + resolution: "htmlparser2@npm:8.0.2" + dependencies: + domelementtype: ^2.3.0 + domhandler: ^5.0.3 + domutils: ^3.0.1 + entities: ^4.4.0 + checksum: 29167a0f9282f181da8a6d0311b76820c8a59bc9e3c87009e21968264c2987d2723d6fde5a964d4b7b6cba663fca96ffb373c06d8223a85f52a6089ced942700 languageName: node linkType: hard -"fs-extra@npm:^0.30.0": - version: 0.30.0 - resolution: "fs-extra@npm:0.30.0" +"http-assert@npm:^1.3.0": + version: 1.5.0 + resolution: "http-assert@npm:1.5.0" dependencies: - graceful-fs: ^4.1.2 - jsonfile: ^2.1.0 - klaw: ^1.0.0 - path-is-absolute: ^1.0.0 - rimraf: ^2.2.8 - checksum: 6edfd65fc813baa27f1603778c0f5ec11f8c5006a20b920437813ee2023eba18aeec8bef1c89b2e6c84f9fc90fdc7c916f4a700466c8c69d22a35d018f2570f0 + deep-equal: ~1.0.1 + http-errors: ~1.8.0 + checksum: 69c9b3c14cf8b2822916360a365089ce936c883c49068f91c365eccba5c141a9964d19fdda589150a480013bf503bf37d8936c732e9635819339e730ab0e7527 languageName: node linkType: hard -"fs-extra@npm:^10.0.0, fs-extra@npm:^10.1.0": - version: 10.1.0 - resolution: "fs-extra@npm:10.1.0" - dependencies: - graceful-fs: ^4.2.0 - jsonfile: ^6.0.1 - universalify: ^2.0.0 - checksum: dc94ab37096f813cc3ca12f0f1b5ad6744dfed9ed21e953d72530d103cea193c2f81584a39e9dee1bea36de5ee66805678c0dddc048e8af1427ac19c00fffc50 +"http-cache-semantics@npm:^4.0.0, http-cache-semantics@npm:^4.1.1": + version: 4.1.1 + resolution: "http-cache-semantics@npm:4.1.1" + checksum: 83ac0bc60b17a3a36f9953e7be55e5c8f41acc61b22583060e8dedc9dd5e3607c823a88d0926f9150e571f90946835c7fe150732801010845c72cd8bbff1a236 languageName: node linkType: hard -"fs-extra@npm:^7.0.1": - version: 7.0.1 - resolution: "fs-extra@npm:7.0.1" - dependencies: - graceful-fs: ^4.1.2 - jsonfile: ^4.0.0 - universalify: ^0.1.0 - checksum: 141b9dccb23b66a66cefdd81f4cda959ff89282b1d721b98cea19ba08db3dcbe6f862f28841f3cf24bb299e0b7e6c42303908f65093cb7e201708e86ea5a8dcf +"http-deceiver@npm:^1.2.7": + version: 1.2.7 + resolution: "http-deceiver@npm:1.2.7" + checksum: 64d7d1ae3a6933eb0e9a94e6f27be4af45a53a96c3c34e84ff57113787105a89fff9d1c3df263ef63add823df019b0e8f52f7121e32393bb5ce9a713bf100b41 languageName: node linkType: hard -"fs-extra@npm:^9.0.0": - version: 9.1.0 - resolution: "fs-extra@npm:9.1.0" +"http-errors@npm:2.0.0": + version: 2.0.0 + resolution: "http-errors@npm:2.0.0" dependencies: - at-least-node: ^1.0.0 - graceful-fs: ^4.2.0 - jsonfile: ^6.0.1 - universalify: ^2.0.0 - checksum: ba71ba32e0faa74ab931b7a0031d1523c66a73e225de7426e275e238e312d07313d2da2d33e34a52aa406c8763ade5712eb3ec9ba4d9edce652bcacdc29e6b20 + depd: 2.0.0 + inherits: 2.0.4 + setprototypeof: 1.2.0 + statuses: 2.0.1 + toidentifier: 1.0.1 + checksum: 9b0a3782665c52ce9dc658a0d1560bcb0214ba5699e4ea15aefb2a496e2ca83db03ebc42e1cce4ac1f413e4e0d2d736a3fd755772c556a9a06853ba2a0b7d920 languageName: node linkType: hard -"fs-minipass@npm:^2.0.0": - version: 2.1.0 - resolution: "fs-minipass@npm:2.1.0" +"http-errors@npm:^1.6.3, http-errors@npm:^1.7.3, http-errors@npm:~1.8.0": + version: 1.8.1 + resolution: "http-errors@npm:1.8.1" dependencies: - minipass: ^3.0.0 - checksum: 1b8d128dae2ac6cc94230cc5ead341ba3e0efaef82dab46a33d171c044caaa6ca001364178d42069b2809c35a1c3c35079a32107c770e9ffab3901b59af8c8b1 + depd: ~1.1.2 + inherits: 2.0.4 + setprototypeof: 1.2.0 + statuses: ">= 1.5.0 < 2" + toidentifier: 1.0.1 + checksum: d3c7e7e776fd51c0a812baff570bdf06fe49a5dc448b700ab6171b1250e4cf7db8b8f4c0b133e4bfe2451022a5790c1ca6c2cae4094dedd6ac8304a1267f91d2 languageName: node linkType: hard -"fs-minipass@npm:^3.0.0": - version: 3.0.3 - resolution: "fs-minipass@npm:3.0.3" +"http-errors@npm:~1.6.2": + version: 1.6.3 + resolution: "http-errors@npm:1.6.3" dependencies: - minipass: ^7.0.3 - checksum: 8722a41109130851d979222d3ec88aabaceeaaf8f57b2a8f744ef8bd2d1ce95453b04a61daa0078822bc5cd21e008814f06fe6586f56fef511e71b8d2394d802 + depd: ~1.1.2 + inherits: 2.0.3 + setprototypeof: 1.1.0 + statuses: ">= 1.4.0 < 2" + checksum: a9654ee027e3d5de305a56db1d1461f25709ac23267c6dc28cdab8323e3f96caa58a9a6a5e93ac15d7285cee0c2f019378c3ada9026e7fe19c872d695f27de7c languageName: node linkType: hard -"fs-monkey@npm:^1.0.4": - version: 1.0.5 - resolution: "fs-monkey@npm:1.0.5" - checksum: 424b67f65b37fe66117ae2bb061f790fe6d4b609e1d160487c74b3d69fbf42f262c665ccfba32e8b5f113f96f92e9a58fcdebe42d5f6649bdfc72918093a3119 +"http-parser-js@npm:>=0.5.1": + version: 0.5.8 + resolution: "http-parser-js@npm:0.5.8" + checksum: 6bbdf2429858e8cf13c62375b0bfb6dc3955ca0f32e58237488bc86cd2378f31d31785fd3ac4ce93f1c74e0189cf8823c91f5cb061696214fd368d2452dc871d languageName: node linkType: hard -"fs.realpath@npm:^1.0.0": - version: 1.0.0 - resolution: "fs.realpath@npm:1.0.0" - checksum: 99ddea01a7e75aa276c250a04eedeffe5662bce66c65c07164ad6264f9de18fb21be9433ead460e54cff20e31721c811f4fb5d70591799df5f85dce6d6746fd0 +"http-proxy-agent@npm:^7.0.0": + version: 7.0.0 + resolution: "http-proxy-agent@npm:7.0.0" + dependencies: + agent-base: ^7.1.0 + debug: ^4.3.4 + checksum: 48d4fac997917e15f45094852b63b62a46d0c8a4f0b9c6c23ca26d27b8df8d178bed88389e604745e748bd9a01f5023e25093722777f0593c3f052009ff438b6 languageName: node linkType: hard -"fsevents@npm:2.3.2": - version: 2.3.2 - resolution: "fsevents@npm:2.3.2" +"http-proxy-middleware@npm:^2.0.3": + version: 2.0.6 + resolution: "http-proxy-middleware@npm:2.0.6" dependencies: - node-gyp: latest - checksum: 97ade64e75091afee5265e6956cb72ba34db7819b4c3e94c431d4be2b19b8bb7a2d4116da417950c3425f17c8fe693d25e20212cac583ac1521ad066b77ae31f - conditions: os=darwin + "@types/http-proxy": ^1.17.8 + http-proxy: ^1.18.1 + is-glob: ^4.0.1 + is-plain-obj: ^3.0.0 + micromatch: ^4.0.2 + peerDependencies: + "@types/express": ^4.17.13 + peerDependenciesMeta: + "@types/express": + optional: true + checksum: 2ee85bc878afa6cbf34491e972ece0f5be0a3e5c98a60850cf40d2a9a5356e1fc57aab6cff33c1fc37691b0121c3a42602d2b1956c52577e87a5b77b62ae1c3a languageName: node linkType: hard -"fsevents@npm:~2.3.2": - version: 2.3.3 - resolution: "fsevents@npm:2.3.3" +"http-proxy@npm:^1.18.1": + version: 1.18.1 + resolution: "http-proxy@npm:1.18.1" dependencies: - node-gyp: latest - checksum: 11e6ea6fea15e42461fc55b4b0e4a0a3c654faa567f1877dbd353f39156f69def97a69936d1746619d656c4b93de2238bf731f6085a03a50cabf287c9d024317 - conditions: os=darwin + eventemitter3: ^4.0.0 + follow-redirects: ^1.0.0 + requires-port: ^1.0.0 + checksum: f5bd96bf83e0b1e4226633dbb51f8b056c3e6321917df402deacec31dd7fe433914fc7a2c1831cf7ae21e69c90b3a669b8f434723e9e8b71fd68afe30737b6a5 languageName: node linkType: hard -"fsevents@patch:fsevents@2.3.2#~builtin": - version: 2.3.2 - resolution: "fsevents@patch:fsevents@npm%3A2.3.2#~builtin::version=2.3.2&hash=df0bf1" +"http2-wrapper@npm:^2.1.10": + version: 2.2.1 + resolution: "http2-wrapper@npm:2.2.1" dependencies: - node-gyp: latest - conditions: os=darwin + quick-lru: ^5.1.1 + resolve-alpn: ^1.2.0 + checksum: e95e55e22c6fd61182ce81fecb9b7da3af680d479febe8ad870d05f7ebbc9f076e455193766f4e7934e50913bf1d8da3ba121fb5cd2928892390b58cf9d5c509 languageName: node linkType: hard -"fsevents@patch:fsevents@~2.3.2#~builtin": - version: 2.3.3 - resolution: "fsevents@patch:fsevents@npm%3A2.3.3#~builtin::version=2.3.3&hash=df0bf1" +"https-proxy-agent@npm:5.0.1, https-proxy-agent@npm:^5.0.0": + version: 5.0.1 + resolution: "https-proxy-agent@npm:5.0.1" dependencies: - node-gyp: latest - conditions: os=darwin + agent-base: 6 + debug: 4 + checksum: 571fccdf38184f05943e12d37d6ce38197becdd69e58d03f43637f7fa1269cf303a7d228aa27e5b27bbd3af8f09fd938e1c91dcfefff2df7ba77c20ed8dfc765 languageName: node linkType: hard -"function-bind@npm:^1.1.1": - version: 1.1.1 - resolution: "function-bind@npm:1.1.1" - checksum: b32fbaebb3f8ec4969f033073b43f5c8befbb58f1a79e12f1d7490358150359ebd92f49e72ff0144f65f2c48ea2a605bff2d07965f548f6474fd8efd95bf361a +"https-proxy-agent@npm:^7.0.0, https-proxy-agent@npm:^7.0.1, https-proxy-agent@npm:^7.0.2": + version: 7.0.2 + resolution: "https-proxy-agent@npm:7.0.2" + dependencies: + agent-base: ^7.0.2 + debug: 4 + checksum: 088969a0dd476ea7a0ed0a2cf1283013682b08f874c3bc6696c83fa061d2c157d29ef0ad3eb70a2046010bb7665573b2388d10fdcb3e410a66995e5248444292 languageName: node linkType: hard -"functional-red-black-tree@npm:^1.0.1": - version: 1.0.1 - resolution: "functional-red-black-tree@npm:1.0.1" - checksum: ca6c170f37640e2d94297da8bb4bf27a1d12bea3e00e6a3e007fd7aa32e37e000f5772acf941b4e4f3cf1c95c3752033d0c509af157ad8f526e7f00723b9eb9f +"human-signals@npm:^2.1.0": + version: 2.1.0 + resolution: "human-signals@npm:2.1.0" + checksum: b87fd89fce72391625271454e70f67fe405277415b48bcc0117ca73d31fa23a4241787afdc8d67f5a116cf37258c052f59ea82daffa72364d61351423848e3b8 languageName: node linkType: hard -"gauge@npm:^4.0.3": - version: 4.0.4 - resolution: "gauge@npm:4.0.4" - dependencies: - aproba: ^1.0.3 || ^2.0.0 - color-support: ^1.1.3 - console-control-strings: ^1.1.0 - has-unicode: ^2.0.1 - signal-exit: ^3.0.7 - string-width: ^4.2.3 - strip-ansi: ^6.0.1 - wide-align: ^1.1.5 - checksum: 788b6bfe52f1dd8e263cda800c26ac0ca2ff6de0b6eee2fe0d9e3abf15e149b651bd27bf5226be10e6e3edb5c4e5d5985a5a1a98137e7a892f75eff76467ad2d +"human-signals@npm:^4.3.0": + version: 4.3.1 + resolution: "human-signals@npm:4.3.1" + checksum: 6f12958df3f21b6fdaf02d90896c271df00636a31e2bbea05bddf817a35c66b38a6fdac5863e2df85bd52f34958997f1f50350ff97249e1dff8452865d5235d1 languageName: node linkType: hard -"gensync@npm:^1.0.0-beta.1, gensync@npm:^1.0.0-beta.2": - version: 1.0.0-beta.2 - resolution: "gensync@npm:1.0.0-beta.2" - checksum: a7437e58c6be12aa6c90f7730eac7fa9833dc78872b4ad2963d2031b00a3367a93f98aec75f9aaac7220848e4026d67a8655e870b24f20a543d103c0d65952ec +"hyperdyperid@npm:^1.2.0": + version: 1.2.0 + resolution: "hyperdyperid@npm:1.2.0" + checksum: 210029d1c86926f09109f6317d143f8b056fc38e8dd11b0c3e3205fc6c6ff8429fb55b4b9c2bce065462719ed9d34366eced387aaa0035d93eb76b306a8547ef languageName: node linkType: hard -"get-caller-file@npm:^2.0.5": - version: 2.0.5 - resolution: "get-caller-file@npm:2.0.5" - checksum: b9769a836d2a98c3ee734a88ba712e62703f1df31b94b784762c433c27a386dd6029ff55c2a920c392e33657d80191edbf18c61487e198844844516f843496b9 +"iconv-lite@npm:0.4.24": + version: 0.4.24 + resolution: "iconv-lite@npm:0.4.24" + dependencies: + safer-buffer: ">= 2.1.2 < 3" + checksum: bd9f120f5a5b306f0bc0b9ae1edeb1577161503f5f8252a20f1a9e56ef8775c9959fd01c55f2d3a39d9a8abaf3e30c1abeb1895f367dcbbe0a8fd1c9ca01c4f6 languageName: node linkType: hard -"get-func-name@npm:^2.0.0": - version: 2.0.0 - resolution: "get-func-name@npm:2.0.0" - checksum: 8d82e69f3e7fab9e27c547945dfe5cc0c57fc0adf08ce135dddb01081d75684a03e7a0487466f478872b341d52ac763ae49e660d01ab83741f74932085f693c3 +"iconv-lite@npm:^0.6.2": + version: 0.6.3 + resolution: "iconv-lite@npm:0.6.3" + dependencies: + safer-buffer: ">= 2.1.2 < 3.0.0" + checksum: 3f60d47a5c8fc3313317edfd29a00a692cc87a19cac0159e2ce711d0ebc9019064108323b5e493625e25594f11c6236647d8e256fbe7a58f4a3b33b89e6d30bf languageName: node linkType: hard -"get-intrinsic@npm:^1.0.2, get-intrinsic@npm:^1.1.1, get-intrinsic@npm:^1.1.3, get-intrinsic@npm:^1.2.1": - version: 1.2.1 - resolution: "get-intrinsic@npm:1.2.1" - dependencies: - function-bind: ^1.1.1 - has: ^1.0.3 - has-proto: ^1.0.1 - has-symbols: ^1.0.3 - checksum: 5b61d88552c24b0cf6fa2d1b3bc5459d7306f699de060d76442cce49a4721f52b8c560a33ab392cf5575b7810277d54ded9d4d39a1ea61855619ebc005aa7e5f +"icss-utils@npm:^5.0.0, icss-utils@npm:^5.1.0": + version: 5.1.0 + resolution: "icss-utils@npm:5.1.0" + peerDependencies: + postcss: ^8.1.0 + checksum: 5c324d283552b1269cfc13a503aaaa172a280f914e5b81544f3803bc6f06a3b585fb79f66f7c771a2c052db7982c18bf92d001e3b47282e3abbbb4c4cc488d68 languageName: node linkType: hard -"get-own-enumerable-property-symbols@npm:^3.0.0": - version: 3.0.2 - resolution: "get-own-enumerable-property-symbols@npm:3.0.2" - checksum: 8f0331f14159f939830884799f937343c8c0a2c330506094bc12cbee3665d88337fe97a4ea35c002cc2bdba0f5d9975ad7ec3abb925015cdf2a93e76d4759ede +"idb-wrapper@npm:^1.5.0": + version: 1.7.2 + resolution: "idb-wrapper@npm:1.7.2" + checksum: a5fa3a771166205e2d5d2b93c66bd31571dada3526b59bc0f8583efb091b6b327125f1a964a25a281b85ef1c44af10a3c511652632ad3adf8229a161132d66ae languageName: node linkType: hard -"get-stdin@npm:^8.0.0": - version: 8.0.0 - resolution: "get-stdin@npm:8.0.0" - checksum: 40128b6cd25781ddbd233344f1a1e4006d4284906191ed0a7d55ec2c1a3e44d650f280b2c9eeab79c03ac3037da80257476c0e4e5af38ddfb902d6ff06282d77 +"ieee754@npm:^1.1.13, ieee754@npm:^1.2.1": + version: 1.2.1 + resolution: "ieee754@npm:1.2.1" + checksum: 5144c0c9815e54ada181d80a0b810221a253562422e7c6c3a60b1901154184f49326ec239d618c416c1c5945a2e197107aee8d986a3dd836b53dffefd99b5e7e languageName: node linkType: hard -"get-stream@npm:^4.1.0": - version: 4.1.0 - resolution: "get-stream@npm:4.1.0" - dependencies: - pump: ^3.0.0 - checksum: 443e1914170c15bd52ff8ea6eff6dfc6d712b031303e36302d2778e3de2506af9ee964d6124010f7818736dcfde05c04ba7ca6cc26883106e084357a17ae7d73 +"ignore@npm:^5.2.0, ignore@npm:^5.2.4": + version: 5.3.0 + resolution: "ignore@npm:5.3.0" + checksum: 2736da6621f14ced652785cb05d86301a66d70248597537176612bd0c8630893564bd5f6421f8806b09e8472e75c591ef01672ab8059c07c6eb2c09cefe04bf9 languageName: node linkType: hard -"get-stream@npm:^5.1.0": - version: 5.2.0 - resolution: "get-stream@npm:5.2.0" +"image-size@npm:^1.0.1, image-size@npm:^1.0.2": + version: 1.0.2 + resolution: "image-size@npm:1.0.2" dependencies: - pump: ^3.0.0 - checksum: 8bc1a23174a06b2b4ce600df38d6c98d2ef6d84e020c1ddad632ad75bac4e092eeb40e4c09e0761c35fc2dbc5e7fff5dab5e763a383582c4a167dd69a905bd12 + queue: 6.0.2 + bin: + image-size: bin/image-size.js + checksum: 01745fdb47f87cecf538e69c63f9adc5bfab30a345345c2de91105f3afbd1bfcfba1256af02bf3323077b33b0004469a837e077bf0cbb9c907e9c1e9e7547585 languageName: node linkType: hard -"get-stream@npm:^6.0.0, get-stream@npm:^6.0.1": - version: 6.0.1 - resolution: "get-stream@npm:6.0.1" - checksum: e04ecece32c92eebf5b8c940f51468cd53554dcbb0ea725b2748be583c9523d00128137966afce410b9b051eb2ef16d657cd2b120ca8edafcf5a65e81af63cad +"immediate@npm:^3.2.3": + version: 3.3.0 + resolution: "immediate@npm:3.3.0" + checksum: 634b4305101e2452eba6c07d485bf3e415995e533c94b9c3ffbc37026fa1be34def6e4f2276b0dc2162a3f91628564a4bfb26280278b89d3ee54624e854d2f5f languageName: node linkType: hard -"github-slugger@npm:^1.4.0": - version: 1.5.0 - resolution: "github-slugger@npm:1.5.0" - checksum: c70988224578b3bdaa25df65973ffc8c24594a77a28550c3636e495e49d17aef5cdb04c04fa3f1744babef98c61eecc6a43299a13ea7f3cc33d680bf9053ffbe +"immer@npm:^9.0.7": + version: 9.0.21 + resolution: "immer@npm:9.0.21" + checksum: 70e3c274165995352f6936695f0ef4723c52c92c92dd0e9afdfe008175af39fa28e76aafb3a2ca9d57d1fb8f796efc4dd1e1cc36f18d33fa5b74f3dfb0375432 languageName: node linkType: hard -"glob-parent@npm:^5.1.2, glob-parent@npm:~5.1.2": - version: 5.1.2 - resolution: "glob-parent@npm:5.1.2" - dependencies: - is-glob: ^4.0.1 - checksum: f4f2bfe2425296e8a47e36864e4f42be38a996db40420fe434565e4480e3322f18eb37589617a98640c5dc8fdec1a387007ee18dbb1f3f5553409c34d17f425e +"immutable@npm:^4.0.0-rc.12": + version: 4.3.4 + resolution: "immutable@npm:4.3.4" + checksum: de3edd964c394bab83432429d3fb0b4816b42f56050f2ca913ba520bd3068ec3e504230d0800332d3abc478616e8f55d3787424a90d0952e6aba864524f1afc3 languageName: node linkType: hard -"glob-parent@npm:^6.0.1, glob-parent@npm:^6.0.2": - version: 6.0.2 - resolution: "glob-parent@npm:6.0.2" +"import-fresh@npm:^3.1.0, import-fresh@npm:^3.2.1, import-fresh@npm:^3.3.0": + version: 3.3.0 + resolution: "import-fresh@npm:3.3.0" dependencies: - is-glob: ^4.0.3 - checksum: c13ee97978bef4f55106b71e66428eb1512e71a7466ba49025fc2aec59a5bfb0954d5abd58fc5ee6c9b076eef4e1f6d3375c2e964b88466ca390da4419a786a8 - languageName: node - linkType: hard - -"glob-to-regexp@npm:^0.4.1": - version: 0.4.1 - resolution: "glob-to-regexp@npm:0.4.1" - checksum: e795f4e8f06d2a15e86f76e4d92751cf8bbfcf0157cea5c2f0f35678a8195a750b34096b1256e436f0cebc1883b5ff0888c47348443e69546a5a87f9e1eb1167 + parent-module: ^1.0.0 + resolve-from: ^4.0.0 + checksum: 2cacfad06e652b1edc50be650f7ec3be08c5e5a6f6d12d035c440a42a8cc028e60a5b99ca08a77ab4d6b1346da7d971915828f33cdab730d3d42f08242d09baa languageName: node linkType: hard -"glob@npm:7.2.0": - version: 7.2.0 - resolution: "glob@npm:7.2.0" - dependencies: - fs.realpath: ^1.0.0 - inflight: ^1.0.4 - inherits: 2 - minimatch: ^3.0.4 - once: ^1.3.0 - path-is-absolute: ^1.0.0 - checksum: 78a8ea942331f08ed2e055cb5b9e40fe6f46f579d7fd3d694f3412fe5db23223d29b7fee1575440202e9a7ff9a72ab106a39fee39934c7bedafe5e5f8ae20134 +"import-lazy@npm:^2.1.0": + version: 2.1.0 + resolution: "import-lazy@npm:2.1.0" + checksum: 05294f3b9dd4971d3a996f0d2f176410fb6745d491d6e73376429189f5c1c3d290548116b2960a7cf3e89c20cdf11431739d1d2d8c54b84061980795010e803a languageName: node linkType: hard -"glob@npm:^10.2.2": - version: 10.3.5 - resolution: "glob@npm:10.3.5" - dependencies: - foreground-child: ^3.1.0 - jackspeak: ^2.0.3 - minimatch: ^9.0.1 - minipass: ^5.0.0 || ^6.0.2 || ^7.0.0 - path-scurry: ^1.10.1 - bin: - glob: dist/cjs/src/bin.js - checksum: 564f4799cae48c0bcc841c88a20b539b5701c27ed5596f8623f588b3c523262d3fc20eb1ea89cab9c75b0912faf40ca5501fc835f982225d0d0599282b09e97a +"import-lazy@npm:^4.0.0": + version: 4.0.0 + resolution: "import-lazy@npm:4.0.0" + checksum: 22f5e51702134aef78890156738454f620e5fe7044b204ebc057c614888a1dd6fdf2ede0fdcca44d5c173fd64f65c985f19a51775b06967ef58cc3d26898df07 languageName: node linkType: hard -"glob@npm:^10.3.10": - version: 10.3.10 - resolution: "glob@npm:10.3.10" +"import-local@npm:^3.0.2": + version: 3.1.0 + resolution: "import-local@npm:3.1.0" dependencies: - foreground-child: ^3.1.0 - jackspeak: ^2.3.5 - minimatch: ^9.0.1 - minipass: ^5.0.0 || ^6.0.2 || ^7.0.0 - path-scurry: ^1.10.1 - bin: - glob: dist/esm/bin.mjs - checksum: 4f2fe2511e157b5a3f525a54092169a5f92405f24d2aed3142f4411df328baca13059f4182f1db1bf933e2c69c0bd89e57ae87edd8950cba8c7ccbe84f721cf3 + pkg-dir: ^4.2.0 + resolve-cwd: ^3.0.0 + bin: + import-local-fixture: fixtures/cli.js + checksum: bfcdb63b5e3c0e245e347f3107564035b128a414c4da1172a20dc67db2504e05ede4ac2eee1252359f78b0bfd7b19ef180aec427c2fce6493ae782d73a04cddd languageName: node linkType: hard -"glob@npm:^7.0.0, glob@npm:^7.1.3, glob@npm:^7.1.4, glob@npm:^7.1.6": - version: 7.2.3 - resolution: "glob@npm:7.2.3" - dependencies: - fs.realpath: ^1.0.0 - inflight: ^1.0.4 - inherits: 2 - minimatch: ^3.1.1 - once: ^1.3.0 - path-is-absolute: ^1.0.0 - checksum: 29452e97b38fa704dabb1d1045350fb2467cf0277e155aa9ff7077e90ad81d1ea9d53d3ee63bd37c05b09a065e90f16aec4a65f5b8de401d1dac40bc5605d133 +"import-meta-resolve@npm:^4.0.0": + version: 4.0.0 + resolution: "import-meta-resolve@npm:4.0.0" + checksum: 51c50115fd38e9ba21736f8d7543a58446b92d2cb5f38c9b5ec72426afeb2fb790f82051560a0f16323f44dd73d8d37c07eab5f8dc4635bcdb401daa36727b1a languageName: node linkType: hard -"global-dirs@npm:^3.0.0": - version: 3.0.1 - resolution: "global-dirs@npm:3.0.1" - dependencies: - ini: 2.0.0 - checksum: 70147b80261601fd40ac02a104581432325c1c47329706acd773f3a6ce99bb36d1d996038c85ccacd482ad22258ec233c586b6a91535b1a116b89663d49d6438 +"imurmurhash@npm:^0.1.4": + version: 0.1.4 + resolution: "imurmurhash@npm:0.1.4" + checksum: 7cae75c8cd9a50f57dadd77482359f659eaebac0319dd9368bcd1714f55e65badd6929ca58569da2b6494ef13fdd5598cd700b1eba23f8b79c5f19d195a3ecf7 languageName: node linkType: hard -"global-modules@npm:^2.0.0": - version: 2.0.0 - resolution: "global-modules@npm:2.0.0" - dependencies: - global-prefix: ^3.0.0 - checksum: d6197f25856c878c2fb5f038899f2dca7cbb2f7b7cf8999660c0104972d5cfa5c68b5a0a77fa8206bb536c3903a4615665acb9709b4d80846e1bb47eaef65430 +"indent-string@npm:^4.0.0": + version: 4.0.0 + resolution: "indent-string@npm:4.0.0" + checksum: 824cfb9929d031dabf059bebfe08cf3137365e112019086ed3dcff6a0a7b698cb80cf67ccccde0e25b9e2d7527aa6cc1fed1ac490c752162496caba3e6699612 languageName: node linkType: hard -"global-prefix@npm:^3.0.0": - version: 3.0.0 - resolution: "global-prefix@npm:3.0.0" - dependencies: - ini: ^1.3.5 - kind-of: ^6.0.2 - which: ^1.3.1 - checksum: 8a82fc1d6f22c45484a4e34656cc91bf021a03e03213b0035098d605bfc612d7141f1e14a21097e8a0413b4884afd5b260df0b6a25605ce9d722e11f1df2881d +"indexof@npm:~0.0.1": + version: 0.0.1 + resolution: "indexof@npm:0.0.1" + checksum: 0fb04e8b147b8585d981a6df1564f25bb3678d6fa74e33e5cecc1464b10f78e15e8ef6bb688f135fe5c2844a128fac8a7831cbe5adc81fdcf12681b093dfcc25 languageName: node linkType: hard -"globals@npm:^11.1.0": - version: 11.12.0 - resolution: "globals@npm:11.12.0" - checksum: 67051a45eca3db904aee189dfc7cd53c20c7d881679c93f6146ddd4c9f4ab2268e68a919df740d39c71f4445d2b38ee360fc234428baea1dbdfe68bbcb46979e +"infima@npm:0.2.0-alpha.43": + version: 0.2.0-alpha.43 + resolution: "infima@npm:0.2.0-alpha.43" + checksum: fc5f79240e940eddd750439511767092ccb4051e5e91d253ec7630a9e7ce691812da3aa0f05e46b4c0a95dbfadeae5714fd0073f8d2df12e5aaff0697a1d6aa2 languageName: node linkType: hard -"globals@npm:^13.19.0": - version: 13.22.0 - resolution: "globals@npm:13.22.0" - dependencies: - type-fest: ^0.20.2 - checksum: 64af5a09565341432770444085f7aa98b54331c3b69732e0de411003921fa2dd060222ae7b50bec0b98f29c4d00b4f49bf434049ba9f7c36ca4ee1773f60458c +"inflation@npm:^2.0.0": + version: 2.1.0 + resolution: "inflation@npm:2.1.0" + checksum: 80c1b5d9ec408105a85f0623c824d668ddf0cadafd8d9716c0737990e5a712ae5f7d6bb0ff216b6648eccb9c6ac69fe06c0d8c58456d168db5bf550c89dd74ed languageName: node linkType: hard -"globby@npm:^11.0.1, globby@npm:^11.0.4, globby@npm:^11.1.0": - version: 11.1.0 - resolution: "globby@npm:11.1.0" +"inflight@npm:^1.0.4": + version: 1.0.6 + resolution: "inflight@npm:1.0.6" dependencies: - array-union: ^2.1.0 - dir-glob: ^3.0.1 - fast-glob: ^3.2.9 - ignore: ^5.2.0 - merge2: ^1.4.1 - slash: ^3.0.0 - checksum: b4be8885e0cfa018fc783792942d53926c35c50b3aefd3fdcfb9d22c627639dc26bd2327a40a0b74b074100ce95bb7187bfeae2f236856aa3de183af7a02aea6 + once: ^1.3.0 + wrappy: 1 + checksum: f4f76aa072ce19fae87ce1ef7d221e709afb59d445e05d47fba710e85470923a75de35bfae47da6de1b18afc3ce83d70facf44cfb0aff89f0a3f45c0a0244dfd languageName: node linkType: hard -"globby@npm:^13.1.1, globby@npm:^13.1.2, globby@npm:^13.1.4": - version: 13.2.2 - resolution: "globby@npm:13.2.2" - dependencies: - dir-glob: ^3.0.1 - fast-glob: ^3.3.0 - ignore: ^5.2.4 - merge2: ^1.4.1 - slash: ^4.0.0 - checksum: f3d84ced58a901b4fcc29c846983108c426631fe47e94872868b65565495f7bee7b3defd68923bd480582771fd4bbe819217803a164a618ad76f1d22f666f41e +"inherits@npm:2, inherits@npm:2.0.4, inherits@npm:^2.0.0, inherits@npm:^2.0.1, inherits@npm:^2.0.3, inherits@npm:^2.0.4, inherits@npm:~2.0.1, inherits@npm:~2.0.3": + version: 2.0.4 + resolution: "inherits@npm:2.0.4" + checksum: 4a48a733847879d6cf6691860a6b1e3f0f4754176e4d71494c41f3475553768b10f84b5ce1d40fbd0e34e6bfbb864ee35858ad4dd2cf31e02fc4a154b724d7f1 languageName: node linkType: hard -"gopd@npm:^1.0.1": - version: 1.0.1 - resolution: "gopd@npm:1.0.1" - dependencies: - get-intrinsic: ^1.1.3 - checksum: a5ccfb8806e0917a94e0b3de2af2ea4979c1da920bc381667c260e00e7cafdbe844e2cb9c5bcfef4e5412e8bf73bab837285bc35c7ba73aaaf0134d4583393a6 +"inherits@npm:2.0.3": + version: 2.0.3 + resolution: "inherits@npm:2.0.3" + checksum: 78cb8d7d850d20a5e9a7f3620db31483aa00ad5f722ce03a55b110e5a723539b3716a3b463e2b96ce3fe286f33afc7c131fa2f91407528ba80cea98a7545d4c0 languageName: node linkType: hard -"got@npm:^9.6.0": - version: 9.6.0 - resolution: "got@npm:9.6.0" - dependencies: - "@sindresorhus/is": ^0.14.0 - "@szmarczak/http-timer": ^1.1.2 - cacheable-request: ^6.0.0 - decompress-response: ^3.3.0 - duplexer3: ^0.1.4 - get-stream: ^4.1.0 - lowercase-keys: ^1.0.1 - mimic-response: ^1.0.1 - p-cancelable: ^1.0.0 - to-readable-stream: ^1.0.0 - url-parse-lax: ^3.0.0 - checksum: 941807bd9704bacf5eb401f0cc1212ffa1f67c6642f2d028fd75900471c221b1da2b8527f4553d2558f3faeda62ea1cf31665f8b002c6137f5de8732f07370b0 +"ini@npm:2.0.0": + version: 2.0.0 + resolution: "ini@npm:2.0.0" + checksum: e7aadc5fb2e4aefc666d74ee2160c073995a4061556b1b5b4241ecb19ad609243b9cceafe91bae49c219519394bbd31512516cb22a3b1ca6e66d869e0447e84e languageName: node linkType: hard -"graceful-fs@npm:^4.1.11, graceful-fs@npm:^4.1.2, graceful-fs@npm:^4.1.6, graceful-fs@npm:^4.1.9, graceful-fs@npm:^4.2.0, graceful-fs@npm:^4.2.4, graceful-fs@npm:^4.2.6, graceful-fs@npm:^4.2.9": - version: 4.2.11 - resolution: "graceful-fs@npm:4.2.11" - checksum: ac85f94da92d8eb6b7f5a8b20ce65e43d66761c55ce85ac96df6865308390da45a8d3f0296dd3a663de65d30ba497bd46c696cc1e248c72b13d6d567138a4fc7 +"ini@npm:4.1.1": + version: 4.1.1 + resolution: "ini@npm:4.1.1" + checksum: 0e5909554074fbc31824fa5415b0f604de4a665514c96a897a77bf77353a7ad4743927321270e9d0610a9d510ccd1f3cd77422f7cc80d8f4542dbce75476fb6d languageName: node linkType: hard -"graphemer@npm:^1.4.0": - version: 1.4.0 - resolution: "graphemer@npm:1.4.0" - checksum: bab8f0be9b568857c7bec9fda95a89f87b783546d02951c40c33f84d05bb7da3fd10f863a9beb901463669b6583173a8c8cc6d6b306ea2b9b9d5d3d943c3a673 +"ini@npm:^1.3.4, ini@npm:^1.3.5, ini@npm:~1.3.0": + version: 1.3.8 + resolution: "ini@npm:1.3.8" + checksum: dfd98b0ca3a4fc1e323e38a6c8eb8936e31a97a918d3b377649ea15bdb15d481207a0dda1021efbd86b464cae29a0d33c1d7dcaf6c5672bee17fa849bc50a1b3 languageName: node linkType: hard -"gray-matter@npm:^4.0.3": - version: 4.0.3 - resolution: "gray-matter@npm:4.0.3" - dependencies: - js-yaml: ^3.13.1 - kind-of: ^6.0.2 - section-matter: ^1.0.0 - strip-bom-string: ^1.0.0 - checksum: 37717bd424344487d655392251ce8d8878a1275ee087003e61208fba3bfd59cbb73a85b2159abf742ae95e23db04964813fdc33ae18b074208428b2528205222 +"inline-style-parser@npm:0.1.1": + version: 0.1.1 + resolution: "inline-style-parser@npm:0.1.1" + checksum: 5d545056a3e1f2bf864c928a886a0e1656a3517127d36917b973de581bd54adc91b4bf1febcb0da054f204b4934763f1a4e09308b4d55002327cf1d48ac5d966 languageName: node linkType: hard -"gzip-size@npm:^6.0.0": - version: 6.0.0 - resolution: "gzip-size@npm:6.0.0" +"inline-style-parser@npm:0.2.2": + version: 0.2.2 + resolution: "inline-style-parser@npm:0.2.2" + checksum: 698893d6542d4e7c0377936a1c7daec34a197765bd77c5599384756a95ce8804e6b79347b783aa591d5e9c6f3d33dae74c6d4cad3a94647eb05f3a785e927a3f + languageName: node + linkType: hard + +"integration-tests@workspace:compiler/integration-tests": + version: 0.0.0-use.local + resolution: "integration-tests@workspace:compiler/integration-tests" dependencies: - duplexer: ^0.1.2 - checksum: 2df97f359696ad154fc171dcb55bc883fe6e833bca7a65e457b9358f3cb6312405ed70a8da24a77c1baac0639906cd52358dc0ce2ec1a937eaa631b934c94194 + "@noir-lang/backend_barretenberg": "workspace:*" + "@noir-lang/noir_js": "workspace:*" + "@noir-lang/noir_wasm": "workspace:*" + "@nomicfoundation/hardhat-chai-matchers": ^2.0.0 + "@nomicfoundation/hardhat-ethers": ^3.0.0 + "@web/dev-server-esbuild": ^0.3.6 + "@web/test-runner": ^0.15.3 + "@web/test-runner-playwright": ^0.10.0 + eslint: ^8.50.0 + eslint-plugin-prettier: ^5.0.0 + ethers: ^6.7.1 + hardhat: ^2.17.4 + prettier: 3.0.3 + smol-toml: ^1.1.2 + toml: ^3.0.0 + tslog: ^4.9.2 + languageName: unknown + linkType: soft + +"interpret@npm:^1.0.0": + version: 1.4.0 + resolution: "interpret@npm:1.4.0" + checksum: 2e5f51268b5941e4a17e4ef0575bc91ed0ab5f8515e3cf77486f7c14d13f3010df9c0959f37063dcc96e78d12dc6b0bb1b9e111cdfe69771f4656d2993d36155 languageName: node linkType: hard -"handle-thing@npm:^2.0.0": - version: 2.0.1 - resolution: "handle-thing@npm:2.0.1" - checksum: 68071f313062315cd9dce55710e9496873945f1dd425107007058fc1629f93002a7649fcc3e464281ce02c7e809a35f5925504ab8105d972cf649f1f47cb7d6c +"interpret@npm:^2.2.0": + version: 2.2.0 + resolution: "interpret@npm:2.2.0" + checksum: f51efef7cb8d02da16408ffa3504cd6053014c5aeb7bb8c223727e053e4235bf565e45d67028b0c8740d917c603807aa3c27d7bd2f21bf20b6417e2bb3e5fd6e languageName: node linkType: hard -"hardhat@npm:^2.17.4": - version: 2.17.4 - resolution: "hardhat@npm:2.17.4" +"invariant@npm:^2.2.4": + version: 2.2.4 + resolution: "invariant@npm:2.2.4" dependencies: - "@ethersproject/abi": ^5.1.2 - "@metamask/eth-sig-util": ^4.0.0 - "@nomicfoundation/ethereumjs-block": 5.0.2 - "@nomicfoundation/ethereumjs-blockchain": 7.0.2 - "@nomicfoundation/ethereumjs-common": 4.0.2 - "@nomicfoundation/ethereumjs-evm": 2.0.2 - "@nomicfoundation/ethereumjs-rlp": 5.0.2 - "@nomicfoundation/ethereumjs-statemanager": 2.0.2 - "@nomicfoundation/ethereumjs-trie": 6.0.2 - "@nomicfoundation/ethereumjs-tx": 5.0.2 - "@nomicfoundation/ethereumjs-util": 9.0.2 - "@nomicfoundation/ethereumjs-vm": 7.0.2 - "@nomicfoundation/solidity-analyzer": ^0.1.0 - "@sentry/node": ^5.18.1 - "@types/bn.js": ^5.1.0 - "@types/lru-cache": ^5.1.0 - adm-zip: ^0.4.16 - aggregate-error: ^3.0.0 - ansi-escapes: ^4.3.0 - chalk: ^2.4.2 - chokidar: ^3.4.0 - ci-info: ^2.0.0 - debug: ^4.1.1 - enquirer: ^2.3.0 - env-paths: ^2.2.0 - ethereum-cryptography: ^1.0.3 - ethereumjs-abi: ^0.6.8 - find-up: ^2.1.0 - fp-ts: 1.19.3 - fs-extra: ^7.0.1 - glob: 7.2.0 - immutable: ^4.0.0-rc.12 - io-ts: 1.10.4 - keccak: ^3.0.2 - lodash: ^4.17.11 - mnemonist: ^0.38.0 - mocha: ^10.0.0 - p-map: ^4.0.0 - raw-body: ^2.4.1 - resolve: 1.17.0 - semver: ^6.3.0 - solc: 0.7.3 - source-map-support: ^0.5.13 - stacktrace-parser: ^0.1.10 - tsort: 0.0.1 - undici: ^5.14.0 - uuid: ^8.3.2 - ws: ^7.4.6 - peerDependencies: - ts-node: "*" - typescript: "*" - peerDependenciesMeta: - ts-node: - optional: true - typescript: - optional: true - bin: - hardhat: internal/cli/bootstrap.js - checksum: da8762f29ac08b6178edaedf1bf4641e8bcf44759c944906af644519fa9d0d3fb6c30e87add3904d8899f9cebfbde47a8422e219ad0bb24226c6636f2d3ef7c2 + loose-envify: ^1.0.0 + checksum: cc3182d793aad82a8d1f0af697b462939cb46066ec48bbf1707c150ad5fad6406137e91a262022c269702e01621f35ef60269f6c0d7fd178487959809acdfb14 languageName: node linkType: hard -"has-flag@npm:^3.0.0": - version: 3.0.0 - resolution: "has-flag@npm:3.0.0" - checksum: 4a15638b454bf086c8148979aae044dd6e39d63904cd452d970374fa6a87623423da485dfb814e7be882e05c096a7ccf1ebd48e7e7501d0208d8384ff4dea73b +"io-ts@npm:1.10.4": + version: 1.10.4 + resolution: "io-ts@npm:1.10.4" + dependencies: + fp-ts: ^1.0.0 + checksum: 619134006778f7ca42693716ade7fc1a383079e7848bbeabc67a0e4ac9139cda6b2a88a052d539ab7d554033ee2ffe4dab5cb96b958c83fee2dff73d23f03e88 languageName: node linkType: hard -"has-flag@npm:^4.0.0": - version: 4.0.0 - resolution: "has-flag@npm:4.0.0" - checksum: 261a1357037ead75e338156b1f9452c016a37dcd3283a972a30d9e4a87441ba372c8b81f818cd0fbcd9c0354b4ae7e18b9e1afa1971164aef6d18c2b6095a8ad +"ip@npm:^1.1.5, ip@npm:^1.1.8": + version: 1.1.8 + resolution: "ip@npm:1.1.8" + checksum: a2ade53eb339fb0cbe9e69a44caab10d6e3784662285eb5d2677117ee4facc33a64679051c35e0dfdb1a3983a51ce2f5d2cb36446d52e10d01881789b76e28fb languageName: node linkType: hard -"has-property-descriptors@npm:^1.0.0": - version: 1.0.0 - resolution: "has-property-descriptors@npm:1.0.0" - dependencies: - get-intrinsic: ^1.1.1 - checksum: a6d3f0a266d0294d972e354782e872e2fe1b6495b321e6ef678c9b7a06a40408a6891817350c62e752adced73a94ac903c54734fee05bf65b1905ee1368194bb +"ip@npm:^2.0.0": + version: 2.0.0 + resolution: "ip@npm:2.0.0" + checksum: cfcfac6b873b701996d71ec82a7dd27ba92450afdb421e356f44044ed688df04567344c36cbacea7d01b1c39a4c732dc012570ebe9bebfb06f27314bca625349 languageName: node linkType: hard -"has-proto@npm:^1.0.1": - version: 1.0.1 - resolution: "has-proto@npm:1.0.1" - checksum: febc5b5b531de8022806ad7407935e2135f1cc9e64636c3916c6842bd7995994ca3b29871ecd7954bd35f9e2986c17b3b227880484d22259e2f8e6ce63fd383e +"ipaddr.js@npm:1.9.1": + version: 1.9.1 + resolution: "ipaddr.js@npm:1.9.1" + checksum: f88d3825981486f5a1942414c8d77dd6674dd71c065adcfa46f578d677edcb99fda25af42675cb59db492fdf427b34a5abfcde3982da11a8fd83a500b41cfe77 languageName: node linkType: hard -"has-symbols@npm:^1.0.2, has-symbols@npm:^1.0.3": - version: 1.0.3 - resolution: "has-symbols@npm:1.0.3" - checksum: a054c40c631c0d5741a8285010a0777ea0c068f99ed43e5d6eb12972da223f8af553a455132fdb0801bdcfa0e0f443c0c03a68d8555aa529b3144b446c3f2410 +"ipaddr.js@npm:^2.0.1": + version: 2.1.0 + resolution: "ipaddr.js@npm:2.1.0" + checksum: 807a054f2bd720c4d97ee479d6c9e865c233bea21f139fb8dabd5a35c4226d2621c42e07b4ad94ff3f82add926a607d8d9d37c625ad0319f0e08f9f2bd1968e2 languageName: node linkType: hard -"has-tostringtag@npm:^1.0.0": - version: 1.0.0 - resolution: "has-tostringtag@npm:1.0.0" - dependencies: - has-symbols: ^1.0.2 - checksum: cc12eb28cb6ae22369ebaad3a8ab0799ed61270991be88f208d508076a1e99abe4198c965935ce85ea90b60c94ddda73693b0920b58e7ead048b4a391b502c1c +"is-alphabetical@npm:1.0.4, is-alphabetical@npm:^1.0.0": + version: 1.0.4 + resolution: "is-alphabetical@npm:1.0.4" + checksum: 6508cce44fd348f06705d377b260974f4ce68c74000e7da4045f0d919e568226dc3ce9685c5a2af272195384df6930f748ce9213fc9f399b5d31b362c66312cb languageName: node linkType: hard -"has-unicode@npm:^2.0.1": +"is-alphabetical@npm:^2.0.0": version: 2.0.1 - resolution: "has-unicode@npm:2.0.1" - checksum: 1eab07a7436512db0be40a710b29b5dc21fa04880b7f63c9980b706683127e3c1b57cb80ea96d47991bdae2dfe479604f6a1ba410106ee1046a41d1bd0814400 - languageName: node - linkType: hard - -"has-yarn@npm:^2.1.0": - version: 2.1.0 - resolution: "has-yarn@npm:2.1.0" - checksum: 5eb1d0bb8518103d7da24532bdbc7124ffc6d367b5d3c10840b508116f2f1bcbcf10fd3ba843ff6e2e991bdf9969fd862d42b2ed58aade88343326c950b7e7f7 + resolution: "is-alphabetical@npm:2.0.1" + checksum: 56207db8d9de0850f0cd30f4966bf731eb82cedfe496cbc2e97e7c3bacaf66fc54a972d2d08c0d93bb679cb84976a05d24c5ad63de56fabbfc60aadae312edaa languageName: node linkType: hard -"has@npm:^1.0.3": - version: 1.0.3 - resolution: "has@npm:1.0.3" +"is-alphanumerical@npm:^1.0.0": + version: 1.0.4 + resolution: "is-alphanumerical@npm:1.0.4" dependencies: - function-bind: ^1.1.1 - checksum: b9ad53d53be4af90ce5d1c38331e712522417d017d5ef1ebd0507e07c2fbad8686fffb8e12ddecd4c39ca9b9b47431afbb975b8abf7f3c3b82c98e9aad052792 + is-alphabetical: ^1.0.0 + is-decimal: ^1.0.0 + checksum: e2e491acc16fcf5b363f7c726f666a9538dba0a043665740feb45bba1652457a73441e7c5179c6768a638ed396db3437e9905f403644ec7c468fb41f4813d03f languageName: node linkType: hard -"hash-base@npm:^3.0.0": - version: 3.1.0 - resolution: "hash-base@npm:3.1.0" +"is-alphanumerical@npm:^2.0.0": + version: 2.0.1 + resolution: "is-alphanumerical@npm:2.0.1" dependencies: - inherits: ^2.0.4 - readable-stream: ^3.6.0 - safe-buffer: ^5.2.0 - checksum: 26b7e97ac3de13cb23fc3145e7e3450b0530274a9562144fc2bf5c1e2983afd0e09ed7cc3b20974ba66039fad316db463da80eb452e7373e780cbee9a0d2f2dc + is-alphabetical: ^2.0.0 + is-decimal: ^2.0.0 + checksum: 87acc068008d4c9c4e9f5bd5e251041d42e7a50995c77b1499cf6ed248f971aadeddb11f239cabf09f7975ee58cac7a48ffc170b7890076d8d227b24a68663c9 languageName: node linkType: hard -"hash.js@npm:1.1.7, hash.js@npm:^1.0.0, hash.js@npm:^1.0.3, hash.js@npm:^1.1.7": - version: 1.1.7 - resolution: "hash.js@npm:1.1.7" +"is-arguments@npm:^1.0.4": + version: 1.1.1 + resolution: "is-arguments@npm:1.1.1" dependencies: - inherits: ^2.0.3 - minimalistic-assert: ^1.0.1 - checksum: e350096e659c62422b85fa508e4b3669017311aa4c49b74f19f8e1bc7f3a54a584fdfd45326d4964d6011f2b2d882e38bea775a96046f2a61b7779a979629d8f + call-bind: ^1.0.2 + has-tostringtag: ^1.0.0 + checksum: 7f02700ec2171b691ef3e4d0e3e6c0ba408e8434368504bb593d0d7c891c0dbfda6d19d30808b904a6cb1929bca648c061ba438c39f296c2a8ca083229c49f27 languageName: node linkType: hard -"hast-to-hyperscript@npm:^9.0.0": - version: 9.0.1 - resolution: "hast-to-hyperscript@npm:9.0.1" - dependencies: - "@types/unist": ^2.0.3 - comma-separated-tokens: ^1.0.0 - property-information: ^5.3.0 - space-separated-tokens: ^1.0.0 - style-to-object: ^0.3.0 - unist-util-is: ^4.0.0 - web-namespaces: ^1.0.0 - checksum: de570d789853018fff2fd38fc096549b9814e366b298f60c90c159a57018230eefc44d46a246027b0e2426ed9e99f2e270050bc183d5bdfe4c9487c320b392cd +"is-arrayish@npm:^0.2.1": + version: 0.2.1 + resolution: "is-arrayish@npm:0.2.1" + checksum: eef4417e3c10e60e2c810b6084942b3ead455af16c4509959a27e490e7aee87cfb3f38e01bbde92220b528a0ee1a18d52b787e1458ee86174d8c7f0e58cd488f languageName: node linkType: hard -"hast-util-from-parse5@npm:^6.0.0": - version: 6.0.1 - resolution: "hast-util-from-parse5@npm:6.0.1" +"is-binary-path@npm:~2.1.0": + version: 2.1.0 + resolution: "is-binary-path@npm:2.1.0" dependencies: - "@types/parse5": ^5.0.0 - hastscript: ^6.0.0 - property-information: ^5.0.0 - vfile: ^4.0.0 - vfile-location: ^3.2.0 - web-namespaces: ^1.0.0 - checksum: 4daa78201468af7779161e7caa2513c329830778e0528481ab16b3e1bcef4b831f6285b526aacdddbee802f3bd9d64df55f80f010591ea1916da535e3a923b83 + binary-extensions: ^2.0.0 + checksum: 84192eb88cff70d320426f35ecd63c3d6d495da9d805b19bc65b518984b7c0760280e57dbf119b7e9be6b161784a5a673ab2c6abe83abb5198a432232ad5b35c languageName: node linkType: hard -"hast-util-is-element@npm:^1.0.0, hast-util-is-element@npm:^1.1.0": - version: 1.1.0 - resolution: "hast-util-is-element@npm:1.1.0" - checksum: 30fad3f65e7ab2f0efd5db9e7344d0820b70971988dfe79f62d8447598b2a1ce8a59cd4bfc05ae0d9a1c451b9b53cbe1023743d7eac764d64720b6b73475f62f +"is-buffer@npm:^2.0.0, is-buffer@npm:^2.0.5": + version: 2.0.5 + resolution: "is-buffer@npm:2.0.5" + checksum: 764c9ad8b523a9f5a32af29bdf772b08eb48c04d2ad0a7240916ac2688c983bf5f8504bf25b35e66240edeb9d9085461f9b5dae1f3d2861c6b06a65fe983de42 languageName: node linkType: hard -"hast-util-parse-selector@npm:^2.0.0": - version: 2.2.5 - resolution: "hast-util-parse-selector@npm:2.2.5" - checksum: 22ee4afbd11754562144cb3c4f3ec52524dafba4d90ee52512902d17cf11066d83b38f7bdf6ca571bbc2541f07ba30db0d234657b6ecb8ca4631587466459605 +"is-builtin-module@npm:^3.1.0, is-builtin-module@npm:^3.2.1": + version: 3.2.1 + resolution: "is-builtin-module@npm:3.2.1" + dependencies: + builtin-modules: ^3.3.0 + checksum: e8f0ffc19a98240bda9c7ada84d846486365af88d14616e737d280d378695c8c448a621dcafc8332dbf0fcd0a17b0763b845400709963fa9151ddffece90ae88 languageName: node linkType: hard -"hast-util-raw@npm:6.0.1": - version: 6.0.1 - resolution: "hast-util-raw@npm:6.0.1" - dependencies: - "@types/hast": ^2.0.0 - hast-util-from-parse5: ^6.0.0 - hast-util-to-parse5: ^6.0.0 - html-void-elements: ^1.0.0 - parse5: ^6.0.0 - unist-util-position: ^3.0.0 - vfile: ^4.0.0 - web-namespaces: ^1.0.0 - xtend: ^4.0.0 - zwitch: ^1.0.0 - checksum: f6d960644f9fbbe0b92d0227b20a24d659cce021d5f9fd218e077154931b4524ee920217b7fd5a45ec2736ec1dee53de9209fe449f6f89454c01d225ff0e7851 +"is-callable@npm:^1.1.3": + version: 1.2.7 + resolution: "is-callable@npm:1.2.7" + checksum: 61fd57d03b0d984e2ed3720fb1c7a897827ea174bd44402878e059542ea8c4aeedee0ea0985998aa5cc2736b2fa6e271c08587addb5b3959ac52cf665173d1ac languageName: node linkType: hard -"hast-util-to-parse5@npm:^6.0.0": - version: 6.0.0 - resolution: "hast-util-to-parse5@npm:6.0.0" +"is-ci@npm:^2.0.0": + version: 2.0.0 + resolution: "is-ci@npm:2.0.0" dependencies: - hast-to-hyperscript: ^9.0.0 - property-information: ^5.0.0 - web-namespaces: ^1.0.0 - xtend: ^4.0.0 - zwitch: ^1.0.0 - checksum: 91a36244e37df1d63c8b7e865ab0c0a25bb7396155602be005cf71d95c348e709568f80e0f891681a3711d733ad896e70642dc41a05b574eddf2e07d285408a8 + ci-info: ^2.0.0 + bin: + is-ci: bin.js + checksum: 77b869057510f3efa439bbb36e9be429d53b3f51abd4776eeea79ab3b221337fe1753d1e50058a9e2c650d38246108beffb15ccfd443929d77748d8c0cc90144 languageName: node linkType: hard -"hast-util-to-text@npm:^2.0.0": - version: 2.0.1 - resolution: "hast-util-to-text@npm:2.0.1" +"is-ci@npm:^3.0.1": + version: 3.0.1 + resolution: "is-ci@npm:3.0.1" dependencies: - hast-util-is-element: ^1.0.0 - repeat-string: ^1.0.0 - unist-util-find-after: ^3.0.0 - checksum: 4e7960b414b7a6b2f0180e4af416cd8ae3c7ba1531d7eaec7e6dc9509daf88308784bbf5b94885384dccc42abcb74cc6cc26755c76914d646f32aa6bc32ea34b + ci-info: ^3.2.0 + bin: + is-ci: bin.js + checksum: 192c66dc7826d58f803ecae624860dccf1899fc1f3ac5505284c0a5cf5f889046ffeb958fa651e5725d5705c5bcb14f055b79150ea5fcad7456a9569de60260e languageName: node linkType: hard -"hastscript@npm:^6.0.0": - version: 6.0.0 - resolution: "hastscript@npm:6.0.0" +"is-core-module@npm:^2.13.0": + version: 2.13.1 + resolution: "is-core-module@npm:2.13.1" dependencies: - "@types/hast": ^2.0.0 - comma-separated-tokens: ^1.0.0 - hast-util-parse-selector: ^2.0.0 - property-information: ^5.0.0 - space-separated-tokens: ^1.0.0 - checksum: 5e50b85af0d2cb7c17979cb1ddca75d6b96b53019dd999b39e7833192c9004201c3cee6445065620ea05d0087d9ae147a4844e582d64868be5bc6b0232dfe52d + hasown: ^2.0.0 + checksum: 256559ee8a9488af90e4bad16f5583c6d59e92f0742e9e8bb4331e758521ee86b810b93bae44f390766ffbc518a0488b18d9dab7da9a5ff997d499efc9403f7c languageName: node linkType: hard -"he@npm:1.2.0, he@npm:^1.2.0": - version: 1.2.0 - resolution: "he@npm:1.2.0" - bin: - he: bin/he - checksum: 3d4d6babccccd79c5c5a3f929a68af33360d6445587d628087f39a965079d84f18ce9c3d3f917ee1e3978916fc833bb8b29377c3b403f919426f91bc6965e7a7 +"is-decimal@npm:^1.0.0": + version: 1.0.4 + resolution: "is-decimal@npm:1.0.4" + checksum: ed483a387517856dc395c68403a10201fddcc1b63dc56513fbe2fe86ab38766120090ecdbfed89223d84ca8b1cd28b0641b93cb6597b6e8f4c097a7c24e3fb96 languageName: node linkType: hard -"history@npm:^4.9.0": - version: 4.10.1 - resolution: "history@npm:4.10.1" - dependencies: - "@babel/runtime": ^7.1.2 - loose-envify: ^1.2.0 - resolve-pathname: ^3.0.0 - tiny-invariant: ^1.0.2 - tiny-warning: ^1.0.0 - value-equal: ^1.0.1 - checksum: addd84bc4683929bae4400419b5af132ff4e4e9b311a0d4e224579ea8e184a6b80d7f72c55927e4fa117f69076a9e47ce082d8d0b422f1a9ddac7991490ca1d0 +"is-decimal@npm:^2.0.0": + version: 2.0.1 + resolution: "is-decimal@npm:2.0.1" + checksum: 97132de7acdce77caa7b797632970a2ecd649a88e715db0e4dbc00ab0708b5e7574ba5903962c860cd4894a14fd12b100c0c4ac8aed445cf6f55c6cf747a4158 languageName: node linkType: hard -"hmac-drbg@npm:^1.0.1": - version: 1.0.1 - resolution: "hmac-drbg@npm:1.0.1" - dependencies: - hash.js: ^1.0.3 - minimalistic-assert: ^1.0.0 - minimalistic-crypto-utils: ^1.0.1 - checksum: bd30b6a68d7f22d63f10e1888aee497d7c2c5c0bb469e66bbdac99f143904d1dfe95f8131f95b3e86c86dd239963c9d972fcbe147e7cffa00e55d18585c43fe0 +"is-docker@npm:^2.0.0, is-docker@npm:^2.1.1": + version: 2.2.1 + resolution: "is-docker@npm:2.2.1" + bin: + is-docker: cli.js + checksum: 3fef7ddbf0be25958e8991ad941901bf5922ab2753c46980b60b05c1bf9c9c2402d35e6dc32e4380b980ef5e1970a5d9d5e5aa2e02d77727c3b6b5e918474c56 languageName: node linkType: hard -"hoist-non-react-statics@npm:^3.1.0": - version: 3.3.2 - resolution: "hoist-non-react-statics@npm:3.3.2" - dependencies: - react-is: ^16.7.0 - checksum: b1538270429b13901ee586aa44f4cc3ecd8831c061d06cb8322e50ea17b3f5ce4d0e2e66394761e6c8e152cd8c34fb3b4b690116c6ce2bd45b18c746516cb9e8 +"is-docker@npm:^3.0.0": + version: 3.0.0 + resolution: "is-docker@npm:3.0.0" + bin: + is-docker: cli.js + checksum: b698118f04feb7eaf3338922bd79cba064ea54a1c3db6ec8c0c8d8ee7613e7e5854d802d3ef646812a8a3ace81182a085dfa0a71cc68b06f3fa794b9783b3c90 languageName: node linkType: hard -"hpack.js@npm:^2.1.6": - version: 2.1.6 - resolution: "hpack.js@npm:2.1.6" - dependencies: - inherits: ^2.0.1 - obuf: ^1.0.0 - readable-stream: ^2.0.1 - wbuf: ^1.1.0 - checksum: 2de144115197967ad6eeee33faf41096c6ba87078703c5cb011632dcfbffeb45784569e0cf02c317bd79c48375597c8ec88c30fff5bb0b023e8f654fb6e9c06e +"is-extendable@npm:^0.1.0": + version: 0.1.1 + resolution: "is-extendable@npm:0.1.1" + checksum: 3875571d20a7563772ecc7a5f36cb03167e9be31ad259041b4a8f73f33f885441f778cee1f1fe0085eb4bc71679b9d8c923690003a36a6a5fdf8023e6e3f0672 languageName: node linkType: hard -"html-entities@npm:^2.3.2": - version: 2.4.0 - resolution: "html-entities@npm:2.4.0" - checksum: 25bea32642ce9ebd0eedc4d24381883ecb0335ccb8ac26379a0958b9b16652fdbaa725d70207ce54a51db24103436a698a8e454397d3ba8ad81460224751f1dc +"is-extglob@npm:^2.1.1": + version: 2.1.1 + resolution: "is-extglob@npm:2.1.1" + checksum: df033653d06d0eb567461e58a7a8c9f940bd8c22274b94bf7671ab36df5719791aae15eef6d83bbb5e23283967f2f984b8914559d4449efda578c775c4be6f85 languageName: node linkType: hard -"html-escaper@npm:^2.0.0": - version: 2.0.2 - resolution: "html-escaper@npm:2.0.2" - checksum: d2df2da3ad40ca9ee3a39c5cc6475ef67c8f83c234475f24d8e9ce0dc80a2c82df8e1d6fa78ddd1e9022a586ea1bd247a615e80a5cd9273d90111ddda7d9e974 +"is-fullwidth-code-point@npm:^3.0.0": + version: 3.0.0 + resolution: "is-fullwidth-code-point@npm:3.0.0" + checksum: 44a30c29457c7fb8f00297bce733f0a64cd22eca270f83e58c105e0d015e45c019491a4ab2faef91ab51d4738c670daff901c799f6a700e27f7314029e99e348 languageName: node linkType: hard -"html-minifier-terser@npm:^6.0.2, html-minifier-terser@npm:^6.1.0": - version: 6.1.0 - resolution: "html-minifier-terser@npm:6.1.0" +"is-generator-function@npm:^1.0.7": + version: 1.0.10 + resolution: "is-generator-function@npm:1.0.10" dependencies: - camel-case: ^4.1.2 - clean-css: ^5.2.2 - commander: ^8.3.0 - he: ^1.2.0 - param-case: ^3.0.4 - relateurl: ^0.2.7 - terser: ^5.10.0 - bin: - html-minifier-terser: cli.js - checksum: ac52c14006476f773204c198b64838477859dc2879490040efab8979c0207424da55d59df7348153f412efa45a0840a1ca3c757bf14767d23a15e3e389d37a93 + has-tostringtag: ^1.0.0 + checksum: d54644e7dbaccef15ceb1e5d91d680eb5068c9ee9f9eb0a9e04173eb5542c9b51b5ab52c5537f5703e48d5fddfd376817c1ca07a84a407b7115b769d4bdde72b languageName: node linkType: hard -"html-tags@npm:^3.2.0": - version: 3.3.1 - resolution: "html-tags@npm:3.3.1" - checksum: b4ef1d5a76b678e43cce46e3783d563607b1d550cab30b4f511211564574770aa8c658a400b100e588bc60b8234e59b35ff72c7851cc28f3b5403b13a2c6cbce +"is-glob@npm:^4.0.0, is-glob@npm:^4.0.1, is-glob@npm:^4.0.3, is-glob@npm:~4.0.1": + version: 4.0.3 + resolution: "is-glob@npm:4.0.3" + dependencies: + is-extglob: ^2.1.1 + checksum: d381c1319fcb69d341cc6e6c7cd588e17cd94722d9a32dbd60660b993c4fb7d0f19438674e68dfec686d09b7c73139c9166b47597f846af387450224a8101ab4 languageName: node linkType: hard -"html-void-elements@npm:^1.0.0": - version: 1.0.5 - resolution: "html-void-elements@npm:1.0.5" - checksum: 1a56f4f6cfbeb994c21701ff72b4b7f556fe784a70e5e554d1566ff775af83b91ea93f10664f039a67802d9f7b40d4a7f1ed20312bab47bd88d89bd792ea84ca +"is-hex-prefixed@npm:1.0.0": + version: 1.0.0 + resolution: "is-hex-prefixed@npm:1.0.0" + checksum: 5ac58e6e528fb029cc43140f6eeb380fad23d0041cc23154b87f7c9a1b728bcf05909974e47248fd0b7fcc11ba33cf7e58d64804883056fabd23e2b898be41de languageName: node linkType: hard -"html-webpack-plugin@npm:^5.5.0": - version: 5.5.3 - resolution: "html-webpack-plugin@npm:5.5.3" - dependencies: - "@types/html-minifier-terser": ^6.0.0 - html-minifier-terser: ^6.0.2 - lodash: ^4.17.21 - pretty-error: ^4.0.0 - tapable: ^2.0.0 - peerDependencies: - webpack: ^5.20.0 - checksum: ccf685195739c372ad641bbd0c9100a847904f34eedc7aff3ece7856cd6c78fd3746d2d615af1bb71e5727993fe711b89e9b744f033ed3fde646540bf5d5e954 +"is-hexadecimal@npm:^1.0.0": + version: 1.0.4 + resolution: "is-hexadecimal@npm:1.0.4" + checksum: a452e047587b6069332d83130f54d30da4faf2f2ebaa2ce6d073c27b5703d030d58ed9e0b729c8e4e5b52c6f1dab26781bb77b7bc6c7805f14f320e328ff8cd5 languageName: node linkType: hard -"htmlparser2@npm:^6.1.0": - version: 6.1.0 - resolution: "htmlparser2@npm:6.1.0" - dependencies: - domelementtype: ^2.0.1 - domhandler: ^4.0.0 - domutils: ^2.5.2 - entities: ^2.0.0 - checksum: 81a7b3d9c3bb9acb568a02fc9b1b81ffbfa55eae7f1c41ae0bf840006d1dbf54cb3aa245b2553e2c94db674840a9f0fdad7027c9a9d01a062065314039058c4e +"is-hexadecimal@npm:^2.0.0": + version: 2.0.1 + resolution: "is-hexadecimal@npm:2.0.1" + checksum: 66a2ea85994c622858f063f23eda506db29d92b52580709eb6f4c19550552d4dcf3fb81952e52f7cf972097237959e00adc7bb8c9400cd12886e15bf06145321 languageName: node linkType: hard -"htmlparser2@npm:^8.0.1": - version: 8.0.2 - resolution: "htmlparser2@npm:8.0.2" +"is-inside-container@npm:^1.0.0": + version: 1.0.0 + resolution: "is-inside-container@npm:1.0.0" dependencies: - domelementtype: ^2.3.0 - domhandler: ^5.0.3 - domutils: ^3.0.1 - entities: ^4.4.0 - checksum: 29167a0f9282f181da8a6d0311b76820c8a59bc9e3c87009e21968264c2987d2723d6fde5a964d4b7b6cba663fca96ffb373c06d8223a85f52a6089ced942700 + is-docker: ^3.0.0 + bin: + is-inside-container: cli.js + checksum: c50b75a2ab66ab3e8b92b3bc534e1ea72ca25766832c0623ac22d134116a98bcf012197d1caabe1d1c4bd5f84363d4aa5c36bb4b585fbcaf57be172cd10a1a03 languageName: node linkType: hard -"http-assert@npm:^1.3.0": - version: 1.5.0 - resolution: "http-assert@npm:1.5.0" +"is-installed-globally@npm:^0.4.0": + version: 0.4.0 + resolution: "is-installed-globally@npm:0.4.0" dependencies: - deep-equal: ~1.0.1 - http-errors: ~1.8.0 - checksum: 69c9b3c14cf8b2822916360a365089ce936c883c49068f91c365eccba5c141a9964d19fdda589150a480013bf503bf37d8936c732e9635819339e730ab0e7527 + global-dirs: ^3.0.0 + is-path-inside: ^3.0.2 + checksum: 3359840d5982d22e9b350034237b2cda2a12bac1b48a721912e1ab8e0631dd07d45a2797a120b7b87552759a65ba03e819f1bd63f2d7ab8657ec0b44ee0bf399 languageName: node linkType: hard -"http-cache-semantics@npm:^4.0.0, http-cache-semantics@npm:^4.1.1": - version: 4.1.1 - resolution: "http-cache-semantics@npm:4.1.1" - checksum: 83ac0bc60b17a3a36f9953e7be55e5c8f41acc61b22583060e8dedc9dd5e3607c823a88d0926f9150e571f90946835c7fe150732801010845c72cd8bbff1a236 +"is-lambda@npm:^1.0.1": + version: 1.0.1 + resolution: "is-lambda@npm:1.0.1" + checksum: 93a32f01940220532e5948538699ad610d5924ac86093fcee83022252b363eb0cc99ba53ab084a04e4fb62bf7b5731f55496257a4c38adf87af9c4d352c71c35 languageName: node linkType: hard -"http-deceiver@npm:^1.2.7": - version: 1.2.7 - resolution: "http-deceiver@npm:1.2.7" - checksum: 64d7d1ae3a6933eb0e9a94e6f27be4af45a53a96c3c34e84ff57113787105a89fff9d1c3df263ef63add823df019b0e8f52f7121e32393bb5ce9a713bf100b41 +"is-module@npm:^1.0.0": + version: 1.0.0 + resolution: "is-module@npm:1.0.0" + checksum: 8cd5390730c7976fb4e8546dd0b38865ee6f7bacfa08dfbb2cc07219606755f0b01709d9361e01f13009bbbd8099fa2927a8ed665118a6105d66e40f1b838c3f languageName: node linkType: hard -"http-errors@npm:2.0.0": - version: 2.0.0 - resolution: "http-errors@npm:2.0.0" +"is-nan@npm:^1.3.2": + version: 1.3.2 + resolution: "is-nan@npm:1.3.2" dependencies: - depd: 2.0.0 - inherits: 2.0.4 - setprototypeof: 1.2.0 - statuses: 2.0.1 - toidentifier: 1.0.1 - checksum: 9b0a3782665c52ce9dc658a0d1560bcb0214ba5699e4ea15aefb2a496e2ca83db03ebc42e1cce4ac1f413e4e0d2d736a3fd755772c556a9a06853ba2a0b7d920 + call-bind: ^1.0.0 + define-properties: ^1.1.3 + checksum: 5dfadcef6ad12d3029d43643d9800adbba21cf3ce2ec849f734b0e14ee8da4070d82b15fdb35138716d02587c6578225b9a22779cab34888a139cc43e4e3610a languageName: node linkType: hard -"http-errors@npm:^1.6.3, http-errors@npm:^1.7.3, http-errors@npm:~1.8.0": - version: 1.8.1 - resolution: "http-errors@npm:1.8.1" - dependencies: - depd: ~1.1.2 - inherits: 2.0.4 - setprototypeof: 1.2.0 - statuses: ">= 1.5.0 < 2" - toidentifier: 1.0.1 - checksum: d3c7e7e776fd51c0a812baff570bdf06fe49a5dc448b700ab6171b1250e4cf7db8b8f4c0b133e4bfe2451022a5790c1ca6c2cae4094dedd6ac8304a1267f91d2 +"is-npm@npm:^5.0.0": + version: 5.0.0 + resolution: "is-npm@npm:5.0.0" + checksum: 9baff02b0c69a3d3c79b162cb2f9e67fb40ef6d172c16601b2e2471c21e9a4fa1fc9885a308d7bc6f3a3cd2a324c27fa0bf284c133c3349bb22571ab70d041cc languageName: node linkType: hard -"http-errors@npm:~1.6.2": - version: 1.6.3 - resolution: "http-errors@npm:1.6.3" - dependencies: - depd: ~1.1.2 - inherits: 2.0.3 - setprototypeof: 1.1.0 - statuses: ">= 1.4.0 < 2" - checksum: a9654ee027e3d5de305a56db1d1461f25709ac23267c6dc28cdab8323e3f96caa58a9a6a5e93ac15d7285cee0c2f019378c3ada9026e7fe19c872d695f27de7c +"is-npm@npm:^6.0.0": + version: 6.0.0 + resolution: "is-npm@npm:6.0.0" + checksum: fafe1ddc772345f5460514891bb8014376904ccdbddd59eee7525c9adcc08d426933f28b087bef3e17524da7ebf35c03ef484ff3b6ba9d5fecd8c6e6a7d4bf11 languageName: node linkType: hard -"http-parser-js@npm:>=0.5.1": - version: 0.5.8 - resolution: "http-parser-js@npm:0.5.8" - checksum: 6bbdf2429858e8cf13c62375b0bfb6dc3955ca0f32e58237488bc86cd2378f31d31785fd3ac4ce93f1c74e0189cf8823c91f5cb061696214fd368d2452dc871d +"is-number@npm:^7.0.0": + version: 7.0.0 + resolution: "is-number@npm:7.0.0" + checksum: 456ac6f8e0f3111ed34668a624e45315201dff921e5ac181f8ec24923b99e9f32ca1a194912dc79d539c97d33dba17dc635202ff0b2cf98326f608323276d27a languageName: node linkType: hard -"http-proxy-agent@npm:^5.0.0": - version: 5.0.0 - resolution: "http-proxy-agent@npm:5.0.0" - dependencies: - "@tootallnate/once": 2 - agent-base: 6 - debug: 4 - checksum: e2ee1ff1656a131953839b2a19cd1f3a52d97c25ba87bd2559af6ae87114abf60971e498021f9b73f9fd78aea8876d1fb0d4656aac8a03c6caa9fc175f22b786 +"is-obj@npm:^1.0.1": + version: 1.0.1 + resolution: "is-obj@npm:1.0.1" + checksum: 3ccf0efdea12951e0b9c784e2b00e77e87b2f8bd30b42a498548a8afcc11b3287342a2030c308e473e93a7a19c9ea7854c99a8832a476591c727df2a9c79796c languageName: node linkType: hard -"http-proxy-middleware@npm:^2.0.3": - version: 2.0.6 - resolution: "http-proxy-middleware@npm:2.0.6" - dependencies: - "@types/http-proxy": ^1.17.8 - http-proxy: ^1.18.1 - is-glob: ^4.0.1 - is-plain-obj: ^3.0.0 - micromatch: ^4.0.2 - peerDependencies: - "@types/express": ^4.17.13 - peerDependenciesMeta: - "@types/express": - optional: true - checksum: 2ee85bc878afa6cbf34491e972ece0f5be0a3e5c98a60850cf40d2a9a5356e1fc57aab6cff33c1fc37691b0121c3a42602d2b1956c52577e87a5b77b62ae1c3a +"is-obj@npm:^2.0.0": + version: 2.0.0 + resolution: "is-obj@npm:2.0.0" + checksum: c9916ac8f4621962a42f5e80e7ffdb1d79a3fab7456ceaeea394cd9e0858d04f985a9ace45be44433bf605673c8be8810540fe4cc7f4266fc7526ced95af5a08 languageName: node linkType: hard -"http-proxy@npm:^1.18.1": - version: 1.18.1 - resolution: "http-proxy@npm:1.18.1" - dependencies: - eventemitter3: ^4.0.0 - follow-redirects: ^1.0.0 - requires-port: ^1.0.0 - checksum: f5bd96bf83e0b1e4226633dbb51f8b056c3e6321917df402deacec31dd7fe433914fc7a2c1831cf7ae21e69c90b3a669b8f434723e9e8b71fd68afe30737b6a5 +"is-object@npm:~0.1.2": + version: 0.1.2 + resolution: "is-object@npm:0.1.2" + checksum: 7e500b15f4748278ea0a8d43b1283e75e866c055e4a790389087ce652eab8a9343fd74710738f0fdf13a323c31330d65bdcc106f38e9bb7bc0b9c60ae3fd2a2d languageName: node linkType: hard -"https-proxy-agent@npm:5.0.1, https-proxy-agent@npm:^5.0.0": - version: 5.0.1 - resolution: "https-proxy-agent@npm:5.0.1" - dependencies: - agent-base: 6 - debug: 4 - checksum: 571fccdf38184f05943e12d37d6ce38197becdd69e58d03f43637f7fa1269cf303a7d228aa27e5b27bbd3af8f09fd938e1c91dcfefff2df7ba77c20ed8dfc765 +"is-path-cwd@npm:^2.2.0": + version: 2.2.0 + resolution: "is-path-cwd@npm:2.2.0" + checksum: 46a840921bb8cc0dc7b5b423a14220e7db338072a4495743a8230533ce78812dc152548c86f4b828411fe98c5451959f07cf841c6a19f611e46600bd699e8048 languageName: node linkType: hard -"human-signals@npm:^2.1.0": - version: 2.1.0 - resolution: "human-signals@npm:2.1.0" - checksum: b87fd89fce72391625271454e70f67fe405277415b48bcc0117ca73d31fa23a4241787afdc8d67f5a116cf37258c052f59ea82daffa72364d61351423848e3b8 +"is-path-inside@npm:^3.0.2, is-path-inside@npm:^3.0.3": + version: 3.0.3 + resolution: "is-path-inside@npm:3.0.3" + checksum: abd50f06186a052b349c15e55b182326f1936c89a78bf6c8f2b707412517c097ce04bc49a0ca221787bc44e1049f51f09a2ffb63d22899051988d3a618ba13e9 languageName: node linkType: hard -"human-signals@npm:^4.3.0": - version: 4.3.1 - resolution: "human-signals@npm:4.3.1" - checksum: 6f12958df3f21b6fdaf02d90896c271df00636a31e2bbea05bddf817a35c66b38a6fdac5863e2df85bd52f34958997f1f50350ff97249e1dff8452865d5235d1 +"is-plain-obj@npm:^2.0.0, is-plain-obj@npm:^2.1.0": + version: 2.1.0 + resolution: "is-plain-obj@npm:2.1.0" + checksum: cec9100678b0a9fe0248a81743041ed990c2d4c99f893d935545cfbc42876cbe86d207f3b895700c690ad2fa520e568c44afc1605044b535a7820c1d40e38daa languageName: node linkType: hard -"humanize-ms@npm:^1.2.1": - version: 1.2.1 - resolution: "humanize-ms@npm:1.2.1" - dependencies: - ms: ^2.0.0 - checksum: 9c7a74a2827f9294c009266c82031030eae811ca87b0da3dceb8d6071b9bde22c9f3daef0469c3c533cc67a97d8a167cd9fc0389350e5f415f61a79b171ded16 +"is-plain-obj@npm:^3.0.0": + version: 3.0.0 + resolution: "is-plain-obj@npm:3.0.0" + checksum: a6ebdf8e12ab73f33530641972a72a4b8aed6df04f762070d823808303e4f76d87d5ea5bd76f96a7bbe83d93f04ac7764429c29413bd9049853a69cb630fb21c languageName: node linkType: hard -"iconv-lite@npm:0.4.24": - version: 0.4.24 - resolution: "iconv-lite@npm:0.4.24" - dependencies: - safer-buffer: ">= 2.1.2 < 3" - checksum: bd9f120f5a5b306f0bc0b9ae1edeb1577161503f5f8252a20f1a9e56ef8775c9959fd01c55f2d3a39d9a8abaf3e30c1abeb1895f367dcbbe0a8fd1c9ca01c4f6 +"is-plain-obj@npm:^4.0.0": + version: 4.1.0 + resolution: "is-plain-obj@npm:4.1.0" + checksum: 6dc45da70d04a81f35c9310971e78a6a3c7a63547ef782e3a07ee3674695081b6ca4e977fbb8efc48dae3375e0b34558d2bcd722aec9bddfa2d7db5b041be8ce languageName: node linkType: hard -"iconv-lite@npm:^0.6.2": - version: 0.6.3 - resolution: "iconv-lite@npm:0.6.3" +"is-plain-object@npm:^2.0.4": + version: 2.0.4 + resolution: "is-plain-object@npm:2.0.4" dependencies: - safer-buffer: ">= 2.1.2 < 3.0.0" - checksum: 3f60d47a5c8fc3313317edfd29a00a692cc87a19cac0159e2ce711d0ebc9019064108323b5e493625e25594f11c6236647d8e256fbe7a58f4a3b33b89e6d30bf - languageName: node - linkType: hard - -"icss-utils@npm:^5.0.0, icss-utils@npm:^5.1.0": - version: 5.1.0 - resolution: "icss-utils@npm:5.1.0" - peerDependencies: - postcss: ^8.1.0 - checksum: 5c324d283552b1269cfc13a503aaaa172a280f914e5b81544f3803bc6f06a3b585fb79f66f7c771a2c052db7982c18bf92d001e3b47282e3abbbb4c4cc488d68 + isobject: ^3.0.1 + checksum: 2a401140cfd86cabe25214956ae2cfee6fbd8186809555cd0e84574f88de7b17abacb2e477a6a658fa54c6083ecbda1e6ae404c7720244cd198903848fca70ca languageName: node linkType: hard -"ieee754@npm:^1.1.13, ieee754@npm:^1.2.1": - version: 1.2.1 - resolution: "ieee754@npm:1.2.1" - checksum: 5144c0c9815e54ada181d80a0b810221a253562422e7c6c3a60b1901154184f49326ec239d618c416c1c5945a2e197107aee8d986a3dd836b53dffefd99b5e7e +"is-plain-object@npm:^5.0.0": + version: 5.0.0 + resolution: "is-plain-object@npm:5.0.0" + checksum: e32d27061eef62c0847d303125440a38660517e586f2f3db7c9d179ae5b6674ab0f469d519b2e25c147a1a3bc87156d0d5f4d8821e0ce4a9ee7fe1fcf11ce45c languageName: node linkType: hard -"ignore-by-default@npm:^2.1.0": - version: 2.1.0 - resolution: "ignore-by-default@npm:2.1.0" - checksum: 2b2df4622b6a07a3e91893987be8f060dc553f7736b67e72aa2312041c450a6fa8371733d03c42f45a02e47ec824e961c2fba63a3d94fc59cbd669220a5b0d7a +"is-port-reachable@npm:4.0.0": + version: 4.0.0 + resolution: "is-port-reachable@npm:4.0.0" + checksum: 47b7e10db8edcef27fbf9e50f0de85ad368d35688790ca64a13db67260111ac5f4b98989b11af06199fa93f25d810bd09a5b21b2c2646529668638f7c34d3c04 languageName: node linkType: hard -"ignore@npm:^5.2.0, ignore@npm:^5.2.4": - version: 5.2.4 - resolution: "ignore@npm:5.2.4" - checksum: 3d4c309c6006e2621659311783eaea7ebcd41fe4ca1d78c91c473157ad6666a57a2df790fe0d07a12300d9aac2888204d7be8d59f9aaf665b1c7fcdb432517ef +"is-reference@npm:^3.0.0": + version: 3.0.2 + resolution: "is-reference@npm:3.0.2" + dependencies: + "@types/estree": "*" + checksum: ac3bf5626fe9d0afbd7454760d73c47f16b9f471401b9749721ad3b66f0a39644390382acf88ca9d029c95782c1e2ec65662855e3ba91acf52d82231247a7fd3 languageName: node linkType: hard -"image-size@npm:^1.0.1": - version: 1.0.2 - resolution: "image-size@npm:1.0.2" - dependencies: - queue: 6.0.2 - bin: - image-size: bin/image-size.js - checksum: 01745fdb47f87cecf538e69c63f9adc5bfab30a345345c2de91105f3afbd1bfcfba1256af02bf3323077b33b0004469a837e077bf0cbb9c907e9c1e9e7547585 +"is-regexp@npm:^1.0.0": + version: 1.0.0 + resolution: "is-regexp@npm:1.0.0" + checksum: be692828e24cba479ec33644326fa98959ec68ba77965e0291088c1a741feaea4919d79f8031708f85fd25e39de002b4520622b55460660b9c369e6f7187faef languageName: node linkType: hard -"immediate@npm:^3.2.3": - version: 3.3.0 - resolution: "immediate@npm:3.3.0" - checksum: 634b4305101e2452eba6c07d485bf3e415995e533c94b9c3ffbc37026fa1be34def6e4f2276b0dc2162a3f91628564a4bfb26280278b89d3ee54624e854d2f5f +"is-root@npm:^2.1.0": + version: 2.1.0 + resolution: "is-root@npm:2.1.0" + checksum: 37eea0822a2a9123feb58a9d101558ba276771a6d830f87005683349a9acff15958a9ca590a44e778c6b335660b83e85c744789080d734f6081a935a4880aee2 languageName: node linkType: hard -"immer@npm:^9.0.7": - version: 9.0.21 - resolution: "immer@npm:9.0.21" - checksum: 70e3c274165995352f6936695f0ef4723c52c92c92dd0e9afdfe008175af39fa28e76aafb3a2ca9d57d1fb8f796efc4dd1e1cc36f18d33fa5b74f3dfb0375432 +"is-stream@npm:^2.0.0": + version: 2.0.1 + resolution: "is-stream@npm:2.0.1" + checksum: b8e05ccdf96ac330ea83c12450304d4a591f9958c11fd17bed240af8d5ffe08aedafa4c0f4cfccd4d28dc9d4d129daca1023633d5c11601a6cbc77521f6fae66 languageName: node linkType: hard -"immutable@npm:^4.0.0-rc.12": - version: 4.3.4 - resolution: "immutable@npm:4.3.4" - checksum: de3edd964c394bab83432429d3fb0b4816b42f56050f2ca913ba520bd3068ec3e504230d0800332d3abc478616e8f55d3787424a90d0952e6aba864524f1afc3 +"is-stream@npm:^3.0.0": + version: 3.0.0 + resolution: "is-stream@npm:3.0.0" + checksum: 172093fe99119ffd07611ab6d1bcccfe8bc4aa80d864b15f43e63e54b7abc71e779acd69afdb854c4e2a67fdc16ae710e370eda40088d1cfc956a50ed82d8f16 languageName: node linkType: hard -"import-fresh@npm:^3.1.0, import-fresh@npm:^3.2.1, import-fresh@npm:^3.3.0": - version: 3.3.0 - resolution: "import-fresh@npm:3.3.0" +"is-typed-array@npm:^1.1.3": + version: 1.1.12 + resolution: "is-typed-array@npm:1.1.12" dependencies: - parent-module: ^1.0.0 - resolve-from: ^4.0.0 - checksum: 2cacfad06e652b1edc50be650f7ec3be08c5e5a6f6d12d035c440a42a8cc028e60a5b99ca08a77ab4d6b1346da7d971915828f33cdab730d3d42f08242d09baa + which-typed-array: ^1.1.11 + checksum: 4c89c4a3be07186caddadf92197b17fda663a9d259ea0d44a85f171558270d36059d1c386d34a12cba22dfade5aba497ce22778e866adc9406098c8fc4771796 languageName: node linkType: hard -"import-lazy@npm:^2.1.0": - version: 2.1.0 - resolution: "import-lazy@npm:2.1.0" - checksum: 05294f3b9dd4971d3a996f0d2f176410fb6745d491d6e73376429189f5c1c3d290548116b2960a7cf3e89c20cdf11431739d1d2d8c54b84061980795010e803a +"is-typedarray@npm:^1.0.0": + version: 1.0.0 + resolution: "is-typedarray@npm:1.0.0" + checksum: 3508c6cd0a9ee2e0df2fa2e9baabcdc89e911c7bd5cf64604586697212feec525aa21050e48affb5ffc3df20f0f5d2e2cf79b08caa64e1ccc9578e251763aef7 languageName: node linkType: hard -"imurmurhash@npm:^0.1.4": - version: 0.1.4 - resolution: "imurmurhash@npm:0.1.4" - checksum: 7cae75c8cd9a50f57dadd77482359f659eaebac0319dd9368bcd1714f55e65badd6929ca58569da2b6494ef13fdd5598cd700b1eba23f8b79c5f19d195a3ecf7 +"is-unicode-supported@npm:^0.1.0": + version: 0.1.0 + resolution: "is-unicode-supported@npm:0.1.0" + checksum: a2aab86ee7712f5c2f999180daaba5f361bdad1efadc9610ff5b8ab5495b86e4f627839d085c6530363c6d6d4ecbde340fb8e54bdb83da4ba8e0865ed5513c52 languageName: node linkType: hard -"indent-string@npm:^4.0.0": - version: 4.0.0 - resolution: "indent-string@npm:4.0.0" - checksum: 824cfb9929d031dabf059bebfe08cf3137365e112019086ed3dcff6a0a7b698cb80cf67ccccde0e25b9e2d7527aa6cc1fed1ac490c752162496caba3e6699612 +"is-whitespace-character@npm:^1.0.0": + version: 1.0.4 + resolution: "is-whitespace-character@npm:1.0.4" + checksum: adab8ad9847ccfcb6f1b7000b8f622881b5ba2a09ce8be2794a6d2b10c3af325b469fc562c9fb889f468eed27be06e227ac609d0aa1e3a59b4dbcc88e2b0418e languageName: node linkType: hard -"indent-string@npm:^5.0.0": - version: 5.0.0 - resolution: "indent-string@npm:5.0.0" - checksum: e466c27b6373440e6d84fbc19e750219ce25865cb82d578e41a6053d727e5520dc5725217d6eb1cc76005a1bb1696a0f106d84ce7ebda3033b963a38583fb3b3 +"is-word-character@npm:^1.0.0": + version: 1.0.4 + resolution: "is-word-character@npm:1.0.4" + checksum: 1821d6c6abe5bc0b3abe3fdc565d66d7c8a74ea4e93bc77b4a47d26e2e2a306d6ab7d92b353b0d2b182869e3ecaa8f4a346c62d0e31d38ebc0ceaf7cae182c3f languageName: node linkType: hard -"infima@npm:0.2.0-alpha.43": - version: 0.2.0-alpha.43 - resolution: "infima@npm:0.2.0-alpha.43" - checksum: fc5f79240e940eddd750439511767092ccb4051e5e91d253ec7630a9e7ce691812da3aa0f05e46b4c0a95dbfadeae5714fd0073f8d2df12e5aaff0697a1d6aa2 +"is-wsl@npm:^2.2.0": + version: 2.2.0 + resolution: "is-wsl@npm:2.2.0" + dependencies: + is-docker: ^2.0.0 + checksum: 20849846ae414997d290b75e16868e5261e86ff5047f104027026fd61d8b5a9b0b3ade16239f35e1a067b3c7cc02f70183cb661010ed16f4b6c7c93dad1b19d8 languageName: node linkType: hard -"inflation@npm:^2.0.0": - version: 2.0.0 - resolution: "inflation@npm:2.0.0" - checksum: a0494871b12275afdef9e2710ee1af1e0fc642b04613a9be69c05ef8b5e9627f3bd7d358a937fa47aa20235ee7313a4f30255048533add0ad4918beb918a586e +"is-yarn-global@npm:^0.3.0": + version: 0.3.0 + resolution: "is-yarn-global@npm:0.3.0" + checksum: bca013d65fee2862024c9fbb3ba13720ffca2fe750095174c1c80922fdda16402b5c233f5ac9e265bc12ecb5446e7b7f519a32d9541788f01d4d44e24d2bf481 languageName: node linkType: hard -"inflight@npm:^1.0.4": - version: 1.0.6 - resolution: "inflight@npm:1.0.6" - dependencies: - once: ^1.3.0 - wrappy: 1 - checksum: f4f76aa072ce19fae87ce1ef7d221e709afb59d445e05d47fba710e85470923a75de35bfae47da6de1b18afc3ce83d70facf44cfb0aff89f0a3f45c0a0244dfd +"is-yarn-global@npm:^0.4.0": + version: 0.4.1 + resolution: "is-yarn-global@npm:0.4.1" + checksum: 79ec4e6f581c53d4fefdf5f6c237f9a3ad8db29c85cdc4659e76ae345659317552052a97b7e56952aa5d94a23c798ebec8ccad72fb14d3b26dc647ddceddd716 languageName: node linkType: hard -"inherits@npm:2, inherits@npm:2.0.4, inherits@npm:^2.0.0, inherits@npm:^2.0.1, inherits@npm:^2.0.3, inherits@npm:^2.0.4, inherits@npm:~2.0.3": - version: 2.0.4 - resolution: "inherits@npm:2.0.4" - checksum: 4a48a733847879d6cf6691860a6b1e3f0f4754176e4d71494c41f3475553768b10f84b5ce1d40fbd0e34e6bfbb864ee35858ad4dd2cf31e02fc4a154b724d7f1 +"is@npm:~0.2.6": + version: 0.2.7 + resolution: "is@npm:0.2.7" + checksum: 45cea1e6deb41150b5753e18041a833657313e9c791c73f96fb9014b613346f5af2e6650858ef50ea6262c79555b65e09b13d30a268139863885025dd65f1059 languageName: node linkType: hard -"inherits@npm:2.0.3": - version: 2.0.3 - resolution: "inherits@npm:2.0.3" - checksum: 78cb8d7d850d20a5e9a7f3620db31483aa00ad5f722ce03a55b110e5a723539b3716a3b463e2b96ce3fe286f33afc7c131fa2f91407528ba80cea98a7545d4c0 +"isarray@npm:0.0.1": + version: 0.0.1 + resolution: "isarray@npm:0.0.1" + checksum: 49191f1425681df4a18c2f0f93db3adb85573bcdd6a4482539d98eac9e705d8961317b01175627e860516a2fc45f8f9302db26e5a380a97a520e272e2a40a8d4 languageName: node linkType: hard -"ini@npm:2.0.0": - version: 2.0.0 - resolution: "ini@npm:2.0.0" - checksum: e7aadc5fb2e4aefc666d74ee2160c073995a4061556b1b5b4241ecb19ad609243b9cceafe91bae49c219519394bbd31512516cb22a3b1ca6e66d869e0447e84e +"isarray@npm:~1.0.0": + version: 1.0.0 + resolution: "isarray@npm:1.0.0" + checksum: f032df8e02dce8ec565cf2eb605ea939bdccea528dbcf565cdf92bfa2da9110461159d86a537388ef1acef8815a330642d7885b29010e8f7eac967c9993b65ab languageName: node linkType: hard -"ini@npm:^1.3.5, ini@npm:~1.3.0": - version: 1.3.8 - resolution: "ini@npm:1.3.8" - checksum: dfd98b0ca3a4fc1e323e38a6c8eb8936e31a97a918d3b377649ea15bdb15d481207a0dda1021efbd86b464cae29a0d33c1d7dcaf6c5672bee17fa849bc50a1b3 +"isbinaryfile@npm:^5.0.0": + version: 5.0.0 + resolution: "isbinaryfile@npm:5.0.0" + checksum: 25cc27388d51b8322c103f5894f9e72ec04e017734e57c4b70be2666501ec7e7f6cbb4a5fcfd15260a7cac979bd1ddb7f5231f5a3098c0695c4e7c049513dfaf languageName: node linkType: hard -"inline-style-parser@npm:0.1.1": - version: 0.1.1 - resolution: "inline-style-parser@npm:0.1.1" - checksum: 5d545056a3e1f2bf864c928a886a0e1656a3517127d36917b973de581bd54adc91b4bf1febcb0da054f204b4934763f1a4e09308b4d55002327cf1d48ac5d966 +"isbuffer@npm:~0.0.0": + version: 0.0.0 + resolution: "isbuffer@npm:0.0.0" + checksum: 9796296d3c493974c1f71ccf3170cc8007217a19ce8b3b9dedffd32e8ccc3ac42473b572bbf1b24b86143e826ea157aead11fd1285389518abab76c7da5f50ed languageName: node linkType: hard -"integration-tests@workspace:compiler/integration-tests": - version: 0.0.0-use.local - resolution: "integration-tests@workspace:compiler/integration-tests" - dependencies: - "@noir-lang/backend_barretenberg": "workspace:*" - "@noir-lang/noir_js": "workspace:*" - "@noir-lang/noir_wasm": "workspace:*" - "@noir-lang/source-resolver": "workspace:*" - "@nomicfoundation/hardhat-chai-matchers": ^2.0.0 - "@nomicfoundation/hardhat-ethers": ^3.0.0 - "@web/dev-server-esbuild": ^0.3.6 - "@web/test-runner": ^0.15.3 - "@web/test-runner-playwright": ^0.10.0 - eslint: ^8.50.0 - eslint-plugin-prettier: ^5.0.0 - ethers: ^6.7.1 - hardhat: ^2.17.4 - prettier: 3.0.3 - smol-toml: ^1.1.2 - toml: ^3.0.0 - tslog: ^4.9.2 - languageName: unknown - linkType: soft - -"interpret@npm:^1.0.0": - version: 1.4.0 - resolution: "interpret@npm:1.4.0" - checksum: 2e5f51268b5941e4a17e4ef0575bc91ed0ab5f8515e3cf77486f7c14d13f3010df9c0959f37063dcc96e78d12dc6b0bb1b9e111cdfe69771f4656d2993d36155 +"isexe@npm:^2.0.0": + version: 2.0.0 + resolution: "isexe@npm:2.0.0" + checksum: 26bf6c5480dda5161c820c5b5c751ae1e766c587b1f951ea3fcfc973bafb7831ae5b54a31a69bd670220e42e99ec154475025a468eae58ea262f813fdc8d1c62 languageName: node linkType: hard -"invariant@npm:^2.2.4": - version: 2.2.4 - resolution: "invariant@npm:2.2.4" - dependencies: - loose-envify: ^1.0.0 - checksum: cc3182d793aad82a8d1f0af697b462939cb46066ec48bbf1707c150ad5fad6406137e91a262022c269702e01621f35ef60269f6c0d7fd178487959809acdfb14 +"isexe@npm:^3.1.1": + version: 3.1.1 + resolution: "isexe@npm:3.1.1" + checksum: 7fe1931ee4e88eb5aa524cd3ceb8c882537bc3a81b02e438b240e47012eef49c86904d0f0e593ea7c3a9996d18d0f1f3be8d3eaa92333977b0c3a9d353d5563e languageName: node linkType: hard -"io-ts@npm:1.10.4": - version: 1.10.4 - resolution: "io-ts@npm:1.10.4" - dependencies: - fp-ts: ^1.0.0 - checksum: 619134006778f7ca42693716ade7fc1a383079e7848bbeabc67a0e4ac9139cda6b2a88a052d539ab7d554033ee2ffe4dab5cb96b958c83fee2dff73d23f03e88 +"isobject@npm:^3.0.1": + version: 3.0.1 + resolution: "isobject@npm:3.0.1" + checksum: db85c4c970ce30693676487cca0e61da2ca34e8d4967c2e1309143ff910c207133a969f9e4ddb2dc6aba670aabce4e0e307146c310350b298e74a31f7d464703 languageName: node linkType: hard -"ip@npm:^1.1.5": - version: 1.1.8 - resolution: "ip@npm:1.1.8" - checksum: a2ade53eb339fb0cbe9e69a44caab10d6e3784662285eb5d2677117ee4facc33a64679051c35e0dfdb1a3983a51ce2f5d2cb36446d52e10d01881789b76e28fb +"istanbul-lib-coverage@npm:^3.0.0": + version: 3.2.2 + resolution: "istanbul-lib-coverage@npm:3.2.2" + checksum: 2367407a8d13982d8f7a859a35e7f8dd5d8f75aae4bb5484ede3a9ea1b426dc245aff28b976a2af48ee759fdd9be374ce2bd2669b644f31e76c5f46a2e29a831 languageName: node linkType: hard -"ip@npm:^2.0.0": - version: 2.0.0 - resolution: "ip@npm:2.0.0" - checksum: cfcfac6b873b701996d71ec82a7dd27ba92450afdb421e356f44044ed688df04567344c36cbacea7d01b1c39a4c732dc012570ebe9bebfb06f27314bca625349 +"istanbul-lib-report@npm:^3.0.0, istanbul-lib-report@npm:^3.0.1": + version: 3.0.1 + resolution: "istanbul-lib-report@npm:3.0.1" + dependencies: + istanbul-lib-coverage: ^3.0.0 + make-dir: ^4.0.0 + supports-color: ^7.1.0 + checksum: fd17a1b879e7faf9bb1dc8f80b2a16e9f5b7b8498fe6ed580a618c34df0bfe53d2abd35bf8a0a00e628fb7405462576427c7df20bbe4148d19c14b431c974b21 languageName: node linkType: hard -"ipaddr.js@npm:1.9.1": - version: 1.9.1 - resolution: "ipaddr.js@npm:1.9.1" - checksum: f88d3825981486f5a1942414c8d77dd6674dd71c065adcfa46f578d677edcb99fda25af42675cb59db492fdf427b34a5abfcde3982da11a8fd83a500b41cfe77 +"istanbul-reports@npm:^3.0.2": + version: 3.1.6 + resolution: "istanbul-reports@npm:3.1.6" + dependencies: + html-escaper: ^2.0.0 + istanbul-lib-report: ^3.0.0 + checksum: 44c4c0582f287f02341e9720997f9e82c071627e1e862895745d5f52ec72c9b9f38e1d12370015d2a71dcead794f34c7732aaef3fab80a24bc617a21c3d911d6 languageName: node linkType: hard -"ipaddr.js@npm:^2.0.1": - version: 2.1.0 - resolution: "ipaddr.js@npm:2.1.0" - checksum: 807a054f2bd720c4d97ee479d6c9e865c233bea21f139fb8dabd5a35c4226d2621c42e07b4ad94ff3f82add926a607d8d9d37c625ad0319f0e08f9f2bd1968e2 +"jackspeak@npm:^2.3.5": + version: 2.3.6 + resolution: "jackspeak@npm:2.3.6" + dependencies: + "@isaacs/cliui": ^8.0.2 + "@pkgjs/parseargs": ^0.11.0 + dependenciesMeta: + "@pkgjs/parseargs": + optional: true + checksum: 57d43ad11eadc98cdfe7496612f6bbb5255ea69fe51ea431162db302c2a11011642f50cfad57288bd0aea78384a0612b16e131944ad8ecd09d619041c8531b54 languageName: node linkType: hard -"irregular-plurals@npm:^3.3.0": - version: 3.5.0 - resolution: "irregular-plurals@npm:3.5.0" - checksum: 5b663091dc89155df7b2e9d053e8fb11941a0c4be95c4b6549ed3ea020489fdf4f75ea586c915b5b543704252679a5a6e8c6c3587da5ac3fc57b12da90a9aee7 +"jest-util@npm:^29.7.0": + version: 29.7.0 + resolution: "jest-util@npm:29.7.0" + dependencies: + "@jest/types": ^29.6.3 + "@types/node": "*" + chalk: ^4.0.0 + ci-info: ^3.2.0 + graceful-fs: ^4.2.9 + picomatch: ^2.2.3 + checksum: 042ab4980f4ccd4d50226e01e5c7376a8556b472442ca6091a8f102488c0f22e6e8b89ea874111d2328a2080083bf3225c86f3788c52af0bd0345a00eb57a3ca languageName: node linkType: hard -"is-alphabetical@npm:1.0.4, is-alphabetical@npm:^1.0.0": - version: 1.0.4 - resolution: "is-alphabetical@npm:1.0.4" - checksum: 6508cce44fd348f06705d377b260974f4ce68c74000e7da4045f0d919e568226dc3ce9685c5a2af272195384df6930f748ce9213fc9f399b5d31b362c66312cb +"jest-worker@npm:^27.4.5": + version: 27.5.1 + resolution: "jest-worker@npm:27.5.1" + dependencies: + "@types/node": "*" + merge-stream: ^2.0.0 + supports-color: ^8.0.0 + checksum: 98cd68b696781caed61c983a3ee30bf880b5bd021c01d98f47b143d4362b85d0737f8523761e2713d45e18b4f9a2b98af1eaee77afade4111bb65c77d6f7c980 languageName: node linkType: hard -"is-alphanumerical@npm:^1.0.0": - version: 1.0.4 - resolution: "is-alphanumerical@npm:1.0.4" +"jest-worker@npm:^29.1.2": + version: 29.7.0 + resolution: "jest-worker@npm:29.7.0" dependencies: - is-alphabetical: ^1.0.0 - is-decimal: ^1.0.0 - checksum: e2e491acc16fcf5b363f7c726f666a9538dba0a043665740feb45bba1652457a73441e7c5179c6768a638ed396db3437e9905f403644ec7c468fb41f4813d03f + "@types/node": "*" + jest-util: ^29.7.0 + merge-stream: ^2.0.0 + supports-color: ^8.0.0 + checksum: 30fff60af49675273644d408b650fc2eb4b5dcafc5a0a455f238322a8f9d8a98d847baca9d51ff197b6747f54c7901daa2287799230b856a0f48287d131f8c13 languageName: node linkType: hard -"is-arrayish@npm:^0.2.1": - version: 0.2.1 - resolution: "is-arrayish@npm:0.2.1" - checksum: eef4417e3c10e60e2c810b6084942b3ead455af16c4509959a27e490e7aee87cfb3f38e01bbde92220b528a0ee1a18d52b787e1458ee86174d8c7f0e58cd488f +"jiti@npm:^1.18.2, jiti@npm:^1.20.0": + version: 1.21.0 + resolution: "jiti@npm:1.21.0" + bin: + jiti: bin/jiti.js + checksum: a7bd5d63921c170eaec91eecd686388181c7828e1fa0657ab374b9372bfc1f383cf4b039e6b272383d5cb25607509880af814a39abdff967322459cca41f2961 languageName: node linkType: hard -"is-binary-path@npm:~2.1.0": - version: 2.1.0 - resolution: "is-binary-path@npm:2.1.0" +"joi@npm:^17.6.0, joi@npm:^17.9.2": + version: 17.11.0 + resolution: "joi@npm:17.11.0" dependencies: - binary-extensions: ^2.0.0 - checksum: 84192eb88cff70d320426f35ecd63c3d6d495da9d805b19bc65b518984b7c0760280e57dbf119b7e9be6b161784a5a673ab2c6abe83abb5198a432232ad5b35c + "@hapi/hoek": ^9.0.0 + "@hapi/topo": ^5.0.0 + "@sideway/address": ^4.1.3 + "@sideway/formula": ^3.0.1 + "@sideway/pinpoint": ^2.0.0 + checksum: 3a4e9ecba345cdafe585e7ed8270a44b39718e11dff3749aa27e0001a63d578b75100c062be28e6f48f960b594864034e7a13833f33fbd7ad56d5ce6b617f9bf languageName: node linkType: hard -"is-buffer@npm:^2.0.0, is-buffer@npm:^2.0.5": - version: 2.0.5 - resolution: "is-buffer@npm:2.0.5" - checksum: 764c9ad8b523a9f5a32af29bdf772b08eb48c04d2ad0a7240916ac2688c983bf5f8504bf25b35e66240edeb9d9085461f9b5dae1f3d2861c6b06a65fe983de42 +"js-base64@npm:^3.7.5": + version: 3.7.5 + resolution: "js-base64@npm:3.7.5" + checksum: 67a78c8b1c47b73f1c6fba1957e9fe6fd9dc78ac93ac46cc2e43472dcb9cf150d126fb0e593192e88e0497354fa634d17d255add7cc6ee3c7b4d29870faa8e18 + languageName: node + linkType: hard + +"js-sdsl@npm:^4.1.4": + version: 4.4.2 + resolution: "js-sdsl@npm:4.4.2" + checksum: ba705adc1788bf3c6f6c8e5077824f2bb4f0acab5a984420ce5cc492c7fff3daddc26335ad2c9a67d4f5e3241ec790f9e5b72a625adcf20cf321d2fd85e62b8b languageName: node linkType: hard -"is-builtin-module@npm:^3.1.0": - version: 3.2.1 - resolution: "is-builtin-module@npm:3.2.1" - dependencies: - builtin-modules: ^3.3.0 - checksum: e8f0ffc19a98240bda9c7ada84d846486365af88d14616e737d280d378695c8c448a621dcafc8332dbf0fcd0a17b0763b845400709963fa9151ddffece90ae88 +"js-sha3@npm:0.8.0": + version: 0.8.0 + resolution: "js-sha3@npm:0.8.0" + checksum: 75df77c1fc266973f06cce8309ce010e9e9f07ec35ab12022ed29b7f0d9c8757f5a73e1b35aa24840dced0dea7059085aa143d817aea9e188e2a80d569d9adce languageName: node linkType: hard -"is-ci@npm:^2.0.0": - version: 2.0.0 - resolution: "is-ci@npm:2.0.0" - dependencies: - ci-info: ^2.0.0 - bin: - is-ci: bin.js - checksum: 77b869057510f3efa439bbb36e9be429d53b3f51abd4776eeea79ab3b221337fe1753d1e50058a9e2c650d38246108beffb15ccfd443929d77748d8c0cc90144 +"js-tokens@npm:^3.0.0 || ^4.0.0, js-tokens@npm:^4.0.0": + version: 4.0.0 + resolution: "js-tokens@npm:4.0.0" + checksum: 8a95213a5a77deb6cbe94d86340e8d9ace2b93bc367790b260101d2f36a2eaf4e4e22d9fa9cf459b38af3a32fb4190e638024cf82ec95ef708680e405ea7cc78 languageName: node linkType: hard -"is-core-module@npm:^2.13.0": - version: 2.13.0 - resolution: "is-core-module@npm:2.13.0" +"js-yaml@npm:4.1.0, js-yaml@npm:^4.1.0": + version: 4.1.0 + resolution: "js-yaml@npm:4.1.0" dependencies: - has: ^1.0.3 - checksum: 053ab101fb390bfeb2333360fd131387bed54e476b26860dc7f5a700bbf34a0ec4454f7c8c4d43e8a0030957e4b3db6e16d35e1890ea6fb654c833095e040355 + argparse: ^2.0.1 + bin: + js-yaml: bin/js-yaml.js + checksum: c7830dfd456c3ef2c6e355cc5a92e6700ceafa1d14bba54497b34a99f0376cecbb3e9ac14d3e5849b426d5a5140709a66237a8c991c675431271c4ce5504151a languageName: node linkType: hard -"is-decimal@npm:^1.0.0": - version: 1.0.4 - resolution: "is-decimal@npm:1.0.4" - checksum: ed483a387517856dc395c68403a10201fddcc1b63dc56513fbe2fe86ab38766120090ecdbfed89223d84ca8b1cd28b0641b93cb6597b6e8f4c097a7c24e3fb96 +"js-yaml@npm:^3.13.1": + version: 3.14.1 + resolution: "js-yaml@npm:3.14.1" + dependencies: + argparse: ^1.0.7 + esprima: ^4.0.0 + bin: + js-yaml: bin/js-yaml.js + checksum: bef146085f472d44dee30ec34e5cf36bf89164f5d585435a3d3da89e52622dff0b188a580e4ad091c3341889e14cb88cac6e4deb16dc5b1e9623bb0601fc255c languageName: node linkType: hard -"is-docker@npm:^2.0.0, is-docker@npm:^2.1.1": - version: 2.2.1 - resolution: "is-docker@npm:2.2.1" +"jsesc@npm:^2.5.1": + version: 2.5.2 + resolution: "jsesc@npm:2.5.2" bin: - is-docker: cli.js - checksum: 3fef7ddbf0be25958e8991ad941901bf5922ab2753c46980b60b05c1bf9c9c2402d35e6dc32e4380b980ef5e1970a5d9d5e5aa2e02d77727c3b6b5e918474c56 + jsesc: bin/jsesc + checksum: 4dc190771129e12023f729ce20e1e0bfceac84d73a85bc3119f7f938843fe25a4aeccb54b6494dce26fcf263d815f5f31acdefac7cc9329efb8422a4f4d9fa9d languageName: node linkType: hard -"is-docker@npm:^3.0.0": - version: 3.0.0 - resolution: "is-docker@npm:3.0.0" +"jsesc@npm:~0.5.0": + version: 0.5.0 + resolution: "jsesc@npm:0.5.0" bin: - is-docker: cli.js - checksum: b698118f04feb7eaf3338922bd79cba064ea54a1c3db6ec8c0c8d8ee7613e7e5854d802d3ef646812a8a3ace81182a085dfa0a71cc68b06f3fa794b9783b3c90 + jsesc: bin/jsesc + checksum: b8b44cbfc92f198ad972fba706ee6a1dfa7485321ee8c0b25f5cedd538dcb20cde3197de16a7265430fce8277a12db066219369e3d51055038946039f6e20e17 languageName: node linkType: hard -"is-error@npm:^2.2.2": - version: 2.2.2 - resolution: "is-error@npm:2.2.2" - checksum: a97b39587150f0d38f9f93f64699807fe3020fe5edbd63548f234dc2ba96fd7c776d66c062bf031dfeb93c7f48db563ff6bde588418ca041da37c659a416f055 +"json-buffer@npm:3.0.0": + version: 3.0.0 + resolution: "json-buffer@npm:3.0.0" + checksum: 0cecacb8025370686a916069a2ff81f7d55167421b6aa7270ee74e244012650dd6bce22b0852202ea7ff8624fce50ff0ec1bdf95914ccb4553426e290d5a63fa languageName: node linkType: hard -"is-extendable@npm:^0.1.0": - version: 0.1.1 - resolution: "is-extendable@npm:0.1.1" - checksum: 3875571d20a7563772ecc7a5f36cb03167e9be31ad259041b4a8f73f33f885441f778cee1f1fe0085eb4bc71679b9d8c923690003a36a6a5fdf8023e6e3f0672 +"json-buffer@npm:3.0.1": + version: 3.0.1 + resolution: "json-buffer@npm:3.0.1" + checksum: 9026b03edc2847eefa2e37646c579300a1f3a4586cfb62bf857832b60c852042d0d6ae55d1afb8926163fa54c2b01d83ae24705f34990348bdac6273a29d4581 languageName: node linkType: hard -"is-extglob@npm:^2.1.1": - version: 2.1.1 - resolution: "is-extglob@npm:2.1.1" - checksum: df033653d06d0eb567461e58a7a8c9f940bd8c22274b94bf7671ab36df5719791aae15eef6d83bbb5e23283967f2f984b8914559d4449efda578c775c4be6f85 +"json-joy@npm:^9.2.0": + version: 9.9.1 + resolution: "json-joy@npm:9.9.1" + dependencies: + arg: ^5.0.2 + hyperdyperid: ^1.2.0 + peerDependencies: + quill-delta: ^5 + rxjs: 7 + tslib: 2 + bin: + jj: bin/jj.js + json-pack: bin/json-pack.js + json-pack-test: bin/json-pack-test.js + json-patch: bin/json-patch.js + json-patch-test: bin/json-patch-test.js + json-pointer: bin/json-pointer.js + json-pointer-test: bin/json-pointer-test.js + json-unpack: bin/json-unpack.js + checksum: d165398682f00019796225faf365cd8d060f3e086af39bb5081c30907b7e52eaf13697d1c0f6ee2b010fe255ae1fd776e05ad7d6ee5fb549e98fe982f560884b languageName: node linkType: hard -"is-fullwidth-code-point@npm:^3.0.0": - version: 3.0.0 - resolution: "is-fullwidth-code-point@npm:3.0.0" - checksum: 44a30c29457c7fb8f00297bce733f0a64cd22eca270f83e58c105e0d015e45c019491a4ab2faef91ab51d4738c670daff901c799f6a700e27f7314029e99e348 +"json-parse-even-better-errors@npm:^2.3.0, json-parse-even-better-errors@npm:^2.3.1": + version: 2.3.1 + resolution: "json-parse-even-better-errors@npm:2.3.1" + checksum: 798ed4cf3354a2d9ccd78e86d2169515a0097a5c133337807cdf7f1fc32e1391d207ccfc276518cc1d7d8d4db93288b8a50ba4293d212ad1336e52a8ec0a941f languageName: node linkType: hard -"is-fullwidth-code-point@npm:^4.0.0": - version: 4.0.0 - resolution: "is-fullwidth-code-point@npm:4.0.0" - checksum: 8ae89bf5057bdf4f57b346fb6c55e9c3dd2549983d54191d722d5c739397a903012cc41a04ee3403fd872e811243ef91a7c5196da7b5841dc6b6aae31a264a8d +"json-schema-traverse@npm:^0.4.1": + version: 0.4.1 + resolution: "json-schema-traverse@npm:0.4.1" + checksum: 7486074d3ba247769fda17d5181b345c9fb7d12e0da98b22d1d71a5db9698d8b4bd900a3ec1a4ffdd60846fc2556274a5c894d0c48795f14cb03aeae7b55260b languageName: node linkType: hard -"is-generator-function@npm:^1.0.7": - version: 1.0.10 - resolution: "is-generator-function@npm:1.0.10" - dependencies: - has-tostringtag: ^1.0.0 - checksum: d54644e7dbaccef15ceb1e5d91d680eb5068c9ee9f9eb0a9e04173eb5542c9b51b5ab52c5537f5703e48d5fddfd376817c1ca07a84a407b7115b769d4bdde72b +"json-schema-traverse@npm:^1.0.0": + version: 1.0.0 + resolution: "json-schema-traverse@npm:1.0.0" + checksum: 02f2f466cdb0362558b2f1fd5e15cce82ef55d60cd7f8fa828cf35ba74330f8d767fcae5c5c2adb7851fa811766c694b9405810879bc4e1ddd78a7c0e03658ad languageName: node linkType: hard -"is-glob@npm:^4.0.0, is-glob@npm:^4.0.1, is-glob@npm:^4.0.3, is-glob@npm:~4.0.1": - version: 4.0.3 - resolution: "is-glob@npm:4.0.3" - dependencies: - is-extglob: ^2.1.1 - checksum: d381c1319fcb69d341cc6e6c7cd588e17cd94722d9a32dbd60660b993c4fb7d0f19438674e68dfec686d09b7c73139c9166b47597f846af387450224a8101ab4 +"json-stable-stringify-without-jsonify@npm:^1.0.1": + version: 1.0.1 + resolution: "json-stable-stringify-without-jsonify@npm:1.0.1" + checksum: cff44156ddce9c67c44386ad5cddf91925fe06b1d217f2da9c4910d01f358c6e3989c4d5a02683c7a5667f9727ff05831f7aa8ae66c8ff691c556f0884d49215 languageName: node linkType: hard -"is-hex-prefixed@npm:1.0.0": - version: 1.0.0 - resolution: "is-hex-prefixed@npm:1.0.0" - checksum: 5ac58e6e528fb029cc43140f6eeb380fad23d0041cc23154b87f7c9a1b728bcf05909974e47248fd0b7fcc11ba33cf7e58d64804883056fabd23e2b898be41de +"json5@npm:^2.1.2, json5@npm:^2.2.3": + version: 2.2.3 + resolution: "json5@npm:2.2.3" + bin: + json5: lib/cli.js + checksum: 2a7436a93393830bce797d4626275152e37e877b265e94ca69c99e3d20c2b9dab021279146a39cdb700e71b2dd32a4cebd1514cd57cee102b1af906ce5040349 languageName: node linkType: hard -"is-hexadecimal@npm:^1.0.0": - version: 1.0.4 - resolution: "is-hexadecimal@npm:1.0.4" - checksum: a452e047587b6069332d83130f54d30da4faf2f2ebaa2ce6d073c27b5703d030d58ed9e0b729c8e4e5b52c6f1dab26781bb77b7bc6c7805f14f320e328ff8cd5 +"jsonc-parser@npm:^3.2.0": + version: 3.2.0 + resolution: "jsonc-parser@npm:3.2.0" + checksum: 946dd9a5f326b745aa326d48a7257e3f4a4b62c5e98ec8e49fa2bdd8d96cef7e6febf1399f5c7016114fd1f68a1c62c6138826d5d90bc650448e3cf0951c53c7 languageName: node linkType: hard -"is-inside-container@npm:^1.0.0": - version: 1.0.0 - resolution: "is-inside-container@npm:1.0.0" +"jsonfile@npm:^2.1.0": + version: 2.4.0 + resolution: "jsonfile@npm:2.4.0" dependencies: - is-docker: ^3.0.0 - bin: - is-inside-container: cli.js - checksum: c50b75a2ab66ab3e8b92b3bc534e1ea72ca25766832c0623ac22d134116a98bcf012197d1caabe1d1c4bd5f84363d4aa5c36bb4b585fbcaf57be172cd10a1a03 + graceful-fs: ^4.1.6 + dependenciesMeta: + graceful-fs: + optional: true + checksum: f5064aabbc9e35530dc471d8b203ae1f40dbe949ddde4391c6f6a6d310619a15f0efdae5587df594d1d70c555193aaeee9d2ed4aec9ffd5767bd5e4e62d49c3d languageName: node linkType: hard -"is-installed-globally@npm:^0.4.0": - version: 0.4.0 - resolution: "is-installed-globally@npm:0.4.0" +"jsonfile@npm:^4.0.0": + version: 4.0.0 + resolution: "jsonfile@npm:4.0.0" dependencies: - global-dirs: ^3.0.0 - is-path-inside: ^3.0.2 - checksum: 3359840d5982d22e9b350034237b2cda2a12bac1b48a721912e1ab8e0631dd07d45a2797a120b7b87552759a65ba03e819f1bd63f2d7ab8657ec0b44ee0bf399 + graceful-fs: ^4.1.6 + dependenciesMeta: + graceful-fs: + optional: true + checksum: 6447d6224f0d31623eef9b51185af03ac328a7553efcee30fa423d98a9e276ca08db87d71e17f2310b0263fd3ffa6c2a90a6308367f661dc21580f9469897c9e languageName: node linkType: hard -"is-lambda@npm:^1.0.1": - version: 1.0.1 - resolution: "is-lambda@npm:1.0.1" - checksum: 93a32f01940220532e5948538699ad610d5924ac86093fcee83022252b363eb0cc99ba53ab084a04e4fb62bf7b5731f55496257a4c38adf87af9c4d352c71c35 +"jsonfile@npm:^6.0.1": + version: 6.1.0 + resolution: "jsonfile@npm:6.1.0" + dependencies: + graceful-fs: ^4.1.6 + universalify: ^2.0.0 + dependenciesMeta: + graceful-fs: + optional: true + checksum: 7af3b8e1ac8fe7f1eccc6263c6ca14e1966fcbc74b618d3c78a0a2075579487547b94f72b7a1114e844a1e15bb00d440e5d1720bfc4612d790a6f285d5ea8354 languageName: node linkType: hard -"is-module@npm:^1.0.0": - version: 1.0.0 - resolution: "is-module@npm:1.0.0" - checksum: 8cd5390730c7976fb4e8546dd0b38865ee6f7bacfa08dfbb2cc07219606755f0b01709d9361e01f13009bbbd8099fa2927a8ed665118a6105d66e40f1b838c3f +"just-extend@npm:^4.0.2": + version: 4.2.1 + resolution: "just-extend@npm:4.2.1" + checksum: ff9fdede240fad313efeeeb68a660b942e5586d99c0058064c78884894a2690dc09bba44c994ad4e077e45d913fef01a9240c14a72c657b53687ac58de53b39c languageName: node linkType: hard -"is-npm@npm:^5.0.0": - version: 5.0.0 - resolution: "is-npm@npm:5.0.0" - checksum: 9baff02b0c69a3d3c79b162cb2f9e67fb40ef6d172c16601b2e2471c21e9a4fa1fc9885a308d7bc6f3a3cd2a324c27fa0bf284c133c3349bb22571ab70d041cc +"katex@npm:^0.16.0": + version: 0.16.9 + resolution: "katex@npm:0.16.9" + dependencies: + commander: ^8.3.0 + bin: + katex: cli.js + checksum: 861194dfd4d86505e657f688fb73048d46ac498edafce71199502a35b03c0ecc35ba930c631be79c4a09d90a0d23476673cd52f6bc367c7a161854d64005fa95 languageName: node linkType: hard -"is-number@npm:^7.0.0": - version: 7.0.0 - resolution: "is-number@npm:7.0.0" - checksum: 456ac6f8e0f3111ed34668a624e45315201dff921e5ac181f8ec24923b99e9f32ca1a194912dc79d539c97d33dba17dc635202ff0b2cf98326f608323276d27a +"keccak@npm:^3.0.0, keccak@npm:^3.0.2": + version: 3.0.4 + resolution: "keccak@npm:3.0.4" + dependencies: + node-addon-api: ^2.0.0 + node-gyp: latest + node-gyp-build: ^4.2.0 + readable-stream: ^3.6.0 + checksum: 2bf27b97b2f24225b1b44027de62be547f5c7326d87d249605665abd0c8c599d774671c35504c62c9b922cae02758504c6f76a73a84234d23af8a2211afaaa11 languageName: node linkType: hard -"is-obj@npm:^1.0.1": - version: 1.0.1 - resolution: "is-obj@npm:1.0.1" - checksum: 3ccf0efdea12951e0b9c784e2b00e77e87b2f8bd30b42a498548a8afcc11b3287342a2030c308e473e93a7a19c9ea7854c99a8832a476591c727df2a9c79796c +"keygrip@npm:~1.1.0": + version: 1.1.0 + resolution: "keygrip@npm:1.1.0" + dependencies: + tsscmp: 1.0.6 + checksum: 078cd16a463d187121f0a27c1c9c95c52ad392b620f823431689f345a0501132cee60f6e96914b07d570105af470b96960402accd6c48a0b1f3cd8fac4fa2cae languageName: node linkType: hard -"is-obj@npm:^2.0.0": - version: 2.0.0 - resolution: "is-obj@npm:2.0.0" - checksum: c9916ac8f4621962a42f5e80e7ffdb1d79a3fab7456ceaeea394cd9e0858d04f985a9ace45be44433bf605673c8be8810540fe4cc7f4266fc7526ced95af5a08 +"keyv@npm:^3.0.0": + version: 3.1.0 + resolution: "keyv@npm:3.1.0" + dependencies: + json-buffer: 3.0.0 + checksum: bb7e8f3acffdbafbc2dd5b63f377fe6ec4c0e2c44fc82720449ef8ab54f4a7ce3802671ed94c0f475ae0a8549703353a2124561fcf3317010c141b32ca1ce903 languageName: node linkType: hard -"is-path-cwd@npm:^2.2.0": - version: 2.2.0 - resolution: "is-path-cwd@npm:2.2.0" - checksum: 46a840921bb8cc0dc7b5b423a14220e7db338072a4495743a8230533ce78812dc152548c86f4b828411fe98c5451959f07cf841c6a19f611e46600bd699e8048 +"keyv@npm:^4.5.3, keyv@npm:^4.5.4": + version: 4.5.4 + resolution: "keyv@npm:4.5.4" + dependencies: + json-buffer: 3.0.1 + checksum: 74a24395b1c34bd44ad5cb2b49140d087553e170625240b86755a6604cd65aa16efdbdeae5cdb17ba1284a0fbb25ad06263755dbc71b8d8b06f74232ce3cdd72 languageName: node linkType: hard -"is-path-inside@npm:^3.0.2, is-path-inside@npm:^3.0.3": - version: 3.0.3 - resolution: "is-path-inside@npm:3.0.3" - checksum: abd50f06186a052b349c15e55b182326f1936c89a78bf6c8f2b707412517c097ce04bc49a0ca221787bc44e1049f51f09a2ffb63d22899051988d3a618ba13e9 +"kind-of@npm:^6.0.0, kind-of@npm:^6.0.2": + version: 6.0.3 + resolution: "kind-of@npm:6.0.3" + checksum: 3ab01e7b1d440b22fe4c31f23d8d38b4d9b91d9f291df683476576493d5dfd2e03848a8b05813dd0c3f0e835bc63f433007ddeceb71f05cb25c45ae1b19c6d3b languageName: node linkType: hard -"is-plain-obj@npm:^2.0.0, is-plain-obj@npm:^2.1.0": - version: 2.1.0 - resolution: "is-plain-obj@npm:2.1.0" - checksum: cec9100678b0a9fe0248a81743041ed990c2d4c99f893d935545cfbc42876cbe86d207f3b895700c690ad2fa520e568c44afc1605044b535a7820c1d40e38daa +"klaw-sync@npm:^6.0.0": + version: 6.0.0 + resolution: "klaw-sync@npm:6.0.0" + dependencies: + graceful-fs: ^4.1.11 + checksum: 0da397f8961313c3ef8f79fb63af9002cde5a8fb2aeb1a37351feff0dd6006129c790400c3f5c3b4e757bedcabb13d21ec0a5eaef5a593d59515d4f2c291e475 languageName: node linkType: hard -"is-plain-obj@npm:^3.0.0": - version: 3.0.0 - resolution: "is-plain-obj@npm:3.0.0" - checksum: a6ebdf8e12ab73f33530641972a72a4b8aed6df04f762070d823808303e4f76d87d5ea5bd76f96a7bbe83d93f04ac7764429c29413bd9049853a69cb630fb21c +"klaw@npm:^1.0.0": + version: 1.3.1 + resolution: "klaw@npm:1.3.1" + dependencies: + graceful-fs: ^4.1.9 + dependenciesMeta: + graceful-fs: + optional: true + checksum: 8f69e4797c26e7c3f2426bfa85f38a3da3c2cb1b4c6bd850d2377aed440d41ce9d806f2885c2e2e224372c56af4b1d43b8a499adecf9a05e7373dc6b8b7c52e4 languageName: node linkType: hard -"is-plain-object@npm:^2.0.4": - version: 2.0.4 - resolution: "is-plain-object@npm:2.0.4" - dependencies: - isobject: ^3.0.1 - checksum: 2a401140cfd86cabe25214956ae2cfee6fbd8186809555cd0e84574f88de7b17abacb2e477a6a658fa54c6083ecbda1e6ae404c7720244cd198903848fca70ca +"kleur@npm:^3.0.3": + version: 3.0.3 + resolution: "kleur@npm:3.0.3" + checksum: df82cd1e172f957bae9c536286265a5cdbd5eeca487cb0a3b2a7b41ef959fc61f8e7c0e9aeea9c114ccf2c166b6a8dd45a46fd619c1c569d210ecd2765ad5169 languageName: node linkType: hard -"is-plain-object@npm:^5.0.0": - version: 5.0.0 - resolution: "is-plain-object@npm:5.0.0" - checksum: e32d27061eef62c0847d303125440a38660517e586f2f3db7c9d179ae5b6674ab0f469d519b2e25c147a1a3bc87156d0d5f4d8821e0ce4a9ee7fe1fcf11ce45c +"kleur@npm:^4.0.3": + version: 4.1.5 + resolution: "kleur@npm:4.1.5" + checksum: 1dc476e32741acf0b1b5b0627ffd0d722e342c1b0da14de3e8ae97821327ca08f9fb944542fb3c126d90ac5f27f9d804edbe7c585bf7d12ef495d115e0f22c12 languageName: node linkType: hard -"is-promise@npm:^4.0.0": - version: 4.0.0 - resolution: "is-promise@npm:4.0.0" - checksum: 0b46517ad47b00b6358fd6553c83ec1f6ba9acd7ffb3d30a0bf519c5c69e7147c132430452351b8a9fc198f8dd6c4f76f8e6f5a7f100f8c77d57d9e0f4261a8a +"koa-compose@npm:^4.1.0": + version: 4.1.0 + resolution: "koa-compose@npm:4.1.0" + checksum: 46cb16792d96425e977c2ae4e5cb04930280740e907242ec9c25e3fb8b4a1d7b54451d7432bc24f40ec62255edea71894d2ceeb8238501842b4e48014f2e83db languageName: node linkType: hard -"is-regexp@npm:^1.0.0": - version: 1.0.0 - resolution: "is-regexp@npm:1.0.0" - checksum: be692828e24cba479ec33644326fa98959ec68ba77965e0291088c1a741feaea4919d79f8031708f85fd25e39de002b4520622b55460660b9c369e6f7187faef +"koa-convert@npm:^2.0.0": + version: 2.0.0 + resolution: "koa-convert@npm:2.0.0" + dependencies: + co: ^4.6.0 + koa-compose: ^4.1.0 + checksum: 7385b3391995f59c1312142e110d5dff677f9850dbfbcf387cd36a7b0af03b5d26e82b811eb9bb008b4f3e661cdab1f8817596e46b1929da2cf6e97a2f7456ed languageName: node linkType: hard -"is-root@npm:^2.1.0": - version: 2.1.0 - resolution: "is-root@npm:2.1.0" - checksum: 37eea0822a2a9123feb58a9d101558ba276771a6d830f87005683349a9acff15958a9ca590a44e778c6b335660b83e85c744789080d734f6081a935a4880aee2 +"koa-etag@npm:^4.0.0": + version: 4.0.0 + resolution: "koa-etag@npm:4.0.0" + dependencies: + etag: ^1.8.1 + checksum: b5f413574e1edbd60fbbd0d31720e66565d51bfcb407d1bc3f48d9dd5b45fa5a9e4f69a60e749fad7397348e90de23e943307578d007a69da30faaae432deaf6 languageName: node linkType: hard -"is-stream@npm:^2.0.0": - version: 2.0.1 - resolution: "is-stream@npm:2.0.1" - checksum: b8e05ccdf96ac330ea83c12450304d4a591f9958c11fd17bed240af8d5ffe08aedafa4c0f4cfccd4d28dc9d4d129daca1023633d5c11601a6cbc77521f6fae66 +"koa-send@npm:^5.0.0, koa-send@npm:^5.0.1": + version: 5.0.1 + resolution: "koa-send@npm:5.0.1" + dependencies: + debug: ^4.1.1 + http-errors: ^1.7.3 + resolve-path: ^1.4.0 + checksum: a9fbaadbe0f50efd157a733df4a1cc2b3b79b0cdf12e67c718641e6038d1792c0bebe40913e6d4ceb707d970301155be3859b98d1ef08b0fd1766f7326b82853 languageName: node linkType: hard -"is-stream@npm:^3.0.0": - version: 3.0.0 - resolution: "is-stream@npm:3.0.0" - checksum: 172093fe99119ffd07611ab6d1bcccfe8bc4aa80d864b15f43e63e54b7abc71e779acd69afdb854c4e2a67fdc16ae710e370eda40088d1cfc956a50ed82d8f16 +"koa-static@npm:^5.0.0": + version: 5.0.0 + resolution: "koa-static@npm:5.0.0" + dependencies: + debug: ^3.1.0 + koa-send: ^5.0.0 + checksum: 8d9b9c4d2b3b13e8818e804245d784099c4b353b55ddd7dbeeb90f27a2e9f5b6f86bd16a4909e337cb89db4d332d9002e6c0f5056caf75749cab62f93c1f0cc5 languageName: node linkType: hard -"is-typedarray@npm:^1.0.0": - version: 1.0.0 - resolution: "is-typedarray@npm:1.0.0" - checksum: 3508c6cd0a9ee2e0df2fa2e9baabcdc89e911c7bd5cf64604586697212feec525aa21050e48affb5ffc3df20f0f5d2e2cf79b08caa64e1ccc9578e251763aef7 +"koa@npm:^2.13.0": + version: 2.14.2 + resolution: "koa@npm:2.14.2" + dependencies: + accepts: ^1.3.5 + cache-content-type: ^1.0.0 + content-disposition: ~0.5.2 + content-type: ^1.0.4 + cookies: ~0.8.0 + debug: ^4.3.2 + delegates: ^1.0.0 + depd: ^2.0.0 + destroy: ^1.0.4 + encodeurl: ^1.0.2 + escape-html: ^1.0.3 + fresh: ~0.5.2 + http-assert: ^1.3.0 + http-errors: ^1.6.3 + is-generator-function: ^1.0.7 + koa-compose: ^4.1.0 + koa-convert: ^2.0.0 + on-finished: ^2.3.0 + only: ~0.0.2 + parseurl: ^1.3.2 + statuses: ^1.5.0 + type-is: ^1.6.16 + vary: ^1.1.2 + checksum: 17fe3b8f5e0b4759004a942cc6ba2a9507299943a697dff9766b85f41f45caed4077ca2645ac9ad254d3359fffedfc4c9ebdd7a70493e5df8cdfac159a8ee835 languageName: node linkType: hard -"is-unicode-supported@npm:^0.1.0": - version: 0.1.0 - resolution: "is-unicode-supported@npm:0.1.0" - checksum: a2aab86ee7712f5c2f999180daaba5f361bdad1efadc9610ff5b8ab5495b86e4f627839d085c6530363c6d6d4ecbde340fb8e54bdb83da4ba8e0865ed5513c52 +"latest-version@npm:^5.1.0": + version: 5.1.0 + resolution: "latest-version@npm:5.1.0" + dependencies: + package-json: ^6.3.0 + checksum: fbc72b071eb66c40f652441fd783a9cca62f08bf42433651937f078cd9ef94bf728ec7743992777826e4e89305aef24f234b515e6030503a2cbee7fc9bdc2c0f languageName: node linkType: hard -"is-unicode-supported@npm:^1.2.0": - version: 1.3.0 - resolution: "is-unicode-supported@npm:1.3.0" - checksum: 20a1fc161afafaf49243551a5ac33b6c4cf0bbcce369fcd8f2951fbdd000c30698ce320de3ee6830497310a8f41880f8066d440aa3eb0a853e2aa4836dd89abc +"latest-version@npm:^7.0.0": + version: 7.0.0 + resolution: "latest-version@npm:7.0.0" + dependencies: + package-json: ^8.1.0 + checksum: 1f0deba00d5a34394cce4463c938811f51bbb539b131674f4bb2062c63f2cc3b80bccd56ecade3bd5932d04a34cf0a5a8a2ccc4ec9e5e6b285a9a7b3e27d0d66 languageName: node linkType: hard -"is-whitespace-character@npm:^1.0.0": - version: 1.0.4 - resolution: "is-whitespace-character@npm:1.0.4" - checksum: adab8ad9847ccfcb6f1b7000b8f622881b5ba2a09ce8be2794a6d2b10c3af325b469fc562c9fb889f468eed27be06e227ac609d0aa1e3a59b4dbcc88e2b0418e +"launch-editor@npm:^2.6.0": + version: 2.6.1 + resolution: "launch-editor@npm:2.6.1" + dependencies: + picocolors: ^1.0.0 + shell-quote: ^1.8.1 + checksum: e06d193075ac09f7f8109f10cabe464a211bf7ed4cbe75f83348d6f67bf4d9f162f06e7a1ab3e1cd7fc250b5342c3b57080618aff2e646dc34248fe499227601 languageName: node linkType: hard -"is-word-character@npm:^1.0.0": - version: 1.0.4 - resolution: "is-word-character@npm:1.0.4" - checksum: 1821d6c6abe5bc0b3abe3fdc565d66d7c8a74ea4e93bc77b4a47d26e2e2a306d6ab7d92b353b0d2b182869e3ecaa8f4a346c62d0e31d38ebc0ceaf7cae182c3f +"level-blobs@npm:^0.1.7": + version: 0.1.7 + resolution: "level-blobs@npm:0.1.7" + dependencies: + level-peek: 1.0.6 + once: ^1.3.0 + readable-stream: ^1.0.26-4 + checksum: e3cf78ef0bc64ff350edb4e247b2689cd4f5facf1119694ca8c96c28a05a38dc9d88e0bd065b18af65330bc22f5d588719a5c3e63adaa5feba5ea7913f87bebe languageName: node linkType: hard -"is-wsl@npm:^2.2.0": - version: 2.2.0 - resolution: "is-wsl@npm:2.2.0" - dependencies: - is-docker: ^2.0.0 - checksum: 20849846ae414997d290b75e16868e5261e86ff5047f104027026fd61d8b5a9b0b3ade16239f35e1a067b3c7cc02f70183cb661010ed16f4b6c7c93dad1b19d8 +"level-filesystem@npm:^1.0.1": + version: 1.2.0 + resolution: "level-filesystem@npm:1.2.0" + dependencies: + concat-stream: ^1.4.4 + errno: ^0.1.1 + fwd-stream: ^1.0.4 + level-blobs: ^0.1.7 + level-peek: ^1.0.6 + level-sublevel: ^5.2.0 + octal: ^1.0.0 + once: ^1.3.0 + xtend: ^2.2.0 + checksum: a29e6a9d8c1879d43610113d1bcb59368685ec0ae413fcf0f8dcbb0a0c26b88fcf16f7481acb2b4650e5951ba0635e73a2c8fbe25cd599c50f80949a5547a367 languageName: node linkType: hard -"is-yarn-global@npm:^0.3.0": - version: 0.3.0 - resolution: "is-yarn-global@npm:0.3.0" - checksum: bca013d65fee2862024c9fbb3ba13720ffca2fe750095174c1c80922fdda16402b5c233f5ac9e265bc12ecb5446e7b7f519a32d9541788f01d4d44e24d2bf481 +"level-fix-range@npm:2.0": + version: 2.0.0 + resolution: "level-fix-range@npm:2.0.0" + dependencies: + clone: ~0.1.9 + checksum: 250cefa69e1035d1412b4ba3e5cab83cceb894aa833fb0a93417d8d6230c60f6f8154feffbd0f116461ddd441b909e7df1323355d3e1769b3bb20a55729145b5 languageName: node linkType: hard -"isarray@npm:0.0.1": - version: 0.0.1 - resolution: "isarray@npm:0.0.1" - checksum: 49191f1425681df4a18c2f0f93db3adb85573bcdd6a4482539d98eac9e705d8961317b01175627e860516a2fc45f8f9302db26e5a380a97a520e272e2a40a8d4 +"level-fix-range@npm:~1.0.2": + version: 1.0.2 + resolution: "level-fix-range@npm:1.0.2" + checksum: 6c9a3894ea08947fae79c41b75e8b9d57979523b656bec43c589f2dc4455276a150df445d9a7ca880a7c58c2ef19f5cea7f661d777993b870f4943af6b31d5bb languageName: node linkType: hard -"isarray@npm:~1.0.0": - version: 1.0.0 - resolution: "isarray@npm:1.0.0" - checksum: f032df8e02dce8ec565cf2eb605ea939bdccea528dbcf565cdf92bfa2da9110461159d86a537388ef1acef8815a330642d7885b29010e8f7eac967c9993b65ab +"level-hooks@npm:>=4.4.0 <5": + version: 4.5.0 + resolution: "level-hooks@npm:4.5.0" + dependencies: + string-range: ~1.2 + checksum: f198ad2e0901a4719e324e67f546097589af79665ebaaabee7122fda18a41ada3158bb1816b8b82430f30c68610125e4e20b5c09ec3ba7ae262d97dba34f48ab languageName: node linkType: hard -"isbinaryfile@npm:^5.0.0": - version: 5.0.0 - resolution: "isbinaryfile@npm:5.0.0" - checksum: 25cc27388d51b8322c103f5894f9e72ec04e017734e57c4b70be2666501ec7e7f6cbb4a5fcfd15260a7cac979bd1ddb7f5231f5a3098c0695c4e7c049513dfaf +"level-js@npm:^2.1.3": + version: 2.2.4 + resolution: "level-js@npm:2.2.4" + dependencies: + abstract-leveldown: ~0.12.0 + idb-wrapper: ^1.5.0 + isbuffer: ~0.0.0 + ltgt: ^2.1.2 + typedarray-to-buffer: ~1.0.0 + xtend: ~2.1.2 + checksum: 4fed784fcfad4bc6ec97d9c3897e95eaa30326fcdab9f4c7437624d10fa875fa84aafcc2acac0d53181af506cbc012c03f413b4da12ff83758d3bcbb699f8c8e languageName: node linkType: hard -"isexe@npm:^2.0.0": - version: 2.0.0 - resolution: "isexe@npm:2.0.0" - checksum: 26bf6c5480dda5161c820c5b5c751ae1e766c587b1f951ea3fcfc973bafb7831ae5b54a31a69bd670220e42e99ec154475025a468eae58ea262f813fdc8d1c62 +"level-peek@npm:1.0.6, level-peek@npm:^1.0.6": + version: 1.0.6 + resolution: "level-peek@npm:1.0.6" + dependencies: + level-fix-range: ~1.0.2 + checksum: e07d5f8b80675727204d9a226a249139da9e354e633b9d57b7a5186a7b85be445e550ca628f5133bf7a220a9311a193ded5a3f83588dc4eaa53ffb86b426154a languageName: node linkType: hard -"isobject@npm:^3.0.1": - version: 3.0.1 - resolution: "isobject@npm:3.0.1" - checksum: db85c4c970ce30693676487cca0e61da2ca34e8d4967c2e1309143ff910c207133a969f9e4ddb2dc6aba670aabce4e0e307146c310350b298e74a31f7d464703 +"level-sublevel@npm:^5.2.0": + version: 5.2.3 + resolution: "level-sublevel@npm:5.2.3" + dependencies: + level-fix-range: 2.0 + level-hooks: ">=4.4.0 <5" + string-range: ~1.2.1 + xtend: ~2.0.4 + checksum: f0fdffc2f9ca289aa183a1bf7f300a8f92e4f01be60eab37ab36e1f6ec33ed449519d8f69504a616e82f3ddca13a15fa4e19af1dcc1beba9044a4c60b6cd94bf languageName: node linkType: hard -"istanbul-lib-coverage@npm:^3.0.0": - version: 3.2.0 - resolution: "istanbul-lib-coverage@npm:3.2.0" - checksum: a2a545033b9d56da04a8571ed05c8120bf10e9bce01cf8633a3a2b0d1d83dff4ac4fe78d6d5673c27fc29b7f21a41d75f83a36be09f82a61c367b56aa73c1ff9 +"level-supports@npm:^4.0.0": + version: 4.0.1 + resolution: "level-supports@npm:4.0.1" + checksum: d4552b42bb8cdeada07b0f6356c7a90fefe76279147331f291aceae26e3e56d5f927b09ce921647c0230bfe03ddfbdcef332be921e5c2194421ae2bfa3cf6368 languageName: node linkType: hard -"istanbul-lib-report@npm:^3.0.0, istanbul-lib-report@npm:^3.0.1": - version: 3.0.1 - resolution: "istanbul-lib-report@npm:3.0.1" +"level-transcoder@npm:^1.0.1": + version: 1.0.1 + resolution: "level-transcoder@npm:1.0.1" dependencies: - istanbul-lib-coverage: ^3.0.0 - make-dir: ^4.0.0 - supports-color: ^7.1.0 - checksum: fd17a1b879e7faf9bb1dc8f80b2a16e9f5b7b8498fe6ed580a618c34df0bfe53d2abd35bf8a0a00e628fb7405462576427c7df20bbe4148d19c14b431c974b21 + buffer: ^6.0.3 + module-error: ^1.0.1 + checksum: 304f08d802faf3491a533b6d87ad8be3cabfd27f2713bbe9d4c633bf50fcb9460eab5a6776bf015e101ead7ba1c1853e05e7f341112f17a9d0cb37ee5a421a25 languageName: node linkType: hard -"istanbul-reports@npm:^3.0.2": - version: 3.1.6 - resolution: "istanbul-reports@npm:3.1.6" +"level@npm:^8.0.0": + version: 8.0.0 + resolution: "level@npm:8.0.0" dependencies: - html-escaper: ^2.0.0 - istanbul-lib-report: ^3.0.0 - checksum: 44c4c0582f287f02341e9720997f9e82c071627e1e862895745d5f52ec72c9b9f38e1d12370015d2a71dcead794f34c7732aaef3fab80a24bc617a21c3d911d6 + browser-level: ^1.0.1 + classic-level: ^1.2.0 + checksum: 13eb25bd71bfdca6cd714d1233adf9da97de9a8a4bf9f28d62a390b5c96d0250abaf983eb90eb8c4e89c7a985bb330750683d106f12670e5ea8fba1d7e608a1f languageName: node linkType: hard -"jackspeak@npm:^2.0.3": - version: 2.3.3 - resolution: "jackspeak@npm:2.3.3" +"levelup@npm:^0.18.2": + version: 0.18.6 + resolution: "levelup@npm:0.18.6" dependencies: - "@isaacs/cliui": ^8.0.2 - "@pkgjs/parseargs": ^0.11.0 - dependenciesMeta: - "@pkgjs/parseargs": - optional: true - checksum: 4313a7c0cc44c7753c4cb9869935f0b06f4cf96827515f63f58ff46b3d2f6e29aba6b3b5151778397c3f5ae67ef8bfc48871967bd10343c27e90cff198ec7808 + bl: ~0.8.1 + deferred-leveldown: ~0.2.0 + errno: ~0.1.1 + prr: ~0.0.0 + readable-stream: ~1.0.26 + semver: ~2.3.1 + xtend: ~3.0.0 + checksum: 80e140dd83dc94050e283fc02874ae85116cb560d81e14fee0ac111f86006887835ec905dca7a081414c07eca202245a580f1e02f696367b777ecc23a9e05b86 languageName: node linkType: hard -"jackspeak@npm:^2.3.5": - version: 2.3.6 - resolution: "jackspeak@npm:2.3.6" - dependencies: - "@isaacs/cliui": ^8.0.2 - "@pkgjs/parseargs": ^0.11.0 - dependenciesMeta: - "@pkgjs/parseargs": - optional: true - checksum: 57d43ad11eadc98cdfe7496612f6bbb5255ea69fe51ea431162db302c2a11011642f50cfad57288bd0aea78384a0612b16e131944ad8ecd09d619041c8531b54 +"leven@npm:^3.1.0": + version: 3.1.0 + resolution: "leven@npm:3.1.0" + checksum: 638401d534585261b6003db9d99afd244dfe82d75ddb6db5c0df412842d5ab30b2ef18de471aaec70fe69a46f17b4ae3c7f01d8a4e6580ef7adb9f4273ad1e55 languageName: node linkType: hard -"jest-util@npm:^29.7.0": - version: 29.7.0 - resolution: "jest-util@npm:29.7.0" +"levn@npm:^0.4.1": + version: 0.4.1 + resolution: "levn@npm:0.4.1" dependencies: - "@jest/types": ^29.6.3 - "@types/node": "*" - chalk: ^4.0.0 - ci-info: ^3.2.0 - graceful-fs: ^4.2.9 - picomatch: ^2.2.3 - checksum: 042ab4980f4ccd4d50226e01e5c7376a8556b472442ca6091a8f102488c0f22e6e8b89ea874111d2328a2080083bf3225c86f3788c52af0bd0345a00eb57a3ca + prelude-ls: ^1.2.1 + type-check: ~0.4.0 + checksum: 12c5021c859bd0f5248561bf139121f0358285ec545ebf48bb3d346820d5c61a4309535c7f387ed7d84361cf821e124ce346c6b7cef8ee09a67c1473b46d0fc4 languageName: node linkType: hard -"jest-worker@npm:^27.4.5": - version: 27.5.1 - resolution: "jest-worker@npm:27.5.1" +"lighthouse-logger@npm:^1.0.0": + version: 1.4.2 + resolution: "lighthouse-logger@npm:1.4.2" dependencies: - "@types/node": "*" - merge-stream: ^2.0.0 - supports-color: ^8.0.0 - checksum: 98cd68b696781caed61c983a3ee30bf880b5bd021c01d98f47b143d4362b85d0737f8523761e2713d45e18b4f9a2b98af1eaee77afade4111bb65c77d6f7c980 + debug: ^2.6.9 + marky: ^1.2.2 + checksum: ba6b73d93424318fab58b4e07c9ed246e3e969a3313f26b69515ed4c06457dd9a0b11bc706948398fdaef26aa4ba5e65cb848c37ce59f470d3c6c450b9b79a33 languageName: node linkType: hard -"jest-worker@npm:^29.1.2": - version: 29.7.0 - resolution: "jest-worker@npm:29.7.0" - dependencies: - "@types/node": "*" - jest-util: ^29.7.0 - merge-stream: ^2.0.0 - supports-color: ^8.0.0 - checksum: 30fff60af49675273644d408b650fc2eb4b5dcafc5a0a455f238322a8f9d8a98d847baca9d51ff197b6747f54c7901daa2287799230b856a0f48287d131f8c13 +"lilconfig@npm:^2.0.3": + version: 2.1.0 + resolution: "lilconfig@npm:2.1.0" + checksum: 8549bb352b8192375fed4a74694cd61ad293904eee33f9d4866c2192865c44c4eb35d10782966242634e0cbc1e91fe62b1247f148dc5514918e3a966da7ea117 languageName: node linkType: hard -"jiti@npm:^1.18.2": - version: 1.20.0 - resolution: "jiti@npm:1.20.0" - bin: - jiti: bin/jiti.js - checksum: 7924062b5675142e3e272a27735be84b7bfc0a0eb73217fc2dcafa034f37c4f7b4b9ffc07dd98bcff0f739a8811ce1544db205ae7e97b1c86f0df92c65ce3c72 +"lines-and-columns@npm:^1.1.6": + version: 1.2.4 + resolution: "lines-and-columns@npm:1.2.4" + checksum: 0c37f9f7fa212b38912b7145e1cd16a5f3cd34d782441c3e6ca653485d326f58b3caccda66efce1c5812bde4961bbde3374fae4b0d11bf1226152337f3894aa5 languageName: node linkType: hard -"joi@npm:^17.6.0": - version: 17.11.0 - resolution: "joi@npm:17.11.0" - dependencies: - "@hapi/hoek": ^9.0.0 - "@hapi/topo": ^5.0.0 - "@sideway/address": ^4.1.3 - "@sideway/formula": ^3.0.1 - "@sideway/pinpoint": ^2.0.0 - checksum: 3a4e9ecba345cdafe585e7ed8270a44b39718e11dff3749aa27e0001a63d578b75100c062be28e6f48f960b594864034e7a13833f33fbd7ad56d5ce6b617f9bf +"loader-runner@npm:^4.2.0": + version: 4.3.0 + resolution: "loader-runner@npm:4.3.0" + checksum: a90e00dee9a16be118ea43fec3192d0b491fe03a32ed48a4132eb61d498f5536a03a1315531c19d284392a8726a4ecad71d82044c28d7f22ef62e029bf761569 languageName: node linkType: hard -"js-sdsl@npm:^4.1.4": - version: 4.4.2 - resolution: "js-sdsl@npm:4.4.2" - checksum: ba705adc1788bf3c6f6c8e5077824f2bb4f0acab5a984420ce5cc492c7fff3daddc26335ad2c9a67d4f5e3241ec790f9e5b72a625adcf20cf321d2fd85e62b8b +"loader-utils@npm:^2.0.0": + version: 2.0.4 + resolution: "loader-utils@npm:2.0.4" + dependencies: + big.js: ^5.2.2 + emojis-list: ^3.0.0 + json5: ^2.1.2 + checksum: a5281f5fff1eaa310ad5e1164095689443630f3411e927f95031ab4fb83b4a98f388185bb1fe949e8ab8d4247004336a625e9255c22122b815bb9a4c5d8fc3b7 languageName: node linkType: hard -"js-sha3@npm:0.8.0": - version: 0.8.0 - resolution: "js-sha3@npm:0.8.0" - checksum: 75df77c1fc266973f06cce8309ce010e9e9f07ec35ab12022ed29b7f0d9c8757f5a73e1b35aa24840dced0dea7059085aa143d817aea9e188e2a80d569d9adce +"loader-utils@npm:^3.2.0": + version: 3.2.1 + resolution: "loader-utils@npm:3.2.1" + checksum: 4e3ea054cdc8be1ab1f1238f49f42fdf0483039eff920fb1d442039f3f0ad4ebd11fb8e584ccdf2cb7e3c56b3d40c1832416e6408a55651b843da288960cc792 languageName: node linkType: hard -"js-string-escape@npm:^1.0.1": - version: 1.0.1 - resolution: "js-string-escape@npm:1.0.1" - checksum: f11e0991bf57e0c183b55c547acec85bd2445f043efc9ea5aa68b41bd2a3e7d3ce94636cb233ae0d84064ba4c1a505d32e969813c5b13f81e7d4be12c59256fe +"locate-path@npm:^2.0.0": + version: 2.0.0 + resolution: "locate-path@npm:2.0.0" + dependencies: + p-locate: ^2.0.0 + path-exists: ^3.0.0 + checksum: 02d581edbbbb0fa292e28d96b7de36b5b62c2fa8b5a7e82638ebb33afa74284acf022d3b1e9ae10e3ffb7658fbc49163fcd5e76e7d1baaa7801c3e05a81da755 languageName: node linkType: hard -"js-tokens@npm:^3.0.0 || ^4.0.0, js-tokens@npm:^4.0.0": - version: 4.0.0 - resolution: "js-tokens@npm:4.0.0" - checksum: 8a95213a5a77deb6cbe94d86340e8d9ace2b93bc367790b260101d2f36a2eaf4e4e22d9fa9cf459b38af3a32fb4190e638024cf82ec95ef708680e405ea7cc78 +"locate-path@npm:^3.0.0": + version: 3.0.0 + resolution: "locate-path@npm:3.0.0" + dependencies: + p-locate: ^3.0.0 + path-exists: ^3.0.0 + checksum: 53db3996672f21f8b0bf2a2c645ae2c13ffdae1eeecfcd399a583bce8516c0b88dcb4222ca6efbbbeb6949df7e46860895be2c02e8d3219abd373ace3bfb4e11 languageName: node linkType: hard -"js-yaml@npm:4.1.0, js-yaml@npm:^4.1.0": - version: 4.1.0 - resolution: "js-yaml@npm:4.1.0" +"locate-path@npm:^5.0.0": + version: 5.0.0 + resolution: "locate-path@npm:5.0.0" dependencies: - argparse: ^2.0.1 - bin: - js-yaml: bin/js-yaml.js - checksum: c7830dfd456c3ef2c6e355cc5a92e6700ceafa1d14bba54497b34a99f0376cecbb3e9ac14d3e5849b426d5a5140709a66237a8c991c675431271c4ce5504151a + p-locate: ^4.1.0 + checksum: 83e51725e67517287d73e1ded92b28602e3ae5580b301fe54bfb76c0c723e3f285b19252e375712316774cf52006cb236aed5704692c32db0d5d089b69696e30 languageName: node linkType: hard -"js-yaml@npm:^3.13.1, js-yaml@npm:^3.14.1": - version: 3.14.1 - resolution: "js-yaml@npm:3.14.1" +"locate-path@npm:^6.0.0": + version: 6.0.0 + resolution: "locate-path@npm:6.0.0" dependencies: - argparse: ^1.0.7 - esprima: ^4.0.0 - bin: - js-yaml: bin/js-yaml.js - checksum: bef146085f472d44dee30ec34e5cf36bf89164f5d585435a3d3da89e52622dff0b188a580e4ad091c3341889e14cb88cac6e4deb16dc5b1e9623bb0601fc255c + p-locate: ^5.0.0 + checksum: 72eb661788a0368c099a184c59d2fee760b3831c9c1c33955e8a19ae4a21b4116e53fa736dc086cdeb9fce9f7cc508f2f92d2d3aae516f133e16a2bb59a39f5a languageName: node linkType: hard -"jsesc@npm:^2.5.1": - version: 2.5.2 - resolution: "jsesc@npm:2.5.2" - bin: - jsesc: bin/jsesc - checksum: 4dc190771129e12023f729ce20e1e0bfceac84d73a85bc3119f7f938843fe25a4aeccb54b6494dce26fcf263d815f5f31acdefac7cc9329efb8422a4f4d9fa9d +"locate-path@npm:^7.1.0": + version: 7.2.0 + resolution: "locate-path@npm:7.2.0" + dependencies: + p-locate: ^6.0.0 + checksum: c1b653bdf29beaecb3d307dfb7c44d98a2a98a02ebe353c9ad055d1ac45d6ed4e1142563d222df9b9efebc2bcb7d4c792b507fad9e7150a04c29530b7db570f8 languageName: node linkType: hard -"jsesc@npm:~0.5.0": - version: 0.5.0 - resolution: "jsesc@npm:0.5.0" - bin: - jsesc: bin/jsesc - checksum: b8b44cbfc92f198ad972fba706ee6a1dfa7485321ee8c0b25f5cedd538dcb20cde3197de16a7265430fce8277a12db066219369e3d51055038946039f6e20e17 +"lodash.assignwith@npm:^4.2.0": + version: 4.2.0 + resolution: "lodash.assignwith@npm:4.2.0" + checksum: 014a88e398802ca4eaae314afb67f32eb2cab6f01e61490dbbb74694263f79715341ab8ddf4b344093a2253b506d347f67731f0499e457d9c0128be1d2caf6dd languageName: node linkType: hard -"json-buffer@npm:3.0.0": - version: 3.0.0 - resolution: "json-buffer@npm:3.0.0" - checksum: 0cecacb8025370686a916069a2ff81f7d55167421b6aa7270ee74e244012650dd6bce22b0852202ea7ff8624fce50ff0ec1bdf95914ccb4553426e290d5a63fa +"lodash.camelcase@npm:^4.3.0": + version: 4.3.0 + resolution: "lodash.camelcase@npm:4.3.0" + checksum: cb9227612f71b83e42de93eccf1232feeb25e705bdb19ba26c04f91e885bfd3dd5c517c4a97137658190581d3493ea3973072ca010aab7e301046d90740393d1 languageName: node linkType: hard -"json-buffer@npm:3.0.1": - version: 3.0.1 - resolution: "json-buffer@npm:3.0.1" - checksum: 9026b03edc2847eefa2e37646c579300a1f3a4586cfb62bf857832b60c852042d0d6ae55d1afb8926163fa54c2b01d83ae24705f34990348bdac6273a29d4581 +"lodash.debounce@npm:^4.0.8": + version: 4.0.8 + resolution: "lodash.debounce@npm:4.0.8" + checksum: a3f527d22c548f43ae31c861ada88b2637eb48ac6aa3eb56e82d44917971b8aa96fbb37aa60efea674dc4ee8c42074f90f7b1f772e9db375435f6c83a19b3bc6 languageName: node linkType: hard -"json-parse-even-better-errors@npm:^2.3.0, json-parse-even-better-errors@npm:^2.3.1": - version: 2.3.1 - resolution: "json-parse-even-better-errors@npm:2.3.1" - checksum: 798ed4cf3354a2d9ccd78e86d2169515a0097a5c133337807cdf7f1fc32e1391d207ccfc276518cc1d7d8d4db93288b8a50ba4293d212ad1336e52a8ec0a941f +"lodash.get@npm:^4.4.2": + version: 4.4.2 + resolution: "lodash.get@npm:4.4.2" + checksum: e403047ddb03181c9d0e92df9556570e2b67e0f0a930fcbbbd779370972368f5568e914f913e93f3b08f6d492abc71e14d4e9b7a18916c31fa04bd2306efe545 languageName: node linkType: hard -"json-schema-traverse@npm:^0.4.1": - version: 0.4.1 - resolution: "json-schema-traverse@npm:0.4.1" - checksum: 7486074d3ba247769fda17d5181b345c9fb7d12e0da98b22d1d71a5db9698d8b4bd900a3ec1a4ffdd60846fc2556274a5c894d0c48795f14cb03aeae7b55260b +"lodash.isequal@npm:^4.5.0": + version: 4.5.0 + resolution: "lodash.isequal@npm:4.5.0" + checksum: da27515dc5230eb1140ba65ff8de3613649620e8656b19a6270afe4866b7bd461d9ba2ac8a48dcc57f7adac4ee80e1de9f965d89d4d81a0ad52bb3eec2609644 languageName: node linkType: hard -"json-schema-traverse@npm:^1.0.0": - version: 1.0.0 - resolution: "json-schema-traverse@npm:1.0.0" - checksum: 02f2f466cdb0362558b2f1fd5e15cce82ef55d60cd7f8fa828cf35ba74330f8d767fcae5c5c2adb7851fa811766c694b9405810879bc4e1ddd78a7c0e03658ad +"lodash.memoize@npm:^4.1.2": + version: 4.1.2 + resolution: "lodash.memoize@npm:4.1.2" + checksum: 9ff3942feeccffa4f1fafa88d32f0d24fdc62fd15ded5a74a5f950ff5f0c6f61916157246744c620173dddf38d37095a92327d5fd3861e2063e736a5c207d089 languageName: node linkType: hard -"json-stable-stringify-without-jsonify@npm:^1.0.1": - version: 1.0.1 - resolution: "json-stable-stringify-without-jsonify@npm:1.0.1" - checksum: cff44156ddce9c67c44386ad5cddf91925fe06b1d217f2da9c4910d01f358c6e3989c4d5a02683c7a5667f9727ff05831f7aa8ae66c8ff691c556f0884d49215 +"lodash.merge@npm:^4.6.2": + version: 4.6.2 + resolution: "lodash.merge@npm:4.6.2" + checksum: ad580b4bdbb7ca1f7abf7e1bce63a9a0b98e370cf40194b03380a46b4ed799c9573029599caebc1b14e3f24b111aef72b96674a56cfa105e0f5ac70546cdc005 languageName: node linkType: hard -"json5@npm:^2.1.2, json5@npm:^2.2.3": - version: 2.2.3 - resolution: "json5@npm:2.2.3" - bin: - json5: lib/cli.js - checksum: 2a7436a93393830bce797d4626275152e37e877b265e94ca69c99e3d20c2b9dab021279146a39cdb700e71b2dd32a4cebd1514cd57cee102b1af906ce5040349 +"lodash.uniq@npm:4.5.0, lodash.uniq@npm:^4.5.0": + version: 4.5.0 + resolution: "lodash.uniq@npm:4.5.0" + checksum: a4779b57a8d0f3c441af13d9afe7ecff22dd1b8ce1129849f71d9bbc8e8ee4e46dfb4b7c28f7ad3d67481edd6e51126e4e2a6ee276e25906d10f7140187c392d languageName: node linkType: hard -"jsonc-parser@npm:^3.2.0": - version: 3.2.0 - resolution: "jsonc-parser@npm:3.2.0" - checksum: 946dd9a5f326b745aa326d48a7257e3f4a4b62c5e98ec8e49fa2bdd8d96cef7e6febf1399f5c7016114fd1f68a1c62c6138826d5d90bc650448e3cf0951c53c7 +"lodash@npm:^4.17.11, lodash@npm:^4.17.14, lodash@npm:^4.17.19, lodash@npm:^4.17.20, lodash@npm:^4.17.21": + version: 4.17.21 + resolution: "lodash@npm:4.17.21" + checksum: eb835a2e51d381e561e508ce932ea50a8e5a68f4ebdd771ea240d3048244a8d13658acbd502cd4829768c56f2e16bdd4340b9ea141297d472517b83868e677f7 languageName: node linkType: hard -"jsonfile@npm:^2.1.0": - version: 2.4.0 - resolution: "jsonfile@npm:2.4.0" +"log-symbols@npm:4.1.0": + version: 4.1.0 + resolution: "log-symbols@npm:4.1.0" dependencies: - graceful-fs: ^4.1.6 - dependenciesMeta: - graceful-fs: - optional: true - checksum: f5064aabbc9e35530dc471d8b203ae1f40dbe949ddde4391c6f6a6d310619a15f0efdae5587df594d1d70c555193aaeee9d2ed4aec9ffd5767bd5e4e62d49c3d + chalk: ^4.1.0 + is-unicode-supported: ^0.1.0 + checksum: fce1497b3135a0198803f9f07464165e9eb83ed02ceb2273930a6f8a508951178d8cf4f0378e9d28300a2ed2bc49050995d2bd5f53ab716bb15ac84d58c6ef74 languageName: node linkType: hard -"jsonfile@npm:^4.0.0": +"log-update@npm:^4.0.0": version: 4.0.0 - resolution: "jsonfile@npm:4.0.0" + resolution: "log-update@npm:4.0.0" dependencies: - graceful-fs: ^4.1.6 - dependenciesMeta: - graceful-fs: - optional: true - checksum: 6447d6224f0d31623eef9b51185af03ac328a7553efcee30fa423d98a9e276ca08db87d71e17f2310b0263fd3ffa6c2a90a6308367f661dc21580f9469897c9e + ansi-escapes: ^4.3.0 + cli-cursor: ^3.1.0 + slice-ansi: ^4.0.0 + wrap-ansi: ^6.2.0 + checksum: ae2f85bbabc1906034154fb7d4c4477c79b3e703d22d78adee8b3862fa913942772e7fa11713e3d96fb46de4e3cabefbf5d0a544344f03b58d3c4bff52aa9eb2 languageName: node linkType: hard -"jsonfile@npm:^6.0.1": - version: 6.1.0 - resolution: "jsonfile@npm:6.1.0" - dependencies: - graceful-fs: ^4.1.6 - universalify: ^2.0.0 - dependenciesMeta: - graceful-fs: - optional: true - checksum: 7af3b8e1ac8fe7f1eccc6263c6ca14e1966fcbc74b618d3c78a0a2075579487547b94f72b7a1114e844a1e15bb00d440e5d1720bfc4612d790a6f285d5ea8354 +"longest-streak@npm:^3.0.0": + version: 3.1.0 + resolution: "longest-streak@npm:3.1.0" + checksum: d7f952ed004cbdb5c8bcfc4f7f5c3d65449e6c5a9e9be4505a656e3df5a57ee125f284286b4bf8ecea0c21a7b3bf2b8f9001ad506c319b9815ad6a63a47d0fd0 languageName: node linkType: hard -"katex@npm:^0.13.0": - version: 0.13.24 - resolution: "katex@npm:0.13.24" +"loose-envify@npm:^1.0.0, loose-envify@npm:^1.1.0, loose-envify@npm:^1.2.0, loose-envify@npm:^1.3.1, loose-envify@npm:^1.4.0": + version: 1.4.0 + resolution: "loose-envify@npm:1.4.0" dependencies: - commander: ^8.0.0 + js-tokens: ^3.0.0 || ^4.0.0 bin: - katex: cli.js - checksum: 1b7c8295867073d0db4f6fb41ef1c0e3418b8e23924ff61b446b36134cb74cdadc7242dfbfb922d9c32f0b15eda6160a08cd30948c4e78141966ca2991a1726b + loose-envify: cli.js + checksum: 6517e24e0cad87ec9888f500c5b5947032cdfe6ef65e1c1936a0c48a524b81e65542c9c3edc91c97d5bddc806ee2a985dbc79be89215d613b1de5db6d1cfe6f4 languageName: node linkType: hard -"keccak@npm:^3.0.0, keccak@npm:^3.0.2": - version: 3.0.4 - resolution: "keccak@npm:3.0.4" +"loupe@npm:^2.3.6": + version: 2.3.7 + resolution: "loupe@npm:2.3.7" dependencies: - node-addon-api: ^2.0.0 - node-gyp: latest - node-gyp-build: ^4.2.0 - readable-stream: ^3.6.0 - checksum: 2bf27b97b2f24225b1b44027de62be547f5c7326d87d249605665abd0c8c599d774671c35504c62c9b922cae02758504c6f76a73a84234d23af8a2211afaaa11 + get-func-name: ^2.0.1 + checksum: 96c058ec7167598e238bb7fb9def2f9339215e97d6685d9c1e3e4bdb33d14600e11fe7a812cf0c003dfb73ca2df374f146280b2287cae9e8d989e9d7a69a203b + languageName: node + linkType: hard + +"lower-case@npm:^2.0.2": + version: 2.0.2 + resolution: "lower-case@npm:2.0.2" + dependencies: + tslib: ^2.0.3 + checksum: 83a0a5f159ad7614bee8bf976b96275f3954335a84fad2696927f609ddae902802c4f3312d86668722e668bef41400254807e1d3a7f2e8c3eede79691aa1f010 + languageName: node + linkType: hard + +"lowercase-keys@npm:^1.0.0, lowercase-keys@npm:^1.0.1": + version: 1.0.1 + resolution: "lowercase-keys@npm:1.0.1" + checksum: 4d045026595936e09953e3867722e309415ff2c80d7701d067546d75ef698dac218a4f53c6d1d0e7368b47e45fd7529df47e6cb56fbb90523ba599f898b3d147 + languageName: node + linkType: hard + +"lowercase-keys@npm:^2.0.0": + version: 2.0.0 + resolution: "lowercase-keys@npm:2.0.0" + checksum: 24d7ebd56ccdf15ff529ca9e08863f3c54b0b9d1edb97a3ae1af34940ae666c01a1e6d200707bce730a8ef76cb57cc10e65f245ecaaf7e6bc8639f2fb460ac23 languageName: node linkType: hard -"keygrip@npm:~1.1.0": - version: 1.1.0 - resolution: "keygrip@npm:1.1.0" - dependencies: - tsscmp: 1.0.6 - checksum: 078cd16a463d187121f0a27c1c9c95c52ad392b620f823431689f345a0501132cee60f6e96914b07d570105af470b96960402accd6c48a0b1f3cd8fac4fa2cae +"lowercase-keys@npm:^3.0.0": + version: 3.0.0 + resolution: "lowercase-keys@npm:3.0.0" + checksum: 67a3f81409af969bc0c4ca0e76cd7d16adb1e25aa1c197229587eaf8671275c8c067cd421795dbca4c81be0098e4c426a086a05e30de8a9c587b7a13c0c7ccc5 languageName: node linkType: hard -"keyv@npm:^3.0.0": - version: 3.1.0 - resolution: "keyv@npm:3.1.0" +"lru-cache@npm:^10.0.1, lru-cache@npm:^9.1.1 || ^10.0.0": + version: 10.1.0 + resolution: "lru-cache@npm:10.1.0" + checksum: 58056d33e2500fbedce92f8c542e7c11b50d7d086578f14b7074d8c241422004af0718e08a6eaae8705cee09c77e39a61c1c79e9370ba689b7010c152e6a76ab + languageName: node + linkType: hard + +"lru-cache@npm:^5.1.1": + version: 5.1.1 + resolution: "lru-cache@npm:5.1.1" dependencies: - json-buffer: 3.0.0 - checksum: bb7e8f3acffdbafbc2dd5b63f377fe6ec4c0e2c44fc82720449ef8ab54f4a7ce3802671ed94c0f475ae0a8549703353a2124561fcf3317010c141b32ca1ce903 + yallist: ^3.0.2 + checksum: c154ae1cbb0c2206d1501a0e94df349653c92c8cbb25236d7e85190bcaf4567a03ac6eb43166fabfa36fd35623694da7233e88d9601fbf411a9a481d85dbd2cb languageName: node linkType: hard -"keyv@npm:^4.5.3": - version: 4.5.3 - resolution: "keyv@npm:4.5.3" +"lru-cache@npm:^6.0.0": + version: 6.0.0 + resolution: "lru-cache@npm:6.0.0" dependencies: - json-buffer: 3.0.1 - checksum: 3ffb4d5b72b6b4b4af443bbb75ca2526b23c750fccb5ac4c267c6116888b4b65681015c2833cb20d26cf3e6e32dac6b988c77f7f022e1a571b7d90f1442257da + yallist: ^4.0.0 + checksum: f97f499f898f23e4585742138a22f22526254fdba6d75d41a1c2526b3b6cc5747ef59c5612ba7375f42aca4f8461950e925ba08c991ead0651b4918b7c978297 languageName: node linkType: hard -"kind-of@npm:^6.0.0, kind-of@npm:^6.0.2": - version: 6.0.3 - resolution: "kind-of@npm:6.0.3" - checksum: 3ab01e7b1d440b22fe4c31f23d8d38b4d9b91d9f291df683476576493d5dfd2e03848a8b05813dd0c3f0e835bc63f433007ddeceb71f05cb25c45ae1b19c6d3b +"lru-cache@npm:^7.14.1": + version: 7.18.3 + resolution: "lru-cache@npm:7.18.3" + checksum: e550d772384709deea3f141af34b6d4fa392e2e418c1498c078de0ee63670f1f46f5eee746e8ef7e69e1c895af0d4224e62ee33e66a543a14763b0f2e74c1356 languageName: node linkType: hard -"klaw-sync@npm:^6.0.0": - version: 6.0.0 - resolution: "klaw-sync@npm:6.0.0" - dependencies: - graceful-fs: ^4.1.11 - checksum: 0da397f8961313c3ef8f79fb63af9002cde5a8fb2aeb1a37351feff0dd6006129c790400c3f5c3b4e757bedcabb13d21ec0a5eaef5a593d59515d4f2c291e475 +"lru-cache@npm:^8.0.4": + version: 8.0.5 + resolution: "lru-cache@npm:8.0.5" + checksum: 87d72196d8f46e8299c4ab576ed2ec8a07e3cbef517dc9874399c0b2470bd9bf62aacec3b67f84ed6d74aaa1ef31636d048edf996f76248fd17db72bfb631609 languageName: node linkType: hard -"klaw@npm:^1.0.0": - version: 1.3.1 - resolution: "klaw@npm:1.3.1" - dependencies: - graceful-fs: ^4.1.9 - dependenciesMeta: - graceful-fs: - optional: true - checksum: 8f69e4797c26e7c3f2426bfa85f38a3da3c2cb1b4c6bd850d2377aed440d41ce9d806f2885c2e2e224372c56af4b1d43b8a499adecf9a05e7373dc6b8b7c52e4 +"lru_map@npm:^0.3.3": + version: 0.3.3 + resolution: "lru_map@npm:0.3.3" + checksum: ca9dd43c65ed7a4f117c548028101c5b6855e10923ea9d1f635af53ad20c5868ff428c364d454a7b57fe391b89c704982275410c3c5099cca5aeee00d76e169a languageName: node linkType: hard -"kleur@npm:^3.0.3": - version: 3.0.3 - resolution: "kleur@npm:3.0.3" - checksum: df82cd1e172f957bae9c536286265a5cdbd5eeca487cb0a3b2a7b41ef959fc61f8e7c0e9aeea9c114ccf2c166b6a8dd45a46fd619c1c569d210ecd2765ad5169 +"ltgt@npm:^2.1.2": + version: 2.2.1 + resolution: "ltgt@npm:2.2.1" + checksum: 7e3874296f7538bc8087b428ac4208008d7b76916354b34a08818ca7c83958c1df10ec427eeeaad895f6b81e41e24745b18d30f89abcc21d228b94f6961d50a2 languageName: node linkType: hard -"kleur@npm:^4.0.3": - version: 4.1.5 - resolution: "kleur@npm:4.1.5" - checksum: 1dc476e32741acf0b1b5b0627ffd0d722e342c1b0da14de3e8ae97821327ca08f9fb944542fb3c126d90ac5f27f9d804edbe7c585bf7d12ef495d115e0f22c12 +"lunr-languages@npm:^1.4.0": + version: 1.14.0 + resolution: "lunr-languages@npm:1.14.0" + checksum: 05dd6338af6897932f64f9cb735d5b48f9905d892499b22a3f3abc279b2ac71a6bce0fdfe59c01464c6ad3f8e44e2956ba0637f092535239793bbadf4540e72d languageName: node linkType: hard -"koa-compose@npm:^4.1.0": - version: 4.1.0 - resolution: "koa-compose@npm:4.1.0" - checksum: 46cb16792d96425e977c2ae4e5cb04930280740e907242ec9c25e3fb8b4a1d7b54451d7432bc24f40ec62255edea71894d2ceeb8238501842b4e48014f2e83db +"lunr@npm:^2.3.9": + version: 2.3.9 + resolution: "lunr@npm:2.3.9" + checksum: 176719e24fcce7d3cf1baccce9dd5633cd8bdc1f41ebe6a180112e5ee99d80373fe2454f5d4624d437e5a8319698ca6837b9950566e15d2cae5f2a543a3db4b8 languageName: node linkType: hard -"koa-convert@npm:^2.0.0": - version: 2.0.0 - resolution: "koa-convert@npm:2.0.0" +"make-dir@npm:^3.0.0, make-dir@npm:^3.0.2, make-dir@npm:^3.1.0": + version: 3.1.0 + resolution: "make-dir@npm:3.1.0" dependencies: - co: ^4.6.0 - koa-compose: ^4.1.0 - checksum: 7385b3391995f59c1312142e110d5dff677f9850dbfbcf387cd36a7b0af03b5d26e82b811eb9bb008b4f3e661cdab1f8817596e46b1929da2cf6e97a2f7456ed + semver: ^6.0.0 + checksum: 484200020ab5a1fdf12f393fe5f385fc8e4378824c940fba1729dcd198ae4ff24867bc7a5646331e50cead8abff5d9270c456314386e629acec6dff4b8016b78 languageName: node linkType: hard -"koa-etag@npm:^4.0.0": +"make-dir@npm:^4.0.0": version: 4.0.0 - resolution: "koa-etag@npm:4.0.0" + resolution: "make-dir@npm:4.0.0" dependencies: - etag: ^1.8.1 - checksum: b5f413574e1edbd60fbbd0d31720e66565d51bfcb407d1bc3f48d9dd5b45fa5a9e4f69a60e749fad7397348e90de23e943307578d007a69da30faaae432deaf6 + semver: ^7.5.3 + checksum: bf0731a2dd3aab4db6f3de1585cea0b746bb73eb5a02e3d8d72757e376e64e6ada190b1eddcde5b2f24a81b688a9897efd5018737d05e02e2a671dda9cff8a8a languageName: node linkType: hard -"koa-send@npm:^5.0.0, koa-send@npm:^5.0.1": - version: 5.0.1 - resolution: "koa-send@npm:5.0.1" - dependencies: - debug: ^4.1.1 - http-errors: ^1.7.3 - resolve-path: ^1.4.0 - checksum: a9fbaadbe0f50efd157a733df4a1cc2b3b79b0cdf12e67c718641e6038d1792c0bebe40913e6d4ceb707d970301155be3859b98d1ef08b0fd1766f7326b82853 +"make-error@npm:^1.1.1": + version: 1.3.6 + resolution: "make-error@npm:1.3.6" + checksum: b86e5e0e25f7f777b77fabd8e2cbf15737972869d852a22b7e73c17623928fccb826d8e46b9951501d3f20e51ad74ba8c59ed584f610526a48f8ccf88aaec402 languageName: node linkType: hard -"koa-static@npm:^5.0.0": - version: 5.0.0 - resolution: "koa-static@npm:5.0.0" +"make-fetch-happen@npm:^13.0.0": + version: 13.0.0 + resolution: "make-fetch-happen@npm:13.0.0" dependencies: - debug: ^3.1.0 - koa-send: ^5.0.0 - checksum: 8d9b9c4d2b3b13e8818e804245d784099c4b353b55ddd7dbeeb90f27a2e9f5b6f86bd16a4909e337cb89db4d332d9002e6c0f5056caf75749cab62f93c1f0cc5 + "@npmcli/agent": ^2.0.0 + cacache: ^18.0.0 + http-cache-semantics: ^4.1.1 + is-lambda: ^1.0.1 + minipass: ^7.0.2 + minipass-fetch: ^3.0.0 + minipass-flush: ^1.0.5 + minipass-pipeline: ^1.2.4 + negotiator: ^0.6.3 + promise-retry: ^2.0.1 + ssri: ^10.0.0 + checksum: 7c7a6d381ce919dd83af398b66459a10e2fe8f4504f340d1d090d3fa3d1b0c93750220e1d898114c64467223504bd258612ba83efbc16f31b075cd56de24b4af languageName: node linkType: hard -"koa@npm:^2.13.0": - version: 2.14.2 - resolution: "koa@npm:2.14.2" - dependencies: - accepts: ^1.3.5 - cache-content-type: ^1.0.0 - content-disposition: ~0.5.2 - content-type: ^1.0.4 - cookies: ~0.8.0 - debug: ^4.3.2 - delegates: ^1.0.0 - depd: ^2.0.0 - destroy: ^1.0.4 - encodeurl: ^1.0.2 - escape-html: ^1.0.3 - fresh: ~0.5.2 - http-assert: ^1.3.0 - http-errors: ^1.6.3 - is-generator-function: ^1.0.7 - koa-compose: ^4.1.0 - koa-convert: ^2.0.0 - on-finished: ^2.3.0 - only: ~0.0.2 - parseurl: ^1.3.2 - statuses: ^1.5.0 - type-is: ^1.6.16 - vary: ^1.1.2 - checksum: 17fe3b8f5e0b4759004a942cc6ba2a9507299943a697dff9766b85f41f45caed4077ca2645ac9ad254d3359fffedfc4c9ebdd7a70493e5df8cdfac159a8ee835 +"map-stream@npm:~0.1.0": + version: 0.1.0 + resolution: "map-stream@npm:0.1.0" + checksum: 38abbe4eb883888031e6b2fc0630bc583c99396be16b8ace5794b937b682a8a081f03e8b15bfd4914d1bc88318f0e9ac73ba3512ae65955cd449f63256ddb31d languageName: node linkType: hard -"latest-version@npm:^5.1.0": - version: 5.1.0 - resolution: "latest-version@npm:5.1.0" - dependencies: - package-json: ^6.3.0 - checksum: fbc72b071eb66c40f652441fd783a9cca62f08bf42433651937f078cd9ef94bf728ec7743992777826e4e89305aef24f234b515e6030503a2cbee7fc9bdc2c0f +"mark.js@npm:^8.11.1": + version: 8.11.1 + resolution: "mark.js@npm:8.11.1" + checksum: aa6b9ae1c67245348d5b7abd253ef2acd6bb05c6be358d7d192416d964e42665fc10e0e865591c6f93ab9b57e8da1f23c23216e8ebddb580905ea7a0c0df15d4 languageName: node linkType: hard -"launch-editor@npm:^2.6.0": - version: 2.6.1 - resolution: "launch-editor@npm:2.6.1" - dependencies: - picocolors: ^1.0.0 - shell-quote: ^1.8.1 - checksum: e06d193075ac09f7f8109f10cabe464a211bf7ed4cbe75f83348d6f67bf4d9f162f06e7a1ab3e1cd7fc250b5342c3b57080618aff2e646dc34248fe499227601 +"markdown-escapes@npm:^1.0.0": + version: 1.0.4 + resolution: "markdown-escapes@npm:1.0.4" + checksum: 6833a93d72d3f70a500658872312c6fa8015c20cc835a85ae6901fa232683fbc6ed7118ebe920fea7c80039a560f339c026597d96eee0e9de602a36921804997 languageName: node linkType: hard -"level-supports@npm:^4.0.0": - version: 4.0.1 - resolution: "level-supports@npm:4.0.1" - checksum: d4552b42bb8cdeada07b0f6356c7a90fefe76279147331f291aceae26e3e56d5f927b09ce921647c0230bfe03ddfbdcef332be921e5c2194421ae2bfa3cf6368 +"markdown-extensions@npm:^2.0.0": + version: 2.0.0 + resolution: "markdown-extensions@npm:2.0.0" + checksum: ec4ffcb0768f112e778e7ac74cb8ef22a966c168c3e6c29829f007f015b0a0b5c79c73ee8599a0c72e440e7f5cfdbf19e80e2d77b9a313b8f66e180a330cf1b2 languageName: node linkType: hard -"level-transcoder@npm:^1.0.1": - version: 1.0.1 - resolution: "level-transcoder@npm:1.0.1" - dependencies: - buffer: ^6.0.3 - module-error: ^1.0.1 - checksum: 304f08d802faf3491a533b6d87ad8be3cabfd27f2713bbe9d4c633bf50fcb9460eab5a6776bf015e101ead7ba1c1853e05e7f341112f17a9d0cb37ee5a421a25 +"markdown-table@npm:^3.0.0": + version: 3.0.3 + resolution: "markdown-table@npm:3.0.3" + checksum: 8fcd3d9018311120fbb97115987f8b1665a603f3134c93fbecc5d1463380c8036f789e2a62c19432058829e594fff8db9ff81c88f83690b2f8ed6c074f8d9e10 languageName: node linkType: hard -"level@npm:^8.0.0": - version: 8.0.0 - resolution: "level@npm:8.0.0" - dependencies: - browser-level: ^1.0.1 - classic-level: ^1.2.0 - checksum: 13eb25bd71bfdca6cd714d1233adf9da97de9a8a4bf9f28d62a390b5c96d0250abaf983eb90eb8c4e89c7a985bb330750683d106f12670e5ea8fba1d7e608a1f +"marked@npm:^4.3.0": + version: 4.3.0 + resolution: "marked@npm:4.3.0" + bin: + marked: bin/marked.js + checksum: 0db6817893952c3ec710eb9ceafb8468bf5ae38cb0f92b7b083baa13d70b19774674be04db5b817681fa7c5c6a088f61300815e4dd75a59696f4716ad69f6260 languageName: node linkType: hard -"leven@npm:^3.1.0": - version: 3.1.0 - resolution: "leven@npm:3.1.0" - checksum: 638401d534585261b6003db9d99afd244dfe82d75ddb6db5c0df412842d5ab30b2ef18de471aaec70fe69a46f17b4ae3c7f01d8a4e6580ef7adb9f4273ad1e55 +"marky@npm:^1.2.2": + version: 1.2.5 + resolution: "marky@npm:1.2.5" + checksum: 823b946677749551cdfc3b5221685478b5d1b9cc0dc03eff977c6f9a615fb05c67559f9556cb3c0fcb941a9ea0e195e37befd83026443396ccee8b724f54f4c5 languageName: node linkType: hard -"levn@npm:^0.4.1": - version: 0.4.1 - resolution: "levn@npm:0.4.1" +"mcl-wasm@npm:^0.7.1": + version: 0.7.9 + resolution: "mcl-wasm@npm:0.7.9" + checksum: 6b6ed5084156b98b2db70b223e1ba2c01953970b48a2e0c4ea3eeb9296610e6b3bfb2a2cce9e92e2d7ad61778b5f5a630e705e663835e915ba188c174a0a37fa + languageName: node + linkType: hard + +"md5.js@npm:^1.3.4": + version: 1.3.5 + resolution: "md5.js@npm:1.3.5" dependencies: - prelude-ls: ^1.2.1 - type-check: ~0.4.0 - checksum: 12c5021c859bd0f5248561bf139121f0358285ec545ebf48bb3d346820d5c61a4309535c7f387ed7d84361cf821e124ce346c6b7cef8ee09a67c1473b46d0fc4 + hash-base: ^3.0.0 + inherits: ^2.0.1 + safe-buffer: ^5.1.2 + checksum: 098494d885684bcc4f92294b18ba61b7bd353c23147fbc4688c75b45cb8590f5a95fd4584d742415dcc52487f7a1ef6ea611cfa1543b0dc4492fe026357f3f0c languageName: node linkType: hard -"lighthouse-logger@npm:^1.0.0": - version: 1.4.2 - resolution: "lighthouse-logger@npm:1.4.2" +"mdast-squeeze-paragraphs@npm:^4.0.0": + version: 4.0.0 + resolution: "mdast-squeeze-paragraphs@npm:4.0.0" dependencies: - debug: ^2.6.9 - marky: ^1.2.2 - checksum: ba6b73d93424318fab58b4e07c9ed246e3e969a3313f26b69515ed4c06457dd9a0b11bc706948398fdaef26aa4ba5e65cb848c37ce59f470d3c6c450b9b79a33 + unist-util-remove: ^2.0.0 + checksum: dfe8ec8e8a62171f020e82b088cc35cb9da787736dc133a3b45ce8811782a93e69bf06d147072e281079f09fac67be8a36153ffffd9bfbf89ed284e4c4f56f75 languageName: node linkType: hard -"lilconfig@npm:^2.0.3": - version: 2.1.0 - resolution: "lilconfig@npm:2.1.0" - checksum: 8549bb352b8192375fed4a74694cd61ad293904eee33f9d4866c2192865c44c4eb35d10782966242634e0cbc1e91fe62b1247f148dc5514918e3a966da7ea117 +"mdast-util-definitions@npm:^4.0.0": + version: 4.0.0 + resolution: "mdast-util-definitions@npm:4.0.0" + dependencies: + unist-util-visit: ^2.0.0 + checksum: 2325f20b82b3fb8cb5fda77038ee0bbdd44f82cfca7c48a854724b58bc1fe5919630a3ce7c45e210726df59d46c881d020b2da7a493bfd1ee36eb2bbfef5d78e languageName: node linkType: hard -"lines-and-columns@npm:^1.1.6": - version: 1.2.4 - resolution: "lines-and-columns@npm:1.2.4" - checksum: 0c37f9f7fa212b38912b7145e1cd16a5f3cd34d782441c3e6ca653485d326f58b3caccda66efce1c5812bde4961bbde3374fae4b0d11bf1226152337f3894aa5 +"mdast-util-directive@npm:^3.0.0": + version: 3.0.0 + resolution: "mdast-util-directive@npm:3.0.0" + dependencies: + "@types/mdast": ^4.0.0 + "@types/unist": ^3.0.0 + devlop: ^1.0.0 + mdast-util-from-markdown: ^2.0.0 + mdast-util-to-markdown: ^2.0.0 + parse-entities: ^4.0.0 + stringify-entities: ^4.0.0 + unist-util-visit-parents: ^6.0.0 + checksum: 593afdc4f39f99bb198f3774bf4648cb546cb99a055e40c82262a7faab10926d2529a725d0d3945300ed0a1f07c6c84215a3f76b899a89b3f410ec7375bbab17 languageName: node linkType: hard -"load-json-file@npm:^7.0.0": - version: 7.0.1 - resolution: "load-json-file@npm:7.0.1" - checksum: a560288da6891778321ef993e4bdbdf05374a4f3a3aeedd5ba6b64672798c830d748cfc59a2ec9891a3db30e78b3d04172e0dcb0d4828168289a393147ca0e74 +"mdast-util-find-and-replace@npm:^3.0.0, mdast-util-find-and-replace@npm:^3.0.1": + version: 3.0.1 + resolution: "mdast-util-find-and-replace@npm:3.0.1" + dependencies: + "@types/mdast": ^4.0.0 + escape-string-regexp: ^5.0.0 + unist-util-is: ^6.0.0 + unist-util-visit-parents: ^6.0.0 + checksum: 05d5c4ff02e31db2f8a685a13bcb6c3f44e040bd9dfa54c19a232af8de5268334c8755d79cb456ed4cced1300c4fb83e88444c7ae8ee9ff16869a580f29d08cd languageName: node linkType: hard -"loader-runner@npm:^4.2.0": - version: 4.3.0 - resolution: "loader-runner@npm:4.3.0" - checksum: a90e00dee9a16be118ea43fec3192d0b491fe03a32ed48a4132eb61d498f5536a03a1315531c19d284392a8726a4ecad71d82044c28d7f22ef62e029bf761569 +"mdast-util-from-markdown@npm:^2.0.0": + version: 2.0.0 + resolution: "mdast-util-from-markdown@npm:2.0.0" + dependencies: + "@types/mdast": ^4.0.0 + "@types/unist": ^3.0.0 + decode-named-character-reference: ^1.0.0 + devlop: ^1.0.0 + mdast-util-to-string: ^4.0.0 + micromark: ^4.0.0 + micromark-util-decode-numeric-character-reference: ^2.0.0 + micromark-util-decode-string: ^2.0.0 + micromark-util-normalize-identifier: ^2.0.0 + micromark-util-symbol: ^2.0.0 + micromark-util-types: ^2.0.0 + unist-util-stringify-position: ^4.0.0 + checksum: 4e8d8a46b4b588486c41b80c39da333a91593bc8d60cd7421c6cd3c22003b8e5a62478292fb7bc97b9255b6301a2250cca32340ef43c309156e215453c5b92be languageName: node linkType: hard -"loader-utils@npm:^2.0.0": - version: 2.0.4 - resolution: "loader-utils@npm:2.0.4" +"mdast-util-frontmatter@npm:^2.0.0": + version: 2.0.1 + resolution: "mdast-util-frontmatter@npm:2.0.1" dependencies: - big.js: ^5.2.2 - emojis-list: ^3.0.0 - json5: ^2.1.2 - checksum: a5281f5fff1eaa310ad5e1164095689443630f3411e927f95031ab4fb83b4a98f388185bb1fe949e8ab8d4247004336a625e9255c22122b815bb9a4c5d8fc3b7 + "@types/mdast": ^4.0.0 + devlop: ^1.0.0 + escape-string-regexp: ^5.0.0 + mdast-util-from-markdown: ^2.0.0 + mdast-util-to-markdown: ^2.0.0 + micromark-extension-frontmatter: ^2.0.0 + checksum: 86a7c8d9eb183be2621d6d9134b9d33df2a3647e3255f68a9796e2425e25643ffae00a501e36c57d9c10973087b94aa5a2ffd865d33cdd274cc9b88cd2d90a2e languageName: node linkType: hard -"loader-utils@npm:^3.2.0": - version: 3.2.1 - resolution: "loader-utils@npm:3.2.1" - checksum: 4e3ea054cdc8be1ab1f1238f49f42fdf0483039eff920fb1d442039f3f0ad4ebd11fb8e584ccdf2cb7e3c56b3d40c1832416e6408a55651b843da288960cc792 +"mdast-util-gfm-autolink-literal@npm:^2.0.0": + version: 2.0.0 + resolution: "mdast-util-gfm-autolink-literal@npm:2.0.0" + dependencies: + "@types/mdast": ^4.0.0 + ccount: ^2.0.0 + devlop: ^1.0.0 + mdast-util-find-and-replace: ^3.0.0 + micromark-util-character: ^2.0.0 + checksum: 10322662e5302964bed7c9829c5fd3b0c9899d4f03e63fb8620ab141cf4f3de9e61fcb4b44d46aacc8a23f82bcd5d900980a211825dfe026b1dab5fdbc3e8742 languageName: node linkType: hard -"locate-path@npm:^2.0.0": +"mdast-util-gfm-footnote@npm:^2.0.0": version: 2.0.0 - resolution: "locate-path@npm:2.0.0" + resolution: "mdast-util-gfm-footnote@npm:2.0.0" dependencies: - p-locate: ^2.0.0 - path-exists: ^3.0.0 - checksum: 02d581edbbbb0fa292e28d96b7de36b5b62c2fa8b5a7e82638ebb33afa74284acf022d3b1e9ae10e3ffb7658fbc49163fcd5e76e7d1baaa7801c3e05a81da755 + "@types/mdast": ^4.0.0 + devlop: ^1.1.0 + mdast-util-from-markdown: ^2.0.0 + mdast-util-to-markdown: ^2.0.0 + micromark-util-normalize-identifier: ^2.0.0 + checksum: 45d26b40e7a093712e023105791129d76e164e2168d5268e113298a22de30c018162683fb7893cdc04ab246dac0087eed708b2a136d1d18ed2b32b3e0cae4a79 languageName: node linkType: hard -"locate-path@npm:^3.0.0": - version: 3.0.0 - resolution: "locate-path@npm:3.0.0" +"mdast-util-gfm-strikethrough@npm:^2.0.0": + version: 2.0.0 + resolution: "mdast-util-gfm-strikethrough@npm:2.0.0" dependencies: - p-locate: ^3.0.0 - path-exists: ^3.0.0 - checksum: 53db3996672f21f8b0bf2a2c645ae2c13ffdae1eeecfcd399a583bce8516c0b88dcb4222ca6efbbbeb6949df7e46860895be2c02e8d3219abd373ace3bfb4e11 + "@types/mdast": ^4.0.0 + mdast-util-from-markdown: ^2.0.0 + mdast-util-to-markdown: ^2.0.0 + checksum: fe9b1d0eba9b791ff9001c008744eafe3dd7a81b085f2bf521595ce4a8e8b1b44764ad9361761ad4533af3e5d913d8ad053abec38172031d9ee32a8ebd1c7dbd languageName: node linkType: hard -"locate-path@npm:^5.0.0": - version: 5.0.0 - resolution: "locate-path@npm:5.0.0" +"mdast-util-gfm-table@npm:^2.0.0": + version: 2.0.0 + resolution: "mdast-util-gfm-table@npm:2.0.0" dependencies: - p-locate: ^4.1.0 - checksum: 83e51725e67517287d73e1ded92b28602e3ae5580b301fe54bfb76c0c723e3f285b19252e375712316774cf52006cb236aed5704692c32db0d5d089b69696e30 + "@types/mdast": ^4.0.0 + devlop: ^1.0.0 + markdown-table: ^3.0.0 + mdast-util-from-markdown: ^2.0.0 + mdast-util-to-markdown: ^2.0.0 + checksum: 063a627fd0993548fd63ca0c24c437baf91ba7d51d0a38820bd459bc20bf3d13d7365ef8d28dca99176dd5eb26058f7dde51190479c186dfe6af2e11202957c9 languageName: node linkType: hard -"locate-path@npm:^6.0.0": - version: 6.0.0 - resolution: "locate-path@npm:6.0.0" +"mdast-util-gfm-task-list-item@npm:^2.0.0": + version: 2.0.0 + resolution: "mdast-util-gfm-task-list-item@npm:2.0.0" dependencies: - p-locate: ^5.0.0 - checksum: 72eb661788a0368c099a184c59d2fee760b3831c9c1c33955e8a19ae4a21b4116e53fa736dc086cdeb9fce9f7cc508f2f92d2d3aae516f133e16a2bb59a39f5a + "@types/mdast": ^4.0.0 + devlop: ^1.0.0 + mdast-util-from-markdown: ^2.0.0 + mdast-util-to-markdown: ^2.0.0 + checksum: 37db90c59b15330fc54d790404abf5ef9f2f83e8961c53666fe7de4aab8dd5e6b3c296b6be19797456711a89a27840291d8871ff0438e9b4e15c89d170efe072 languageName: node linkType: hard -"locate-path@npm:^7.1.0": - version: 7.2.0 - resolution: "locate-path@npm:7.2.0" +"mdast-util-gfm@npm:^3.0.0": + version: 3.0.0 + resolution: "mdast-util-gfm@npm:3.0.0" dependencies: - p-locate: ^6.0.0 - checksum: c1b653bdf29beaecb3d307dfb7c44d98a2a98a02ebe353c9ad055d1ac45d6ed4e1142563d222df9b9efebc2bcb7d4c792b507fad9e7150a04c29530b7db570f8 + mdast-util-from-markdown: ^2.0.0 + mdast-util-gfm-autolink-literal: ^2.0.0 + mdast-util-gfm-footnote: ^2.0.0 + mdast-util-gfm-strikethrough: ^2.0.0 + mdast-util-gfm-table: ^2.0.0 + mdast-util-gfm-task-list-item: ^2.0.0 + mdast-util-to-markdown: ^2.0.0 + checksum: 62039d2f682ae3821ea1c999454863d31faf94d67eb9b746589c7e136076d7fb35fabc67e02f025c7c26fd7919331a0ee1aabfae24f565d9a6a9ebab3371c626 languageName: node linkType: hard -"lodash.assignwith@npm:^4.2.0": - version: 4.2.0 - resolution: "lodash.assignwith@npm:4.2.0" - checksum: 014a88e398802ca4eaae314afb67f32eb2cab6f01e61490dbbb74694263f79715341ab8ddf4b344093a2253b506d347f67731f0499e457d9c0128be1d2caf6dd +"mdast-util-math@npm:^3.0.0": + version: 3.0.0 + resolution: "mdast-util-math@npm:3.0.0" + dependencies: + "@types/hast": ^3.0.0 + "@types/mdast": ^4.0.0 + devlop: ^1.0.0 + longest-streak: ^3.0.0 + mdast-util-from-markdown: ^2.0.0 + mdast-util-to-markdown: ^2.1.0 + unist-util-remove-position: ^5.0.0 + checksum: dc7dfb14aec2ec143420f2d92f80c5e6d69293d7cfb6b8180e7f411ce4e1314b5cf4a8d3345eefe06ab0ddd95e3c7801c4174b343fd2c26741180ca4dbad5371 languageName: node linkType: hard -"lodash.camelcase@npm:^4.3.0": - version: 4.3.0 - resolution: "lodash.camelcase@npm:4.3.0" - checksum: cb9227612f71b83e42de93eccf1232feeb25e705bdb19ba26c04f91e885bfd3dd5c517c4a97137658190581d3493ea3973072ca010aab7e301046d90740393d1 +"mdast-util-mdx-expression@npm:^2.0.0": + version: 2.0.0 + resolution: "mdast-util-mdx-expression@npm:2.0.0" + dependencies: + "@types/estree-jsx": ^1.0.0 + "@types/hast": ^3.0.0 + "@types/mdast": ^4.0.0 + devlop: ^1.0.0 + mdast-util-from-markdown: ^2.0.0 + mdast-util-to-markdown: ^2.0.0 + checksum: 4e1183000e183e07a7264e192889b4fd57372806103031c71b9318967f85fd50a5dd0f92ef14f42c331e77410808f5de3341d7bc8ad4ee91b7fa8f0a30043a8a languageName: node linkType: hard -"lodash.curry@npm:^4.0.1": - version: 4.1.1 - resolution: "lodash.curry@npm:4.1.1" - checksum: 9192b70fe7df4d1ff780c0260bee271afa9168c93fe4fa24bc861900240531b59781b5fdaadf4644fea8f4fbcd96f0700539ab294b579ffc1022c6c15dcc462a +"mdast-util-mdx-jsx@npm:^3.0.0": + version: 3.0.0 + resolution: "mdast-util-mdx-jsx@npm:3.0.0" + dependencies: + "@types/estree-jsx": ^1.0.0 + "@types/hast": ^3.0.0 + "@types/mdast": ^4.0.0 + "@types/unist": ^3.0.0 + ccount: ^2.0.0 + devlop: ^1.1.0 + mdast-util-from-markdown: ^2.0.0 + mdast-util-to-markdown: ^2.0.0 + parse-entities: ^4.0.0 + stringify-entities: ^4.0.0 + unist-util-remove-position: ^5.0.0 + unist-util-stringify-position: ^4.0.0 + vfile-message: ^4.0.0 + checksum: 48fe1ba617205f3776ca2030d195adbdb42bb6c53326534db3f5bdd28abe7895103af8c4dfda7cbe2911e8cd71921bc8a82fe40856565e57af8b4f8a79c8c126 + languageName: node + linkType: hard + +"mdast-util-mdx@npm:^3.0.0": + version: 3.0.0 + resolution: "mdast-util-mdx@npm:3.0.0" + dependencies: + mdast-util-from-markdown: ^2.0.0 + mdast-util-mdx-expression: ^2.0.0 + mdast-util-mdx-jsx: ^3.0.0 + mdast-util-mdxjs-esm: ^2.0.0 + mdast-util-to-markdown: ^2.0.0 + checksum: e2b007d826fcd49fd57ed03e190753c8b0f7d9eff6c7cb26ba609cde15cd3a472c0cd5e4a1ee3e39a40f14be22fdb57de243e093cea0c064d6f3366cff3e3af2 languageName: node linkType: hard -"lodash.debounce@npm:^4.0.8": - version: 4.0.8 - resolution: "lodash.debounce@npm:4.0.8" - checksum: a3f527d22c548f43ae31c861ada88b2637eb48ac6aa3eb56e82d44917971b8aa96fbb37aa60efea674dc4ee8c42074f90f7b1f772e9db375435f6c83a19b3bc6 +"mdast-util-mdxjs-esm@npm:^2.0.0": + version: 2.0.1 + resolution: "mdast-util-mdxjs-esm@npm:2.0.1" + dependencies: + "@types/estree-jsx": ^1.0.0 + "@types/hast": ^3.0.0 + "@types/mdast": ^4.0.0 + devlop: ^1.0.0 + mdast-util-from-markdown: ^2.0.0 + mdast-util-to-markdown: ^2.0.0 + checksum: 1f9dad04d31d59005332e9157ea9510dc1d03092aadbc607a10475c7eec1c158b475aa0601a3a4f74e13097ca735deb8c2d9d37928ddef25d3029fd7c9e14dc3 languageName: node linkType: hard -"lodash.escape@npm:^4.0.1": - version: 4.0.1 - resolution: "lodash.escape@npm:4.0.1" - checksum: fcb54f457497256964d619d5cccbd80a961916fca60df3fe0fa3e7f052715c2944c0ed5aefb4f9e047d127d44aa2d55555f3350cb42c6549e9e293fb30b41e7f +"mdast-util-phrasing@npm:^4.0.0": + version: 4.0.0 + resolution: "mdast-util-phrasing@npm:4.0.0" + dependencies: + "@types/mdast": ^4.0.0 + unist-util-is: ^6.0.0 + checksum: 95d5d8e18d5ea6dbfe2ee4ed1045961372efae9077e5c98e10bfef7025ee3fd9449f9a82840068ff50aa98fa43af0a0a14898ae10b5e46e96edde01e2797df34 languageName: node linkType: hard -"lodash.flatten@npm:^4.4.0": - version: 4.4.0 - resolution: "lodash.flatten@npm:4.4.0" - checksum: 0ac34a393d4b795d4b7421153d27c13ae67e08786c9cbb60ff5b732210d46f833598eee3fb3844bb10070e8488efe390ea53bb567377e0cb47e9e630bf0811cb +"mdast-util-to-hast@npm:10.0.1": + version: 10.0.1 + resolution: "mdast-util-to-hast@npm:10.0.1" + dependencies: + "@types/mdast": ^3.0.0 + "@types/unist": ^2.0.0 + mdast-util-definitions: ^4.0.0 + mdurl: ^1.0.0 + unist-builder: ^2.0.0 + unist-util-generated: ^1.0.0 + unist-util-position: ^3.0.0 + unist-util-visit: ^2.0.0 + checksum: e5f385757df7e9b37db4d6f326bf7b4fc1b40f9ad01fc335686578f44abe0ba46d3e60af4d5e5b763556d02e65069ef9a09c49db049b52659203a43e7fa9084d languageName: node linkType: hard -"lodash.flow@npm:^3.3.0": - version: 3.5.0 - resolution: "lodash.flow@npm:3.5.0" - checksum: a9a62ad344e3c5a1f42bc121da20f64dd855aaafecee24b1db640f29b88bd165d81c37ff7e380a7191de6f70b26f5918abcebbee8396624f78f3618a0b18634c +"mdast-util-to-hast@npm:^13.0.0": + version: 13.0.2 + resolution: "mdast-util-to-hast@npm:13.0.2" + dependencies: + "@types/hast": ^3.0.0 + "@types/mdast": ^4.0.0 + "@ungap/structured-clone": ^1.0.0 + devlop: ^1.0.0 + micromark-util-sanitize-uri: ^2.0.0 + trim-lines: ^3.0.0 + unist-util-position: ^5.0.0 + unist-util-visit: ^5.0.0 + checksum: 8fef6c3752476461d9c00b1dea4f141bc7d980e1b3bac7bd965bc68f532b6d30fb1c9e810433327c167176e68e071b8f4ab5a45355954857dc095c878421f35e languageName: node linkType: hard -"lodash.invokemap@npm:^4.6.0": - version: 4.6.0 - resolution: "lodash.invokemap@npm:4.6.0" - checksum: 646ceebbefbcb6da301f8c2868254680fd0bcdc6ada470495d9ae49c9c32938829c1b38a38c95d0258409a9655f85db404b16e648381c7450b7ed3d9c52d8808 +"mdast-util-to-markdown@npm:^2.0.0, mdast-util-to-markdown@npm:^2.1.0": + version: 2.1.0 + resolution: "mdast-util-to-markdown@npm:2.1.0" + dependencies: + "@types/mdast": ^4.0.0 + "@types/unist": ^3.0.0 + longest-streak: ^3.0.0 + mdast-util-phrasing: ^4.0.0 + mdast-util-to-string: ^4.0.0 + micromark-util-decode-string: ^2.0.0 + unist-util-visit: ^5.0.0 + zwitch: ^2.0.0 + checksum: 3a2cf3957e23b34e2e092e6e76ae72ee0b8745955bd811baba6814cf3a3d916c3fd52264b4b58f3bb3d512a428f84a1e998b6fc7e28434e388a9ae8fb6a9c173 languageName: node linkType: hard -"lodash.isequal@npm:^4.5.0": - version: 4.5.0 - resolution: "lodash.isequal@npm:4.5.0" - checksum: da27515dc5230eb1140ba65ff8de3613649620e8656b19a6270afe4866b7bd461d9ba2ac8a48dcc57f7adac4ee80e1de9f965d89d4d81a0ad52bb3eec2609644 +"mdast-util-to-string@npm:^2.0.0": + version: 2.0.0 + resolution: "mdast-util-to-string@npm:2.0.0" + checksum: 0b2113ada10e002fbccb014170506dabe2f2ddacaacbe4bc1045c33f986652c5a162732a2c057c5335cdb58419e2ad23e368e5be226855d4d4e280b81c4e9ec2 languageName: node linkType: hard -"lodash.memoize@npm:^4.1.2": - version: 4.1.2 - resolution: "lodash.memoize@npm:4.1.2" - checksum: 9ff3942feeccffa4f1fafa88d32f0d24fdc62fd15ded5a74a5f950ff5f0c6f61916157246744c620173dddf38d37095a92327d5fd3861e2063e736a5c207d089 +"mdast-util-to-string@npm:^4.0.0": + version: 4.0.0 + resolution: "mdast-util-to-string@npm:4.0.0" + dependencies: + "@types/mdast": ^4.0.0 + checksum: 35489fb5710d58cbc2d6c8b6547df161a3f81e0f28f320dfb3548a9393555daf07c310c0c497708e67ed4dfea4a06e5655799e7d631ca91420c288b4525d6c29 languageName: node linkType: hard -"lodash.merge@npm:^4.6.2": - version: 4.6.2 - resolution: "lodash.merge@npm:4.6.2" - checksum: ad580b4bdbb7ca1f7abf7e1bce63a9a0b98e370cf40194b03380a46b4ed799c9573029599caebc1b14e3f24b111aef72b96674a56cfa105e0f5ac70546cdc005 +"mdn-data@npm:2.0.14": + version: 2.0.14 + resolution: "mdn-data@npm:2.0.14" + checksum: 9d0128ed425a89f4cba8f787dca27ad9408b5cb1b220af2d938e2a0629d17d879a34d2cb19318bdb26c3f14c77dd5dfbae67211f5caaf07b61b1f2c5c8c7dc16 languageName: node linkType: hard -"lodash.pullall@npm:^4.2.0": - version: 4.2.0 - resolution: "lodash.pullall@npm:4.2.0" - checksum: 7a5fbaedf186ec197ce1e0b9ba1d88a89773ebaf6a8291c7d273838cac59cb3b339cf36ef00e94172862ee84d2304c38face161846f08f5581d0553dcbdcd090 +"mdurl@npm:^1.0.0": + version: 1.0.1 + resolution: "mdurl@npm:1.0.1" + checksum: 71731ecba943926bfbf9f9b51e28b5945f9411c4eda80894221b47cc105afa43ba2da820732b436f0798fd3edbbffcd1fc1415843c41a87fea08a41cc1e3d02b languageName: node linkType: hard -"lodash.uniq@npm:4.5.0, lodash.uniq@npm:^4.5.0": - version: 4.5.0 - resolution: "lodash.uniq@npm:4.5.0" - checksum: a4779b57a8d0f3c441af13d9afe7ecff22dd1b8ce1129849f71d9bbc8e8ee4e46dfb4b7c28f7ad3d67481edd6e51126e4e2a6ee276e25906d10f7140187c392d +"media-typer@npm:0.3.0": + version: 0.3.0 + resolution: "media-typer@npm:0.3.0" + checksum: af1b38516c28ec95d6b0826f6c8f276c58aec391f76be42aa07646b4e39d317723e869700933ca6995b056db4b09a78c92d5440dc23657e6764be5d28874bba1 languageName: node linkType: hard -"lodash.uniqby@npm:^4.7.0": - version: 4.7.0 - resolution: "lodash.uniqby@npm:4.7.0" - checksum: 659264545a95726d1493123345aad8cbf56e17810fa9a0b029852c6d42bc80517696af09d99b23bef1845d10d95e01b8b4a1da578f22aeba7a30d3e0022a4938 +"memfs@npm:^3.1.2, memfs@npm:^3.4.3": + version: 3.5.3 + resolution: "memfs@npm:3.5.3" + dependencies: + fs-monkey: ^1.0.4 + checksum: 18dfdeacad7c8047b976a6ccd58bc98ba76e122ad3ca0e50a21837fe2075fc0d9aafc58ab9cf2576c2b6889da1dd2503083f2364191b695273f40969db2ecc44 languageName: node linkType: hard -"lodash@npm:^4.17.11, lodash@npm:^4.17.14, lodash@npm:^4.17.15, lodash@npm:^4.17.19, lodash@npm:^4.17.20, lodash@npm:^4.17.21": - version: 4.17.21 - resolution: "lodash@npm:4.17.21" - checksum: eb835a2e51d381e561e508ce932ea50a8e5a68f4ebdd771ea240d3048244a8d13658acbd502cd4829768c56f2e16bdd4340b9ea141297d472517b83868e677f7 +"memfs@npm:^4.6.0": + version: 4.6.0 + resolution: "memfs@npm:4.6.0" + dependencies: + json-joy: ^9.2.0 + thingies: ^1.11.1 + peerDependencies: + tslib: 2 + checksum: b32a35bee9f96dc011605f3bb39e74e6d2a5de51c952a77bb38a0dfabd3381c40ae382d27f385aa290edee8081597fb1a3b41a07bb3f775fd55312dc30ac1d9d languageName: node linkType: hard -"log-symbols@npm:4.1.0": - version: 4.1.0 - resolution: "log-symbols@npm:4.1.0" +"memory-level@npm:^1.0.0": + version: 1.0.0 + resolution: "memory-level@npm:1.0.0" dependencies: - chalk: ^4.1.0 - is-unicode-supported: ^0.1.0 - checksum: fce1497b3135a0198803f9f07464165e9eb83ed02ceb2273930a6f8a508951178d8cf4f0378e9d28300a2ed2bc49050995d2bd5f53ab716bb15ac84d58c6ef74 + abstract-level: ^1.0.0 + functional-red-black-tree: ^1.0.1 + module-error: ^1.0.1 + checksum: 80b1b7aedaf936e754adbcd7b9303018c3684fb32f9992fd967c448f145d177f16c724fbba9ed3c3590a9475fd563151eae664d69b83d2ad48714852e9fc5c72 languageName: node linkType: hard -"log-update@npm:^4.0.0": - version: 4.0.0 - resolution: "log-update@npm:4.0.0" - dependencies: - ansi-escapes: ^4.3.0 - cli-cursor: ^3.1.0 - slice-ansi: ^4.0.0 - wrap-ansi: ^6.2.0 - checksum: ae2f85bbabc1906034154fb7d4c4477c79b3e703d22d78adee8b3862fa913942772e7fa11713e3d96fb46de4e3cabefbf5d0a544344f03b58d3c4bff52aa9eb2 +"memorystream@npm:^0.3.1": + version: 0.3.1 + resolution: "memorystream@npm:0.3.1" + checksum: f18b42440d24d09516d01466c06adf797df7873f0d40aa7db02e5fb9ed83074e5e65412d0720901d7069363465f82dc4f8bcb44f0cde271567a61426ce6ca2e9 languageName: node linkType: hard -"loose-envify@npm:^1.0.0, loose-envify@npm:^1.1.0, loose-envify@npm:^1.2.0, loose-envify@npm:^1.3.1, loose-envify@npm:^1.4.0": - version: 1.4.0 - resolution: "loose-envify@npm:1.4.0" - dependencies: - js-tokens: ^3.0.0 || ^4.0.0 - bin: - loose-envify: cli.js - checksum: 6517e24e0cad87ec9888f500c5b5947032cdfe6ef65e1c1936a0c48a524b81e65542c9c3edc91c97d5bddc806ee2a985dbc79be89215d613b1de5db6d1cfe6f4 +"merge-descriptors@npm:1.0.1": + version: 1.0.1 + resolution: "merge-descriptors@npm:1.0.1" + checksum: 5abc259d2ae25bb06d19ce2b94a21632583c74e2a9109ee1ba7fd147aa7362b380d971e0251069f8b3eb7d48c21ac839e21fa177b335e82c76ec172e30c31a26 languageName: node linkType: hard -"loupe@npm:^2.3.1": - version: 2.3.6 - resolution: "loupe@npm:2.3.6" - dependencies: - get-func-name: ^2.0.0 - checksum: cc83f1b124a1df7384601d72d8d1f5fe95fd7a8185469fec48bb2e4027e45243949e7a013e8d91051a138451ff0552310c32aa9786e60b6a30d1e801bdc2163f +"merge-stream@npm:^2.0.0": + version: 2.0.0 + resolution: "merge-stream@npm:2.0.0" + checksum: 6fa4dcc8d86629705cea944a4b88ef4cb0e07656ebf223fa287443256414283dd25d91c1cd84c77987f2aec5927af1a9db6085757cb43d90eb170ebf4b47f4f4 languageName: node linkType: hard -"lower-case@npm:^2.0.2": - version: 2.0.2 - resolution: "lower-case@npm:2.0.2" - dependencies: - tslib: ^2.0.3 - checksum: 83a0a5f159ad7614bee8bf976b96275f3954335a84fad2696927f609ddae902802c4f3312d86668722e668bef41400254807e1d3a7f2e8c3eede79691aa1f010 +"merge2@npm:^1.3.0, merge2@npm:^1.4.1": + version: 1.4.1 + resolution: "merge2@npm:1.4.1" + checksum: 7268db63ed5169466540b6fb947aec313200bcf6d40c5ab722c22e242f651994619bcd85601602972d3c85bd2cc45a358a4c61937e9f11a061919a1da569b0c2 languageName: node linkType: hard -"lowercase-keys@npm:^1.0.0, lowercase-keys@npm:^1.0.1": - version: 1.0.1 - resolution: "lowercase-keys@npm:1.0.1" - checksum: 4d045026595936e09953e3867722e309415ff2c80d7701d067546d75ef698dac218a4f53c6d1d0e7368b47e45fd7529df47e6cb56fbb90523ba599f898b3d147 +"methods@npm:~1.1.2": + version: 1.1.2 + resolution: "methods@npm:1.1.2" + checksum: 0917ff4041fa8e2f2fda5425a955fe16ca411591fbd123c0d722fcf02b73971ed6f764d85f0a6f547ce49ee0221ce2c19a5fa692157931cecb422984f1dcd13a languageName: node linkType: hard -"lowercase-keys@npm:^2.0.0": +"micromark-core-commonmark@npm:^2.0.0": version: 2.0.0 - resolution: "lowercase-keys@npm:2.0.0" - checksum: 24d7ebd56ccdf15ff529ca9e08863f3c54b0b9d1edb97a3ae1af34940ae666c01a1e6d200707bce730a8ef76cb57cc10e65f245ecaaf7e6bc8639f2fb460ac23 + resolution: "micromark-core-commonmark@npm:2.0.0" + dependencies: + decode-named-character-reference: ^1.0.0 + devlop: ^1.0.0 + micromark-factory-destination: ^2.0.0 + micromark-factory-label: ^2.0.0 + micromark-factory-space: ^2.0.0 + micromark-factory-title: ^2.0.0 + micromark-factory-whitespace: ^2.0.0 + micromark-util-character: ^2.0.0 + micromark-util-chunked: ^2.0.0 + micromark-util-classify-character: ^2.0.0 + micromark-util-html-tag-name: ^2.0.0 + micromark-util-normalize-identifier: ^2.0.0 + micromark-util-resolve-all: ^2.0.0 + micromark-util-subtokenize: ^2.0.0 + micromark-util-symbol: ^2.0.0 + micromark-util-types: ^2.0.0 + checksum: 9c12fb580cf4ce71f60872043bd2794efe129f44d7b2b73afa155bbc0a66b7bc35655ba8cef438a6bd068441837ed3b6dc6ad7e5a18f815462c1750793e03a42 + languageName: node + linkType: hard + +"micromark-extension-directive@npm:^3.0.0": + version: 3.0.0 + resolution: "micromark-extension-directive@npm:3.0.0" + dependencies: + devlop: ^1.0.0 + micromark-factory-space: ^2.0.0 + micromark-factory-whitespace: ^2.0.0 + micromark-util-character: ^2.0.0 + micromark-util-symbol: ^2.0.0 + micromark-util-types: ^2.0.0 + parse-entities: ^4.0.0 + checksum: 8350106bdf039a544cba64cf7932261a710e07d73d43d6c645dd2b16577f30ebd04abf762e8ca74266f5de19938e1eeff6c237d79f8244dea23aef7f90df2c31 languageName: node linkType: hard -"lru-cache@npm:^5.1.1": - version: 5.1.1 - resolution: "lru-cache@npm:5.1.1" +"micromark-extension-frontmatter@npm:^2.0.0": + version: 2.0.0 + resolution: "micromark-extension-frontmatter@npm:2.0.0" dependencies: - yallist: ^3.0.2 - checksum: c154ae1cbb0c2206d1501a0e94df349653c92c8cbb25236d7e85190bcaf4567a03ac6eb43166fabfa36fd35623694da7233e88d9601fbf411a9a481d85dbd2cb + fault: ^2.0.0 + micromark-util-character: ^2.0.0 + micromark-util-symbol: ^2.0.0 + micromark-util-types: ^2.0.0 + checksum: f68032df38c00ae47de15b63bcd72515bfcce39de4a9262a3a1ac9c5990f253f8e41bdc65fd17ec4bb3d144c32529ce0829571331e4901a9a413f1a53785d1e8 languageName: node linkType: hard -"lru-cache@npm:^6.0.0": - version: 6.0.0 - resolution: "lru-cache@npm:6.0.0" +"micromark-extension-gfm-autolink-literal@npm:^2.0.0": + version: 2.0.0 + resolution: "micromark-extension-gfm-autolink-literal@npm:2.0.0" dependencies: - yallist: ^4.0.0 - checksum: f97f499f898f23e4585742138a22f22526254fdba6d75d41a1c2526b3b6cc5747ef59c5612ba7375f42aca4f8461950e925ba08c991ead0651b4918b7c978297 + micromark-util-character: ^2.0.0 + micromark-util-sanitize-uri: ^2.0.0 + micromark-util-symbol: ^2.0.0 + micromark-util-types: ^2.0.0 + checksum: fa16d59528239262d6d04d539a052baf1f81275954ec8bfadea40d81bfc25667d5c8e68b225a5358626df5e30a3933173a67fdad2fed011d37810a10b770b0b2 languageName: node linkType: hard -"lru-cache@npm:^7.7.1": - version: 7.18.3 - resolution: "lru-cache@npm:7.18.3" - checksum: e550d772384709deea3f141af34b6d4fa392e2e418c1498c078de0ee63670f1f46f5eee746e8ef7e69e1c895af0d4224e62ee33e66a543a14763b0f2e74c1356 +"micromark-extension-gfm-footnote@npm:^2.0.0": + version: 2.0.0 + resolution: "micromark-extension-gfm-footnote@npm:2.0.0" + dependencies: + devlop: ^1.0.0 + micromark-core-commonmark: ^2.0.0 + micromark-factory-space: ^2.0.0 + micromark-util-character: ^2.0.0 + micromark-util-normalize-identifier: ^2.0.0 + micromark-util-sanitize-uri: ^2.0.0 + micromark-util-symbol: ^2.0.0 + micromark-util-types: ^2.0.0 + checksum: a426fddecfac6144fc622b845cd2dc09d46faa75be5b76ff022cb76a03301b1d4929a5e5e41e071491787936be65e03d0b03c7aebc0e0136b3cdbfadadd6632c languageName: node linkType: hard -"lru-cache@npm:^8.0.4": - version: 8.0.5 - resolution: "lru-cache@npm:8.0.5" - checksum: 87d72196d8f46e8299c4ab576ed2ec8a07e3cbef517dc9874399c0b2470bd9bf62aacec3b67f84ed6d74aaa1ef31636d048edf996f76248fd17db72bfb631609 +"micromark-extension-gfm-strikethrough@npm:^2.0.0": + version: 2.0.0 + resolution: "micromark-extension-gfm-strikethrough@npm:2.0.0" + dependencies: + devlop: ^1.0.0 + micromark-util-chunked: ^2.0.0 + micromark-util-classify-character: ^2.0.0 + micromark-util-resolve-all: ^2.0.0 + micromark-util-symbol: ^2.0.0 + micromark-util-types: ^2.0.0 + checksum: 4e35fbbf364bfce08066b70acd94b9d393a8fd09a5afbe0bae70d0c8a174640b1ba86ab6b78ee38f411a813e2a718b07959216cf0063d823ba1c569a7694e5ad languageName: node linkType: hard -"lru-cache@npm:^9.1.1 || ^10.0.0": - version: 10.0.1 - resolution: "lru-cache@npm:10.0.1" - checksum: 06f8d0e1ceabd76bb6f644a26dbb0b4c471b79c7b514c13c6856113879b3bf369eb7b497dad4ff2b7e2636db202412394865b33c332100876d838ad1372f0181 +"micromark-extension-gfm-table@npm:^2.0.0": + version: 2.0.0 + resolution: "micromark-extension-gfm-table@npm:2.0.0" + dependencies: + devlop: ^1.0.0 + micromark-factory-space: ^2.0.0 + micromark-util-character: ^2.0.0 + micromark-util-symbol: ^2.0.0 + micromark-util-types: ^2.0.0 + checksum: 71484dcf8db7b189da0528f472cc81e4d6d1a64ae43bbe7fcb7e2e1dba758a0a4f785f9f1afb9459fe5b4a02bbe023d78c95c05204414a14083052eb8219e5eb languageName: node linkType: hard -"lru_map@npm:^0.3.3": - version: 0.3.3 - resolution: "lru_map@npm:0.3.3" - checksum: ca9dd43c65ed7a4f117c548028101c5b6855e10923ea9d1f635af53ad20c5868ff428c364d454a7b57fe391b89c704982275410c3c5099cca5aeee00d76e169a +"micromark-extension-gfm-tagfilter@npm:^2.0.0": + version: 2.0.0 + resolution: "micromark-extension-gfm-tagfilter@npm:2.0.0" + dependencies: + micromark-util-types: ^2.0.0 + checksum: cf21552f4a63592bfd6c96ae5d64a5f22bda4e77814e3f0501bfe80e7a49378ad140f827007f36044666f176b3a0d5fea7c2e8e7973ce4b4579b77789f01ae95 languageName: node linkType: hard -"lunr-languages@npm:^1.4.0": - version: 1.14.0 - resolution: "lunr-languages@npm:1.14.0" - checksum: 05dd6338af6897932f64f9cb735d5b48f9905d892499b22a3f3abc279b2ac71a6bce0fdfe59c01464c6ad3f8e44e2956ba0637f092535239793bbadf4540e72d +"micromark-extension-gfm-task-list-item@npm:^2.0.0": + version: 2.0.1 + resolution: "micromark-extension-gfm-task-list-item@npm:2.0.1" + dependencies: + devlop: ^1.0.0 + micromark-factory-space: ^2.0.0 + micromark-util-character: ^2.0.0 + micromark-util-symbol: ^2.0.0 + micromark-util-types: ^2.0.0 + checksum: 80e569ab1a1d1f89d86af91482e9629e24b7e3f019c9d7989190f36a9367c6de723b2af48e908c1b73479f35b2215d3d38c1fdbf02ab01eb2fc90a59d1cf4465 + languageName: node + linkType: hard + +"micromark-extension-gfm@npm:^3.0.0": + version: 3.0.0 + resolution: "micromark-extension-gfm@npm:3.0.0" + dependencies: + micromark-extension-gfm-autolink-literal: ^2.0.0 + micromark-extension-gfm-footnote: ^2.0.0 + micromark-extension-gfm-strikethrough: ^2.0.0 + micromark-extension-gfm-table: ^2.0.0 + micromark-extension-gfm-tagfilter: ^2.0.0 + micromark-extension-gfm-task-list-item: ^2.0.0 + micromark-util-combine-extensions: ^2.0.0 + micromark-util-types: ^2.0.0 + checksum: 2060fa62666a09532d6b3a272d413bc1b25bbb262f921d7402795ac021e1362c8913727e33d7528d5b4ccaf26922ec51208c43f795a702964817bc986de886c9 languageName: node linkType: hard -"lunr@npm:^2.3.9": - version: 2.3.9 - resolution: "lunr@npm:2.3.9" - checksum: 176719e24fcce7d3cf1baccce9dd5633cd8bdc1f41ebe6a180112e5ee99d80373fe2454f5d4624d437e5a8319698ca6837b9950566e15d2cae5f2a543a3db4b8 +"micromark-extension-math@npm:^3.0.0": + version: 3.0.0 + resolution: "micromark-extension-math@npm:3.0.0" + dependencies: + "@types/katex": ^0.16.0 + devlop: ^1.0.0 + katex: ^0.16.0 + micromark-factory-space: ^2.0.0 + micromark-util-character: ^2.0.0 + micromark-util-symbol: ^2.0.0 + micromark-util-types: ^2.0.0 + checksum: 14d313ea58f711d69c567951870f8d8517384cd90318d8b608b4eabb2fea2278c7de1ca38fa86067704cba679b5a82be243680436c1bc26c4c4978b28fdf647c languageName: node linkType: hard -"make-dir@npm:^3.0.0, make-dir@npm:^3.0.2, make-dir@npm:^3.1.0": - version: 3.1.0 - resolution: "make-dir@npm:3.1.0" +"micromark-extension-mdx-expression@npm:^3.0.0": + version: 3.0.0 + resolution: "micromark-extension-mdx-expression@npm:3.0.0" dependencies: - semver: ^6.0.0 - checksum: 484200020ab5a1fdf12f393fe5f385fc8e4378824c940fba1729dcd198ae4ff24867bc7a5646331e50cead8abff5d9270c456314386e629acec6dff4b8016b78 + "@types/estree": ^1.0.0 + devlop: ^1.0.0 + micromark-factory-mdx-expression: ^2.0.0 + micromark-factory-space: ^2.0.0 + micromark-util-character: ^2.0.0 + micromark-util-events-to-acorn: ^2.0.0 + micromark-util-symbol: ^2.0.0 + micromark-util-types: ^2.0.0 + checksum: abd6ba0acdebc03bc0836c51a1ec4ca28e0be86f10420dd8cfbcd6c10dd37cd3f31e7c8b9792e9276e7526748883f4a30d0803d72b6285dae47d4e5348c23a10 languageName: node linkType: hard -"make-dir@npm:^4.0.0": - version: 4.0.0 - resolution: "make-dir@npm:4.0.0" +"micromark-extension-mdx-jsx@npm:^3.0.0": + version: 3.0.0 + resolution: "micromark-extension-mdx-jsx@npm:3.0.0" dependencies: - semver: ^7.5.3 - checksum: bf0731a2dd3aab4db6f3de1585cea0b746bb73eb5a02e3d8d72757e376e64e6ada190b1eddcde5b2f24a81b688a9897efd5018737d05e02e2a671dda9cff8a8a + "@types/acorn": ^4.0.0 + "@types/estree": ^1.0.0 + devlop: ^1.0.0 + estree-util-is-identifier-name: ^3.0.0 + micromark-factory-mdx-expression: ^2.0.0 + micromark-factory-space: ^2.0.0 + micromark-util-character: ^2.0.0 + micromark-util-symbol: ^2.0.0 + micromark-util-types: ^2.0.0 + vfile-message: ^4.0.0 + checksum: 5e2f45d381d1ce43afadc5376427b42ef8cd2a574ca3658473254eabe84db99ef1abc03055b3d86728fac7f1edfb1076e6f2f322ed8bfb1f2f14cafc2c8f0d0e languageName: node linkType: hard -"make-error@npm:^1.1.1": - version: 1.3.6 - resolution: "make-error@npm:1.3.6" - checksum: b86e5e0e25f7f777b77fabd8e2cbf15737972869d852a22b7e73c17623928fccb826d8e46b9951501d3f20e51ad74ba8c59ed584f610526a48f8ccf88aaec402 +"micromark-extension-mdx-md@npm:^2.0.0": + version: 2.0.0 + resolution: "micromark-extension-mdx-md@npm:2.0.0" + dependencies: + micromark-util-types: ^2.0.0 + checksum: 7daf03372fd7faddf3f0ac87bdb0debb0bb770f33b586f72251e1072b222ceee75400ab6194c0e130dbf1e077369a5b627be6e9130d7a2e9e6b849f0d18ff246 languageName: node linkType: hard -"make-fetch-happen@npm:^11.0.3": - version: 11.1.1 - resolution: "make-fetch-happen@npm:11.1.1" +"micromark-extension-mdxjs-esm@npm:^3.0.0": + version: 3.0.0 + resolution: "micromark-extension-mdxjs-esm@npm:3.0.0" dependencies: - agentkeepalive: ^4.2.1 - cacache: ^17.0.0 - http-cache-semantics: ^4.1.1 - http-proxy-agent: ^5.0.0 - https-proxy-agent: ^5.0.0 - is-lambda: ^1.0.1 - lru-cache: ^7.7.1 - minipass: ^5.0.0 - minipass-fetch: ^3.0.0 - minipass-flush: ^1.0.5 - minipass-pipeline: ^1.2.4 - negotiator: ^0.6.3 - promise-retry: ^2.0.1 - socks-proxy-agent: ^7.0.0 - ssri: ^10.0.0 - checksum: 7268bf274a0f6dcf0343829489a4506603ff34bd0649c12058753900b0eb29191dce5dba12680719a5d0a983d3e57810f594a12f3c18494e93a1fbc6348a4540 + "@types/estree": ^1.0.0 + devlop: ^1.0.0 + micromark-core-commonmark: ^2.0.0 + micromark-util-character: ^2.0.0 + micromark-util-events-to-acorn: ^2.0.0 + micromark-util-symbol: ^2.0.0 + micromark-util-types: ^2.0.0 + unist-util-position-from-estree: ^2.0.0 + vfile-message: ^4.0.0 + checksum: fb33d850200afce567b95c90f2f7d42259bd33eea16154349e4fa77c3ec934f46c8e5c111acea16321dce3d9f85aaa4c49afe8b810e31b34effc11617aeee8f6 languageName: node linkType: hard -"map-age-cleaner@npm:^0.1.3": - version: 0.1.3 - resolution: "map-age-cleaner@npm:0.1.3" +"micromark-extension-mdxjs@npm:^3.0.0": + version: 3.0.0 + resolution: "micromark-extension-mdxjs@npm:3.0.0" dependencies: - p-defer: ^1.0.0 - checksum: cb2804a5bcb3cbdfe4b59066ea6d19f5e7c8c196cd55795ea4c28f792b192e4c442426ae52524e5e1acbccf393d3bddacefc3d41f803e66453f6c4eda3650bc1 + acorn: ^8.0.0 + acorn-jsx: ^5.0.0 + micromark-extension-mdx-expression: ^3.0.0 + micromark-extension-mdx-jsx: ^3.0.0 + micromark-extension-mdx-md: ^2.0.0 + micromark-extension-mdxjs-esm: ^3.0.0 + micromark-util-combine-extensions: ^2.0.0 + micromark-util-types: ^2.0.0 + checksum: 7da6f0fb0e1e0270a2f5ad257e7422cc16e68efa7b8214c63c9d55bc264cb872e9ca4ac9a71b9dfd13daf52e010f730bac316086f4340e4fcc6569ec699915bf languageName: node linkType: hard -"map-stream@npm:~0.1.0": - version: 0.1.0 - resolution: "map-stream@npm:0.1.0" - checksum: 38abbe4eb883888031e6b2fc0630bc583c99396be16b8ace5794b937b682a8a081f03e8b15bfd4914d1bc88318f0e9ac73ba3512ae65955cd449f63256ddb31d +"micromark-factory-destination@npm:^2.0.0": + version: 2.0.0 + resolution: "micromark-factory-destination@npm:2.0.0" + dependencies: + micromark-util-character: ^2.0.0 + micromark-util-symbol: ^2.0.0 + micromark-util-types: ^2.0.0 + checksum: d36e65ed1c072ff4148b016783148ba7c68a078991154625723e24bda3945160268fb91079fb28618e1613c2b6e70390a8ddc544c45410288aa27b413593071a languageName: node linkType: hard -"mark.js@npm:^8.11.1": - version: 8.11.1 - resolution: "mark.js@npm:8.11.1" - checksum: aa6b9ae1c67245348d5b7abd253ef2acd6bb05c6be358d7d192416d964e42665fc10e0e865591c6f93ab9b57e8da1f23c23216e8ebddb580905ea7a0c0df15d4 +"micromark-factory-label@npm:^2.0.0": + version: 2.0.0 + resolution: "micromark-factory-label@npm:2.0.0" + dependencies: + devlop: ^1.0.0 + micromark-util-character: ^2.0.0 + micromark-util-symbol: ^2.0.0 + micromark-util-types: ^2.0.0 + checksum: c021dbd0ed367610d35f2bae21209bc804d1a6d1286ffce458fd6a717f4d7fe581a7cba7d5c2d7a63757c44eb927c80d6a571d6ea7969fae1b48ab6461d109c4 languageName: node linkType: hard -"markdown-escapes@npm:^1.0.0": - version: 1.0.4 - resolution: "markdown-escapes@npm:1.0.4" - checksum: 6833a93d72d3f70a500658872312c6fa8015c20cc835a85ae6901fa232683fbc6ed7118ebe920fea7c80039a560f339c026597d96eee0e9de602a36921804997 +"micromark-factory-mdx-expression@npm:^2.0.0": + version: 2.0.1 + resolution: "micromark-factory-mdx-expression@npm:2.0.1" + dependencies: + "@types/estree": ^1.0.0 + devlop: ^1.0.0 + micromark-util-character: ^2.0.0 + micromark-util-events-to-acorn: ^2.0.0 + micromark-util-symbol: ^2.0.0 + micromark-util-types: ^2.0.0 + unist-util-position-from-estree: ^2.0.0 + vfile-message: ^4.0.0 + checksum: 2ba0ae939d0174a5e5331b1a4c203b96862ccf06e8903d6bdcc2d51f75515e52d407cd394afcd182f9ff0e877dc2a14e3fa430ced0131e156650d45104de8311 languageName: node linkType: hard -"marked@npm:^4.3.0": - version: 4.3.0 - resolution: "marked@npm:4.3.0" - bin: - marked: bin/marked.js - checksum: 0db6817893952c3ec710eb9ceafb8468bf5ae38cb0f92b7b083baa13d70b19774674be04db5b817681fa7c5c6a088f61300815e4dd75a59696f4716ad69f6260 +"micromark-factory-space@npm:^1.0.0": + version: 1.1.0 + resolution: "micromark-factory-space@npm:1.1.0" + dependencies: + micromark-util-character: ^1.0.0 + micromark-util-types: ^1.0.0 + checksum: b58435076b998a7e244259a4694eb83c78915581206b6e7fc07b34c6abd36a1726ade63df8972fbf6c8fa38eecb9074f4e17be8d53f942e3b3d23d1a0ecaa941 languageName: node linkType: hard -"marky@npm:^1.2.2": - version: 1.2.5 - resolution: "marky@npm:1.2.5" - checksum: 823b946677749551cdfc3b5221685478b5d1b9cc0dc03eff977c6f9a615fb05c67559f9556cb3c0fcb941a9ea0e195e37befd83026443396ccee8b724f54f4c5 +"micromark-factory-space@npm:^2.0.0": + version: 2.0.0 + resolution: "micromark-factory-space@npm:2.0.0" + dependencies: + micromark-util-character: ^2.0.0 + micromark-util-types: ^2.0.0 + checksum: 4ffdcdc2f759887bbb356500cb460b3915ecddcb5d85c3618d7df68ad05d13ed02b1153ee1845677b7d8126df8f388288b84fcf0d943bd9c92bcc71cd7222e37 languageName: node linkType: hard -"matcher@npm:^5.0.0": - version: 5.0.0 - resolution: "matcher@npm:5.0.0" +"micromark-factory-title@npm:^2.0.0": + version: 2.0.0 + resolution: "micromark-factory-title@npm:2.0.0" dependencies: - escape-string-regexp: ^5.0.0 - checksum: 28f191c2d23fee0f6f32fd0181d9fe173b0ab815a919edba55605438a2f9fa40372e002574a1b17add981b0a8669c75bc6194318d065ed2dceffd8b160c38118 + micromark-factory-space: ^2.0.0 + micromark-util-character: ^2.0.0 + micromark-util-symbol: ^2.0.0 + micromark-util-types: ^2.0.0 + checksum: 39e1ac23af3554e6e652e56065579bc7faf21ade7b8704b29c175871b4152b7109b790bb3cae0f7e088381139c6bac9553b8400772c3d322e4fa635f813a3578 languageName: node linkType: hard -"mcl-wasm@npm:^0.7.1": - version: 0.7.9 - resolution: "mcl-wasm@npm:0.7.9" - checksum: 6b6ed5084156b98b2db70b223e1ba2c01953970b48a2e0c4ea3eeb9296610e6b3bfb2a2cce9e92e2d7ad61778b5f5a630e705e663835e915ba188c174a0a37fa +"micromark-factory-whitespace@npm:^2.0.0": + version: 2.0.0 + resolution: "micromark-factory-whitespace@npm:2.0.0" + dependencies: + micromark-factory-space: ^2.0.0 + micromark-util-character: ^2.0.0 + micromark-util-symbol: ^2.0.0 + micromark-util-types: ^2.0.0 + checksum: 9587c2546d1a58b4d5472b42adf05463f6212d0449455285662d63cd8eaed89c6b159ac82713fcee5f9dd88628c24307d9533cccd8971a2f3f4d48702f8f850a languageName: node linkType: hard -"md5-hex@npm:^3.0.1": - version: 3.0.1 - resolution: "md5-hex@npm:3.0.1" +"micromark-util-character@npm:^1.0.0, micromark-util-character@npm:^1.1.0": + version: 1.2.0 + resolution: "micromark-util-character@npm:1.2.0" dependencies: - blueimp-md5: ^2.10.0 - checksum: 6799a19e8bdd3e0c2861b94c1d4d858a89220488d7885c1fa236797e367d0c2e5f2b789e05309307083503f85be3603a9686a5915568a473137d6b4117419cc2 + micromark-util-symbol: ^1.0.0 + micromark-util-types: ^1.0.0 + checksum: 089e79162a19b4a28731736246579ab7e9482ac93cd681c2bfca9983dcff659212ef158a66a5957e9d4b1dba957d1b87b565d85418a5b009f0294f1f07f2aaac languageName: node linkType: hard -"md5.js@npm:^1.3.4": - version: 1.3.5 - resolution: "md5.js@npm:1.3.5" +"micromark-util-character@npm:^2.0.0": + version: 2.0.1 + resolution: "micromark-util-character@npm:2.0.1" dependencies: - hash-base: ^3.0.0 - inherits: ^2.0.1 - safe-buffer: ^5.1.2 - checksum: 098494d885684bcc4f92294b18ba61b7bd353c23147fbc4688c75b45cb8590f5a95fd4584d742415dcc52487f7a1ef6ea611cfa1543b0dc4492fe026357f3f0c + micromark-util-symbol: ^2.0.0 + micromark-util-types: ^2.0.0 + checksum: 318d6e16fdcbe9d89e18b8e7796568d986abbb10a9f3037b7ac9b92a236bcc962f3cd380e26a7c49df40fd1d9ca33eb546268956345b662f4c4ca4962c7695f2 languageName: node linkType: hard -"mdast-squeeze-paragraphs@npm:^4.0.0": - version: 4.0.0 - resolution: "mdast-squeeze-paragraphs@npm:4.0.0" +"micromark-util-chunked@npm:^2.0.0": + version: 2.0.0 + resolution: "micromark-util-chunked@npm:2.0.0" dependencies: - unist-util-remove: ^2.0.0 - checksum: dfe8ec8e8a62171f020e82b088cc35cb9da787736dc133a3b45ce8811782a93e69bf06d147072e281079f09fac67be8a36153ffffd9bfbf89ed284e4c4f56f75 + micromark-util-symbol: ^2.0.0 + checksum: 324f95cccdae061332a8241936eaba6ef0782a1e355bac5c607ad2564fd3744929be7dc81651315a2921535747a33243e6a5606bcb64b7a56d49b6d74ea1a3d4 languageName: node linkType: hard -"mdast-util-definitions@npm:^4.0.0": - version: 4.0.0 - resolution: "mdast-util-definitions@npm:4.0.0" +"micromark-util-classify-character@npm:^2.0.0": + version: 2.0.0 + resolution: "micromark-util-classify-character@npm:2.0.0" dependencies: - unist-util-visit: ^2.0.0 - checksum: 2325f20b82b3fb8cb5fda77038ee0bbdd44f82cfca7c48a854724b58bc1fe5919630a3ce7c45e210726df59d46c881d020b2da7a493bfd1ee36eb2bbfef5d78e + micromark-util-character: ^2.0.0 + micromark-util-symbol: ^2.0.0 + micromark-util-types: ^2.0.0 + checksum: 086e52904deffebb793fb1c08c94aabb8901f76958142dfc3a6282890ebaa983b285e69bd602b9d507f1b758ed38e75a994d2ad9fbbefa7de2584f67a16af405 languageName: node linkType: hard -"mdast-util-to-hast@npm:10.0.1": - version: 10.0.1 - resolution: "mdast-util-to-hast@npm:10.0.1" +"micromark-util-combine-extensions@npm:^2.0.0": + version: 2.0.0 + resolution: "micromark-util-combine-extensions@npm:2.0.0" dependencies: - "@types/mdast": ^3.0.0 - "@types/unist": ^2.0.0 - mdast-util-definitions: ^4.0.0 - mdurl: ^1.0.0 - unist-builder: ^2.0.0 - unist-util-generated: ^1.0.0 - unist-util-position: ^3.0.0 - unist-util-visit: ^2.0.0 - checksum: e5f385757df7e9b37db4d6f326bf7b4fc1b40f9ad01fc335686578f44abe0ba46d3e60af4d5e5b763556d02e65069ef9a09c49db049b52659203a43e7fa9084d + micromark-util-chunked: ^2.0.0 + micromark-util-types: ^2.0.0 + checksum: 107c47700343f365b4ed81551e18bc3458b573c500e56ac052b2490bd548adc475216e41d2271633a8867fac66fc22ba3e0a2d74a31ed79b9870ca947eb4e3ba languageName: node linkType: hard -"mdast-util-to-string@npm:^2.0.0": +"micromark-util-decode-numeric-character-reference@npm:^2.0.0": + version: 2.0.1 + resolution: "micromark-util-decode-numeric-character-reference@npm:2.0.1" + dependencies: + micromark-util-symbol: ^2.0.0 + checksum: 9512507722efd2033a9f08715eeef787fbfe27e23edf55db21423d46d82ab46f76c89b4f960be3f5e50a2d388d89658afc0647989cf256d051e9ea01277a1adb + languageName: node + linkType: hard + +"micromark-util-decode-string@npm:^2.0.0": version: 2.0.0 - resolution: "mdast-util-to-string@npm:2.0.0" - checksum: 0b2113ada10e002fbccb014170506dabe2f2ddacaacbe4bc1045c33f986652c5a162732a2c057c5335cdb58419e2ad23e368e5be226855d4d4e280b81c4e9ec2 + resolution: "micromark-util-decode-string@npm:2.0.0" + dependencies: + decode-named-character-reference: ^1.0.0 + micromark-util-character: ^2.0.0 + micromark-util-decode-numeric-character-reference: ^2.0.0 + micromark-util-symbol: ^2.0.0 + checksum: a75daf32a4a6b549e9f19b4d833ebfeb09a32a9a1f9ce50f35dec6b6a3e4f9f121f49024ba7f9c91c55ebe792f7c7a332fc9604795181b6a612637df0df5b959 languageName: node linkType: hard -"mdn-data@npm:2.0.14": - version: 2.0.14 - resolution: "mdn-data@npm:2.0.14" - checksum: 9d0128ed425a89f4cba8f787dca27ad9408b5cb1b220af2d938e2a0629d17d879a34d2cb19318bdb26c3f14c77dd5dfbae67211f5caaf07b61b1f2c5c8c7dc16 +"micromark-util-encode@npm:^2.0.0": + version: 2.0.0 + resolution: "micromark-util-encode@npm:2.0.0" + checksum: 853a3f33fce72aaf4ffa60b7f2b6fcfca40b270b3466e1b96561b02185d2bd8c01dd7948bc31a24ac014f4cc854e545ca9a8e9cf7ea46262f9d24c9e88551c66 languageName: node linkType: hard -"mdurl@npm:^1.0.0": - version: 1.0.1 - resolution: "mdurl@npm:1.0.1" - checksum: 71731ecba943926bfbf9f9b51e28b5945f9411c4eda80894221b47cc105afa43ba2da820732b436f0798fd3edbbffcd1fc1415843c41a87fea08a41cc1e3d02b +"micromark-util-events-to-acorn@npm:^2.0.0": + version: 2.0.2 + resolution: "micromark-util-events-to-acorn@npm:2.0.2" + dependencies: + "@types/acorn": ^4.0.0 + "@types/estree": ^1.0.0 + "@types/unist": ^3.0.0 + devlop: ^1.0.0 + estree-util-visit: ^2.0.0 + micromark-util-symbol: ^2.0.0 + micromark-util-types: ^2.0.0 + vfile-message: ^4.0.0 + checksum: bcb3eeac52a4ae5c3ca3d8cff514de3a7d1f272d9a94cce26a08c578bef64df4d61820874c01207e92fcace9eae5c9a7ecdddef0c6e10014b255a07b7880bf94 languageName: node linkType: hard -"media-typer@npm:0.3.0": - version: 0.3.0 - resolution: "media-typer@npm:0.3.0" - checksum: af1b38516c28ec95d6b0826f6c8f276c58aec391f76be42aa07646b4e39d317723e869700933ca6995b056db4b09a78c92d5440dc23657e6764be5d28874bba1 +"micromark-util-html-tag-name@npm:^2.0.0": + version: 2.0.0 + resolution: "micromark-util-html-tag-name@npm:2.0.0" + checksum: d786d4486f93eb0ac5b628779809ca97c5dc60f3c9fc03eb565809831db181cf8cb7f05f9ac76852f3eb35461af0f89fa407b46f3a03f4f97a96754d8dc540d8 languageName: node linkType: hard -"mem@npm:^9.0.2": - version: 9.0.2 - resolution: "mem@npm:9.0.2" +"micromark-util-normalize-identifier@npm:^2.0.0": + version: 2.0.0 + resolution: "micromark-util-normalize-identifier@npm:2.0.0" dependencies: - map-age-cleaner: ^0.1.3 - mimic-fn: ^4.0.0 - checksum: 07829bb182af0e3ecf748dc2edb1c3b10a256ef10458f7e24d06561a2adc2b3ef34d14abe81678bbcedb46faa477e7370223f118b1a5e1252da5fe43496f3967 + micromark-util-symbol: ^2.0.0 + checksum: b36da2d3fd102053dadd953ce5c558328df12a63a8ac0e5aad13d4dda8e43b6a5d4a661baafe0a1cd8a260bead4b4a8e6e0e74193dd651e8484225bd4f4e68aa languageName: node linkType: hard -"memfs@npm:^3.1.2, memfs@npm:^3.4.3": - version: 3.5.3 - resolution: "memfs@npm:3.5.3" +"micromark-util-resolve-all@npm:^2.0.0": + version: 2.0.0 + resolution: "micromark-util-resolve-all@npm:2.0.0" dependencies: - fs-monkey: ^1.0.4 - checksum: 18dfdeacad7c8047b976a6ccd58bc98ba76e122ad3ca0e50a21837fe2075fc0d9aafc58ab9cf2576c2b6889da1dd2503083f2364191b695273f40969db2ecc44 + micromark-util-types: ^2.0.0 + checksum: 31fe703b85572cb3f598ebe32750e59516925c7ff1f66cfe6afaebe0771a395a9eaa770787f2523d3c46082ea80e6c14f83643303740b3d650af7c96ebd30ccc languageName: node linkType: hard -"memory-level@npm:^1.0.0": - version: 1.0.0 - resolution: "memory-level@npm:1.0.0" +"micromark-util-sanitize-uri@npm:^2.0.0": + version: 2.0.0 + resolution: "micromark-util-sanitize-uri@npm:2.0.0" dependencies: - abstract-level: ^1.0.0 - functional-red-black-tree: ^1.0.1 - module-error: ^1.0.1 - checksum: 80b1b7aedaf936e754adbcd7b9303018c3684fb32f9992fd967c448f145d177f16c724fbba9ed3c3590a9475fd563151eae664d69b83d2ad48714852e9fc5c72 + micromark-util-character: ^2.0.0 + micromark-util-encode: ^2.0.0 + micromark-util-symbol: ^2.0.0 + checksum: ea4c28bbffcf2430e9aff2d18554296789a8b0a1f54ac24020d1dde76624a7f93e8f2a83e88cd5a846b6d2c4287b71b1142d1b89fa7f1b0363a9b33711a141fe languageName: node linkType: hard -"memorystream@npm:^0.3.1": - version: 0.3.1 - resolution: "memorystream@npm:0.3.1" - checksum: f18b42440d24d09516d01466c06adf797df7873f0d40aa7db02e5fb9ed83074e5e65412d0720901d7069363465f82dc4f8bcb44f0cde271567a61426ce6ca2e9 +"micromark-util-subtokenize@npm:^2.0.0": + version: 2.0.0 + resolution: "micromark-util-subtokenize@npm:2.0.0" + dependencies: + devlop: ^1.0.0 + micromark-util-chunked: ^2.0.0 + micromark-util-symbol: ^2.0.0 + micromark-util-types: ^2.0.0 + checksum: 77d9c7d59c05a20468d49ce2a3640e9cb268c083ccad02322f26c84e1094c25b44f4b8139ef0a247ca11a4fef7620c5bf82fbffd98acdb2989e79cbe7bd8f1db languageName: node linkType: hard -"merge-descriptors@npm:1.0.1": - version: 1.0.1 - resolution: "merge-descriptors@npm:1.0.1" - checksum: 5abc259d2ae25bb06d19ce2b94a21632583c74e2a9109ee1ba7fd147aa7362b380d971e0251069f8b3eb7d48c21ac839e21fa177b335e82c76ec172e30c31a26 +"micromark-util-symbol@npm:^1.0.0, micromark-util-symbol@npm:^1.0.1": + version: 1.1.0 + resolution: "micromark-util-symbol@npm:1.1.0" + checksum: 02414a753b79f67ff3276b517eeac87913aea6c028f3e668a19ea0fc09d98aea9f93d6222a76ca783d20299af9e4b8e7c797fe516b766185dcc6e93290f11f88 languageName: node linkType: hard -"merge-stream@npm:^2.0.0": +"micromark-util-symbol@npm:^2.0.0": version: 2.0.0 - resolution: "merge-stream@npm:2.0.0" - checksum: 6fa4dcc8d86629705cea944a4b88ef4cb0e07656ebf223fa287443256414283dd25d91c1cd84c77987f2aec5927af1a9db6085757cb43d90eb170ebf4b47f4f4 + resolution: "micromark-util-symbol@npm:2.0.0" + checksum: fa4a05bff575d9fbf0ad96a1013003e3bb6087ed6b34b609a141b6c0d2137b57df594aca409a95f4c5fda199f227b56a7d8b1f82cea0768df161d8a3a3660764 languageName: node linkType: hard -"merge2@npm:^1.3.0, merge2@npm:^1.4.1": - version: 1.4.1 - resolution: "merge2@npm:1.4.1" - checksum: 7268db63ed5169466540b6fb947aec313200bcf6d40c5ab722c22e242f651994619bcd85601602972d3c85bd2cc45a358a4c61937e9f11a061919a1da569b0c2 +"micromark-util-types@npm:^1.0.0": + version: 1.1.0 + resolution: "micromark-util-types@npm:1.1.0" + checksum: b0ef2b4b9589f15aec2666690477a6a185536927ceb7aa55a0f46475852e012d75a1ab945187e5c7841969a842892164b15d58ff8316b8e0d6cc920cabd5ede7 languageName: node linkType: hard -"methods@npm:~1.1.2": - version: 1.1.2 - resolution: "methods@npm:1.1.2" - checksum: 0917ff4041fa8e2f2fda5425a955fe16ca411591fbd123c0d722fcf02b73971ed6f764d85f0a6f547ce49ee0221ce2c19a5fa692157931cecb422984f1dcd13a +"micromark-util-types@npm:^2.0.0": + version: 2.0.0 + resolution: "micromark-util-types@npm:2.0.0" + checksum: 819fef3ab5770c37893d2a60381fb2694396c8d22803b6e103c830c3a1bc1490363c2b0470bb2acaaddad776dfbc2fc1fcfde39cb63c4f54d95121611672e3d0 languageName: node linkType: hard -"micromatch@npm:^4.0.2, micromatch@npm:^4.0.4, micromatch@npm:^4.0.5": +"micromark@npm:^4.0.0": + version: 4.0.0 + resolution: "micromark@npm:4.0.0" + dependencies: + "@types/debug": ^4.0.0 + debug: ^4.0.0 + decode-named-character-reference: ^1.0.0 + devlop: ^1.0.0 + micromark-core-commonmark: ^2.0.0 + micromark-factory-space: ^2.0.0 + micromark-util-character: ^2.0.0 + micromark-util-chunked: ^2.0.0 + micromark-util-combine-extensions: ^2.0.0 + micromark-util-decode-numeric-character-reference: ^2.0.0 + micromark-util-encode: ^2.0.0 + micromark-util-normalize-identifier: ^2.0.0 + micromark-util-resolve-all: ^2.0.0 + micromark-util-sanitize-uri: ^2.0.0 + micromark-util-subtokenize: ^2.0.0 + micromark-util-symbol: ^2.0.0 + micromark-util-types: ^2.0.0 + checksum: b84ab5ab1a0b28c063c52e9c2c9d7d44b954507235c10c9492d66e0b38f7de24bf298f914a1fbdf109f2a57a88cf0412de217c84cfac5fd60e3e42a74dbac085 + languageName: node + linkType: hard + +"micromatch@npm:^4.0.0, micromatch@npm:^4.0.2, micromatch@npm:^4.0.4, micromatch@npm:^4.0.5": version: 4.0.5 resolution: "micromatch@npm:4.0.5" dependencies: @@ -12422,7 +16103,21 @@ __metadata: languageName: node linkType: hard -"mini-css-extract-plugin@npm:^2.6.1": +"mimic-response@npm:^3.1.0": + version: 3.1.0 + resolution: "mimic-response@npm:3.1.0" + checksum: 25739fee32c17f433626bf19f016df9036b75b3d84a3046c7d156e72ec963dd29d7fc8a302f55a3d6c5a4ff24259676b15d915aad6480815a969ff2ec0836867 + languageName: node + linkType: hard + +"mimic-response@npm:^4.0.0": + version: 4.0.0 + resolution: "mimic-response@npm:4.0.0" + checksum: 33b804cc961efe206efdb1fca6a22540decdcfce6c14eb5c0c50e5ae9022267ab22ce8f5568b1f7247ba67500fe20d523d81e0e9f009b321ccd9d472e78d1850 + languageName: node + linkType: hard + +"mini-css-extract-plugin@npm:^2.6.1, mini-css-extract-plugin@npm:^2.7.6": version: 2.7.6 resolution: "mini-css-extract-plugin@npm:2.7.6" dependencies: @@ -12481,12 +16176,12 @@ __metadata: languageName: node linkType: hard -"minipass-collect@npm:^1.0.2": - version: 1.0.2 - resolution: "minipass-collect@npm:1.0.2" +"minipass-collect@npm:^2.0.1": + version: 2.0.1 + resolution: "minipass-collect@npm:2.0.1" dependencies: - minipass: ^3.0.0 - checksum: 14df761028f3e47293aee72888f2657695ec66bd7d09cae7ad558da30415fdc4752bbfee66287dcc6fd5e6a2fa3466d6c484dc1cbd986525d9393b9523d97f10 + minipass: ^7.0.3 + checksum: b251bceea62090f67a6cced7a446a36f4cd61ee2d5cea9aee7fff79ba8030e416327a1c5aa2908dc22629d06214b46d88fdab8c51ac76bacbf5703851b5ad342 languageName: node linkType: hard @@ -12548,10 +16243,10 @@ __metadata: languageName: node linkType: hard -"minipass@npm:^5.0.0 || ^6.0.2 || ^7.0.0, minipass@npm:^7.0.3": - version: 7.0.3 - resolution: "minipass@npm:7.0.3" - checksum: 6f1614f5b5b55568a46bca5fec0e7c46dac027691db27d0e1923a8192866903144cd962ac772c0e9f89b608ea818b702709c042bce98e190d258847d85461531 +"minipass@npm:^5.0.0 || ^6.0.2 || ^7.0.0, minipass@npm:^7.0.2, minipass@npm:^7.0.3": + version: 7.0.4 + resolution: "minipass@npm:7.0.4" + checksum: 87585e258b9488caf2e7acea242fd7856bbe9a2c84a7807643513a338d66f368c7d518200ad7b70a508664d408aa000517647b2930c259a8b1f9f0984f344a21 languageName: node linkType: hard @@ -12608,6 +16303,15 @@ __metadata: languageName: node linkType: hard +"mocha-each@npm:^2.0.1": + version: 2.0.1 + resolution: "mocha-each@npm:2.0.1" + dependencies: + sprintf-js: ^1.0.3 + checksum: 0de01ce517c2f7e7c3e19ef3f444809913f2f1602cb2571e6a3f8cb7ef3040f4f01b0f9f11a317e4ec1aeb9d39ceae4947c96668560cf638fe4d02ea549c2d4c + languageName: node + linkType: hard + "mocha@npm:^10.0.0, mocha@npm:^10.2.0": version: 10.2.0 resolution: "mocha@npm:10.2.0" @@ -12647,6 +16351,34 @@ __metadata: languageName: node linkType: hard +"monaco-editor-textmate@npm:^4.0.0": + version: 4.0.0 + resolution: "monaco-editor-textmate@npm:4.0.0" + peerDependencies: + monaco-editor: 0.x.x + monaco-textmate: ^3.0.0 + checksum: 9d3f5f24982f928c5f4b7c5c5170a549cb19eb1471eb157aa07bd97cf15cd75dd941585eeb6924b05c54109a55d48adc45191eda9db5d2793b1d8c462181c100 + languageName: node + linkType: hard + +"monaco-editor@npm:^0.44.0": + version: 0.44.0 + resolution: "monaco-editor@npm:0.44.0" + checksum: 6e561b23e5e9090cbdbb820dae5895a8bf9d537acc09281756a8c428960da0481461c72f387cc9a2e14bff69ab4359186c98df2dd29d6d109f1ab7189b573a35 + languageName: node + linkType: hard + +"monaco-textmate@npm:^3.0.1": + version: 3.0.1 + resolution: "monaco-textmate@npm:3.0.1" + dependencies: + fast-plist: ^0.1.2 + peerDependencies: + onigasm: ^2.0.0 + checksum: 0f2ec07ee3e9a37bb880e2aaef802f82cb666660b40fc3c7c3e35553d740aed34ae94399b06296fddf1f96efdaf27eaf347b39cb14bc08ccfb65162c52771d56 + languageName: node + linkType: hard + "mri@npm:^1.1.0": version: 1.2.0 resolution: "mri@npm:1.2.0" @@ -12675,7 +16407,7 @@ __metadata: languageName: node linkType: hard -"ms@npm:2.1.3, ms@npm:^2.0.0, ms@npm:^2.1.1, ms@npm:^2.1.3": +"ms@npm:2.1.3, ms@npm:^2.1.1": version: 2.1.3 resolution: "ms@npm:2.1.3" checksum: aa92de608021b242401676e35cfa5aa42dd70cbdc082b916da7fb925c542173e36bce97ea3e804923fe92c0ad991434e4a38327e15a1b5b5f945d66df615ae6d @@ -12710,12 +16442,12 @@ __metadata: languageName: node linkType: hard -"nanoid@npm:^3.1.25, nanoid@npm:^3.3.6": - version: 3.3.6 - resolution: "nanoid@npm:3.3.6" +"nanoid@npm:^3.1.25, nanoid@npm:^3.3.7": + version: 3.3.7 + resolution: "nanoid@npm:3.3.7" bin: nanoid: bin/nanoid.cjs - checksum: 7d0eda657002738aa5206107bd0580aead6c95c460ef1bdd0b1a87a9c7ae6277ac2e9b945306aaa5b32c6dcb7feaf462d0f552e7f8b5718abfc6ead5c94a71b3 + checksum: d36c427e530713e4ac6567d488b489a36582ef89da1d6d4e3b87eded11eb10d7042a877958c6f104929809b2ab0bafa17652b076cdf84324aa75b30b722204f2 languageName: node linkType: hard @@ -12747,6 +16479,26 @@ __metadata: languageName: node linkType: hard +"netmask@npm:^2.0.2": + version: 2.0.2 + resolution: "netmask@npm:2.0.2" + checksum: c65cb8d3f7ea5669edddb3217e4c96910a60d0d9a4b52d9847ff6b28b2d0277cd8464eee0ef85133cdee32605c57940cacdd04a9a019079b091b6bba4cb0ec22 + languageName: node + linkType: hard + +"nise@npm:^5.1.5": + version: 5.1.5 + resolution: "nise@npm:5.1.5" + dependencies: + "@sinonjs/commons": ^2.0.0 + "@sinonjs/fake-timers": ^10.0.2 + "@sinonjs/text-encoding": ^0.7.1 + just-extend: ^4.0.2 + path-to-regexp: ^1.7.0 + checksum: c763dc62c5796cafa5c9268e14a5b34db6e6fa2f1dbc57a891fe5d7ea632a87868e22b5bb34965006f984630793ea11368351e94971163228d9e20b2e88edce8 + languageName: node + linkType: hard + "no-case@npm:^3.0.4": version: 3.0.4 resolution: "no-case@npm:3.0.4" @@ -12782,6 +16534,18 @@ __metadata: languageName: node linkType: hard +"node-emoji@npm:^2.1.0": + version: 2.1.3 + resolution: "node-emoji@npm:2.1.3" + dependencies: + "@sindresorhus/is": ^4.6.0 + char-regex: ^1.0.2 + emojilib: ^2.4.0 + skin-tone: ^2.0.0 + checksum: 9ae5a1fb12fd5ce6885f251f345986115de4bb82e7d06fdc943845fb19260d89d0aaaccbaf85cae39fe7aaa1fc391640558865ba690c9bb8a7236c3ac10bbab0 + languageName: node + linkType: hard + "node-fetch@npm:2.6.7": version: 2.6.7 resolution: "node-fetch@npm:2.6.7" @@ -12829,59 +16593,51 @@ __metadata: linkType: hard "node-gyp-build@npm:^4.2.0, node-gyp-build@npm:^4.3.0": - version: 4.6.1 - resolution: "node-gyp-build@npm:4.6.1" + version: 4.7.1 + resolution: "node-gyp-build@npm:4.7.1" bin: node-gyp-build: bin.js node-gyp-build-optional: optional.js node-gyp-build-test: build-test.js - checksum: c3676d337b36803bc7792e35bf7fdcda7cdcb7e289b8f9855a5535702a82498eb976842fefcf487258c58005ca32ce3d537fbed91280b04409161dcd7232a882 + checksum: 2ef8248021489db03be3e8098977cdc797b80a9b12b77c6dcb89b0dc89b8c62e6a482672ee298f61021740ae7f080fb33154cfec8fb158cec620f57b0fae87c0 languageName: node linkType: hard "node-gyp@npm:latest": - version: 9.4.0 - resolution: "node-gyp@npm:9.4.0" + version: 10.0.1 + resolution: "node-gyp@npm:10.0.1" dependencies: env-paths: ^2.2.0 exponential-backoff: ^3.1.1 - glob: ^7.1.4 + glob: ^10.3.10 graceful-fs: ^4.2.6 - make-fetch-happen: ^11.0.3 - nopt: ^6.0.0 - npmlog: ^6.0.0 - rimraf: ^3.0.2 + make-fetch-happen: ^13.0.0 + nopt: ^7.0.0 + proc-log: ^3.0.0 semver: ^7.3.5 tar: ^6.1.2 - which: ^2.0.2 + which: ^4.0.0 bin: node-gyp: bin/node-gyp.js - checksum: 78b404e2e0639d64e145845f7f5a3cb20c0520cdaf6dda2f6e025e9b644077202ea7de1232396ba5bde3fee84cdc79604feebe6ba3ec84d464c85d407bb5da99 - languageName: node - linkType: hard - -"node-releases@npm:^2.0.13": - version: 2.0.13 - resolution: "node-releases@npm:2.0.13" - checksum: 17ec8f315dba62710cae71a8dad3cd0288ba943d2ece43504b3b1aa8625bf138637798ab470b1d9035b0545996f63000a8a926e0f6d35d0996424f8b6d36dda3 + checksum: 60a74e66d364903ce02049966303a57f898521d139860ac82744a5fdd9f7b7b3b61f75f284f3bfe6e6add3b8f1871ce305a1d41f775c7482de837b50c792223f languageName: node linkType: hard -"nofilter@npm:^3.1.0": - version: 3.1.0 - resolution: "nofilter@npm:3.1.0" - checksum: 58aa85a5b4b35cbb6e42de8a8591c5e338061edc9f3e7286f2c335e9e9b9b8fa7c335ae45daa8a1f3433164dc0b9a3d187fa96f9516e04a17a1f9ce722becc4f +"node-releases@npm:^2.0.14": + version: 2.0.14 + resolution: "node-releases@npm:2.0.14" + checksum: 59443a2f77acac854c42d321bf1b43dea0aef55cd544c6a686e9816a697300458d4e82239e2d794ea05f7bbbc8a94500332e2d3ac3f11f52e4b16cbe638b3c41 languageName: node linkType: hard -"nopt@npm:^6.0.0": - version: 6.0.0 - resolution: "nopt@npm:6.0.0" +"nopt@npm:^7.0.0": + version: 7.2.0 + resolution: "nopt@npm:7.2.0" dependencies: - abbrev: ^1.0.0 + abbrev: ^2.0.0 bin: nopt: bin/nopt.js - checksum: 82149371f8be0c4b9ec2f863cc6509a7fd0fa729929c009f3a58e4eb0c9e4cae9920e8f1f8eb46e7d032fec8fb01bede7f0f41a67eb3553b7b8e14fa53de1dac + checksum: a9c0f57fb8cb9cc82ae47192ca2b7ef00e199b9480eed202482c962d61b59a7fbe7541920b2a5839a97b42ee39e288c0aed770e38057a608d7f579389dfde410 languageName: node linkType: hard @@ -12913,6 +16669,13 @@ __metadata: languageName: node linkType: hard +"normalize-url@npm:^8.0.0": + version: 8.0.0 + resolution: "normalize-url@npm:8.0.0" + checksum: 24c20b75ebfd526d8453084692720b49d111c63c0911f1b7447427829597841eef5a8ba3f6bb93d6654007b991c1f5cd85da2c907800e439e2e2ec6c2abd0fc0 + languageName: node + linkType: hard + "npm-run-path@npm:^4.0.1": version: 4.0.1 resolution: "npm-run-path@npm:4.0.1" @@ -12926,20 +16689,8 @@ __metadata: version: 5.1.0 resolution: "npm-run-path@npm:5.1.0" dependencies: - path-key: ^4.0.0 - checksum: dc184eb5ec239d6a2b990b43236845332ef12f4e0beaa9701de724aa797fe40b6bbd0157fb7639d24d3ab13f5d5cf22d223a19c6300846b8126f335f788bee66 - languageName: node - linkType: hard - -"npmlog@npm:^6.0.0": - version: 6.0.2 - resolution: "npmlog@npm:6.0.2" - dependencies: - are-we-there-yet: ^3.0.0 - console-control-strings: ^1.1.0 - gauge: ^4.0.3 - set-blocking: ^2.0.0 - checksum: ae238cd264a1c3f22091cdd9e2b106f684297d3c184f1146984ecbe18aaa86343953f26b9520dedd1b1372bc0316905b736c1932d778dbeb1fcf5a1001390e2a + path-key: ^4.0.0 + checksum: dc184eb5ec239d6a2b990b43236845332ef12f4e0beaa9701de724aa797fe40b6bbd0157fb7639d24d3ab13f5d5cf22d223a19c6300846b8126f335f788bee66 languageName: node linkType: hard @@ -12959,7 +16710,7 @@ __metadata: languageName: node linkType: hard -"object-assign@npm:^4.1.0, object-assign@npm:^4.1.1": +"object-assign@npm:^4.1.1": version: 4.1.1 resolution: "object-assign@npm:4.1.1" checksum: fcc6e4ea8c7fe48abfbb552578b1c53e0d194086e2e6bbbf59e0a536381a292f39943c6e9628af05b5528aa5e3318bb30d6b2e53cadaf5b8fe9e12c4b69af23f @@ -12967,9 +16718,19 @@ __metadata: linkType: hard "object-inspect@npm:^1.9.0": - version: 1.12.3 - resolution: "object-inspect@npm:1.12.3" - checksum: dabfd824d97a5f407e6d5d24810d888859f6be394d8b733a77442b277e0808860555176719c5905e765e3743a7cada6b8b0a3b85e5331c530fd418cc8ae991db + version: 1.13.1 + resolution: "object-inspect@npm:1.13.1" + checksum: 7d9fa9221de3311dcb5c7c307ee5dc011cdd31dc43624b7c184b3840514e118e05ef0002be5388304c416c0eb592feb46e983db12577fc47e47d5752fbbfb61f + languageName: node + linkType: hard + +"object-is@npm:^1.1.5": + version: 1.1.5 + resolution: "object-is@npm:1.1.5" + dependencies: + call-bind: ^1.0.2 + define-properties: ^1.1.3 + checksum: 989b18c4cba258a6b74dc1d74a41805c1a1425bce29f6cabb50dcb1a6a651ea9104a1b07046739a49a5bb1bc49727bcb00efd5c55f932f6ea04ec8927a7901fe languageName: node linkType: hard @@ -12980,15 +16741,33 @@ __metadata: languageName: node linkType: hard -"object.assign@npm:^4.1.0": - version: 4.1.4 - resolution: "object.assign@npm:4.1.4" +"object-keys@npm:~0.2.0": + version: 0.2.0 + resolution: "object-keys@npm:0.2.0" dependencies: - call-bind: ^1.0.2 - define-properties: ^1.1.4 + foreach: ~2.0.1 + indexof: ~0.0.1 + is: ~0.2.6 + checksum: 4b96bab88fe9df22a03aec3c59a084bdffc789ad1318a39081e6b8389af6b9ab8571dd3776eed3ec5831137d057fb7ba76911552c6a6efd59b5d126ac3b6e432 + languageName: node + linkType: hard + +"object-keys@npm:~0.4.0": + version: 0.4.0 + resolution: "object-keys@npm:0.4.0" + checksum: 1be3ebe9b48c0d5eda8e4a30657d887a748cb42435e0e2eaf49faf557bdd602cd2b7558b8ce90a4eb2b8592d16b875a1900bce859cbb0f35b21c67e11a45313c + languageName: node + linkType: hard + +"object.assign@npm:^4.1.0, object.assign@npm:^4.1.4": + version: 4.1.5 + resolution: "object.assign@npm:4.1.5" + dependencies: + call-bind: ^1.0.5 + define-properties: ^1.2.1 has-symbols: ^1.0.3 object-keys: ^1.1.1 - checksum: 76cab513a5999acbfe0ff355f15a6a125e71805fcf53de4e9d4e082e1989bdb81d1e329291e1e4e0ae7719f0e4ef80e88fb2d367ae60500d79d25a6224ac8864 + checksum: f9aeac0541661370a1fc86e6a8065eb1668d3e771f7dbb33ee54578201336c057b21ee61207a186dd42db0c62201d91aac703d20d12a79fc79c353eed44d4e25 languageName: node linkType: hard @@ -13006,6 +16785,13 @@ __metadata: languageName: node linkType: hard +"octal@npm:^1.0.0": + version: 1.0.0 + resolution: "octal@npm:1.0.0" + checksum: d648917f4f0a1042d7a4e230262aed00274c9791fe4795e9a2ce3b64ab7f2ca93e62cd55ca5ad4e4bd3fc375ca84d6919d7bf417be461790c1042503ac2c2310 + languageName: node + linkType: hard + "on-finished@npm:2.4.1, on-finished@npm:^2.3.0": version: 2.4.1 resolution: "on-finished@npm:2.4.1" @@ -13049,6 +16835,15 @@ __metadata: languageName: node linkType: hard +"onigasm@npm:^2.2.5": + version: 2.2.5 + resolution: "onigasm@npm:2.2.5" + dependencies: + lru-cache: ^5.1.1 + checksum: 97aedde610ef561f05853609d6a5b720ec1e123f867bdac1b38b5aeb3bc90ed60209678c75a5f0f9821aa793c720b6d17aabfb956e26ab115ee9b81d6e56bdf7 + languageName: node + linkType: hard + "only@npm:~0.0.2": version: 0.0.2 resolution: "only@npm:0.0.2" @@ -13132,19 +16927,10 @@ __metadata: languageName: node linkType: hard -"p-defer@npm:^1.0.0": - version: 1.0.0 - resolution: "p-defer@npm:1.0.0" - checksum: 4271b935c27987e7b6f229e5de4cdd335d808465604644cb7b4c4c95bef266735859a93b16415af8a41fd663ee9e3b97a1a2023ca9def613dba1bad2a0da0c7b - languageName: node - linkType: hard - -"p-event@npm:^5.0.1": - version: 5.0.1 - resolution: "p-event@npm:5.0.1" - dependencies: - p-timeout: ^5.0.2 - checksum: 3bdd8df6092e6b149f25e9c2eb1c0843b3b4279b07be2a2c72c02b65b267a8908c2040fefd606f2497b0f2bcefcd214f8ca5a74f0c883515d400ccf1d88d5683 +"p-cancelable@npm:^3.0.0": + version: 3.0.0 + resolution: "p-cancelable@npm:3.0.0" + checksum: 2b5ae34218f9c2cf7a7c18e5d9a726ef9b165ef07e6c959f6738371509e747334b5f78f3bcdeb03d8a12dcb978faf641fd87eb21486ed7d36fb823b8ddef3219 languageName: node linkType: hard @@ -13238,15 +17024,6 @@ __metadata: languageName: node linkType: hard -"p-map@npm:^5.5.0": - version: 5.5.0 - resolution: "p-map@npm:5.5.0" - dependencies: - aggregate-error: ^4.0.0 - checksum: 065cb6fca6b78afbd070dd9224ff160dc23eea96e57863c09a0c8ea7ce921043f76854be7ee0abc295cff1ac9adcf700e79a1fbe3b80b625081087be58e7effb - languageName: node - linkType: hard - "p-retry@npm:^4.5.0": version: 4.6.2 resolution: "p-retry@npm:4.6.2" @@ -13257,13 +17034,6 @@ __metadata: languageName: node linkType: hard -"p-timeout@npm:^5.0.2": - version: 5.1.0 - resolution: "p-timeout@npm:5.1.0" - checksum: f5cd4e17301ff1ff1d8dbf2817df0ad88c6bba99349fc24d8d181827176ad4f8aca649190b8a5b1a428dfd6ddc091af4606835d3e0cb0656e04045da5c9e270c - languageName: node - linkType: hard - "p-try@npm:^1.0.0": version: 1.0.0 resolution: "p-try@npm:1.0.0" @@ -13278,6 +17048,33 @@ __metadata: languageName: node linkType: hard +"pac-proxy-agent@npm:^7.0.0": + version: 7.0.1 + resolution: "pac-proxy-agent@npm:7.0.1" + dependencies: + "@tootallnate/quickjs-emscripten": ^0.23.0 + agent-base: ^7.0.2 + debug: ^4.3.4 + get-uri: ^6.0.1 + http-proxy-agent: ^7.0.0 + https-proxy-agent: ^7.0.2 + pac-resolver: ^7.0.0 + socks-proxy-agent: ^8.0.2 + checksum: 3d4aa48ec1c19db10158ecc1c4c9a9f77792294412d225ceb3dfa45d5a06950dca9755e2db0d9b69f12769119bea0adf2b24390d9c73c8d81df75e28245ae451 + languageName: node + linkType: hard + +"pac-resolver@npm:^7.0.0": + version: 7.0.0 + resolution: "pac-resolver@npm:7.0.0" + dependencies: + degenerator: ^5.0.0 + ip: ^1.1.8 + netmask: ^2.0.2 + checksum: fa3a898c09848e93e35f5e23443fea36ddb393a851c76a23664a5bf3fcbe58ff77a0bcdae1e4f01b9ea87ea493c52e14d97a0fe39f92474d14cd45559c6e3cde + languageName: node + linkType: hard + "package-json@npm:^6.3.0": version: 6.5.0 resolution: "package-json@npm:6.5.0" @@ -13290,6 +17087,18 @@ __metadata: languageName: node linkType: hard +"package-json@npm:^8.1.0": + version: 8.1.1 + resolution: "package-json@npm:8.1.1" + dependencies: + got: ^12.1.0 + registry-auth-token: ^5.0.1 + registry-url: ^6.0.0 + semver: ^7.3.7 + checksum: 28bec6f42bf9fba66b7c8fea07576fc23d08ec7923433f7835d6cd8654e72169d74f9738b3785107d18a476ae76712e0daeb1dddcd6930e69f9e4b47eba7c0ca + languageName: node + linkType: hard + "param-case@npm:^3.0.4": version: 3.0.4 resolution: "param-case@npm:3.0.4" @@ -13309,6 +17118,15 @@ __metadata: languageName: node linkType: hard +"parent-module@npm:^2.0.0": + version: 2.0.0 + resolution: "parent-module@npm:2.0.0" + dependencies: + callsites: ^3.1.0 + checksum: f131f13d687a938556a01033561fb1b274b39921eb4425c7a691f0d91dcfbe9b19759c2b8d425a3ee7c8a46874e57fa418a690643880c3c7c56827aba12f78dd + languageName: node + linkType: hard + "parse-entities@npm:^2.0.0": version: 2.0.0 resolution: "parse-entities@npm:2.0.0" @@ -13323,6 +17141,22 @@ __metadata: languageName: node linkType: hard +"parse-entities@npm:^4.0.0": + version: 4.0.1 + resolution: "parse-entities@npm:4.0.1" + dependencies: + "@types/unist": ^2.0.0 + character-entities: ^2.0.0 + character-entities-legacy: ^3.0.0 + character-reference-invalid: ^2.0.0 + decode-named-character-reference: ^1.0.0 + is-alphanumerical: ^2.0.0 + is-decimal: ^2.0.0 + is-hexadecimal: ^2.0.0 + checksum: 32a6ff5b9acb9d2c4d71537308521fd265e685b9215691df73feedd9edfe041bb6da9f89bd0c35c4a2bc7d58e3e76e399bb6078c2fd7d2a343ff1dd46edbf1bd + languageName: node + linkType: hard + "parse-json@npm:^5.0.0, parse-json@npm:^5.2.0": version: 5.2.0 resolution: "parse-json@npm:5.2.0" @@ -13335,13 +17169,6 @@ __metadata: languageName: node linkType: hard -"parse-ms@npm:^3.0.0": - version: 3.0.0 - resolution: "parse-ms@npm:3.0.0" - checksum: fc602bba093835562321a67a9d6c8c9687ca4f26a09459a77e07ebd7efddd1a5766725ec60eb0c83a2abe67f7a23808f7deb1c1226727776eaf7f9607ae09db2 - languageName: node - linkType: hard - "parse-numeric-range@npm:^1.3.0": version: 1.3.0 resolution: "parse-numeric-range@npm:1.3.0" @@ -13392,6 +17219,13 @@ __metadata: languageName: node linkType: hard +"path-browserify@npm:^1.0.1": + version: 1.0.1 + resolution: "path-browserify@npm:1.0.1" + checksum: c6d7fa376423fe35b95b2d67990060c3ee304fc815ff0a2dc1c6c3cfaff2bd0d572ee67e18f19d0ea3bbe32e8add2a05021132ac40509416459fffee35200699 + languageName: node + linkType: hard + "path-exists@npm:^3.0.0": version: 3.0.0 resolution: "path-exists@npm:3.0.0" @@ -13524,6 +17358,17 @@ __metadata: languageName: node linkType: hard +"periscopic@npm:^3.0.0": + version: 3.1.0 + resolution: "periscopic@npm:3.1.0" + dependencies: + "@types/estree": ^1.0.0 + estree-walker: ^3.0.0 + is-reference: ^3.0.0 + checksum: 2153244352e58a0d76e7e8d9263e66fe74509495f809af388da20045fb30aa3e93f2f94468dc0b9166ecf206fcfc0d73d2c7641c6fbedc07b1de858b710142cb + languageName: node + linkType: hard + "picocolors@npm:^1.0.0": version: 1.0.0 resolution: "picocolors@npm:1.0.0" @@ -13538,17 +17383,7 @@ __metadata: languageName: node linkType: hard -"pkg-conf@npm:^4.0.0": - version: 4.0.0 - resolution: "pkg-conf@npm:4.0.0" - dependencies: - find-up: ^6.0.0 - load-json-file: ^7.0.0 - checksum: 6da0c064a74f6c7ae80d7d68c5853e14f7e762a2a80c6ca9e0aa827002b90b69c86fefe3bac830b10a6f1739e7f96a1f728637f2a141e50b0fdafe92a2c3eab6 - languageName: node - linkType: hard - -"pkg-dir@npm:^4.1.0": +"pkg-dir@npm:^4.1.0, pkg-dir@npm:^4.2.0": version: 4.2.0 resolution: "pkg-dir@npm:4.2.0" dependencies: @@ -13557,6 +17392,15 @@ __metadata: languageName: node linkType: hard +"pkg-dir@npm:^7.0.0": + version: 7.0.0 + resolution: "pkg-dir@npm:7.0.0" + dependencies: + find-up: ^6.3.0 + checksum: 94298b20a446bfbbd66604474de8a0cdd3b8d251225170970f15d9646f633e056c80520dd5b4c1d1050c9fed8f6a9e5054b141c93806439452efe72e57562c03 + languageName: node + linkType: hard + "pkg-up@npm:^3.1.0": version: 3.1.0 resolution: "pkg-up@npm:3.1.0" @@ -13566,36 +17410,27 @@ __metadata: languageName: node linkType: hard -"playwright-core@npm:1.38.0": - version: 1.38.0 - resolution: "playwright-core@npm:1.38.0" +"playwright-core@npm:1.40.1": + version: 1.40.1 + resolution: "playwright-core@npm:1.40.1" bin: playwright-core: cli.js - checksum: 9eb43fc6c3cb392d5f35b0fd0b7291b38a8cbdc3cbb944a8261f744f30d09196dfa3b5d84aa02ffc09af87d08d31b385b007b6af20d0b6cd50a29344f3b0db8d + checksum: 84d92fb9b86e3c225b16b6886bf858eb5059b4e60fa1205ff23336e56a06dcb2eac62650992dede72f406c8e70a7b6a5303e511f9b4bc0b85022ede356a01ee0 languageName: node linkType: hard "playwright@npm:^1.22.2": - version: 1.38.0 - resolution: "playwright@npm:1.38.0" + version: 1.40.1 + resolution: "playwright@npm:1.40.1" dependencies: fsevents: 2.3.2 - playwright-core: 1.38.0 + playwright-core: 1.40.1 dependenciesMeta: fsevents: optional: true bin: playwright: cli.js - checksum: c5356690a391d5dd41f814d4e2694b93ba9e79381ce63de752da1c6c59b1f9c69bc6be853d973d0542d73a44a6b15f7c0081a164a64cd27b6b31207710c0ab34 - languageName: node - linkType: hard - -"plur@npm:^5.1.0": - version: 5.1.0 - resolution: "plur@npm:5.1.0" - dependencies: - irregular-plurals: ^3.3.0 - checksum: 57e400dc4b926768fb0abab7f8688fe17e85673712134546e7beaaee188bae7e0504976e847d7e41d0d6103ff2fd61204095f03c2a45de19a8bad15aecb45cc1 + checksum: 9e36791c1b4a649c104aa365fdd9d049924eeb518c5967c0e921aa38b9b00994aa6ee54784d6c2af194b3b494b6f69772673081ef53c6c4a4b2065af9955c4ba languageName: node linkType: hard @@ -13695,7 +17530,7 @@ __metadata: languageName: node linkType: hard -"postcss-loader@npm:^7.0.0": +"postcss-loader@npm:^7.0.0, postcss-loader@npm:^7.3.3": version: 7.3.3 resolution: "postcss-loader@npm:7.3.3" dependencies: @@ -13994,7 +17829,7 @@ __metadata: languageName: node linkType: hard -"postcss-sort-media-queries@npm:^4.2.1": +"postcss-sort-media-queries@npm:^4.2.1, postcss-sort-media-queries@npm:^4.4.1": version: 4.4.1 resolution: "postcss-sort-media-queries@npm:4.4.1" dependencies: @@ -14044,14 +17879,14 @@ __metadata: languageName: node linkType: hard -"postcss@npm:^8.3.11, postcss@npm:^8.4.14, postcss@npm:^8.4.17, postcss@npm:^8.4.21": - version: 8.4.31 - resolution: "postcss@npm:8.4.31" +"postcss@npm:^8.4.14, postcss@npm:^8.4.17, postcss@npm:^8.4.21, postcss@npm:^8.4.26": + version: 8.4.32 + resolution: "postcss@npm:8.4.32" dependencies: - nanoid: ^3.3.6 + nanoid: ^3.3.7 picocolors: ^1.0.0 source-map-js: ^1.0.2 - checksum: 1d8611341b073143ad90486fcdfeab49edd243377b1f51834dc4f6d028e82ce5190e4f11bb2633276864503654fb7cab28e67abdc0fbf9d1f88cad4a0ff0beea + checksum: 220d9d0bf5d65be7ed31006c523bfb11619461d296245c1231831f90150aeb4a31eab9983ac9c5c89759a3ca8b60b3e0d098574964e1691673c3ce5c494305ae languageName: node linkType: hard @@ -14078,7 +17913,16 @@ __metadata: languageName: node linkType: hard -"prettier@npm:*, prettier@npm:3.0.3": +"prettier@npm:*": + version: 3.1.1 + resolution: "prettier@npm:3.1.1" + bin: + prettier: bin/prettier.cjs + checksum: e386855e3a1af86a748e16953f168be555ce66d6233f4ba54eb6449b88eb0c6b2ca79441b11eae6d28a7f9a5c96440ce50864b9d5f6356d331d39d6bb66c648e + languageName: node + linkType: hard + +"prettier@npm:3.0.3": version: 3.0.3 resolution: "prettier@npm:3.0.3" bin: @@ -14097,15 +17941,6 @@ __metadata: languageName: node linkType: hard -"pretty-ms@npm:^8.0.0": - version: 8.0.0 - resolution: "pretty-ms@npm:8.0.0" - dependencies: - parse-ms: ^3.0.0 - checksum: b7d2a8182887af0e5ab93f9df331f10db9b8eda86855e2de115eb01a6c501bde5631a8813b1b0abdd7d045e79b08ae875369a8fd279a3dacd6d9e572bdd3bfa6 - languageName: node - linkType: hard - "pretty-time@npm:^1.1.0": version: 1.1.0 resolution: "pretty-time@npm:1.1.0" @@ -14113,22 +17948,32 @@ __metadata: languageName: node linkType: hard -"prism-react-renderer@npm:^1.3.5": - version: 1.3.5 - resolution: "prism-react-renderer@npm:1.3.5" +"prism-react-renderer@npm:^2.1.0, prism-react-renderer@npm:^2.3.0": + version: 2.3.0 + resolution: "prism-react-renderer@npm:2.3.0" + dependencies: + "@types/prismjs": ^1.26.0 + clsx: ^2.0.0 peerDependencies: - react: ">=0.14.9" - checksum: c18806dcbc4c0b4fd6fd15bd06b4f7c0a6da98d93af235c3e970854994eb9b59e23315abb6cfc29e69da26d36709a47e25da85ab27fed81b6812f0a52caf6dfa + react: ">=16.0.0" + checksum: 29b24eb5015c09e1b7e3fa2941584ead6fceb5556fdfbe7c34548d96886e0b291290bda93a421aab8b26ce6aae677387aac294982d11349a050843f6dbbc7449 languageName: node linkType: hard -"prismjs@npm:^1.28.0": +"prismjs@npm:^1.29.0": version: 1.29.0 resolution: "prismjs@npm:1.29.0" checksum: 007a8869d4456ff8049dc59404e32d5666a07d99c3b0e30a18bd3b7676dfa07d1daae9d0f407f20983865fd8da56de91d09cb08e6aa61f5bc420a27c0beeaf93 languageName: node linkType: hard +"proc-log@npm:^3.0.0": + version: 3.0.0 + resolution: "proc-log@npm:3.0.0" + checksum: 02b64e1b3919e63df06f836b98d3af002b5cd92655cab18b5746e37374bfb73e03b84fe305454614b34c25b485cc687a9eebdccf0242cda8fda2475dd2c97e02 + languageName: node + linkType: hard + "process-nextick-args@npm:~2.0.0": version: 2.0.1 resolution: "process-nextick-args@npm:2.0.1" @@ -14136,6 +17981,13 @@ __metadata: languageName: node linkType: hard +"process@npm:^0.11.10": + version: 0.11.10 + resolution: "process@npm:0.11.10" + checksum: bfcce49814f7d172a6e6a14d5fa3ac92cc3d0c3b9feb1279774708a719e19acd673995226351a082a9ae99978254e320ccda4240ddc474ba31a76c79491ca7c3 + languageName: node + linkType: hard + "progress@npm:2.0.3": version: 2.0.3 resolution: "progress@npm:2.0.3" @@ -14153,15 +18005,6 @@ __metadata: languageName: node linkType: hard -"promise@npm:^7.1.1": - version: 7.3.1 - resolution: "promise@npm:7.3.1" - dependencies: - asap: ~2.0.3 - checksum: 475bb069130179fbd27ed2ab45f26d8862376a137a57314cf53310bdd85cc986a826fd585829be97ebc0aaf10e9d8e68be1bfe5a4a0364144b1f9eedfa940cf1 - languageName: node - linkType: hard - "prompts@npm:^2.4.2": version: 2.4.2 resolution: "prompts@npm:2.4.2" @@ -14192,6 +18035,20 @@ __metadata: languageName: node linkType: hard +"property-information@npm:^6.0.0": + version: 6.4.0 + resolution: "property-information@npm:6.4.0" + checksum: b5aed9a40e87730995f3ceed29839f137fa73b2a4cccfb8ed72ab8bddb8881cad05c3487c4aa168d7cb49a53db8089790c9f00f59d15b8380d2bb5383cdd1f24 + languageName: node + linkType: hard + +"proto-list@npm:~1.2.1": + version: 1.2.4 + resolution: "proto-list@npm:1.2.4" + checksum: 4d4826e1713cbfa0f15124ab0ae494c91b597a3c458670c9714c36e8baddf5a6aad22842776f2f5b137f259c8533e741771445eb8df82e861eea37a6eaba03f7 + languageName: node + linkType: hard + "proxy-addr@npm:~2.0.7": version: 2.0.7 resolution: "proxy-addr@npm:2.0.7" @@ -14202,6 +18059,22 @@ __metadata: languageName: node linkType: hard +"proxy-agent@npm:6.3.0": + version: 6.3.0 + resolution: "proxy-agent@npm:6.3.0" + dependencies: + agent-base: ^7.0.2 + debug: ^4.3.4 + http-proxy-agent: ^7.0.0 + https-proxy-agent: ^7.0.0 + lru-cache: ^7.14.1 + pac-proxy-agent: ^7.0.0 + proxy-from-env: ^1.1.0 + socks-proxy-agent: ^8.0.1 + checksum: e3fb0633d665e352ed4efe23ae5616b8301423dfa4ff1c5975d093da8a636181a97391f7a91c6a7ffae17c1a305df855e95507f73bcdafda8876198c64b88f5b + languageName: node + linkType: hard + "proxy-from-env@npm:1.1.0, proxy-from-env@npm:^1.1.0": version: 1.1.0 resolution: "proxy-from-env@npm:1.1.0" @@ -14209,6 +18082,20 @@ __metadata: languageName: node linkType: hard +"prr@npm:~0.0.0": + version: 0.0.0 + resolution: "prr@npm:0.0.0" + checksum: 6552d9d92d9d55ec1afb8952ad80f81bbb1b4379f24ff7c506ad083ea701caf1bf6d4b092a2baeb98ec3f312c5a49d8bdf1d9b20a6db2998d05c2d52aa6a82e7 + languageName: node + linkType: hard + +"prr@npm:~1.0.1": + version: 1.0.1 + resolution: "prr@npm:1.0.1" + checksum: 3bca2db0479fd38f8c4c9439139b0c42dcaadcc2fbb7bb8e0e6afaa1383457f1d19aea9e5f961d5b080f1cfc05bfa1fe9e45c97a1d3fd6d421950a73d3108381 + languageName: node + linkType: hard + "ps-tree@npm:^1.2.0": version: 1.2.0 resolution: "ps-tree@npm:1.2.0" @@ -14230,7 +18117,7 @@ __metadata: languageName: node linkType: hard -"punycode@npm:^1.3.2": +"punycode@npm:^1.3.2, punycode@npm:^1.4.1": version: 1.4.1 resolution: "punycode@npm:1.4.1" checksum: fa6e698cb53db45e4628559e557ddaf554103d2a96a1d62892c8f4032cd3bc8871796cae9eabc1bc700e2b6677611521ce5bb1d9a27700086039965d0cf34518 @@ -14238,9 +18125,9 @@ __metadata: linkType: hard "punycode@npm:^2.1.0, punycode@npm:^2.1.1": - version: 2.3.0 - resolution: "punycode@npm:2.3.0" - checksum: 39f760e09a2a3bbfe8f5287cf733ecdad69d6af2fe6f97ca95f24b8921858b91e9ea3c9eeec6e08cede96181b3bb33f95c6ffd8c77e63986508aa2e8159fa200 + version: 2.3.1 + resolution: "punycode@npm:2.3.1" + checksum: bb0a0ceedca4c3c57a9b981b90601579058903c62be23c5e8e843d2c2d4148a3ecf029d5133486fb0e1822b098ba8bba09e89d6b21742d02fa26bda6441a6fb2 languageName: node linkType: hard @@ -14253,6 +18140,15 @@ __metadata: languageName: node linkType: hard +"pupa@npm:^3.1.0": + version: 3.1.0 + resolution: "pupa@npm:3.1.0" + dependencies: + escape-goat: ^4.0.0 + checksum: 0e4f4ab6bbdce600fa6d23b1833f1af57b2641246ff4cbe10f9d66e4e5479b0de2864a88d5bd629eef59524eda3c6680726acd7f3f873d9ed46b7f095d0bb5f6 + languageName: node + linkType: hard + "puppeteer-core@npm:^19.8.1": version: 19.11.1 resolution: "puppeteer-core@npm:19.11.1" @@ -14277,10 +18173,22 @@ __metadata: languageName: node linkType: hard -"pure-color@npm:^1.2.0": - version: 1.3.0 - resolution: "pure-color@npm:1.3.0" - checksum: 646d8bed6e6eab89affdd5e2c11f607a85b631a7fb03c061dfa658eb4dc4806881a15feed2ac5fd8c0bad8c00c632c640d5b1cb8b9a972e6e947393a1329371b +"puppeteer-core@npm:^20.0.0": + version: 20.9.0 + resolution: "puppeteer-core@npm:20.9.0" + dependencies: + "@puppeteer/browsers": 1.4.6 + chromium-bidi: 0.4.16 + cross-fetch: 4.0.0 + debug: 4.3.4 + devtools-protocol: 0.0.1147663 + ws: 8.13.0 + peerDependencies: + typescript: ">= 4.7.4" + peerDependenciesMeta: + typescript: + optional: true + checksum: d298598445b0f2032c02d0ed7d1d18a8d2d2fcaf6fc31fc96e93e2669a7fc6fbee0338bd9b8c8f8822887f18a8fb680b77bb56e96fe1928baadb52292bbd93b4 languageName: node linkType: hard @@ -14293,7 +18201,7 @@ __metadata: languageName: node linkType: hard -"qs@npm:^6.5.2": +"qs@npm:^6.11.2, qs@npm:^6.5.2": version: 6.11.2 resolution: "qs@npm:6.11.2" dependencies: @@ -14309,6 +18217,13 @@ __metadata: languageName: node linkType: hard +"queue-tick@npm:^1.0.1": + version: 1.0.1 + resolution: "queue-tick@npm:1.0.1" + checksum: 57c3292814b297f87f792fbeb99ce982813e4e54d7a8bdff65cf53d5c084113913289d4a48ec8bbc964927a74b847554f9f4579df43c969a6c8e0f026457ad01 + languageName: node + linkType: hard + "queue@npm:6.0.2": version: 6.0.2 resolution: "queue@npm:6.0.2" @@ -14318,6 +18233,13 @@ __metadata: languageName: node linkType: hard +"quick-lru@npm:^5.1.1": + version: 5.1.1 + resolution: "quick-lru@npm:5.1.1" + checksum: a516faa25574be7947969883e6068dbe4aa19e8ef8e8e0fd96cddd6d36485e9106d85c0041a27153286b0770b381328f4072aa40d3b18a19f5f7d2b78b94b5ed + languageName: node + linkType: hard + "randombytes@npm:^2.1.0": version: 2.1.0 resolution: "randombytes@npm:2.1.0" @@ -14365,7 +18287,7 @@ __metadata: languageName: node linkType: hard -"rc@npm:1.2.8, rc@npm:^1.2.8": +"rc@npm:1.2.8, rc@npm:^1.0.1, rc@npm:^1.1.6, rc@npm:^1.2.8": version: 1.2.8 resolution: "rc@npm:1.2.8" dependencies: @@ -14379,18 +18301,6 @@ __metadata: languageName: node linkType: hard -"react-base16-styling@npm:^0.6.0": - version: 0.6.0 - resolution: "react-base16-styling@npm:0.6.0" - dependencies: - base16: ^1.0.0 - lodash.curry: ^4.0.1 - lodash.flow: ^3.3.0 - pure-color: ^1.2.0 - checksum: 00a12dddafc8a9025cca933b0dcb65fca41c81fa176d1fc3a6a9d0242127042e2c0a604f4c724a3254dd2c6aeb5ef55095522ff22f5462e419641c1341a658e4 - languageName: node - linkType: hard - "react-dev-utils@npm:^12.0.1": version: 12.0.1 resolution: "react-dev-utils@npm:12.0.1" @@ -14423,16 +18333,15 @@ __metadata: languageName: node linkType: hard -"react-dom@npm:^17.0.2": - version: 17.0.2 - resolution: "react-dom@npm:17.0.2" +"react-dom@npm:^18.2.0": + version: 18.2.0 + resolution: "react-dom@npm:18.2.0" dependencies: loose-envify: ^1.1.0 - object-assign: ^4.1.1 - scheduler: ^0.20.2 + scheduler: ^0.23.0 peerDependencies: - react: 17.0.2 - checksum: 1c1eaa3bca7c7228d24b70932e3d7c99e70d1d04e13bb0843bbf321582bc25d7961d6b8a6978a58a598af2af496d1cedcfb1bf65f6b0960a0a8161cb8dab743c + react: ^18.2.0 + checksum: 7d323310bea3a91be2965f9468d552f201b1c27891e45ddc2d6b8f717680c95a75ae0bc1e3f5cf41472446a2589a75aed4483aee8169287909fcd59ad149e8cc languageName: node linkType: hard @@ -14443,14 +18352,28 @@ __metadata: languageName: node linkType: hard -"react-fast-compare@npm:^3.2.0": +"react-fast-compare@npm:^3.2.0, react-fast-compare@npm:^3.2.2": version: 3.2.2 resolution: "react-fast-compare@npm:3.2.2" checksum: 2071415b4f76a3e6b55c84611c4d24dcb12ffc85811a2840b5a3f1ff2d1a99be1020d9437ee7c6e024c9f4cbb84ceb35e48cf84f28fcb00265ad2dfdd3947704 languageName: node linkType: hard -"react-helmet-async@npm:*, react-helmet-async@npm:^1.3.0": +"react-helmet-async@npm:*": + version: 2.0.3 + resolution: "react-helmet-async@npm:2.0.3" + dependencies: + invariant: ^2.2.4 + react-fast-compare: ^3.2.2 + shallowequal: ^1.1.0 + peerDependencies: + react: ^16.6.0 || ^17.0.0 || ^18.0.0 + react-dom: ^16.6.0 || ^17.0.0 || ^18.0.0 + checksum: dfb951174c1812b1aaa8099181024ea8fbdc5cb65e5ab56e465753e2dc5a7c7756e9ab86ba382157ff27dc5b33353a2a000f7f088653815e4aa576daec01ec33 + languageName: node + linkType: hard + +"react-helmet-async@npm:^1.3.0": version: 1.3.0 resolution: "react-helmet-async@npm:1.3.0" dependencies: @@ -14473,25 +18396,12 @@ __metadata: languageName: node linkType: hard -"react-json-view@npm:^1.21.3": - version: 1.21.3 - resolution: "react-json-view@npm:1.21.3" - dependencies: - flux: ^4.0.1 - react-base16-styling: ^0.6.0 - react-lifecycles-compat: ^3.0.4 - react-textarea-autosize: ^8.3.2 +"react-json-view-lite@npm:^1.2.0": + version: 1.2.1 + resolution: "react-json-view-lite@npm:1.2.1" peerDependencies: - react: ^17.0.0 || ^16.3.0 || ^15.5.4 - react-dom: ^17.0.0 || ^16.3.0 || ^15.5.4 - checksum: 5718bcd9210ad5b06eb9469cf8b9b44be9498845a7702e621343618e8251f26357e6e1c865532cf170db6165df1cb30202787e057309d8848c220bc600ec0d1a - languageName: node - linkType: hard - -"react-lifecycles-compat@npm:^3.0.4": - version: 3.0.4 - resolution: "react-lifecycles-compat@npm:3.0.4" - checksum: a904b0fc0a8eeb15a148c9feb7bc17cec7ef96e71188280061fc340043fd6d8ee3ff233381f0e8f95c1cf926210b2c4a31f38182c8f35ac55057e453d6df204f + react: ^16.13.1 || ^17.0.0 || ^18.0.0 + checksum: 9441260033ec07991b0121281d846f9d3e8db9061ea0dc3938f7003630f515d47870a465d90f1f9300ebe406679986657a4062c009c2a2084e2e145e6fa05a47 languageName: node linkType: hard @@ -14519,7 +18429,7 @@ __metadata: languageName: node linkType: hard -"react-router-dom@npm:^5.3.3": +"react-router-dom@npm:^5.3.3, react-router-dom@npm:^5.3.4": version: 5.3.4 resolution: "react-router-dom@npm:5.3.4" dependencies: @@ -14536,7 +18446,7 @@ __metadata: languageName: node linkType: hard -"react-router@npm:5.3.4, react-router@npm:^5.3.3": +"react-router@npm:5.3.4, react-router@npm:^5.3.3, react-router@npm:^5.3.4": version: 5.3.4 resolution: "react-router@npm:5.3.4" dependencies: @@ -14555,30 +18465,50 @@ __metadata: languageName: node linkType: hard -"react-textarea-autosize@npm:^8.3.2": - version: 8.5.3 - resolution: "react-textarea-autosize@npm:8.5.3" +"react-spinners@npm:^0.13.8": + version: 0.13.8 + resolution: "react-spinners@npm:0.13.8" + peerDependencies: + react: ^16.0.0 || ^17.0.0 || ^18.0.0 + react-dom: ^16.0.0 || ^17.0.0 || ^18.0.0 + checksum: cb58f916bbf00c4f7fc33c1a75a8c0f91a98a42af26944340a0e29e4b3a97937a03f0292bc444eb5f5f0bea099344b580f9c04a81524246dadbf0571bdb57e62 + languageName: node + linkType: hard + +"react-toastify@npm:^9.1.3": + version: 9.1.3 + resolution: "react-toastify@npm:9.1.3" dependencies: - "@babel/runtime": ^7.20.13 - use-composed-ref: ^1.3.0 - use-latest: ^1.2.1 + clsx: ^1.1.1 peerDependencies: - react: ^16.8.0 || ^17.0.0 || ^18.0.0 - checksum: b317c3763f37a89621bbafd0e6e2d068e7876790a5ae77f497adfd6ba9334ceea138c8a0b7d907bae0f79c765cb24e8b2ca2b8033b4144c0bce28571a3658921 + react: ">=16" + react-dom: ">=16" + checksum: e8bd92c5cbf831b43a042644ab9bc69abe6ceb3ce91ba71f5cd2d8b6a2c9885ca52770e1f1ba64c5632607f6df962db344a26c7fba57606faf5aa0e7bfc8535f languageName: node linkType: hard -"react@npm:^17.0.2": - version: 17.0.2 - resolution: "react@npm:17.0.2" +"react@npm:^18.2.0": + version: 18.2.0 + resolution: "react@npm:18.2.0" dependencies: loose-envify: ^1.1.0 - object-assign: ^4.1.1 - checksum: b254cc17ce3011788330f7bbf383ab653c6848902d7936a87b09d835d091e3f295f7e9dd1597c6daac5dc80f90e778c8230218ba8ad599f74adcc11e33b9d61b + checksum: 88e38092da8839b830cda6feef2e8505dec8ace60579e46aa5490fc3dc9bba0bd50336507dc166f43e3afc1c42939c09fe33b25fae889d6f402721dcd78fca1b + languageName: node + linkType: hard + +"readable-stream@npm:^1.0.26-4": + version: 1.1.14 + resolution: "readable-stream@npm:1.1.14" + dependencies: + core-util-is: ~1.0.0 + inherits: ~2.0.1 + isarray: 0.0.1 + string_decoder: ~0.10.x + checksum: 17dfeae3e909945a4a1abc5613ea92d03269ef54c49288599507fc98ff4615988a1c39a999dcf9aacba70233d9b7040bc11a5f2bfc947e262dedcc0a8b32b5a0 languageName: node linkType: hard -"readable-stream@npm:^2.0.1": +"readable-stream@npm:^2.0.1, readable-stream@npm:^2.2.2": version: 2.3.8 resolution: "readable-stream@npm:2.3.8" dependencies: @@ -14604,6 +18534,31 @@ __metadata: languageName: node linkType: hard +"readable-stream@npm:^4.4.2": + version: 4.4.2 + resolution: "readable-stream@npm:4.4.2" + dependencies: + abort-controller: ^3.0.0 + buffer: ^6.0.3 + events: ^3.3.0 + process: ^0.11.10 + string_decoder: ^1.3.0 + checksum: 6f4063763dbdb52658d22d3f49ca976420e1fbe16bbd241f744383715845350b196a2f08b8d6330f8e219153dff34b140aeefd6296da828e1041a7eab1f20d5e + languageName: node + linkType: hard + +"readable-stream@npm:~1.0.26, readable-stream@npm:~1.0.26-4": + version: 1.0.34 + resolution: "readable-stream@npm:1.0.34" + dependencies: + core-util-is: ~1.0.0 + inherits: ~2.0.1 + isarray: 0.0.1 + string_decoder: ~0.10.x + checksum: 85042c537e4f067daa1448a7e257a201070bfec3dd2706abdbd8ebc7f3418eb4d3ed4b8e5af63e2544d69f88ab09c28d5da3c0b77dc76185fddd189a59863b60 + languageName: node + linkType: hard + "readdirp@npm:~3.6.0": version: 3.6.0 resolution: "readdirp@npm:3.6.0" @@ -14629,6 +18584,15 @@ __metadata: languageName: node linkType: hard +"rechoir@npm:^0.7.0": + version: 0.7.1 + resolution: "rechoir@npm:0.7.1" + dependencies: + resolve: ^1.9.0 + checksum: 2a04aab4e28c05fcd6ee6768446bc8b859d8f108e71fc7f5bcbc5ef25e53330ce2c11d10f82a24591a2df4c49c4f61feabe1fd11f844c66feedd4cd7bb61146a + languageName: node + linkType: hard + "recursive-readdir@npm:^2.2.2": version: 2.2.3 resolution: "recursive-readdir@npm:2.2.3" @@ -14691,6 +18655,16 @@ __metadata: languageName: node linkType: hard +"registry-auth-token@npm:3.3.2": + version: 3.3.2 + resolution: "registry-auth-token@npm:3.3.2" + dependencies: + rc: ^1.1.6 + safe-buffer: ^5.0.1 + checksum: c9d7ae160a738f1fa825556e3669e6c771d2c0239ce37679f7e8646157a97d0a76464738be075002a1f754ef9bfb913b689f4bbfd5296d28f136fbf98c8c2217 + languageName: node + linkType: hard + "registry-auth-token@npm:^4.0.0": version: 4.2.2 resolution: "registry-auth-token@npm:4.2.2" @@ -14700,6 +18674,24 @@ __metadata: languageName: node linkType: hard +"registry-auth-token@npm:^5.0.1": + version: 5.0.2 + resolution: "registry-auth-token@npm:5.0.2" + dependencies: + "@pnpm/npm-conf": ^2.1.0 + checksum: 0d7683b71ee418993e7872b389024b13645c4295eb7bb850d10728eaf46065db24ea4d47dc6cbb71a60d1aa4bef077b0d8b7363c9ac9d355fdba47bebdfb01dd + languageName: node + linkType: hard + +"registry-url@npm:3.1.0": + version: 3.1.0 + resolution: "registry-url@npm:3.1.0" + dependencies: + rc: ^1.0.1 + checksum: 6d223da41b04e1824f5faa63905c6f2e43b216589d72794111573f017352b790aef42cd1f826463062f89d804abb2027e3d9665d2a9a0426a11eedd04d470af3 + languageName: node + linkType: hard + "registry-url@npm:^5.0.0": version: 5.1.0 resolution: "registry-url@npm:5.1.0" @@ -14709,6 +18701,15 @@ __metadata: languageName: node linkType: hard +"registry-url@npm:^6.0.0": + version: 6.0.1 + resolution: "registry-url@npm:6.0.1" + dependencies: + rc: 1.2.8 + checksum: 33712aa1b489aab7aba2191c1cdadfdd71f5bf166d4792d81744a6be332c160bd7d9273af8269d8a01284b9562f14a5b31b7abcf7ad9306c44887ecff51c89ab + languageName: node + linkType: hard + "regjsparser@npm:^0.9.1": version: 0.9.1 resolution: "regjsparser@npm:0.9.1" @@ -14720,27 +18721,29 @@ __metadata: languageName: node linkType: hard -"rehype-katex@npm:^5.0.0": - version: 5.0.0 - resolution: "rehype-katex@npm:5.0.0" +"rehype-katex@npm:^7.0.0": + version: 7.0.0 + resolution: "rehype-katex@npm:7.0.0" dependencies: - "@types/katex": ^0.11.0 - hast-util-to-text: ^2.0.0 - katex: ^0.13.0 - rehype-parse: ^7.0.0 - unified: ^9.0.0 - unist-util-visit: ^2.0.0 - checksum: b20e24c5326a718581619761057a30d03615519eccd0693ada2c7c710064dceaf08f038ae3a1131550f1f7c47ca54a254ba8e45547da384867e956ceca73f6bf + "@types/hast": ^3.0.0 + "@types/katex": ^0.16.0 + hast-util-from-html-isomorphic: ^2.0.0 + hast-util-to-text: ^4.0.0 + katex: ^0.16.0 + unist-util-visit-parents: ^6.0.0 + vfile: ^6.0.0 + checksum: 3184cf7635e63039a5d455e27718cbc99998cc7bfcc15422badf5da892887f4200f3ee54a6617aa231aa15d46cb678716c112b6b7f9cef11a8653e5d518ad6f0 languageName: node linkType: hard -"rehype-parse@npm:^7.0.0": - version: 7.0.1 - resolution: "rehype-parse@npm:7.0.1" +"rehype-raw@npm:^7.0.0": + version: 7.0.0 + resolution: "rehype-raw@npm:7.0.0" dependencies: - hast-util-from-parse5: ^6.0.0 - parse5: ^6.0.0 - checksum: c3c914aa9281853290eff6b09e0bed6843934e788b957e25219e91f0bf244a183d2f5e042c7d21543276571f9b49a6bae90f4640b8f885f2773392ffa57baf4b + "@types/hast": ^3.0.0 + hast-util-raw: ^9.0.0 + vfile: ^6.0.0 + checksum: f9e28dcbf4c6c7d91a97c10a840310f18ef3268aa45abb3e0428b6b191ff3c4fa8f753b910d768588a2dac5c7da7e557b4ddc3f1b6cd252e8d20cb62d60c65ed languageName: node linkType: hard @@ -14760,6 +18763,18 @@ __metadata: languageName: unknown linkType: soft +"remark-directive@npm:^3.0.0": + version: 3.0.0 + resolution: "remark-directive@npm:3.0.0" + dependencies: + "@types/mdast": ^4.0.0 + mdast-util-directive: ^3.0.0 + micromark-extension-directive: ^3.0.0 + unified: ^11.0.0 + checksum: 744d12bbe924bd0492a2481cbaf9250aa6622c0d2cc090bb7bc39975e355c8a46ae13cc4793204ada39f0af64c953f6b730a55420a50375e0f74a5dd5d201089 + languageName: node + linkType: hard + "remark-emoji@npm:^2.2.0": version: 2.2.0 resolution: "remark-emoji@npm:2.2.0" @@ -14771,6 +18786,19 @@ __metadata: languageName: node linkType: hard +"remark-emoji@npm:^4.0.0": + version: 4.0.1 + resolution: "remark-emoji@npm:4.0.1" + dependencies: + "@types/mdast": ^4.0.2 + emoticon: ^4.0.1 + mdast-util-find-and-replace: ^3.0.1 + node-emoji: ^2.1.0 + unified: ^11.0.4 + checksum: 2c02d8c0b694535a9f0c4fe39180cb89a8fbd07eb873c94842c34dfde566b8a6703df9d28fe175a8c28584f96252121de722862baa756f2d875f2f1a4352c1f4 + languageName: node + linkType: hard + "remark-footnotes@npm:2.0.0": version: 2.0.0 resolution: "remark-footnotes@npm:2.0.0" @@ -14778,10 +18806,41 @@ __metadata: languageName: node linkType: hard -"remark-math@npm:^3.0.1": - version: 3.0.1 - resolution: "remark-math@npm:3.0.1" - checksum: 690256f27f2b42dadcf41806fec443056e09592454622ae77f03b1a8474e8c83cc7610e694be7e17de92c96cc272c61209e59a6e7a24e3af6ede47d48b185ccd +"remark-frontmatter@npm:^5.0.0": + version: 5.0.0 + resolution: "remark-frontmatter@npm:5.0.0" + dependencies: + "@types/mdast": ^4.0.0 + mdast-util-frontmatter: ^2.0.0 + micromark-extension-frontmatter: ^2.0.0 + unified: ^11.0.0 + checksum: b36e11d528d1d0172489c74ce7961bb6073f7272e71ea1349f765fc79c4246a758aef949174d371a088c48e458af776fcfbb3b043c49cd1120ca8239aeafe16a + languageName: node + linkType: hard + +"remark-gfm@npm:^4.0.0": + version: 4.0.0 + resolution: "remark-gfm@npm:4.0.0" + dependencies: + "@types/mdast": ^4.0.0 + mdast-util-gfm: ^3.0.0 + micromark-extension-gfm: ^3.0.0 + remark-parse: ^11.0.0 + remark-stringify: ^11.0.0 + unified: ^11.0.0 + checksum: 84bea84e388061fbbb697b4b666089f5c328aa04d19dc544c229b607446bc10902e46b67b9594415a1017bbbd7c811c1f0c30d36682c6d1a6718b66a1558261b + languageName: node + linkType: hard + +"remark-math@npm:^6.0.0": + version: 6.0.0 + resolution: "remark-math@npm:6.0.0" + dependencies: + "@types/mdast": ^4.0.0 + mdast-util-math: ^3.0.0 + micromark-extension-math: ^3.0.0 + unified: ^11.0.0 + checksum: fef489acb6cae6e40af05012367dc22a846ce16301e8a96006c6d78935887bdb3e6c5018b6514884ecee57f9c7a51f97a10862526ab0a0f5f7b7d339fe0eb20f languageName: node linkType: hard @@ -14801,6 +18860,16 @@ __metadata: languageName: node linkType: hard +"remark-mdx@npm:^3.0.0": + version: 3.0.0 + resolution: "remark-mdx@npm:3.0.0" + dependencies: + mdast-util-mdx: ^3.0.0 + micromark-extension-mdxjs: ^3.0.0 + checksum: 8b9b3e5297e5cb4c312553f42c3720280ada96ae60ede880606924a0aad2e773106aba1ef45a0c179c218f8da6f58dac3c789a9c4f791649ca7a183706cde5b8 + languageName: node + linkType: hard + "remark-parse@npm:8.0.3": version: 8.0.3 resolution: "remark-parse@npm:8.0.3" @@ -14825,6 +18894,31 @@ __metadata: languageName: node linkType: hard +"remark-parse@npm:^11.0.0": + version: 11.0.0 + resolution: "remark-parse@npm:11.0.0" + dependencies: + "@types/mdast": ^4.0.0 + mdast-util-from-markdown: ^2.0.0 + micromark-util-types: ^2.0.0 + unified: ^11.0.0 + checksum: d83d245290fa84bb04fb3e78111f09c74f7417e7c012a64dd8dc04fccc3699036d828fbd8eeec8944f774b6c30cc1d925c98f8c46495ebcee7c595496342ab7f + languageName: node + linkType: hard + +"remark-rehype@npm:^11.0.0": + version: 11.0.0 + resolution: "remark-rehype@npm:11.0.0" + dependencies: + "@types/hast": ^3.0.0 + "@types/mdast": ^4.0.0 + mdast-util-to-hast: ^13.0.0 + unified: ^11.0.0 + vfile: ^6.0.0 + checksum: 0ff0fd948759cbde9d507ca1581028d0b89da0b5f610b35a6cb0a511f8d11621449b6eca573b11ddaea77afd37edd4755f3f1eb086ad49a6f7b970b4a4634e13 + languageName: node + linkType: hard + "remark-squeeze-paragraphs@npm:4.0.0": version: 4.0.0 resolution: "remark-squeeze-paragraphs@npm:4.0.0" @@ -14834,6 +18928,17 @@ __metadata: languageName: node linkType: hard +"remark-stringify@npm:^11.0.0": + version: 11.0.0 + resolution: "remark-stringify@npm:11.0.0" + dependencies: + "@types/mdast": ^4.0.0 + mdast-util-to-markdown: ^2.0.0 + unified: ^11.0.0 + checksum: 59e07460eb629d6c3b3c0f438b0b236e7e6858fd5ab770303078f5a556ec00354d9c7fb9ef6d5f745a4617ac7da1ab618b170fbb4dac120e183fecd9cc86bce6 + languageName: node + linkType: hard + "renderkid@npm:^3.0.0": version: 3.0.0 resolution: "renderkid@npm:3.0.0" @@ -14847,7 +18952,7 @@ __metadata: languageName: node linkType: hard -"repeat-string@npm:^1.0.0, repeat-string@npm:^1.5.4": +"repeat-string@npm:^1.5.4, repeat-string@npm:^1.6.1": version: 1.6.1 resolution: "repeat-string@npm:1.6.1" checksum: 1b809fc6db97decdc68f5b12c4d1a671c8e3f65ec4a40c238bc5200e44e85bcc52a54f78268ab9c29fcf5fe4f1343e805420056d1f30fa9a9ee4c2d93e3cc6c0 @@ -14882,6 +18987,13 @@ __metadata: languageName: node linkType: hard +"resolve-alpn@npm:^1.2.0": + version: 1.2.1 + resolution: "resolve-alpn@npm:1.2.1" + checksum: f558071fcb2c60b04054c99aebd572a2af97ef64128d59bef7ab73bd50d896a222a056de40ffc545b633d99b304c259ea9d0c06830d5c867c34f0bfa60b8eae0 + languageName: node + linkType: hard + "resolve-cwd@npm:^3.0.0": version: 3.0.0 resolution: "resolve-cwd@npm:3.0.0" @@ -14922,6 +19034,13 @@ __metadata: languageName: node linkType: hard +"resolve-pkg-maps@npm:^1.0.0": + version: 1.0.0 + resolution: "resolve-pkg-maps@npm:1.0.0" + checksum: 1012afc566b3fdb190a6309cc37ef3b2dcc35dff5fa6683a9d00cd25c3247edfbc4691b91078c97adc82a29b77a2660c30d791d65dab4fc78bfc473f60289977 + languageName: node + linkType: hard + "resolve@npm:1.17.0": version: 1.17.0 resolution: "resolve@npm:1.17.0" @@ -14931,7 +19050,7 @@ __metadata: languageName: node linkType: hard -"resolve@npm:^1.1.6, resolve@npm:^1.14.2, resolve@npm:^1.3.2": +"resolve@npm:^1.1.6, resolve@npm:^1.14.2, resolve@npm:^1.19.0, resolve@npm:^1.22.1, resolve@npm:^1.3.2, resolve@npm:^1.9.0": version: 1.22.8 resolution: "resolve@npm:1.22.8" dependencies: @@ -14944,19 +19063,6 @@ __metadata: languageName: node linkType: hard -"resolve@npm:^1.19.0": - version: 1.22.6 - resolution: "resolve@npm:1.22.6" - dependencies: - is-core-module: ^2.13.0 - path-parse: ^1.0.7 - supports-preserve-symlinks-flag: ^1.0.0 - bin: - resolve: bin/resolve - checksum: d13bf66d4e2ee30d291491f16f2fa44edd4e0cefb85d53249dd6f93e70b2b8c20ec62f01b18662e3cd40e50a7528f18c4087a99490048992a3bb954cf3201a5b - languageName: node - linkType: hard - "resolve@patch:resolve@1.17.0#~builtin": version: 1.17.0 resolution: "resolve@patch:resolve@npm%3A1.17.0#~builtin::version=1.17.0&hash=c3c19d" @@ -14966,7 +19072,7 @@ __metadata: languageName: node linkType: hard -"resolve@patch:resolve@^1.1.6#~builtin, resolve@patch:resolve@^1.14.2#~builtin, resolve@patch:resolve@^1.3.2#~builtin": +"resolve@patch:resolve@^1.1.6#~builtin, resolve@patch:resolve@^1.14.2#~builtin, resolve@patch:resolve@^1.19.0#~builtin, resolve@patch:resolve@^1.22.1#~builtin, resolve@patch:resolve@^1.3.2#~builtin, resolve@patch:resolve@^1.9.0#~builtin": version: 1.22.8 resolution: "resolve@patch:resolve@npm%3A1.22.8#~builtin::version=1.22.8&hash=c3c19d" dependencies: @@ -14979,19 +19085,6 @@ __metadata: languageName: node linkType: hard -"resolve@patch:resolve@^1.19.0#~builtin": - version: 1.22.6 - resolution: "resolve@patch:resolve@npm%3A1.22.6#~builtin::version=1.22.6&hash=c3c19d" - dependencies: - is-core-module: ^2.13.0 - path-parse: ^1.0.7 - supports-preserve-symlinks-flag: ^1.0.0 - bin: - resolve: bin/resolve - checksum: 9d3b3c67aefd12cecbe5f10ca4d1f51ea190891096497c43f301b086883b426466918c3a64f1bbf1788fabb52b579d58809614006c5d0b49186702b3b8fb746a - languageName: node - linkType: hard - "responselike@npm:^1.0.2": version: 1.0.2 resolution: "responselike@npm:1.0.2" @@ -15001,6 +19094,15 @@ __metadata: languageName: node linkType: hard +"responselike@npm:^3.0.0": + version: 3.0.0 + resolution: "responselike@npm:3.0.0" + dependencies: + lowercase-keys: ^3.0.0 + checksum: e0cc9be30df4f415d6d83cdede3c5c887cd4a73e7cc1708bcaab1d50a28d15acb68460ac5b02bcc55a42f3d493729c8856427dcf6e57e6e128ad05cba4cfb95e + languageName: node + linkType: hard + "restore-cursor@npm:^3.1.0": version: 3.1.0 resolution: "restore-cursor@npm:3.1.0" @@ -15054,6 +19156,17 @@ __metadata: languageName: node linkType: hard +"rimraf@npm:^5.0.5": + version: 5.0.5 + resolution: "rimraf@npm:5.0.5" + dependencies: + glob: ^10.3.7 + bin: + rimraf: dist/esm/bin.mjs + checksum: d66eef829b2e23b16445f34e73d75c7b7cf4cbc8834b04720def1c8f298eb0753c3d76df77325fad79d0a2c60470525d95f89c2475283ad985fd7441c32732d1 + languageName: node + linkType: hard + "ripemd160@npm:^2.0.0, ripemd160@npm:^2.0.1": version: 2.0.2 resolution: "ripemd160@npm:2.0.2" @@ -15089,24 +19202,78 @@ __metadata: languageName: node linkType: hard +"rollup@npm:^4.4.0": + version: 4.9.4 + resolution: "rollup@npm:4.9.4" + dependencies: + "@rollup/rollup-android-arm-eabi": 4.9.4 + "@rollup/rollup-android-arm64": 4.9.4 + "@rollup/rollup-darwin-arm64": 4.9.4 + "@rollup/rollup-darwin-x64": 4.9.4 + "@rollup/rollup-linux-arm-gnueabihf": 4.9.4 + "@rollup/rollup-linux-arm64-gnu": 4.9.4 + "@rollup/rollup-linux-arm64-musl": 4.9.4 + "@rollup/rollup-linux-riscv64-gnu": 4.9.4 + "@rollup/rollup-linux-x64-gnu": 4.9.4 + "@rollup/rollup-linux-x64-musl": 4.9.4 + "@rollup/rollup-win32-arm64-msvc": 4.9.4 + "@rollup/rollup-win32-ia32-msvc": 4.9.4 + "@rollup/rollup-win32-x64-msvc": 4.9.4 + "@types/estree": 1.0.5 + fsevents: ~2.3.2 + dependenciesMeta: + "@rollup/rollup-android-arm-eabi": + optional: true + "@rollup/rollup-android-arm64": + optional: true + "@rollup/rollup-darwin-arm64": + optional: true + "@rollup/rollup-darwin-x64": + optional: true + "@rollup/rollup-linux-arm-gnueabihf": + optional: true + "@rollup/rollup-linux-arm64-gnu": + optional: true + "@rollup/rollup-linux-arm64-musl": + optional: true + "@rollup/rollup-linux-riscv64-gnu": + optional: true + "@rollup/rollup-linux-x64-gnu": + optional: true + "@rollup/rollup-linux-x64-musl": + optional: true + "@rollup/rollup-win32-arm64-msvc": + optional: true + "@rollup/rollup-win32-ia32-msvc": + optional: true + "@rollup/rollup-win32-x64-msvc": + optional: true + fsevents: + optional: true + bin: + rollup: dist/bin/rollup + checksum: 134b1fd8886a1dc86b2cadada979174e736a39aec12d069261fe8b799ad0c4aa3213188ea49adeee155669315016617260e43eea754436c50121aa359899da4d + languageName: node + linkType: hard + "rtl-detect@npm:^1.0.4": - version: 1.0.4 - resolution: "rtl-detect@npm:1.0.4" - checksum: d562535baa0db62f57f0a1d4676297bff72fd6b94e88f0f0900d5c3e810ab512c5c4cadffd3e05fbe8d9c74310c919afa3ea8c1001c244e5555e8eef12d02d6f + version: 1.1.2 + resolution: "rtl-detect@npm:1.1.2" + checksum: 4a43a1e5df0617eb86d5485640b318787d12b86acf53d840a3b2ff701ee941e95479d4e9ae97e907569ec763d1c47218cb87639bc87bcdad60a85747e5270cf0 languageName: node linkType: hard -"rtlcss@npm:^3.5.0": - version: 3.5.0 - resolution: "rtlcss@npm:3.5.0" +"rtlcss@npm:^4.1.0": + version: 4.1.1 + resolution: "rtlcss@npm:4.1.1" dependencies: - find-up: ^5.0.0 + escalade: ^3.1.1 picocolors: ^1.0.0 - postcss: ^8.3.11 + postcss: ^8.4.21 strip-json-comments: ^3.1.1 bin: rtlcss: bin/rtlcss.js - checksum: a3763cad2cb58ce1b950de155097c3c294e7aefc8bf328b58d0cc8d5efb88bf800865edc158a78ace6d1f7f99fea6fd66fb4a354d859b172dadd3dab3e0027b3 + checksum: dcf37d76265b5c84d610488afa68a2506d008f95feac968b35ccae9aa49e7019ae0336a80363303f8f8bbf60df3ecdeb60413548b049114a24748319b68aefde languageName: node linkType: hard @@ -15190,13 +19357,12 @@ __metadata: languageName: node linkType: hard -"scheduler@npm:^0.20.2": - version: 0.20.2 - resolution: "scheduler@npm:0.20.2" +"scheduler@npm:^0.23.0": + version: 0.23.0 + resolution: "scheduler@npm:0.23.0" dependencies: loose-envify: ^1.1.0 - object-assign: ^4.1.1 - checksum: c4b35cf967c8f0d3e65753252d0f260271f81a81e427241295c5a7b783abf4ea9e905f22f815ab66676f5313be0a25f47be582254db8f9241b259213e999b8fc + checksum: d79192eeaa12abef860c195ea45d37cbf2bbf5f66e3c4dcd16f54a7da53b17788a70d109ee3d3dde1a0fd50e6a8fc171f4300356c5aee4fc0171de526bf35f8a languageName: node linkType: hard @@ -15282,11 +19448,12 @@ __metadata: linkType: hard "selfsigned@npm:^2.1.1": - version: 2.1.1 - resolution: "selfsigned@npm:2.1.1" + version: 2.4.1 + resolution: "selfsigned@npm:2.4.1" dependencies: + "@types/node-forge": ^1.3.0 node-forge: ^1 - checksum: aa9ce2150a54838978d5c0aee54d7ebe77649a32e4e690eb91775f71fdff773874a4fbafd0ac73d8ec3b702ff8a395c604df4f8e8868528f36fd6c15076fb43a + checksum: 38b91c56f1d7949c0b77f9bbe4545b19518475cae15e7d7f0043f87b1626710b011ce89879a88969651f650a19d213bb15b7d5b4c2877df9eeeff7ba8f8b9bfa languageName: node linkType: hard @@ -15299,6 +19466,15 @@ __metadata: languageName: node linkType: hard +"semver-diff@npm:^4.0.0": + version: 4.0.0 + resolution: "semver-diff@npm:4.0.0" + dependencies: + semver: ^7.3.5 + checksum: 4a958d6f76c7e7858268e1e2cf936712542441c9e003e561b574167279eee0a9bd55cc7eae1bfb31d3e7ad06a9fc370e7dd412fcfefec8c0daf1ce5aea623559 + languageName: node + linkType: hard + "semver@npm:^5.4.1, semver@npm:^5.5.0": version: 5.7.2 resolution: "semver@npm:5.7.2" @@ -15328,6 +19504,15 @@ __metadata: languageName: node linkType: hard +"semver@npm:~2.3.1": + version: 2.3.2 + resolution: "semver@npm:2.3.2" + bin: + semver: ./bin/semver + checksum: e0649fb18a1da909df7b5a6f586314a7f6e052385fc1e6eafa7084dd77c0787e755ab35ca491f9eec986fe1d0d6d36eae85a21eb7e2ed32ae5906796acb92c56 + languageName: node + linkType: hard + "send@npm:0.18.0": version: 0.18.0 resolution: "send@npm:0.18.0" @@ -15349,15 +19534,6 @@ __metadata: languageName: node linkType: hard -"serialize-error@npm:^7.0.1": - version: 7.0.1 - resolution: "serialize-error@npm:7.0.1" - dependencies: - type-fest: ^0.13.1 - checksum: e0aba4dca2fc9fe74ae1baf38dbd99190e1945445a241ba646290f2176cdb2032281a76443b02ccf0caf30da5657d510746506368889a593b9835a497fc0732e - languageName: node - linkType: hard - "serialize-javascript@npm:6.0.0": version: 6.0.0 resolution: "serialize-javascript@npm:6.0.0" @@ -15376,7 +19552,7 @@ __metadata: languageName: node linkType: hard -"serve-handler@npm:^6.1.3": +"serve-handler@npm:6.1.5, serve-handler@npm:^6.1.3, serve-handler@npm:^6.1.5": version: 6.1.5 resolution: "serve-handler@npm:6.1.5" dependencies: @@ -15419,10 +19595,36 @@ __metadata: languageName: node linkType: hard -"set-blocking@npm:^2.0.0": - version: 2.0.0 - resolution: "set-blocking@npm:2.0.0" - checksum: 6e65a05f7cf7ebdf8b7c75b101e18c0b7e3dff4940d480efed8aad3a36a4005140b660fa1d804cb8bce911cac290441dc728084a30504d3516ac2ff7ad607b02 +"serve@npm:^14.2.1": + version: 14.2.1 + resolution: "serve@npm:14.2.1" + dependencies: + "@zeit/schemas": 2.29.0 + ajv: 8.11.0 + arg: 5.0.2 + boxen: 7.0.0 + chalk: 5.0.1 + chalk-template: 0.4.0 + clipboardy: 3.0.0 + compression: 1.7.4 + is-port-reachable: 4.0.0 + serve-handler: 6.1.5 + update-check: 1.5.4 + bin: + serve: build/main.js + checksum: c39a517b5d795a0a5c2f9fb9ff088b7e4962c579e34ace5b85dd62f93e0eacbc8a90359792c153c444a83258ffda392113dff7bfd10d41ced574a2d1886c2994 + languageName: node + linkType: hard + +"set-function-length@npm:^1.1.1": + version: 1.1.1 + resolution: "set-function-length@npm:1.1.1" + dependencies: + define-data-property: ^1.1.1 + get-intrinsic: ^1.2.1 + gopd: ^1.0.1 + has-property-descriptors: ^1.0.0 + checksum: c131d7569cd7e110cafdfbfbb0557249b538477624dfac4fc18c376d879672fa52563b74029ca01f8f4583a8acb35bb1e873d573a24edb80d978a7ee607c6e06 languageName: node linkType: hard @@ -15512,14 +19714,14 @@ __metadata: linkType: hard "shiki@npm:^0.14.1": - version: 0.14.5 - resolution: "shiki@npm:0.14.5" + version: 0.14.6 + resolution: "shiki@npm:0.14.6" dependencies: ansi-sequence-parser: ^1.1.0 jsonc-parser: ^3.2.0 vscode-oniguruma: ^1.7.0 vscode-textmate: ^8.0.0 - checksum: 41d847817cfc9bb6d8bf190316896698d250303656546446659cc02caed8dcc171b10cd113bb5da82425b51d0032e87aafcdc36c3dd61dadc123170b438da736 + checksum: 24d9e29f93546118f1c07659e8a38f7dbf57b06479a0a9245e0823f7c988c825be53ad0b83a820e2eda5edbf3f0c6d17fad398b825130de1c11973e0bd7075fc languageName: node linkType: hard @@ -15548,6 +19750,20 @@ __metadata: languageName: node linkType: hard +"sinon@npm:^17.0.1": + version: 17.0.1 + resolution: "sinon@npm:17.0.1" + dependencies: + "@sinonjs/commons": ^3.0.0 + "@sinonjs/fake-timers": ^11.2.2 + "@sinonjs/samsam": ^8.0.0 + diff: ^5.1.0 + nise: ^5.1.5 + supports-color: ^7.2.0 + checksum: a807c2997d6eabdcaa4409df9fd9816a3e839f96d7e5d76610a33f5e1b60cf37616c6288f0f580262da17ea4ee626c6d1600325bf423e30c5a7f0d9a203e26c0 + languageName: node + linkType: hard + "sirv@npm:^2.0.3": version: 2.0.3 resolution: "sirv@npm:2.0.3" @@ -15580,6 +19796,15 @@ __metadata: languageName: node linkType: hard +"skin-tone@npm:^2.0.0": + version: 2.0.0 + resolution: "skin-tone@npm:2.0.0" + dependencies: + unicode-emoji-modifier-base: ^1.0.0 + checksum: 19de157586b8019cacc55eb25d9d640f00fc02415761f3e41a4527142970fd4e7f6af0333bc90e879858766c20a976107bb386ffd4c812289c01d51f2c8d182c + languageName: node + linkType: hard + "slash@npm:^3.0.0": version: 3.0.0 resolution: "slash@npm:3.0.0" @@ -15605,16 +19830,6 @@ __metadata: languageName: node linkType: hard -"slice-ansi@npm:^5.0.0": - version: 5.0.0 - resolution: "slice-ansi@npm:5.0.0" - dependencies: - ansi-styles: ^6.0.0 - is-fullwidth-code-point: ^4.0.0 - checksum: 7e600a2a55e333a21ef5214b987c8358fe28bfb03c2867ff2cbf919d62143d1812ac27b4297a077fdaf27a03da3678e49551c93e35f9498a3d90221908a1180e - languageName: node - linkType: hard - "smart-buffer@npm:^4.2.0": version: 4.2.0 resolution: "smart-buffer@npm:4.2.0" @@ -15623,9 +19838,9 @@ __metadata: linkType: hard "smol-toml@npm:^1.1.2": - version: 1.1.2 - resolution: "smol-toml@npm:1.1.2" - checksum: aa984b8d7aadb9c7db601533f81bcc0e7b6773fe56b69e87fb06224ac0c22702900b927becf0f5af0250b29746648344b57ecbcc203e82d667d46a19e1a61d77 + version: 1.1.3 + resolution: "smol-toml@npm:1.1.3" + checksum: 7948c0823b3d8db9ebf7d6dc706f75ea2bc55fa0233e6fccabc2ae3be0f1410da4617d9e3db0708a6434cfd35e13c2bc453300fa4ab11dde3da5df1c166feb39 languageName: node linkType: hard @@ -15640,18 +19855,18 @@ __metadata: languageName: node linkType: hard -"socks-proxy-agent@npm:^7.0.0": - version: 7.0.0 - resolution: "socks-proxy-agent@npm:7.0.0" +"socks-proxy-agent@npm:^8.0.1, socks-proxy-agent@npm:^8.0.2": + version: 8.0.2 + resolution: "socks-proxy-agent@npm:8.0.2" dependencies: - agent-base: ^6.0.2 - debug: ^4.3.3 - socks: ^2.6.2 - checksum: 720554370154cbc979e2e9ce6a6ec6ced205d02757d8f5d93fe95adae454fc187a5cbfc6b022afab850a5ce9b4c7d73e0f98e381879cf45f66317a4895953846 + agent-base: ^7.0.2 + debug: ^4.3.4 + socks: ^2.7.1 + checksum: 4fb165df08f1f380881dcd887b3cdfdc1aba3797c76c1e9f51d29048be6e494c5b06d68e7aea2e23df4572428f27a3ec22b3d7c75c570c5346507433899a4b6d languageName: node linkType: hard -"socks@npm:^2.6.2": +"socks@npm:^2.7.1": version: 2.7.1 resolution: "socks@npm:2.7.1" dependencies: @@ -15711,14 +19926,14 @@ __metadata: languageName: node linkType: hard -"source-map@npm:^0.6.0, source-map@npm:^0.6.1, source-map@npm:~0.6.0": +"source-map@npm:^0.6.0, source-map@npm:^0.6.1, source-map@npm:~0.6.0, source-map@npm:~0.6.1": version: 0.6.1 resolution: "source-map@npm:0.6.1" checksum: 59ce8640cf3f3124f64ac289012c2b8bd377c238e316fb323ea22fbfe83da07d81e000071d7242cad7a23cd91c7de98e4df8830ec3f133cb6133a5f6e9f67bc2 languageName: node linkType: hard -"source-map@npm:^0.7.3": +"source-map@npm:^0.7.0, source-map@npm:^0.7.3, source-map@npm:^0.7.4": version: 0.7.4 resolution: "source-map@npm:0.7.4" checksum: 01cc5a74b1f0e1d626a58d36ad6898ea820567e87f18dfc9d24a9843a351aaa2ec09b87422589906d6ff1deed29693e176194dc88bcae7c9a852dc74b311dbf5 @@ -15732,6 +19947,13 @@ __metadata: languageName: node linkType: hard +"space-separated-tokens@npm:^2.0.0": + version: 2.0.2 + resolution: "space-separated-tokens@npm:2.0.2" + checksum: 202e97d7ca1ba0758a0aa4fe226ff98142073bcceeff2da3aad037968878552c3bbce3b3231970025375bbba5aee00c5b8206eda408da837ab2dc9c0f26be990 + languageName: node + linkType: hard + "spdy-transport@npm:^3.0.0": version: 3.0.0 resolution: "spdy-transport@npm:3.0.0" @@ -15768,6 +19990,13 @@ __metadata: languageName: node linkType: hard +"sprintf-js@npm:^1.0.3": + version: 1.1.3 + resolution: "sprintf-js@npm:1.1.3" + checksum: a3fdac7b49643875b70864a9d9b469d87a40dfeaf5d34d9d0c5b1cda5fd7d065531fcb43c76357d62254c57184a7b151954156563a4d6a747015cfb41021cad0 + languageName: node + linkType: hard + "sprintf-js@npm:~1.0.2": version: 1.0.3 resolution: "sprintf-js@npm:1.0.3" @@ -15775,6 +20004,13 @@ __metadata: languageName: node linkType: hard +"srcset@npm:^4.0.0": + version: 4.0.0 + resolution: "srcset@npm:4.0.0" + checksum: aceb898c9281101ef43bfbf96bf04dfae828e1bf942a45df6fad74ae9f8f0a425f4bca1480e0d22879beb40dd2bc6947e0e1e5f4d307a714666196164bc5769d + languageName: node + linkType: hard + "ssri@npm:^10.0.0": version: 10.0.5 resolution: "ssri@npm:10.0.5" @@ -15791,15 +20027,6 @@ __metadata: languageName: node linkType: hard -"stack-utils@npm:^2.0.6": - version: 2.0.6 - resolution: "stack-utils@npm:2.0.6" - dependencies: - escape-string-regexp: ^2.0.0 - checksum: 052bf4d25bbf5f78e06c1d5e67de2e088b06871fa04107ca8d3f0e9d9263326e2942c8bedee3545795fc77d787d443a538345eef74db2f8e35db3558c6f91ff7 - languageName: node - linkType: hard - "stacktrace-parser@npm:^0.1.10": version: 0.1.10 resolution: "stacktrace-parser@npm:0.1.10" @@ -15809,6 +20036,13 @@ __metadata: languageName: node linkType: hard +"state-local@npm:^1.0.6": + version: 1.0.7 + resolution: "state-local@npm:1.0.7" + checksum: d1afcf1429e7e6eb08685b3a94be8797db847369316d4776fd51f3962b15b984dacc7f8e401ad20968e5798c9565b4b377afedf4e4c4d60fe7495e1cbe14a251 + languageName: node + linkType: hard + "state-toggle@npm:^1.0.0": version: 1.0.3 resolution: "state-toggle@npm:1.0.3" @@ -15831,9 +20065,9 @@ __metadata: linkType: hard "std-env@npm:^3.0.1": - version: 3.4.3 - resolution: "std-env@npm:3.4.3" - checksum: bef186fb2baddda31911234b1e58fa18f181eb6930616aaec3b54f6d5db65f2da5daaa5f3b326b98445a7d50ca81d6fe8809ab4ebab85ecbe4a802f1b40921bf + version: 3.6.0 + resolution: "std-env@npm:3.6.0" + checksum: ec344e93af17fd1b71eb28aeb4712f72790b9f2363981fc91ad1a91c9c7967c1ab89271819242d1b3bdbd57f10ac8ef0559d561ccf081a5377f9b3cd8c9b2259 languageName: node linkType: hard @@ -15853,6 +20087,16 @@ __metadata: languageName: node linkType: hard +"streamx@npm:^2.15.0": + version: 2.15.6 + resolution: "streamx@npm:2.15.6" + dependencies: + fast-fifo: ^1.1.0 + queue-tick: ^1.0.1 + checksum: 37a245f5cee4c33fcb8b018ccb935bad6eab423f05b0d14d018e63dbd2670bb109a69442e961a195b750c2c774f613c19476d11bd727d645eedb655d2dba234b + languageName: node + linkType: hard + "string-format@npm:^2.0.0": version: 2.0.0 resolution: "string-format@npm:2.0.0" @@ -15860,6 +20104,13 @@ __metadata: languageName: node linkType: hard +"string-range@npm:~1.2, string-range@npm:~1.2.1": + version: 1.2.2 + resolution: "string-range@npm:1.2.2" + checksum: 7118cc83a7e63fca5fd8bef9b61464bfc51197b5f6dc475c9e1d24a93ce02fa27f7adb4cd7adac5daf599bde442b383608078f9b051bddb108d3b45840923097 + languageName: node + linkType: hard + "string-to-stream@npm:^3.0.1": version: 3.0.1 resolution: "string-to-stream@npm:3.0.1" @@ -15869,7 +20120,7 @@ __metadata: languageName: node linkType: hard -"string-width-cjs@npm:string-width@^4.2.0, string-width@npm:^1.0.2 || 2 || 3 || 4, string-width@npm:^4.0.0, string-width@npm:^4.1.0, string-width@npm:^4.2.0, string-width@npm:^4.2.2, string-width@npm:^4.2.3": +"string-width-cjs@npm:string-width@^4.2.0, string-width@npm:^4.0.0, string-width@npm:^4.1.0, string-width@npm:^4.2.0, string-width@npm:^4.2.2, string-width@npm:^4.2.3": version: 4.2.3 resolution: "string-width@npm:4.2.3" dependencies: @@ -15880,7 +20131,7 @@ __metadata: languageName: node linkType: hard -"string-width@npm:^5.0.0, string-width@npm:^5.0.1, string-width@npm:^5.1.2": +"string-width@npm:^5.0.1, string-width@npm:^5.1.2": version: 5.1.2 resolution: "string-width@npm:5.1.2" dependencies: @@ -15891,7 +20142,7 @@ __metadata: languageName: node linkType: hard -"string_decoder@npm:^1.1.1": +"string_decoder@npm:^1.1.1, string_decoder@npm:^1.3.0": version: 1.3.0 resolution: "string_decoder@npm:1.3.0" dependencies: @@ -15900,6 +20151,13 @@ __metadata: languageName: node linkType: hard +"string_decoder@npm:~0.10.x": + version: 0.10.31 + resolution: "string_decoder@npm:0.10.31" + checksum: fe00f8e303647e5db919948ccb5ce0da7dea209ab54702894dd0c664edd98e5d4df4b80d6fabf7b9e92b237359d21136c95bf068b2f7760b772ca974ba970202 + languageName: node + linkType: hard + "string_decoder@npm:~1.1.1": version: 1.1.1 resolution: "string_decoder@npm:1.1.1" @@ -15909,6 +20167,16 @@ __metadata: languageName: node linkType: hard +"stringify-entities@npm:^4.0.0": + version: 4.0.3 + resolution: "stringify-entities@npm:4.0.3" + dependencies: + character-entities-html4: ^2.0.0 + character-entities-legacy: ^3.0.0 + checksum: 59e8f523b403bf7d415690e72ae52982decd6ea5426bd8b3f5c66225ddde73e766c0c0d91627df082d0794e30b19dd907ffb5864cef3602e4098d6777d7ca3c2 + languageName: node + linkType: hard + "stringify-object@npm:^3.3.0": version: 3.3.0 resolution: "stringify-object@npm:3.3.0" @@ -15929,7 +20197,7 @@ __metadata: languageName: node linkType: hard -"strip-ansi@npm:^7.0.1": +"strip-ansi@npm:^7.0.1, strip-ansi@npm:^7.1.0": version: 7.1.0 resolution: "strip-ansi@npm:7.1.0" dependencies: @@ -15991,6 +20259,24 @@ __metadata: languageName: node linkType: hard +"style-to-object@npm:^0.4.0": + version: 0.4.4 + resolution: "style-to-object@npm:0.4.4" + dependencies: + inline-style-parser: 0.1.1 + checksum: 41656c06f93ac0a7ac260ebc2f9d09a8bd74b8ec1836f358cc58e169235835a3a356977891d2ebbd76f0e08a53616929069199f9cce543214d3dc98346e19c9a + languageName: node + linkType: hard + +"style-to-object@npm:^1.0.0": + version: 1.0.5 + resolution: "style-to-object@npm:1.0.5" + dependencies: + inline-style-parser: 0.2.2 + checksum: 6201063204b6a94645f81b189452b2ca3e63d61867ec48523f4d52609c81e96176739fa12020d97fbbf023efb57a6f7ec3a15fb3a7fb7eb3ffea0b52b9dd6b8c + languageName: node + linkType: hard + "stylehacks@npm:^5.1.1": version: 5.1.1 resolution: "stylehacks@npm:5.1.1" @@ -16010,18 +20296,6 @@ __metadata: languageName: node linkType: hard -"supertap@npm:^3.0.1": - version: 3.0.1 - resolution: "supertap@npm:3.0.1" - dependencies: - indent-string: ^5.0.0 - js-yaml: ^3.14.1 - serialize-error: ^7.0.1 - strip-ansi: ^7.0.1 - checksum: ee3d71c1d25f7f15d4a849e72b0c5f430df7cd8f702cf082fdbec5642a9546be6557766745655fa3a3e9c88f7c7eed849f2d74457b5b72cb9d94a779c0c8a948 - languageName: node - linkType: hard - "supports-color@npm:8.1.1, supports-color@npm:^8.0.0": version: 8.1.1 resolution: "supports-color@npm:8.1.1" @@ -16040,7 +20314,7 @@ __metadata: languageName: node linkType: hard -"supports-color@npm:^7.1.0": +"supports-color@npm:^7.1.0, supports-color@npm:^7.2.0": version: 7.2.0 resolution: "supports-color@npm:7.2.0" dependencies: @@ -16081,12 +20355,12 @@ __metadata: linkType: hard "synckit@npm:^0.8.5": - version: 0.8.5 - resolution: "synckit@npm:0.8.5" + version: 0.8.6 + resolution: "synckit@npm:0.8.6" dependencies: - "@pkgr/utils": ^2.3.1 - tslib: ^2.5.0 - checksum: 8a9560e5d8f3d94dc3cf5f7b9c83490ffa30d320093560a37b88f59483040771fd1750e76b9939abfbb1b5a23fd6dfbae77f6b338abffe7cae7329cd9b9bb86b + "@pkgr/utils": ^2.4.2 + tslib: ^2.6.2 + checksum: 7c1f4991d0afd63c090c0537f1cf8619dd5777a40cf83bf46beadbf4eb0f9e400d92044e90a177a305df4bcb56efbaf1b689877f301f2672d865b6eecf1be75a languageName: node linkType: hard @@ -16145,6 +20419,17 @@ __metadata: languageName: node linkType: hard +"tar-fs@npm:3.0.4": + version: 3.0.4 + resolution: "tar-fs@npm:3.0.4" + dependencies: + mkdirp-classic: ^0.5.2 + pump: ^3.0.0 + tar-stream: ^3.1.5 + checksum: dcf4054f9e92ca0efe61c2b3f612914fb259a47900aa908a63106513a6d006c899b426ada53eb88d9dbbf089b5724c8e90b96a2c4ca6171845fa14203d734e30 + languageName: node + linkType: hard + "tar-stream@npm:^2.1.4": version: 2.2.0 resolution: "tar-stream@npm:2.2.0" @@ -16158,6 +20443,17 @@ __metadata: languageName: node linkType: hard +"tar-stream@npm:^3.1.5": + version: 3.1.6 + resolution: "tar-stream@npm:3.1.6" + dependencies: + b4a: ^1.6.4 + fast-fifo: ^1.2.0 + streamx: ^2.15.0 + checksum: f3627f918581976e954ff03cb8d370551053796b82564f8c7ca8fac84c48e4d042026d0854fc222171a34ff9c682b72fae91be9c9b0a112d4c54f9e4f443e9c5 + languageName: node + linkType: hard + "tar@npm:^6.1.11, tar@npm:^6.1.2": version: 6.2.0 resolution: "tar@npm:6.2.0" @@ -16172,14 +20468,7 @@ __metadata: languageName: node linkType: hard -"temp-dir@npm:^3.0.0": - version: 3.0.0 - resolution: "temp-dir@npm:3.0.0" - checksum: 577211e995d1d584dd60f1469351d45e8a5b4524e4a9e42d3bdd12cfde1d0bb8f5898311bef24e02aaafb69514c1feb58c7b4c33dcec7129da3b0861a4ca935b - languageName: node - linkType: hard - -"terser-webpack-plugin@npm:^5.3.3, terser-webpack-plugin@npm:^5.3.7": +"terser-webpack-plugin@npm:^5.3.3, terser-webpack-plugin@npm:^5.3.7, terser-webpack-plugin@npm:^5.3.9": version: 5.3.9 resolution: "terser-webpack-plugin@npm:5.3.9" dependencies: @@ -16201,9 +20490,9 @@ __metadata: languageName: node linkType: hard -"terser@npm:^5.10.0, terser@npm:^5.16.8": - version: 5.21.0 - resolution: "terser@npm:5.21.0" +"terser@npm:^5.10.0, terser@npm:^5.15.1, terser@npm:^5.16.8": + version: 5.26.0 + resolution: "terser@npm:5.26.0" dependencies: "@jridgewell/source-map": ^0.3.3 acorn: ^8.8.2 @@ -16211,7 +20500,7 @@ __metadata: source-map-support: ~0.5.20 bin: terser: bin/terser - checksum: 130f1567af1ffa4ddb067651bb284a01b45b5c83e82b3a072a5ff94b0b00ac35090f89c8714631a4a45972f65187bc149fc7144380611f437e1e3d9e174b136b + checksum: 02a9bb896f04df828025af8f0eced36c315d25d310b6c2418e7dad2bed19ddeb34a9cea9b34e7c24789830fa51e1b6a9be26679980987a9c817a7e6d9cd4154b languageName: node linkType: hard @@ -16222,6 +20511,15 @@ __metadata: languageName: node linkType: hard +"thingies@npm:^1.11.1": + version: 1.15.0 + resolution: "thingies@npm:1.15.0" + peerDependencies: + tslib: ^2 + checksum: 9721bc0cbcf1565ba1abd70cfe5141a638c88dded74abb24d9b21749e16034af37dfa68edd37e7d1bb1b1c9c8bfb301c4699a8e4bc998a3a9c18052edb829bb2 + languageName: node + linkType: hard + "through@npm:2, through@npm:^2.3.8, through@npm:~2.3, through@npm:~2.3.1": version: 2.3.8 resolution: "through@npm:2.3.8" @@ -16236,13 +20534,6 @@ __metadata: languageName: node linkType: hard -"time-zone@npm:^1.0.0": - version: 1.0.0 - resolution: "time-zone@npm:1.0.0" - checksum: e46f5a69b8c236dcd8e91e29d40d4e7a3495ed4f59888c3f84ce1d9678e20461421a6ba41233509d47dd94bc18f1a4377764838b21b584663f942b3426dcbce8 - languageName: node - linkType: hard - "tiny-invariant@npm:^1.0.2": version: 1.3.1 resolution: "tiny-invariant@npm:1.3.1" @@ -16333,6 +20624,13 @@ __metadata: languageName: node linkType: hard +"trim-lines@npm:^3.0.0": + version: 3.0.1 + resolution: "trim-lines@npm:3.0.1" + checksum: e241da104682a0e0d807222cc1496b92e716af4db7a002f4aeff33ae6a0024fef93165d49eab11aa07c71e1347c42d46563f91dfaa4d3fb945aa535cdead53ed + languageName: node + linkType: hard + "trim-trailing-lines@npm:^1.0.0": version: 1.1.4 resolution: "trim-trailing-lines@npm:1.1.4" @@ -16354,6 +20652,13 @@ __metadata: languageName: node linkType: hard +"trough@npm:^2.0.0": + version: 2.1.0 + resolution: "trough@npm:2.1.0" + checksum: a577bb561c2b401cc0e1d9e188fcfcdf63b09b151ff56a668da12197fe97cac15e3d77d5b51f426ccfd94255744a9118e9e9935afe81a3644fa1be9783c82886 + languageName: node + linkType: hard + "ts-api-utils@npm:^1.0.1": version: 1.0.3 resolution: "ts-api-utils@npm:1.0.3" @@ -16377,9 +20682,25 @@ __metadata: languageName: node linkType: hard +"ts-loader@npm:^9.5.1": + version: 9.5.1 + resolution: "ts-loader@npm:9.5.1" + dependencies: + chalk: ^4.1.0 + enhanced-resolve: ^5.0.0 + micromatch: ^4.0.0 + semver: ^7.3.4 + source-map: ^0.7.4 + peerDependencies: + typescript: "*" + webpack: ^5.0.0 + checksum: 7cf396e656d905388ea2a9b5e82f16d3c955fda8d3df2fbf219f4bee16ff50a3c995c44ae3e584634e9443f056cec70bb3151add3917ffb4588ecd7394bac0ec + languageName: node + linkType: hard + "ts-node@npm:^10.9.1": - version: 10.9.1 - resolution: "ts-node@npm:10.9.1" + version: 10.9.2 + resolution: "ts-node@npm:10.9.2" dependencies: "@cspotcode/source-map-support": ^0.8.0 "@tsconfig/node10": ^1.0.7 @@ -16411,7 +20732,7 @@ __metadata: ts-node-script: dist/bin-script.js ts-node-transpile-only: dist/bin-transpile.js ts-script: dist/bin-script-deprecated.js - checksum: 090adff1302ab20bd3486e6b4799e90f97726ed39e02b39e566f8ab674fd5bd5f727f43615debbfc580d33c6d9d1c6b1b3ce7d8e3cca3e20530a145ffa232c35 + checksum: fde256c9073969e234526e2cfead42591b9a2aec5222bac154b0de2fa9e4ceb30efcd717ee8bc785a56f3a119bdd5aa27b333d9dbec94ed254bd26f8944c67ac languageName: node linkType: hard @@ -16451,7 +20772,7 @@ __metadata: languageName: node linkType: hard -"tslib@npm:^2.0.3, tslib@npm:^2.1.0, tslib@npm:^2.4.0, tslib@npm:^2.5.0, tslib@npm:^2.6.0": +"tslib@npm:^2.0.1, tslib@npm:^2.0.3, tslib@npm:^2.1.0, tslib@npm:^2.4.0, tslib@npm:^2.5.0, tslib@npm:^2.6.0, tslib@npm:^2.6.2": version: 2.6.2 resolution: "tslib@npm:2.6.2" checksum: 329ea56123005922f39642318e3d1f0f8265d1e7fcb92c633e0809521da75eeaca28d2cf96d7248229deb40e5c19adf408259f4b9640afd20d13aecc1430f3ad @@ -16479,6 +20800,22 @@ __metadata: languageName: node linkType: hard +"tsx@npm:^4.6.2": + version: 4.6.2 + resolution: "tsx@npm:4.6.2" + dependencies: + esbuild: ~0.18.20 + fsevents: ~2.3.3 + get-tsconfig: ^4.7.2 + dependenciesMeta: + fsevents: + optional: true + bin: + tsx: dist/cli.mjs + checksum: a9f13bdb67bfb316bbfc92303d8464323ab1b673aa93ea97271c211a8ba7c59274d4b32eeec5ad8fbd0b04260a092a3ad2116abeb6913254ba456010ff685743 + languageName: node + linkType: hard + "tweetnacl-util@npm:^0.15.1": version: 0.15.1 resolution: "tweetnacl-util@npm:0.15.1" @@ -16502,20 +20839,13 @@ __metadata: languageName: node linkType: hard -"type-detect@npm:^4.0.0, type-detect@npm:^4.0.5": +"type-detect@npm:4.0.8, type-detect@npm:^4.0.0, type-detect@npm:^4.0.8": version: 4.0.8 resolution: "type-detect@npm:4.0.8" checksum: 62b5628bff67c0eb0b66afa371bd73e230399a8d2ad30d852716efcc4656a7516904570cd8631a49a3ce57c10225adf5d0cbdcb47f6b0255fe6557c453925a15 languageName: node linkType: hard -"type-fest@npm:^0.13.1": - version: 0.13.1 - resolution: "type-fest@npm:0.13.1" - checksum: e6bf2e3c449f27d4ef5d56faf8b86feafbc3aec3025fc9a5fbe2db0a2587c44714521f9c30d8516a833c8c506d6263f5cc11267522b10c6ccdb6cc55b0a9d1c4 - languageName: node - linkType: hard - "type-fest@npm:^0.20.2": version: 0.20.2 resolution: "type-fest@npm:0.20.2" @@ -16537,7 +20867,14 @@ __metadata: languageName: node linkType: hard -"type-fest@npm:^2.5.0": +"type-fest@npm:^1.0.1": + version: 1.4.0 + resolution: "type-fest@npm:1.4.0" + checksum: b011c3388665b097ae6a109a437a04d6f61d81b7357f74cbcb02246f2f5bd72b888ae33631b99871388122ba0a87f4ff1c94078e7119ff22c70e52c0ff828201 + languageName: node + linkType: hard + +"type-fest@npm:^2.13.0, type-fest@npm:^2.5.0": version: 2.19.0 resolution: "type-fest@npm:2.19.0" checksum: a4ef07ece297c9fba78fc1bd6d85dff4472fe043ede98bd4710d2615d15776902b595abf62bd78339ed6278f021235fb28a96361f8be86ed754f778973a0d278 @@ -16563,6 +20900,20 @@ __metadata: languageName: node linkType: hard +"typedarray-to-buffer@npm:~1.0.0": + version: 1.0.4 + resolution: "typedarray-to-buffer@npm:1.0.4" + checksum: ac6989c456a0b175c8362b3ebbd8a74af7b9bcc94f9dc9ffd34436569cd29aea6a1e0e5f5752d0d5bd855a55b2520e960d1d4cb9c9149f863ce09220540df17f + languageName: node + linkType: hard + +"typedarray@npm:^0.0.6": + version: 0.0.6 + resolution: "typedarray@npm:0.0.6" + checksum: 33b39f3d0e8463985eeaeeacc3cb2e28bc3dfaf2a5ed219628c0b629d5d7b810b0eb2165f9f607c34871d5daa92ba1dc69f49051cf7d578b4cbd26c340b9d1b1 + languageName: node + linkType: hard + "typedoc-plugin-frontmatter@npm:^0.0.2": version: 0.0.2 resolution: "typedoc-plugin-frontmatter@npm:0.0.2" @@ -16591,28 +20942,18 @@ __metadata: linkType: hard "typedoc@npm:^0.25.0": - version: 0.25.2 - resolution: "typedoc@npm:0.25.2" + version: 0.25.4 + resolution: "typedoc@npm:0.25.4" dependencies: lunr: ^2.3.9 marked: ^4.3.0 minimatch: ^9.0.3 shiki: ^0.14.1 peerDependencies: - typescript: 4.6.x || 4.7.x || 4.8.x || 4.9.x || 5.0.x || 5.1.x || 5.2.x + typescript: 4.6.x || 4.7.x || 4.8.x || 4.9.x || 5.0.x || 5.1.x || 5.2.x || 5.3.x bin: typedoc: bin/typedoc - checksum: 5b6e24bae7498bb542aaba495378ed5a3e13c76eb04a1ae95b506f76bda4d517847101fb05a7eab3f6b79357d1e2ac6f4747d39792395329b72e463f7effda65 - languageName: node - linkType: hard - -"typescript@npm:4.9.4": - version: 4.9.4 - resolution: "typescript@npm:4.9.4" - bin: - tsc: bin/tsc - tsserver: bin/tsserver - checksum: e782fb9e0031cb258a80000f6c13530288c6d63f1177ed43f770533fdc15740d271554cdae86701c1dd2c83b082cea808b07e97fd68b38a172a83dbf9e0d0ef9 + checksum: 6d441baa277c0db4d577db2932a7af316d175415841e2faf2e68e3eda6ad60356c54f56374f89c5233d7bd5c057b0337455e5d484d8463e1445e67c37a6d94eb languageName: node linkType: hard @@ -16627,22 +20968,22 @@ __metadata: linkType: hard "typescript@npm:^5.0.4, typescript@npm:^5.2.2": - version: 5.2.2 - resolution: "typescript@npm:5.2.2" + version: 5.3.3 + resolution: "typescript@npm:5.3.3" bin: tsc: bin/tsc tsserver: bin/tsserver - checksum: 7912821dac4d962d315c36800fe387cdc0a6298dba7ec171b350b4a6e988b51d7b8f051317786db1094bd7431d526b648aba7da8236607febb26cf5b871d2d3c + checksum: 2007ccb6e51bbbf6fde0a78099efe04dc1c3dfbdff04ca3b6a8bc717991862b39fd6126c0c3ebf2d2d98ac5e960bcaa873826bb2bb241f14277034148f41f6a2 languageName: node linkType: hard -"typescript@patch:typescript@4.9.4#~builtin": - version: 4.9.4 - resolution: "typescript@patch:typescript@npm%3A4.9.4#~builtin::version=4.9.4&hash=289587" +"typescript@npm:~5.2.2": + version: 5.2.2 + resolution: "typescript@npm:5.2.2" bin: tsc: bin/tsc tsserver: bin/tsserver - checksum: 2160f7ad975c59b2f5816817d3916be1d156c5688a7517602b3b640c5015e740f4ba933996ac85371d68f7bbdd41602150fb8b68334122ac637fdb5418085e7a + checksum: 7912821dac4d962d315c36800fe387cdc0a6298dba7ec171b350b4a6e988b51d7b8f051317786db1094bd7431d526b648aba7da8236607febb26cf5b871d2d3c languageName: node linkType: hard @@ -16657,6 +20998,16 @@ __metadata: linkType: hard "typescript@patch:typescript@^5.0.4#~builtin, typescript@patch:typescript@^5.2.2#~builtin": + version: 5.3.3 + resolution: "typescript@patch:typescript@npm%3A5.3.3#~builtin::version=5.3.3&hash=f3b441" + bin: + tsc: bin/tsc + tsserver: bin/tsserver + checksum: f61375590b3162599f0f0d5b8737877ac0a7bc52761dbb585d67e7b8753a3a4c42d9a554c4cc929f591ffcf3a2b0602f65ae3ce74714fd5652623a816862b610 + languageName: node + linkType: hard + +"typescript@patch:typescript@~5.2.2#~builtin": version: 5.2.2 resolution: "typescript@patch:typescript@npm%3A5.2.2#~builtin::version=5.2.2&hash=f3b441" bin: @@ -16687,10 +21038,10 @@ __metadata: languageName: node linkType: hard -"ua-parser-js@npm:^1.0.33, ua-parser-js@npm:^1.0.35": - version: 1.0.36 - resolution: "ua-parser-js@npm:1.0.36" - checksum: 5b2c8a5e3443dfbba7624421805de946457c26ae167cb2275781a2729d1518f7067c9d5c74c3b0acac4b9ff3278cae4eace08ca6eecb63848bc3b2f6a63cc975 +"ua-parser-js@npm:^1.0.33": + version: 1.0.37 + resolution: "ua-parser-js@npm:1.0.37" + checksum: 4d481c720d523366d7762dc8a46a1b58967d979aacf786f9ceceb1cd767de069f64a4bdffb63956294f1c0696eb465ddb950f28ba90571709e33521b4bd75e07 languageName: node linkType: hard @@ -16704,12 +21055,19 @@ __metadata: languageName: node linkType: hard +"undici-types@npm:~5.26.4": + version: 5.26.5 + resolution: "undici-types@npm:5.26.5" + checksum: 3192ef6f3fd5df652f2dc1cd782b49d6ff14dc98e5dced492aa8a8c65425227da5da6aafe22523c67f035a272c599bb89cfe803c1db6311e44bed3042fc25487 + languageName: node + linkType: hard + "undici@npm:^5.14.0": - version: 5.25.3 - resolution: "undici@npm:5.25.3" + version: 5.28.2 + resolution: "undici@npm:5.28.2" dependencies: "@fastify/busboy": ^2.0.0 - checksum: 65b814b7d8b06dab2d41c250d123663fe94edb78cf1a891cf3476569ea66dc425c7d4ba52b91d6f8ed6eba24613dd28e4a5070c372063532c3b997cd343ccc96 + checksum: f9e9335803f962fff07c3c11c6d50bbc76248bacf97035047155adb29c3622a65bd6bff23a22218189740133149d22e63b68131d8c40e78ac6cb4b6d686a6dfa languageName: node linkType: hard @@ -16730,6 +21088,13 @@ __metadata: languageName: node linkType: hard +"unicode-emoji-modifier-base@npm:^1.0.0": + version: 1.0.0 + resolution: "unicode-emoji-modifier-base@npm:1.0.0" + checksum: 6e1521d35fa69493207eb8b41f8edb95985d8b3faf07c01d820a1830b5e8403e20002563e2f84683e8e962a49beccae789f0879356bf92a4ec7a4dd8e2d16fdb + languageName: node + linkType: hard + "unicode-match-property-ecmascript@npm:^2.0.0": version: 2.0.0 resolution: "unicode-match-property-ecmascript@npm:2.0.0" @@ -16768,7 +21133,22 @@ __metadata: languageName: node linkType: hard -"unified@npm:^9.0.0, unified@npm:^9.2.2": +"unified@npm:^11.0.0, unified@npm:^11.0.3, unified@npm:^11.0.4": + version: 11.0.4 + resolution: "unified@npm:11.0.4" + dependencies: + "@types/unist": ^3.0.0 + bail: ^2.0.0 + devlop: ^1.0.0 + extend: ^3.0.0 + is-plain-obj: ^4.0.0 + trough: ^2.0.0 + vfile: ^6.0.0 + checksum: cfb023913480ac2bd5e787ffb8c27782c43e6be4a55f8f1c288233fce46a7ebe7718ccc5adb80bf8d56b7ef85f5fc32239c7bfccda006f9f2382e0cc2e2a77e4 + languageName: node + linkType: hard + +"unified@npm:^9.2.2": version: 9.2.2 resolution: "unified@npm:9.2.2" dependencies: @@ -16809,6 +21189,15 @@ __metadata: languageName: node linkType: hard +"unique-string@npm:^3.0.0": + version: 3.0.0 + resolution: "unique-string@npm:3.0.0" + dependencies: + crypto-random-string: ^4.0.0 + checksum: 1a1e2e7d02eab1bb10f720475da735e1990c8a5ff34edd1a3b6bc31590cb4210b7a1233d779360cc622ce11c211e43afa1628dd658f35d3e6a89964b622940df + languageName: node + linkType: hard + "unist-builder@npm:2.0.3, unist-builder@npm:^2.0.0": version: 2.0.3 resolution: "unist-builder@npm:2.0.3" @@ -16816,12 +21205,13 @@ __metadata: languageName: node linkType: hard -"unist-util-find-after@npm:^3.0.0": - version: 3.0.0 - resolution: "unist-util-find-after@npm:3.0.0" +"unist-util-find-after@npm:^5.0.0": + version: 5.0.0 + resolution: "unist-util-find-after@npm:5.0.0" dependencies: - unist-util-is: ^4.0.0 - checksum: daa9a28f6cdf533a72ce7ec4864dbe0f11f0fd3efd337b54c08a8a9a47cdc8d10a299cd984d7f512a57e97af012df052210a51aab7c9afd6b1e24da3b2d0a714 + "@types/unist": ^3.0.0 + unist-util-is: ^6.0.0 + checksum: e64bd5ebee7ac021cf990bf33e9ec29fc6452159187d4a7fa0f77334bea8e378fea7a7fb0bcf957300b2ffdba902ff25b62c165fc8b86309613da35ad793ada0 languageName: node linkType: hard @@ -16839,6 +21229,24 @@ __metadata: languageName: node linkType: hard +"unist-util-is@npm:^6.0.0": + version: 6.0.0 + resolution: "unist-util-is@npm:6.0.0" + dependencies: + "@types/unist": ^3.0.0 + checksum: f630a925126594af9993b091cf807b86811371e465b5049a6283e08537d3e6ba0f7e248e1e7dab52cfe33f9002606acef093441137181b327f6fe504884b20e2 + languageName: node + linkType: hard + +"unist-util-position-from-estree@npm:^2.0.0": + version: 2.0.0 + resolution: "unist-util-position-from-estree@npm:2.0.0" + dependencies: + "@types/unist": ^3.0.0 + checksum: d3b3048a5727c2367f64ef6dcc5b20c4717215ef8b1372ff9a7c426297c5d1e5776409938acd01531213e2cd2543218d16e73f9f862f318e9496e2c73bb18354 + languageName: node + linkType: hard + "unist-util-position@npm:^3.0.0": version: 3.1.0 resolution: "unist-util-position@npm:3.1.0" @@ -16846,6 +21254,15 @@ __metadata: languageName: node linkType: hard +"unist-util-position@npm:^5.0.0": + version: 5.0.0 + resolution: "unist-util-position@npm:5.0.0" + dependencies: + "@types/unist": ^3.0.0 + checksum: f89b27989b19f07878de9579cd8db2aa0194c8360db69e2c99bd2124a480d79c08f04b73a64daf01a8fb3af7cba65ff4b45a0b978ca243226084ad5f5d441dde + languageName: node + linkType: hard + "unist-util-remove-position@npm:^2.0.0": version: 2.0.1 resolution: "unist-util-remove-position@npm:2.0.1" @@ -16855,6 +21272,16 @@ __metadata: languageName: node linkType: hard +"unist-util-remove-position@npm:^5.0.0": + version: 5.0.0 + resolution: "unist-util-remove-position@npm:5.0.0" + dependencies: + "@types/unist": ^3.0.0 + unist-util-visit: ^5.0.0 + checksum: 8aabdb9d0e3e744141bc123d8f87b90835d521209ad3c6c4619d403b324537152f0b8f20dda839b40c3aa0abfbf1828b3635a7a8bb159c3ed469e743023510ee + languageName: node + linkType: hard + "unist-util-remove@npm:^2.0.0": version: 2.1.0 resolution: "unist-util-remove@npm:2.1.0" @@ -16873,6 +21300,15 @@ __metadata: languageName: node linkType: hard +"unist-util-stringify-position@npm:^4.0.0": + version: 4.0.0 + resolution: "unist-util-stringify-position@npm:4.0.0" + dependencies: + "@types/unist": ^3.0.0 + checksum: e2e7aee4b92ddb64d314b4ac89eef7a46e4c829cbd3ee4aee516d100772b490eb6b4974f653ba0717a0071ca6ea0770bf22b0a2ea62c65fcba1d071285e96324 + languageName: node + linkType: hard + "unist-util-visit-parents@npm:^3.0.0": version: 3.1.1 resolution: "unist-util-visit-parents@npm:3.1.1" @@ -16883,6 +21319,16 @@ __metadata: languageName: node linkType: hard +"unist-util-visit-parents@npm:^6.0.0": + version: 6.0.1 + resolution: "unist-util-visit-parents@npm:6.0.1" + dependencies: + "@types/unist": ^3.0.0 + unist-util-is: ^6.0.0 + checksum: 08927647c579f63b91aafcbec9966dc4a7d0af1e5e26fc69f4e3e6a01215084835a2321b06f3cbe7bf7914a852830fc1439f0fc3d7153d8804ac3ef851ddfa20 + languageName: node + linkType: hard + "unist-util-visit@npm:2.0.3, unist-util-visit@npm:^2.0.0, unist-util-visit@npm:^2.0.3": version: 2.0.3 resolution: "unist-util-visit@npm:2.0.3" @@ -16894,6 +21340,17 @@ __metadata: languageName: node linkType: hard +"unist-util-visit@npm:^5.0.0": + version: 5.0.0 + resolution: "unist-util-visit@npm:5.0.0" + dependencies: + "@types/unist": ^3.0.0 + unist-util-is: ^6.0.0 + unist-util-visit-parents: ^6.0.0 + checksum: 9ec42e618e7e5d0202f3c191cd30791b51641285732767ee2e6bcd035931032e3c1b29093f4d7fd0c79175bbc1f26f24f26ee49770d32be76f8730a652a857e6 + languageName: node + linkType: hard + "universalify@npm:^0.1.0": version: 0.1.2 resolution: "universalify@npm:0.1.2" @@ -16902,9 +21359,9 @@ __metadata: linkType: hard "universalify@npm:^2.0.0": - version: 2.0.0 - resolution: "universalify@npm:2.0.0" - checksum: 2406a4edf4a8830aa6813278bab1f953a8e40f2f63a37873ffa9a3bc8f9745d06cc8e88f3572cb899b7e509013f7f6fcc3e37e8a6d914167a5381d8440518c44 + version: 2.0.1 + resolution: "universalify@npm:2.0.1" + checksum: ecd8469fe0db28e7de9e5289d32bd1b6ba8f7183db34f3bfc4ca53c49891c2d6aa05f3fb3936a81285a905cc509fb641a0c3fc131ec786167eff41236ae32e60 languageName: node linkType: hard @@ -16922,6 +21379,15 @@ __metadata: languageName: node linkType: hard +"unzipit@npm:^1.4.3": + version: 1.4.3 + resolution: "unzipit@npm:1.4.3" + dependencies: + uzip-module: ^1.0.2 + checksum: ce29348edab7b5fb5b7b4d43437f48e35812ac8b3cc2d76efd1acfcad6dd1b96b4f96bfd03250a724b87ba99dd531d7727ad24b590acf727dde79f54f5e779ed + languageName: node + linkType: hard + "update-browserslist-db@npm:^1.0.13": version: 1.0.13 resolution: "update-browserslist-db@npm:1.0.13" @@ -16936,6 +21402,16 @@ __metadata: languageName: node linkType: hard +"update-check@npm:1.5.4": + version: 1.5.4 + resolution: "update-check@npm:1.5.4" + dependencies: + registry-auth-token: 3.3.2 + registry-url: 3.1.0 + checksum: 2c9f7de6f030364c5ea02a341e5ae2dfe76da6559b32d40dd3b047b3ac0927408cf92d322c51cd8e009688210a85ccbf1eba449762a65a0d1b14f3cdf1ea5c48 + languageName: node + linkType: hard + "update-notifier@npm:^5.1.0": version: 5.1.0 resolution: "update-notifier@npm:5.1.0" @@ -16958,6 +21434,28 @@ __metadata: languageName: node linkType: hard +"update-notifier@npm:^6.0.2": + version: 6.0.2 + resolution: "update-notifier@npm:6.0.2" + dependencies: + boxen: ^7.0.0 + chalk: ^5.0.1 + configstore: ^6.0.0 + has-yarn: ^3.0.0 + import-lazy: ^4.0.0 + is-ci: ^3.0.1 + is-installed-globally: ^0.4.0 + is-npm: ^6.0.0 + is-yarn-global: ^0.4.0 + latest-version: ^7.0.0 + pupa: ^3.1.0 + semver: ^7.3.7 + semver-diff: ^4.0.0 + xdg-basedir: ^5.1.0 + checksum: 4bae7b3eca7b2068b6b87dde88c9dad24831fa913a5b83ecb39a7e4702c93e8b05fd9bcac5f1a005178f6e5dc859e0b3817ddda833d2a7ab92c6485e078b3cc8 + languageName: node + linkType: hard + "uri-js@npm:^4.2.2": version: 4.4.1 resolution: "uri-js@npm:4.4.1" @@ -16993,47 +21491,13 @@ __metadata: languageName: node linkType: hard -"use-composed-ref@npm:^1.3.0": - version: 1.3.0 - resolution: "use-composed-ref@npm:1.3.0" - peerDependencies: - react: ^16.8.0 || ^17.0.0 || ^18.0.0 - checksum: f771cbadfdc91e03b7ab9eb32d0fc0cc647755711801bf507e891ad38c4bbc5f02b2509acadf9c965ec9c5f2f642fd33bdfdfb17b0873c4ad0a9b1f5e5e724bf - languageName: node - linkType: hard - -"use-isomorphic-layout-effect@npm:^1.1.1": - version: 1.1.2 - resolution: "use-isomorphic-layout-effect@npm:1.1.2" - peerDependencies: - react: ^16.8.0 || ^17.0.0 || ^18.0.0 - peerDependenciesMeta: - "@types/react": - optional: true - checksum: a6532f7fc9ae222c3725ff0308aaf1f1ddbd3c00d685ef9eee6714fd0684de5cb9741b432fbf51e61a784e2955424864f7ea9f99734a02f237b17ad3e18ea5cb - languageName: node - linkType: hard - -"use-latest@npm:^1.2.1": - version: 1.2.1 - resolution: "use-latest@npm:1.2.1" +"url@npm:^0.11.3": + version: 0.11.3 + resolution: "url@npm:0.11.3" dependencies: - use-isomorphic-layout-effect: ^1.1.1 - peerDependencies: - react: ^16.8.0 || ^17.0.0 || ^18.0.0 - peerDependenciesMeta: - "@types/react": - optional: true - checksum: ed3f2ddddf6f21825e2ede4c2e0f0db8dcce5129802b69d1f0575fc1b42380436e8c76a6cd885d4e9aa8e292e60fb8b959c955f33c6a9123b83814a1a1875367 - languageName: node - linkType: hard - -"use-sync-external-store@npm:^1.2.0": - version: 1.2.0 - resolution: "use-sync-external-store@npm:1.2.0" - peerDependencies: - react: ^16.8.0 || ^17.0.0 || ^18.0.0 - checksum: 5c639e0f8da3521d605f59ce5be9e094ca772bd44a4ce7322b055a6f58eeed8dda3c94cabd90c7a41fb6fa852210092008afe48f7038792fd47501f33299116a + punycode: ^1.4.1 + qs: ^6.11.2 + checksum: f9e7886f46a16f96d2e42fbcc5d682c231c55ef5442c1ff66150c0f6556f6e3a97d094a84f51be15ec2432711d212eb60426659ce418f5fcadeaa3f601532c4e languageName: node linkType: hard @@ -17044,6 +21508,19 @@ __metadata: languageName: node linkType: hard +"util@npm:^0.12.5": + version: 0.12.5 + resolution: "util@npm:0.12.5" + dependencies: + inherits: ^2.0.3 + is-arguments: ^1.0.4 + is-generator-function: ^1.0.7 + is-typed-array: ^1.1.3 + which-typed-array: ^1.1.2 + checksum: 705e51f0de5b446f4edec10739752ac25856541e0254ea1e7e45e5b9f9b0cb105bc4bd415736a6210edc68245a7f903bf085ffb08dd7deb8a0e847f60538a38a + languageName: node + linkType: hard + "utila@npm:~0.4": version: 0.4.0 resolution: "utila@npm:0.4.0" @@ -17088,6 +21565,13 @@ __metadata: languageName: node linkType: hard +"uzip-module@npm:^1.0.2": + version: 1.0.3 + resolution: "uzip-module@npm:1.0.3" + checksum: fc286c44a04d75055577fae8293d3fee499d1e850f87e88c158b1e3657f4794a3a40ca2d34f73474ff82917176dd5ca9d1c0d1e375a083714e11afabd3afa423 + languageName: node + linkType: hard + "v8-compile-cache-lib@npm:^3.0.1": version: 3.0.1 resolution: "v8-compile-cache-lib@npm:3.0.1" @@ -17096,13 +21580,13 @@ __metadata: linkType: hard "v8-to-istanbul@npm:^9.0.1": - version: 9.1.0 - resolution: "v8-to-istanbul@npm:9.1.0" + version: 9.2.0 + resolution: "v8-to-istanbul@npm:9.2.0" dependencies: "@jridgewell/trace-mapping": ^0.3.12 "@types/istanbul-lib-coverage": ^2.0.1 - convert-source-map: ^1.6.0 - checksum: 2069d59ee46cf8d83b4adfd8a5c1a90834caffa9f675e4360f1157ffc8578ef0f763c8f32d128334424159bb6b01f3876acd39cd13297b2769405a9da241f8d1 + convert-source-map: ^2.0.0 + checksum: 31ef98c6a31b1dab6be024cf914f235408cd4c0dc56a5c744a5eea1a9e019ba279e1b6f90d695b78c3186feed391ed492380ccf095009e2eb91f3d058f0b4491 languageName: node linkType: hard @@ -17127,6 +21611,16 @@ __metadata: languageName: node linkType: hard +"vfile-location@npm:^5.0.0": + version: 5.0.2 + resolution: "vfile-location@npm:5.0.2" + dependencies: + "@types/unist": ^3.0.0 + vfile: ^6.0.0 + checksum: b61c048cedad3555b4f007f390412c6503f58a6a130b58badf4ee340c87e0d7421e9c86bbc1494c57dedfccadb60f5176cc60ba3098209d99fb3a3d8804e4c38 + languageName: node + linkType: hard + "vfile-message@npm:^2.0.0": version: 2.0.4 resolution: "vfile-message@npm:2.0.4" @@ -17137,6 +21631,16 @@ __metadata: languageName: node linkType: hard +"vfile-message@npm:^4.0.0": + version: 4.0.2 + resolution: "vfile-message@npm:4.0.2" + dependencies: + "@types/unist": ^3.0.0 + unist-util-stringify-position: ^4.0.0 + checksum: 964e7e119f4c0e0270fc269119c41c96da20afa01acb7c9809a88365c8e0c64aa692fafbd952669382b978002ecd7ad31ef4446d85e8a22cdb62f6df20186c2d + languageName: node + linkType: hard + "vfile@npm:^4.0.0": version: 4.2.1 resolution: "vfile@npm:4.2.1" @@ -17149,6 +21653,24 @@ __metadata: languageName: node linkType: hard +"vfile@npm:^6.0.0, vfile@npm:^6.0.1": + version: 6.0.1 + resolution: "vfile@npm:6.0.1" + dependencies: + "@types/unist": ^3.0.0 + unist-util-stringify-position: ^4.0.0 + vfile-message: ^4.0.0 + checksum: 05ccee73aeb00402bc8a5d0708af299e9f4a33f5132805449099295085e3ca3b0d018328bad9ff44cf2e6f4cd364f1d558d3fb9b394243a25b2739207edcb0ed + languageName: node + linkType: hard + +"vscode-languageserver-textdocument@npm:^1.0.11": + version: 1.0.11 + resolution: "vscode-languageserver-textdocument@npm:1.0.11" + checksum: ea7cdc9d4ffaae5952071fa11d17d714215a76444e6936c9359f94b9ba3222a52a55edb5bd5928bd3e9712b900a9f175bb3565ec1c8923234fe3bd327584bafb + languageName: node + linkType: hard + "vscode-oniguruma@npm:^1.7.0": version: 1.7.0 resolution: "vscode-oniguruma@npm:1.7.0" @@ -17163,6 +21685,13 @@ __metadata: languageName: node linkType: hard +"vscode-uri@npm:^3.0.8": + version: 3.0.8 + resolution: "vscode-uri@npm:3.0.8" + checksum: 514249126850c0a41a7d8c3c2836cab35983b9dc1938b903cfa253b9e33974c1416d62a00111385adcfa2b98df456437ab704f709a2ecca76a90134ef5eb4832 + languageName: node + linkType: hard + "wait-on@npm:^6.0.1": version: 6.0.1 resolution: "wait-on@npm:6.0.1" @@ -17178,7 +21707,7 @@ __metadata: languageName: node linkType: hard -"watchpack@npm:^2.4.0": +"watchpack@npm:^2.1.1, watchpack@npm:^2.4.0": version: 2.4.0 resolution: "watchpack@npm:2.4.0" dependencies: @@ -17204,6 +21733,13 @@ __metadata: languageName: node linkType: hard +"web-namespaces@npm:^2.0.0": + version: 2.0.1 + resolution: "web-namespaces@npm:2.0.1" + checksum: b6d9f02f1a43d0ef0848a812d89c83801d5bbad57d8bb61f02eb6d7eb794c3736f6cc2e1191664bb26136594c8218ac609f4069722c6f56d9fc2d808fa9271c6 + languageName: node + linkType: hard + "web-streams-polyfill@npm:^3.0.3": version: 3.2.1 resolution: "web-streams-polyfill@npm:3.2.1" @@ -17225,30 +21761,59 @@ __metadata: languageName: node linkType: hard -"webpack-bundle-analyzer@npm:^4.5.0": - version: 4.9.1 - resolution: "webpack-bundle-analyzer@npm:4.9.1" +"webpack-bundle-analyzer@npm:^4.5.0, webpack-bundle-analyzer@npm:^4.9.0": + version: 4.10.1 + resolution: "webpack-bundle-analyzer@npm:4.10.1" dependencies: "@discoveryjs/json-ext": 0.5.7 acorn: ^8.0.4 acorn-walk: ^8.0.0 commander: ^7.2.0 + debounce: ^1.2.1 escape-string-regexp: ^4.0.0 gzip-size: ^6.0.0 + html-escaper: ^2.0.2 is-plain-object: ^5.0.0 - lodash.debounce: ^4.0.8 - lodash.escape: ^4.0.1 - lodash.flatten: ^4.4.0 - lodash.invokemap: ^4.6.0 - lodash.pullall: ^4.2.0 - lodash.uniqby: ^4.7.0 opener: ^1.5.2 picocolors: ^1.0.0 sirv: ^2.0.3 ws: ^7.3.1 bin: webpack-bundle-analyzer: lib/bin/analyzer.js - checksum: 7e891c28d5a903242893e55ecc714fa01d7ad6bedade143235c07091b235915349812fa048968462781d59187507962f38b6c61ed7d25fb836ba0ac0ee919a39 + checksum: 77f48f10a493b1cc95674526472978a2de32412ddbf556bd3903738f14890611426f19477352993efe5a9fd6ca16711eb912d986f2221b17ba6eeca1b6f71fb6 + languageName: node + linkType: hard + +"webpack-cli@npm:^4.7.2": + version: 4.10.0 + resolution: "webpack-cli@npm:4.10.0" + dependencies: + "@discoveryjs/json-ext": ^0.5.0 + "@webpack-cli/configtest": ^1.2.0 + "@webpack-cli/info": ^1.5.0 + "@webpack-cli/serve": ^1.7.0 + colorette: ^2.0.14 + commander: ^7.0.0 + cross-spawn: ^7.0.3 + fastest-levenshtein: ^1.0.12 + import-local: ^3.0.2 + interpret: ^2.2.0 + rechoir: ^0.7.0 + webpack-merge: ^5.7.3 + peerDependencies: + webpack: 4.x.x || 5.x.x + peerDependenciesMeta: + "@webpack-cli/generators": + optional: true + "@webpack-cli/migrate": + optional: true + webpack-bundle-analyzer: + optional: true + webpack-dev-server: + optional: true + bin: + webpack-cli: bin/cli.js + checksum: 2ff5355ac348e6b40f2630a203b981728834dca96d6d621be96249764b2d0fc01dd54edfcc37f02214d02935de2cf0eefd6ce689d970d154ef493f01ba922390 languageName: node linkType: hard @@ -17267,7 +21832,7 @@ __metadata: languageName: node linkType: hard -"webpack-dev-server@npm:^4.9.3": +"webpack-dev-server@npm:^4.15.1, webpack-dev-server@npm:^4.9.3": version: 4.15.1 resolution: "webpack-dev-server@npm:4.15.1" dependencies: @@ -17314,13 +21879,14 @@ __metadata: languageName: node linkType: hard -"webpack-merge@npm:^5.8.0": - version: 5.9.0 - resolution: "webpack-merge@npm:5.9.0" +"webpack-merge@npm:^5.7.3, webpack-merge@npm:^5.8.0, webpack-merge@npm:^5.9.0": + version: 5.10.0 + resolution: "webpack-merge@npm:5.10.0" dependencies: clone-deep: ^4.0.1 + flat: ^5.0.2 wildcard: ^2.0.0 - checksum: 64fe2c23aacc5f19684452a0e84ec02c46b990423aee6fcc5c18d7d471155bd14e9a6adb02bd3656eb3e0ac2532c8e97d69412ad14c97eeafe32fa6d10050872 + checksum: 1fe8bf5309add7298e1ac72fb3f2090e1dfa80c48c7e79fa48aa60b5961332c7d0d61efa8851acb805e6b91a4584537a347bc106e05e9aec87fa4f7088c62f2f languageName: node linkType: hard @@ -17331,7 +21897,7 @@ __metadata: languageName: node linkType: hard -"webpack@npm:^5.73.0": +"webpack@npm:^5.49.0, webpack@npm:^5.73.0, webpack@npm:^5.88.1": version: 5.89.0 resolution: "webpack@npm:5.89.0" dependencies: @@ -17400,13 +21966,6 @@ __metadata: languageName: node linkType: hard -"well-known-symbols@npm:^2.0.0": - version: 2.0.0 - resolution: "well-known-symbols@npm:2.0.0" - checksum: 4f54bbc3012371cb4d228f436891b8e7536d34ac61a57541890257e96788608e096231e0121ac24d08ef2f908b3eb2dc0adba35023eaeb2a7df655da91415402 - languageName: node - linkType: hard - "whatwg-url@npm:^11.0.0": version: 11.0.0 resolution: "whatwg-url@npm:11.0.0" @@ -17427,6 +21986,19 @@ __metadata: languageName: node linkType: hard +"which-typed-array@npm:^1.1.11, which-typed-array@npm:^1.1.2": + version: 1.1.13 + resolution: "which-typed-array@npm:1.1.13" + dependencies: + available-typed-arrays: ^1.0.5 + call-bind: ^1.0.4 + for-each: ^0.3.3 + gopd: ^1.0.1 + has-tostringtag: ^1.0.0 + checksum: 3828a0d5d72c800e369d447e54c7620742a4cc0c9baf1b5e8c17e9b6ff90d8d861a3a6dd4800f1953dbf80e5e5cec954a289e5b4a223e3bee4aeb1f8c5f33309 + languageName: node + linkType: hard + "which@npm:^1.3.1": version: 1.3.1 resolution: "which@npm:1.3.1" @@ -17449,12 +22021,14 @@ __metadata: languageName: node linkType: hard -"wide-align@npm:^1.1.5": - version: 1.1.5 - resolution: "wide-align@npm:1.1.5" +"which@npm:^4.0.0": + version: 4.0.0 + resolution: "which@npm:4.0.0" dependencies: - string-width: ^1.0.2 || 2 || 3 || 4 - checksum: d5fc37cd561f9daee3c80e03b92ed3e84d80dde3365a8767263d03dacfc8fa06b065ffe1df00d8c2a09f731482fcacae745abfbb478d4af36d0a891fad4834d3 + isexe: ^3.1.1 + bin: + node-which: bin/which.js + checksum: f17e84c042592c21e23c8195108cff18c64050b9efb8459589116999ea9da6dd1509e6a1bac3aeebefd137be00fabbb61b5c2bc0aa0f8526f32b58ee2f545651 languageName: node linkType: hard @@ -17547,7 +22121,7 @@ __metadata: languageName: node linkType: hard -"write-file-atomic@npm:^3.0.0": +"write-file-atomic@npm:^3.0.0, write-file-atomic@npm:^3.0.3": version: 3.0.3 resolution: "write-file-atomic@npm:3.0.3" dependencies: @@ -17559,16 +22133,6 @@ __metadata: languageName: node linkType: hard -"write-file-atomic@npm:^5.0.1": - version: 5.0.1 - resolution: "write-file-atomic@npm:5.0.1" - dependencies: - imurmurhash: ^0.1.4 - signal-exit: ^4.0.1 - checksum: 8dbb0e2512c2f72ccc20ccedab9986c7d02d04039ed6e8780c987dc4940b793339c50172a1008eed7747001bfacc0ca47562668a069a7506c46c77d7ba3926a9 - languageName: node - linkType: hard - "ws@npm:7.4.6": version: 7.4.6 resolution: "ws@npm:7.4.6" @@ -17630,8 +22194,8 @@ __metadata: linkType: hard "ws@npm:^8.13.0": - version: 8.14.2 - resolution: "ws@npm:8.14.2" + version: 8.15.0 + resolution: "ws@npm:8.15.0" peerDependencies: bufferutil: ^4.0.1 utf-8-validate: ">=5.0.2" @@ -17640,7 +22204,7 @@ __metadata: optional: true utf-8-validate: optional: true - checksum: 3ca0dad26e8cc6515ff392b622a1467430814c463b3368b0258e33696b1d4bed7510bc7030f7b72838b9fdeb8dbd8839cbf808367d6aae2e1d668ce741d4308b + checksum: ca15c590aa49bc0197223b8ab7d15e7362ae6c4011d91ed0e5cd5867cdd5497fd71470ea36314833b4b078929fe64dc4ba7748b1e58e50a0f8e41f147db2b464 languageName: node linkType: hard @@ -17651,6 +22215,13 @@ __metadata: languageName: node linkType: hard +"xdg-basedir@npm:^5.0.1, xdg-basedir@npm:^5.1.0": + version: 5.1.0 + resolution: "xdg-basedir@npm:5.1.0" + checksum: b60e8a2c663ccb1dac77c2d913f3b96de48dafbfa083657171d3d50e10820b8a04bb4edfe9f00808c8c20e5f5355e1927bea9029f03136e29265cb98291e1fea + languageName: node + linkType: hard + "xml-js@npm:^1.6.11": version: 1.6.11 resolution: "xml-js@npm:1.6.11" @@ -17662,6 +22233,13 @@ __metadata: languageName: node linkType: hard +"xtend@npm:^2.2.0": + version: 2.2.0 + resolution: "xtend@npm:2.2.0" + checksum: 9fcd1ddabefdb3c68a698b08177525ad14a6df3423b13bad9a53900d19374e476a43c219b0756d39675776b2326a35fe477c547cfb8a05ae9fea4ba2235bebe2 + languageName: node + linkType: hard + "xtend@npm:^4.0.0, xtend@npm:^4.0.1": version: 4.0.2 resolution: "xtend@npm:4.0.2" @@ -17669,6 +22247,32 @@ __metadata: languageName: node linkType: hard +"xtend@npm:~2.0.4": + version: 2.0.6 + resolution: "xtend@npm:2.0.6" + dependencies: + is-object: ~0.1.2 + object-keys: ~0.2.0 + checksum: 414531e51cbc56d4676ae2b3a4070052e0c7a36caf7ee74f2e8449fe0fc1752b971a776fca5b85ec02ef3d0a33b8e75491d900474b8407f3f4bba3f49325a785 + languageName: node + linkType: hard + +"xtend@npm:~2.1.2": + version: 2.1.2 + resolution: "xtend@npm:2.1.2" + dependencies: + object-keys: ~0.4.0 + checksum: a8b79f31502c163205984eaa2b196051cd2fab0882b49758e30f2f9018255bc6c462e32a090bf3385d1bda04755ad8cc0052a09e049b0038f49eb9b950d9c447 + languageName: node + linkType: hard + +"xtend@npm:~3.0.0": + version: 3.0.0 + resolution: "xtend@npm:3.0.0" + checksum: ecdc4dd74f26e561dbc13d4148fcc7b8f46f49b9259862fc31e42b7cede9eee62af9d869050a7b8e089475e858744a74ceae3f0da2943755ef712f3277ad2e50 + languageName: node + linkType: hard + "y18n@npm:^5.0.5": version: 5.0.8 resolution: "y18n@npm:5.0.8" @@ -17697,17 +22301,10 @@ __metadata: languageName: node linkType: hard -"yaml@npm:^2.1.1": - version: 2.3.2 - resolution: "yaml@npm:2.3.2" - checksum: acd80cc24df12c808c6dec8a0176d404ef9e6f08ad8786f746ecc9d8974968c53c6e8a67fdfabcc5f99f3dc59b6bb0994b95646ff03d18e9b1dcd59eccc02146 - languageName: node - linkType: hard - -"yaml@npm:^2.2.2": - version: 2.3.3 - resolution: "yaml@npm:2.3.3" - checksum: cdfd132e7e0259f948929efe8835923df05c013c273c02bb7a2de9b46ac3af53c2778a35b32c7c0f877cc355dc9340ed564018c0242bfbb1278c2a3e53a0e99e +"yaml@npm:^2.1.1, yaml@npm:^2.2.2, yaml@npm:^2.3.4": + version: 2.3.4 + resolution: "yaml@npm:2.3.4" + checksum: e6d1dae1c6383bcc8ba11796eef3b8c02d5082911c6723efeeb5ba50fc8e881df18d645e64de68e421b577296000bea9c75d6d9097c2f6699da3ae0406c030d8 languageName: node linkType: hard @@ -17774,7 +22371,7 @@ __metadata: languageName: node linkType: hard -"yargs@npm:^17.7.1, yargs@npm:^17.7.2": +"yargs@npm:^17.7.1": version: 17.7.2 resolution: "yargs@npm:17.7.2" dependencies: @@ -17834,6 +22431,13 @@ __metadata: languageName: node linkType: hard +"zwitch@npm:^2.0.0": + version: 2.0.4 + resolution: "zwitch@npm:2.0.4" + checksum: f22ec5fc2d5f02c423c93d35cdfa83573a3a3bd98c66b927c368ea4d0e7252a500df2a90a6b45522be536a96a73404393c958e945fdba95e6832c200791702b6 + languageName: node + linkType: hard + "zx@npm:7.1.1": version: 7.1.1 resolution: "zx@npm:7.1.1"