From e934e872d5a2fb3ca46646436de25777a33c4737 Mon Sep 17 00:00:00 2001 From: Alex Gherghisan Date: Thu, 27 Jun 2024 19:42:41 +0100 Subject: [PATCH 01/29] fix: TS LSP being slow (#7181) Potential fix for Typescript LSP being slow. Based on https://github.com/microsoft/TypeScript/issues/42761#issuecomment-778368320 and https://github.com/microsoft/TypeScript/issues/52994 --------- Co-authored-by: Santiago Palladino --- .vscode/settings.json | 1 + yarn-project/.earthlyignore | 7 +- yarn-project/.gitignore | 7 +- yarn-project/Earthfile | 15 ++-- yarn-project/accounts/.prettierignore | 2 +- yarn-project/accounts/package.json | 5 +- yarn-project/accounts/package.local.json | 12 +++- .../accounts/scripts/copy-contracts.sh | 12 +++- yarn-project/accounts/src/ecdsa/artifact.ts | 2 +- yarn-project/accounts/src/schnorr/artifact.ts | 2 +- .../accounts/src/single_key/artifact.ts | 2 +- yarn-project/accounts/tsconfig.json | 2 +- yarn-project/noir-contracts.js/package.json | 10 +-- .../noir-contracts.js/package.local.json | 4 +- .../scripts/generate-types.sh | 9 +++ yarn-project/noir-contracts.js/tsconfig.json | 5 +- .../.prettierignore | 2 +- .../noir-protocol-circuits-types/package.json | 7 +- .../package.local.json | 12 +++- .../noir-protocol-circuits-types/src/index.ts | 68 +++++++++---------- .../src/scripts/generate_declaration_files.ts | 17 +++++ .../src/scripts/generate_ts_from_abi.ts | 2 +- .../tsconfig.json | 6 +- .../protocol-contracts/.prettierignore | 2 +- yarn-project/protocol-contracts/package.json | 5 +- .../protocol-contracts/package.local.json | 12 +++- .../scripts/copy-contracts.sh | 13 +++- .../src/auth-registry/artifact.ts | 2 +- .../src/class-registerer/artifact.ts | 2 +- .../src/gas-token/artifact.ts | 2 +- .../src/instance-deployer/artifact.ts | 2 +- .../src/key-registry/artifact.ts | 2 +- .../src/multi-call-entrypoint/artifact.ts | 2 +- yarn-project/protocol-contracts/tsconfig.json | 2 +- .../simulator/src/public/public_kernel.ts | 3 +- yarn-project/simulator/src/rollup/rollup.ts | 4 +- yarn-project/tsconfig.json | 5 +- yarn-project/types/src/noir/index.ts | 2 + 38 files changed, 167 insertions(+), 104 deletions(-) create mode 100644 yarn-project/noir-protocol-circuits-types/src/scripts/generate_declaration_files.ts diff --git a/.vscode/settings.json b/.vscode/settings.json index 86a0427f790..630178ad32a 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -169,4 +169,5 @@ "**/noir/noir-repo/docs/versioned_docs/**": true }, "cmake.sourceDirectory": "${workspaceFolder}/barretenberg/cpp", + "typescript.tsserver.maxTsServerMemory": 4096, } diff --git a/yarn-project/.earthlyignore b/yarn-project/.earthlyignore index 90a25913737..bd9d331e9a6 100644 --- a/yarn-project/.earthlyignore +++ b/yarn-project/.earthlyignore @@ -39,10 +39,9 @@ cmake-build-debug **/tsconfig.tsbuildinfo **/.eslintcache **/target -accounts/src/artifacts +accounts/artifacts aztec-faucet/data* aztec-node/data* -aztec-js/src/account_contract/artifacts aztec/log circuits.js/fixtures/*.json docs/dist @@ -51,8 +50,8 @@ end-to-end/log end-to-end/data end-to-end/src/web/main.js end-to-end/src/web/main.js.LICENSE.txt -entry-points/src/artifacts l1-contracts/generated +noir-protocol-circuits-types/artifacts builder/target/ builder/proofs/ builder/Prover.toml @@ -64,7 +63,7 @@ builder/Verifier.toml builder/src/target builder/src/crs builder/src/types -protocol-contracts/src/artifacts +protocol-contracts/artifacts scripts/tmp noir-contracts.js/src noir-contracts.js/artifacts/ diff --git a/yarn-project/.gitignore b/yarn-project/.gitignore index e92d481c252..e044af6d3bd 100644 --- a/yarn-project/.gitignore +++ b/yarn-project/.gitignore @@ -12,10 +12,9 @@ **/.debounce-* **/.tsc.pid **/*.result -accounts/src/artifacts +accounts/artifacts aztec-faucet/data* aztec-node/data* -aztec-js/src/account_contract/artifacts aztec/log circuits.js/fixtures/*.json circuits.js/src/structs/kernel/private_kernel_reset_circuit_private_inputs_variants.ts @@ -25,9 +24,9 @@ end-to-end/log end-to-end/data end-to-end/src/web/main.js end-to-end/src/web/main.js.LICENSE.txt -entry-points/src/artifacts l1-artifacts/generated l1-contracts/generated +noir-protocol-circuits-types/artifacts builder/target/ builder/proofs/ builder/Prover.toml @@ -40,7 +39,7 @@ builder/src/target builder/src/crs builder/src/types noir-protocol-circuits-types/src/types/ -protocol-contracts/src/artifacts +protocol-contracts/artifacts scripts/tmp noir-contracts.js/src noir-contracts.js/artifacts/ diff --git a/yarn-project/Earthfile b/yarn-project/Earthfile index 1f4e2fdc790..162949f4a43 100644 --- a/yarn-project/Earthfile +++ b/yarn-project/Earthfile @@ -73,8 +73,7 @@ bb-cli: ../barretenberg/ts/dest/browser \ aztec.js/dest/main.js \ end-to-end \ - **/src \ - **/artifacts + **/src # yarn symlinks the binary to node_modules/.bin ENTRYPOINT ["/usr/src/yarn-project/node_modules/.bin/bb-cli"] @@ -124,8 +123,7 @@ txe: ../l1-contracts \ ../barretenberg/ts/src \ ../barretenberg/ts/dest/node-cjs \ - ../barretenberg/ts/dest/browser \ - **/artifacts + ../barretenberg/ts/dest/browser SAVE ARTIFACT /usr/src /usr/src aztec-prod: @@ -140,8 +138,7 @@ aztec-prod: ../barretenberg/ts/dest/browser \ aztec.js/dest/main.js \ end-to-end \ - **/src \ - **/artifacts + **/src COPY --dir +rollup-verifier-contract/usr/src/bb /usr/src SAVE ARTIFACT /usr/src /usr/src @@ -172,8 +169,7 @@ aztec-faucet-build: ../barretenberg/ts/dest/browser \ aztec.js/dest/main.js \ end-to-end \ - **/src \ - **/artifacts + **/src SAVE ARTIFACT /usr/src /usr/src aztec-faucet: @@ -193,8 +189,7 @@ end-to-end-prod: ../l1-contracts \ ../barretenberg/ts/src \ ../barretenberg/ts/dest/node-cjs \ - ../barretenberg/ts/dest/browser \ - **/artifacts + ../barretenberg/ts/dest/browser COPY --dir +rollup-verifier-contract/usr/src/bb /usr/src SAVE ARTIFACT /usr/src /usr/src diff --git a/yarn-project/accounts/.prettierignore b/yarn-project/accounts/.prettierignore index 2ade63ee6f9..eb6b23ceb90 100644 --- a/yarn-project/accounts/.prettierignore +++ b/yarn-project/accounts/.prettierignore @@ -1 +1 @@ -src/artifacts/*.json \ No newline at end of file +artifacts/*.json \ No newline at end of file diff --git a/yarn-project/accounts/package.json b/yarn-project/accounts/package.json index 90d2d36ab83..b1209964ae4 100644 --- a/yarn-project/accounts/package.json +++ b/yarn-project/accounts/package.json @@ -28,7 +28,7 @@ "generate:noir-contracts": "./scripts/copy-contracts.sh", "build:dev": "tsc -b --watch", "build:ts": "tsc -b", - "clean": "rm -rf ./dest .tsbuildinfo ./src/artifacts", + "clean": "rm -rf ./dest .tsbuildinfo ./artifacts", "formatting": "run -T prettier --check ./src && run -T eslint ./src", "formatting:fix": "run -T eslint --fix ./src && run -T prettier -w ./src", "test": "NODE_NO_WARNINGS=1 node --experimental-vm-modules ../node_modules/.bin/jest --passWithNoTests" @@ -91,7 +91,8 @@ "files": [ "dest", "src", - "!*.test.*" + "!*.test.*", + "artifacts" ], "engines": { "node": ">=18" diff --git a/yarn-project/accounts/package.local.json b/yarn-project/accounts/package.local.json index c5987104cfc..6e3a34a9358 100644 --- a/yarn-project/accounts/package.local.json +++ b/yarn-project/accounts/package.local.json @@ -5,6 +5,12 @@ "generate:noir-contracts": "./scripts/copy-contracts.sh", "build:dev": "tsc -b --watch", "build:ts": "tsc -b", - "clean": "rm -rf ./dest .tsbuildinfo ./src/artifacts" - } -} + "clean": "rm -rf ./dest .tsbuildinfo ./artifacts" + }, + "files": [ + "dest", + "src", + "artifacts", + "!*.test.*" + ] +} \ No newline at end of file diff --git a/yarn-project/accounts/scripts/copy-contracts.sh b/yarn-project/accounts/scripts/copy-contracts.sh index 8b945155a9d..5984357006a 100755 --- a/yarn-project/accounts/scripts/copy-contracts.sh +++ b/yarn-project/accounts/scripts/copy-contracts.sh @@ -1,9 +1,17 @@ #! /bin/bash set -euo pipefail -mkdir -p ./src/artifacts +mkdir -p ./artifacts contracts=(schnorr_account_contract-SchnorrAccount ecdsa_account_contract-EcdsaAccount schnorr_single_key_account_contract-SchnorrSingleKeyAccount) +decl=$(cat < ./artifacts/${contract#*-}.d.json.ts done \ No newline at end of file diff --git a/yarn-project/accounts/src/ecdsa/artifact.ts b/yarn-project/accounts/src/ecdsa/artifact.ts index 54ec3212dd0..a38c97a094f 100644 --- a/yarn-project/accounts/src/ecdsa/artifact.ts +++ b/yarn-project/accounts/src/ecdsa/artifact.ts @@ -1,5 +1,5 @@ import { type NoirCompiledContract, loadContractArtifact } from '@aztec/aztec.js'; -import EcdsaAccountContractJson from '../artifacts/EcdsaAccount.json' assert { type: 'json' }; +import EcdsaAccountContractJson from '../../artifacts/EcdsaAccount.json' assert { type: 'json' }; export const EcdsaAccountContractArtifact = loadContractArtifact(EcdsaAccountContractJson as NoirCompiledContract); diff --git a/yarn-project/accounts/src/schnorr/artifact.ts b/yarn-project/accounts/src/schnorr/artifact.ts index f7cac3337f7..88c1c5d1e4e 100644 --- a/yarn-project/accounts/src/schnorr/artifact.ts +++ b/yarn-project/accounts/src/schnorr/artifact.ts @@ -1,5 +1,5 @@ import { type NoirCompiledContract, loadContractArtifact } from '@aztec/aztec.js'; -import SchnorrAccountContractJson from '../artifacts/SchnorrAccount.json' assert { type: 'json' }; +import SchnorrAccountContractJson from '../../artifacts/SchnorrAccount.json' assert { type: 'json' }; export const SchnorrAccountContractArtifact = loadContractArtifact(SchnorrAccountContractJson as NoirCompiledContract); diff --git a/yarn-project/accounts/src/single_key/artifact.ts b/yarn-project/accounts/src/single_key/artifact.ts index 55a819dc570..f48ee9a14e8 100644 --- a/yarn-project/accounts/src/single_key/artifact.ts +++ b/yarn-project/accounts/src/single_key/artifact.ts @@ -1,6 +1,6 @@ import { type NoirCompiledContract, loadContractArtifact } from '@aztec/aztec.js'; -import SchnorrSingleKeyAccountContractJson from '../artifacts/SchnorrSingleKeyAccount.json' assert { type: 'json' }; +import SchnorrSingleKeyAccountContractJson from '../../artifacts/SchnorrSingleKeyAccount.json' assert { type: 'json' }; export const SchnorrSingleKeyAccountContractArtifact = loadContractArtifact( SchnorrSingleKeyAccountContractJson as NoirCompiledContract, diff --git a/yarn-project/accounts/tsconfig.json b/yarn-project/accounts/tsconfig.json index 0b48acf92f7..62dae97b860 100644 --- a/yarn-project/accounts/tsconfig.json +++ b/yarn-project/accounts/tsconfig.json @@ -28,5 +28,5 @@ "path": "../types" } ], - "include": ["src", "src/**/*.json"] + "include": ["src", "artifacts/*.d.json.ts"] } diff --git a/yarn-project/noir-contracts.js/package.json b/yarn-project/noir-contracts.js/package.json index 881226f0a93..0a86d66706b 100644 --- a/yarn-project/noir-contracts.js/package.json +++ b/yarn-project/noir-contracts.js/package.json @@ -3,14 +3,14 @@ "version": "0.1.0", "type": "module", "exports": { - ".": "./dest/src/index.js", - "./artifacts/*": "./dest/artifacts/*.json", - "./*": "./dest/src/*.js" + ".": "./dest/index.js", + "./artifacts/*": "./artifacts/*.json", + "./*": "./dest/*.js" }, "scripts": { "build": "yarn clean && yarn generate", "build:dev": "tsc -b --watch", - "clean": "rm -rf .tsbuildinfo ./artifacts ./codegenCache.json", + "clean": "rm -rf ./dest .tsbuildinfo ./artifacts ./src ./codegenCache.json", "formatting": "run -T prettier --check ./src && run -T eslint ./src", "formatting:fix": "run -T eslint --fix ./src && run -T prettier -w ./src", "test": "NODE_NO_WARNINGS=1 node --experimental-vm-modules ../node_modules/.bin/jest --passWithNoTests", @@ -73,4 +73,4 @@ "engines": { "node": ">=18" } -} +} \ No newline at end of file diff --git a/yarn-project/noir-contracts.js/package.local.json b/yarn-project/noir-contracts.js/package.local.json index 5dd3e0100f9..4cab71fc4dd 100644 --- a/yarn-project/noir-contracts.js/package.local.json +++ b/yarn-project/noir-contracts.js/package.local.json @@ -3,6 +3,6 @@ "build": "yarn clean && yarn generate", "generate": "yarn generate:noir-contracts", "generate:noir-contracts": "./scripts/generate-types.sh && run -T prettier -w ./src --loglevel warn", - "clean": "rm -rf .tsbuildinfo ./artifacts ./codegenCache.json" + "clean": "rm -rf ./dest .tsbuildinfo ./artifacts ./src ./codegenCache.json" } -} +} \ No newline at end of file diff --git a/yarn-project/noir-contracts.js/scripts/generate-types.sh b/yarn-project/noir-contracts.js/scripts/generate-types.sh index f7355440508..8c1168b24e8 100755 --- a/yarn-project/noir-contracts.js/scripts/generate-types.sh +++ b/yarn-project/noir-contracts.js/scripts/generate-types.sh @@ -19,12 +19,21 @@ echo "// Auto generated module - do not edit!" >"$INDEX" # Ensure the artifacts directory exists mkdir -p artifacts +decl=$(cat < "artifacts/$dts_file" done # Generate types for the contracts diff --git a/yarn-project/noir-contracts.js/tsconfig.json b/yarn-project/noir-contracts.js/tsconfig.json index caf5e40c801..92add33fb36 100644 --- a/yarn-project/noir-contracts.js/tsconfig.json +++ b/yarn-project/noir-contracts.js/tsconfig.json @@ -2,7 +2,7 @@ "extends": "..", "compilerOptions": { "outDir": "dest", - "rootDir": ".", + "rootDir": "src", "tsBuildInfoFile": ".tsbuildinfo" }, "references": [ @@ -15,8 +15,7 @@ ], "include": [ "src", - "artifacts", - "artifacts/*.json" + "artifacts/*.d.json.ts" ], "exclude": [ "dest" diff --git a/yarn-project/noir-protocol-circuits-types/.prettierignore b/yarn-project/noir-protocol-circuits-types/.prettierignore index 595a24e0611..2adf7da0bda 100644 --- a/yarn-project/noir-protocol-circuits-types/.prettierignore +++ b/yarn-project/noir-protocol-circuits-types/.prettierignore @@ -1,2 +1,2 @@ crates -src/target +artifacts diff --git a/yarn-project/noir-protocol-circuits-types/package.json b/yarn-project/noir-protocol-circuits-types/package.json index dab1d3a0e72..4f1b5e1fd38 100644 --- a/yarn-project/noir-protocol-circuits-types/package.json +++ b/yarn-project/noir-protocol-circuits-types/package.json @@ -12,12 +12,12 @@ ], "scripts": { "build": "yarn clean && yarn generate && tsc -b", - "clean": "rm -rf ./dest .tsbuildinfo src/types src/target", + "clean": "rm -rf ./dest .tsbuildinfo src/types artifacts", "formatting": "run -T prettier --check ./src && run -T eslint ./src", "formatting:fix": "run -T eslint --fix ./src && run -T prettier -w ./src", "formatting:fix:types": "NODE_OPTIONS='--max-old-space-size=8096' run -T eslint --fix ./src/types && run -T prettier -w ./src/types", "generate": "yarn generate:noir-circuits", - "generate:noir-circuits": "mkdir -p ./src/target && cp ../../noir-projects/noir-protocol-circuits/target/* ./src/target && node --no-warnings --loader ts-node/esm src/scripts/generate_ts_from_abi.ts && run -T prettier -w ./src/types", + "generate:noir-circuits": "mkdir -p ./artifacts && cp ../../noir-projects/noir-protocol-circuits/target/* ./artifacts && node --no-warnings --loader ts-node/esm src/scripts/generate_declaration_files.ts && node --no-warnings --loader ts-node/esm src/scripts/generate_ts_from_abi.ts && run -T prettier -w ./src/types", "test": "NODE_NO_WARNINGS=1 node --experimental-vm-modules ../node_modules/.bin/jest --passWithNoTests", "codegen": "yarn noir-codegen", "build:dev": "tsc -b --watch" @@ -82,7 +82,8 @@ "files": [ "dest", "src", - "!*.test.*" + "!*.test.*", + "artifacts" ], "types": "./dest/index.d.ts", "engines": { diff --git a/yarn-project/noir-protocol-circuits-types/package.local.json b/yarn-project/noir-protocol-circuits-types/package.local.json index 9ade866233e..e3deaa112a7 100644 --- a/yarn-project/noir-protocol-circuits-types/package.local.json +++ b/yarn-project/noir-protocol-circuits-types/package.local.json @@ -1,6 +1,12 @@ { "scripts": { "build": "yarn clean && yarn generate && tsc -b", - "clean": "rm -rf ./dest .tsbuildinfo src/types src/target" - } -} + "clean": "rm -rf ./dest .tsbuildinfo src/types artifacts" + }, + "files": [ + "dest", + "src", + "artifacts", + "!*.test.*" + ] +} \ No newline at end of file diff --git a/yarn-project/noir-protocol-circuits-types/src/index.ts b/yarn-project/noir-protocol-circuits-types/src/index.ts index b6ba9bd4097..789ff0f92f7 100644 --- a/yarn-project/noir-protocol-circuits-types/src/index.ts +++ b/yarn-project/noir-protocol-circuits-types/src/index.ts @@ -30,40 +30,40 @@ import { type Abi, abiDecode, abiEncode } from '@noir-lang/noirc_abi'; import { type WitnessMap } from '@noir-lang/types'; import { strict as assert } from 'assert'; -import EmptyNestedJson from './target/empty_nested.json' assert { type: 'json' }; -import EmptyNestedSimulatedJson from './target/empty_nested_simulated.json' assert { type: 'json' }; -import BaseParityJson from './target/parity_base.json' assert { type: 'json' }; -import RootParityJson from './target/parity_root.json' assert { type: 'json' }; -import PrivateKernelEmptyJson from './target/private_kernel_empty.json' assert { type: 'json' }; -import PrivateKernelEmptySimulatedJson from './target/private_kernel_empty_simulated.json' assert { type: 'json' }; -import PrivateKernelInitJson from './target/private_kernel_init.json' assert { type: 'json' }; -import PrivateKernelInitSimulatedJson from './target/private_kernel_init_simulated.json' assert { type: 'json' }; -import PrivateKernelInnerJson from './target/private_kernel_inner.json' assert { type: 'json' }; -import PrivateKernelInnerSimulatedJson from './target/private_kernel_inner_simulated.json' assert { type: 'json' }; -import PrivateKernelResetJson from './target/private_kernel_reset.json' assert { type: 'json' }; -import PrivateKernelResetBigJson from './target/private_kernel_reset_big.json' assert { type: 'json' }; -import PrivateKernelResetMediumJson from './target/private_kernel_reset_medium.json' assert { type: 'json' }; -import PrivateKernelResetSimulatedJson from './target/private_kernel_reset_simulated.json' assert { type: 'json' }; -import PrivateKernelResetBigSimulatedJson from './target/private_kernel_reset_simulated_big.json' assert { type: 'json' }; -import PrivateKernelResetMediumSimulatedJson from './target/private_kernel_reset_simulated_medium.json' assert { type: 'json' }; -import PrivateKernelResetSmallSimulatedJson from './target/private_kernel_reset_simulated_small.json' assert { type: 'json' }; -import PrivateKernelResetSmallJson from './target/private_kernel_reset_small.json' assert { type: 'json' }; -import PrivateKernelTailJson from './target/private_kernel_tail.json' assert { type: 'json' }; -import PrivateKernelTailSimulatedJson from './target/private_kernel_tail_simulated.json' assert { type: 'json' }; -import PrivateKernelTailToPublicJson from './target/private_kernel_tail_to_public.json' assert { type: 'json' }; -import PrivateKernelTailToPublicSimulatedJson from './target/private_kernel_tail_to_public_simulated.json' assert { type: 'json' }; -import PublicKernelAppLogicJson from './target/public_kernel_app_logic.json' assert { type: 'json' }; -import PublicKernelAppLogicSimulatedJson from './target/public_kernel_app_logic_simulated.json' assert { type: 'json' }; -import PublicKernelSetupJson from './target/public_kernel_setup.json' assert { type: 'json' }; -import PublicKernelSetupSimulatedJson from './target/public_kernel_setup_simulated.json' assert { type: 'json' }; -import PublicKernelTailJson from './target/public_kernel_tail.json' assert { type: 'json' }; -import PublicKernelTailSimulatedJson from './target/public_kernel_tail_simulated.json' assert { type: 'json' }; -import PublicKernelTeardownJson from './target/public_kernel_teardown.json' assert { type: 'json' }; -import PublicKernelTeardownSimulatedJson from './target/public_kernel_teardown_simulated.json' assert { type: 'json' }; -import BaseRollupJson from './target/rollup_base.json' assert { type: 'json' }; -import BaseRollupSimulatedJson from './target/rollup_base_simulated.json' assert { type: 'json' }; -import MergeRollupJson from './target/rollup_merge.json' assert { type: 'json' }; -import RootRollupJson from './target/rollup_root.json' assert { type: 'json' }; +import EmptyNestedJson from '../artifacts/empty_nested.json' assert { type: 'json' }; +import EmptyNestedSimulatedJson from '../artifacts/empty_nested_simulated.json' assert { type: 'json' }; +import BaseParityJson from '../artifacts/parity_base.json' assert { type: 'json' }; +import RootParityJson from '../artifacts/parity_root.json' assert { type: 'json' }; +import PrivateKernelEmptyJson from '../artifacts/private_kernel_empty.json' assert { type: 'json' }; +import PrivateKernelEmptySimulatedJson from '../artifacts/private_kernel_empty_simulated.json' assert { type: 'json' }; +import PrivateKernelInitJson from '../artifacts/private_kernel_init.json' assert { type: 'json' }; +import PrivateKernelInitSimulatedJson from '../artifacts/private_kernel_init_simulated.json' assert { type: 'json' }; +import PrivateKernelInnerJson from '../artifacts/private_kernel_inner.json' assert { type: 'json' }; +import PrivateKernelInnerSimulatedJson from '../artifacts/private_kernel_inner_simulated.json' assert { type: 'json' }; +import PrivateKernelResetJson from '../artifacts/private_kernel_reset.json' assert { type: 'json' }; +import PrivateKernelResetBigJson from '../artifacts/private_kernel_reset_big.json' assert { type: 'json' }; +import PrivateKernelResetMediumJson from '../artifacts/private_kernel_reset_medium.json' assert { type: 'json' }; +import PrivateKernelResetSimulatedJson from '../artifacts/private_kernel_reset_simulated.json' assert { type: 'json' }; +import PrivateKernelResetBigSimulatedJson from '../artifacts/private_kernel_reset_simulated_big.json' assert { type: 'json' }; +import PrivateKernelResetMediumSimulatedJson from '../artifacts/private_kernel_reset_simulated_medium.json' assert { type: 'json' }; +import PrivateKernelResetSmallSimulatedJson from '../artifacts/private_kernel_reset_simulated_small.json' assert { type: 'json' }; +import PrivateKernelResetSmallJson from '../artifacts/private_kernel_reset_small.json' assert { type: 'json' }; +import PrivateKernelTailJson from '../artifacts/private_kernel_tail.json' assert { type: 'json' }; +import PrivateKernelTailSimulatedJson from '../artifacts/private_kernel_tail_simulated.json' assert { type: 'json' }; +import PrivateKernelTailToPublicJson from '../artifacts/private_kernel_tail_to_public.json' assert { type: 'json' }; +import PrivateKernelTailToPublicSimulatedJson from '../artifacts/private_kernel_tail_to_public_simulated.json' assert { type: 'json' }; +import PublicKernelAppLogicJson from '../artifacts/public_kernel_app_logic.json' assert { type: 'json' }; +import PublicKernelAppLogicSimulatedJson from '../artifacts/public_kernel_app_logic_simulated.json' assert { type: 'json' }; +import PublicKernelSetupJson from '../artifacts/public_kernel_setup.json' assert { type: 'json' }; +import PublicKernelSetupSimulatedJson from '../artifacts/public_kernel_setup_simulated.json' assert { type: 'json' }; +import PublicKernelTailJson from '../artifacts/public_kernel_tail.json' assert { type: 'json' }; +import PublicKernelTailSimulatedJson from '../artifacts/public_kernel_tail_simulated.json' assert { type: 'json' }; +import PublicKernelTeardownJson from '../artifacts/public_kernel_teardown.json' assert { type: 'json' }; +import PublicKernelTeardownSimulatedJson from '../artifacts/public_kernel_teardown_simulated.json' assert { type: 'json' }; +import BaseRollupJson from '../artifacts/rollup_base.json' assert { type: 'json' }; +import BaseRollupSimulatedJson from '../artifacts/rollup_base_simulated.json' assert { type: 'json' }; +import MergeRollupJson from '../artifacts/rollup_merge.json' assert { type: 'json' }; +import RootRollupJson from '../artifacts/rollup_root.json' assert { type: 'json' }; import { mapBaseOrMergeRollupPublicInputsFromNoir, mapBaseParityInputsToNoir, diff --git a/yarn-project/noir-protocol-circuits-types/src/scripts/generate_declaration_files.ts b/yarn-project/noir-protocol-circuits-types/src/scripts/generate_declaration_files.ts new file mode 100644 index 00000000000..98a5f2f06b4 --- /dev/null +++ b/yarn-project/noir-protocol-circuits-types/src/scripts/generate_declaration_files.ts @@ -0,0 +1,17 @@ +import { fileURLToPath } from '@aztec/foundation/url'; + +import { readdir, writeFile } from 'fs/promises'; +import { join } from 'path'; + +const content = `\ +import { type NoirCompiledCircuit } from '@aztec/types/noir'; +const circuit: NoirCompiledCircuit; +export = circuit; +`; + +const target = fileURLToPath(new URL('../../artifacts', import.meta.url).href); +const files = await readdir(target); +for (const file of files) { + const name = file.replace('.json', ''); + await writeFile(join(target, `${name}.d.json.ts`), content); +} diff --git a/yarn-project/noir-protocol-circuits-types/src/scripts/generate_ts_from_abi.ts b/yarn-project/noir-protocol-circuits-types/src/scripts/generate_ts_from_abi.ts index 4c3734e42e6..acb52af5f5b 100644 --- a/yarn-project/noir-protocol-circuits-types/src/scripts/generate_ts_from_abi.ts +++ b/yarn-project/noir-protocol-circuits-types/src/scripts/generate_ts_from_abi.ts @@ -45,7 +45,7 @@ const main = async () => { const programs: [string, CompiledCircuit][] = []; // Collect all circuits for (const circuit of circuits) { - const rawData = await fs.readFile(`./src/target/${circuit}.json`, 'utf-8'); + const rawData = await fs.readFile(`./artifacts/${circuit}.json`, 'utf-8'); const abiObj: CompiledCircuit = JSON.parse(rawData); programs.push([pascalCase(circuit), abiObj]); } diff --git a/yarn-project/noir-protocol-circuits-types/tsconfig.json b/yarn-project/noir-protocol-circuits-types/tsconfig.json index 632b9eed778..390735ab54c 100644 --- a/yarn-project/noir-protocol-circuits-types/tsconfig.json +++ b/yarn-project/noir-protocol-circuits-types/tsconfig.json @@ -3,7 +3,8 @@ "compilerOptions": { "outDir": "dest", "rootDir": "src", - "tsBuildInfoFile": ".tsbuildinfo" + "tsBuildInfoFile": ".tsbuildinfo", + "resolveJsonModule": true }, "references": [ { @@ -28,6 +29,5 @@ "path": "../merkle-tree" } ], - "include": ["src", "src/**/*.json"], - "exclude": ["src/contracts"] + "include": ["src", "artifacts/*.d.json.ts"] } diff --git a/yarn-project/protocol-contracts/.prettierignore b/yarn-project/protocol-contracts/.prettierignore index 2ade63ee6f9..eb6b23ceb90 100644 --- a/yarn-project/protocol-contracts/.prettierignore +++ b/yarn-project/protocol-contracts/.prettierignore @@ -1 +1 @@ -src/artifacts/*.json \ No newline at end of file +artifacts/*.json \ No newline at end of file diff --git a/yarn-project/protocol-contracts/package.json b/yarn-project/protocol-contracts/package.json index 48007fea086..c7bff393480 100644 --- a/yarn-project/protocol-contracts/package.json +++ b/yarn-project/protocol-contracts/package.json @@ -23,7 +23,7 @@ "generate:noir-contracts": "./scripts/copy-contracts.sh", "build:dev": "tsc -b --watch", "build:ts": "tsc -b", - "clean": "rm -rf ./dest .tsbuildinfo ./src/artifacts", + "clean": "rm -rf ./dest .tsbuildinfo ./artifacts", "formatting": "run -T prettier --check ./src && run -T eslint ./src", "formatting:fix": "run -T eslint --fix ./src && run -T prettier -w ./src", "test": "NODE_NO_WARNINGS=1 node --experimental-vm-modules ../node_modules/.bin/jest --passWithNoTests" @@ -84,7 +84,8 @@ "files": [ "dest", "src", - "!*.test.*" + "!*.test.*", + "artifacts" ], "engines": { "node": ">=18" diff --git a/yarn-project/protocol-contracts/package.local.json b/yarn-project/protocol-contracts/package.local.json index c5987104cfc..6e3a34a9358 100644 --- a/yarn-project/protocol-contracts/package.local.json +++ b/yarn-project/protocol-contracts/package.local.json @@ -5,6 +5,12 @@ "generate:noir-contracts": "./scripts/copy-contracts.sh", "build:dev": "tsc -b --watch", "build:ts": "tsc -b", - "clean": "rm -rf ./dest .tsbuildinfo ./src/artifacts" - } -} + "clean": "rm -rf ./dest .tsbuildinfo ./artifacts" + }, + "files": [ + "dest", + "src", + "artifacts", + "!*.test.*" + ] +} \ No newline at end of file diff --git a/yarn-project/protocol-contracts/scripts/copy-contracts.sh b/yarn-project/protocol-contracts/scripts/copy-contracts.sh index 239445f9bbc..5001bf8c254 100755 --- a/yarn-project/protocol-contracts/scripts/copy-contracts.sh +++ b/yarn-project/protocol-contracts/scripts/copy-contracts.sh @@ -1,6 +1,6 @@ #! /bin/bash set -euo pipefail -mkdir -p ./src/artifacts +mkdir -p ./artifacts contracts=( contract_class_registerer_contract-ContractClassRegisterer @@ -11,6 +11,15 @@ contracts=( multi_call_entrypoint_contract-MultiCallEntrypoint ) + +decl=$(cat < ./artifacts/${contract#*-}.d.json.ts done diff --git a/yarn-project/protocol-contracts/src/auth-registry/artifact.ts b/yarn-project/protocol-contracts/src/auth-registry/artifact.ts index d33c2a9820f..030414f5d45 100644 --- a/yarn-project/protocol-contracts/src/auth-registry/artifact.ts +++ b/yarn-project/protocol-contracts/src/auth-registry/artifact.ts @@ -1,6 +1,6 @@ import { loadContractArtifact } from '@aztec/types/abi'; import { type NoirCompiledContract } from '@aztec/types/noir'; -import AuthRegistryJson from '../artifacts/AuthRegistry.json' assert { type: 'json' }; +import AuthRegistryJson from '../../artifacts/AuthRegistry.json' assert { type: 'json' }; export const AuthRegistryArtifact = loadContractArtifact(AuthRegistryJson as NoirCompiledContract); diff --git a/yarn-project/protocol-contracts/src/class-registerer/artifact.ts b/yarn-project/protocol-contracts/src/class-registerer/artifact.ts index 433bf7b269c..9f1f360b4c5 100644 --- a/yarn-project/protocol-contracts/src/class-registerer/artifact.ts +++ b/yarn-project/protocol-contracts/src/class-registerer/artifact.ts @@ -1,7 +1,7 @@ import { loadContractArtifact } from '@aztec/types/abi'; import { type NoirCompiledContract } from '@aztec/types/noir'; -import ContractClassRegistererJson from '../artifacts/ContractClassRegisterer.json' assert { type: 'json' }; +import ContractClassRegistererJson from '../../artifacts/ContractClassRegisterer.json' assert { type: 'json' }; export const ContractClassRegistererArtifact = loadContractArtifact( ContractClassRegistererJson as NoirCompiledContract, diff --git a/yarn-project/protocol-contracts/src/gas-token/artifact.ts b/yarn-project/protocol-contracts/src/gas-token/artifact.ts index 0dbaf8c2d74..9d83dd2d21b 100644 --- a/yarn-project/protocol-contracts/src/gas-token/artifact.ts +++ b/yarn-project/protocol-contracts/src/gas-token/artifact.ts @@ -1,6 +1,6 @@ import { loadContractArtifact } from '@aztec/types/abi'; import { type NoirCompiledContract } from '@aztec/types/noir'; -import GasTokenJson from '../artifacts/GasToken.json' assert { type: 'json' }; +import GasTokenJson from '../../artifacts/GasToken.json' assert { type: 'json' }; export const GasTokenArtifact = loadContractArtifact(GasTokenJson as NoirCompiledContract); diff --git a/yarn-project/protocol-contracts/src/instance-deployer/artifact.ts b/yarn-project/protocol-contracts/src/instance-deployer/artifact.ts index 809e35873ac..03dffc51462 100644 --- a/yarn-project/protocol-contracts/src/instance-deployer/artifact.ts +++ b/yarn-project/protocol-contracts/src/instance-deployer/artifact.ts @@ -1,7 +1,7 @@ import { loadContractArtifact } from '@aztec/types/abi'; import { type NoirCompiledContract } from '@aztec/types/noir'; -import ContractInstanceDeployerJson from '../artifacts/ContractInstanceDeployer.json' assert { type: 'json' }; +import ContractInstanceDeployerJson from '../../artifacts/ContractInstanceDeployer.json' assert { type: 'json' }; export const ContractInstanceDeployerArtifact = loadContractArtifact( ContractInstanceDeployerJson as NoirCompiledContract, diff --git a/yarn-project/protocol-contracts/src/key-registry/artifact.ts b/yarn-project/protocol-contracts/src/key-registry/artifact.ts index 89436d313e6..5feb280a624 100644 --- a/yarn-project/protocol-contracts/src/key-registry/artifact.ts +++ b/yarn-project/protocol-contracts/src/key-registry/artifact.ts @@ -1,6 +1,6 @@ import { loadContractArtifact } from '@aztec/types/abi'; import { type NoirCompiledContract } from '@aztec/types/noir'; -import KeyRegistryJson from '../artifacts/KeyRegistry.json' assert { type: 'json' }; +import KeyRegistryJson from '../../artifacts/KeyRegistry.json' assert { type: 'json' }; export const KeyRegistryArtifact = loadContractArtifact(KeyRegistryJson as NoirCompiledContract); diff --git a/yarn-project/protocol-contracts/src/multi-call-entrypoint/artifact.ts b/yarn-project/protocol-contracts/src/multi-call-entrypoint/artifact.ts index b3cf23f9f41..9f259a2d824 100644 --- a/yarn-project/protocol-contracts/src/multi-call-entrypoint/artifact.ts +++ b/yarn-project/protocol-contracts/src/multi-call-entrypoint/artifact.ts @@ -1,6 +1,6 @@ import { loadContractArtifact } from '@aztec/types/abi'; import { type NoirCompiledContract } from '@aztec/types/noir'; -import MultiCallEntrypoint from '../artifacts/MultiCallEntrypoint.json' assert { type: 'json' }; +import MultiCallEntrypoint from '../../artifacts/MultiCallEntrypoint.json' assert { type: 'json' }; export const MultiCallEntrypointArtifact = loadContractArtifact(MultiCallEntrypoint as NoirCompiledContract); diff --git a/yarn-project/protocol-contracts/tsconfig.json b/yarn-project/protocol-contracts/tsconfig.json index 01c876235ce..96ab4e32557 100644 --- a/yarn-project/protocol-contracts/tsconfig.json +++ b/yarn-project/protocol-contracts/tsconfig.json @@ -16,5 +16,5 @@ "path": "../types" } ], - "include": ["src", "src/**/*.json"] + "include": ["src", "artifacts/*.d.json.ts"] } diff --git a/yarn-project/simulator/src/public/public_kernel.ts b/yarn-project/simulator/src/public/public_kernel.ts index c48c9ed4512..6910123b54e 100644 --- a/yarn-project/simulator/src/public/public_kernel.ts +++ b/yarn-project/simulator/src/public/public_kernel.ts @@ -21,8 +21,9 @@ import { convertSimulatedPublicTeardownInputsToWitnessMap, convertSimulatedPublicTeardownOutputFromWitnessMap, } from '@aztec/noir-protocol-circuits-types'; -import { type SimulationProvider, WASMSimulator } from '@aztec/simulator'; +import { WASMSimulator } from '../providers/acvm_wasm.js'; +import { type SimulationProvider } from '../providers/simulation_provider.js'; import { type PublicKernelCircuitSimulator } from './public_kernel_circuit_simulator.js'; /** diff --git a/yarn-project/simulator/src/rollup/rollup.ts b/yarn-project/simulator/src/rollup/rollup.ts index 114873499a7..58c72653d8f 100644 --- a/yarn-project/simulator/src/rollup/rollup.ts +++ b/yarn-project/simulator/src/rollup/rollup.ts @@ -28,7 +28,9 @@ import { convertSimulatedBaseRollupInputsToWitnessMap, convertSimulatedBaseRollupOutputsFromWitnessMap, } from '@aztec/noir-protocol-circuits-types'; -import { type SimulationProvider, WASMSimulator } from '@aztec/simulator'; + +import { WASMSimulator } from '../providers/acvm_wasm.js'; +import { type SimulationProvider } from '../providers/simulation_provider.js'; /** * Circuit simulator for the rollup circuits. diff --git a/yarn-project/tsconfig.json b/yarn-project/tsconfig.json index 52b02c625b2..68777079e4a 100644 --- a/yarn-project/tsconfig.json +++ b/yarn-project/tsconfig.json @@ -16,7 +16,8 @@ "resolveJsonModule": true, "composite": true, "skipLibCheck": true, - "noImplicitOverride": true + "noImplicitOverride": true, + "allowArbitraryExtensions": true }, "references": [ { "path": "accounts/tsconfig.json" }, @@ -50,5 +51,5 @@ { "path": "cli/tsconfig.json" } ], "files": ["./@types/jest/index.d.ts"], - "exclude": ["node_modules"] + "exclude": ["node_modules", "**/node_modules", "**/.*/"] } diff --git a/yarn-project/types/src/noir/index.ts b/yarn-project/types/src/noir/index.ts index 7361d6687db..41b860a3719 100644 --- a/yarn-project/types/src/noir/index.ts +++ b/yarn-project/types/src/noir/index.ts @@ -18,6 +18,8 @@ export const AZTEC_VIEW_ATTRIBUTE = 'aztec(view)'; export interface NoirFunctionAbi { /** The parameters of the function. */ parameters: ABIParameter[]; + /** TODO */ + error_types: Record; /** The return type of the function. */ return_type: { /** From 2740d600c0d4a18ce90df24e334e572a80233832 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Nicol=C3=A1s=20Venturo?= Date: Thu, 27 Jun 2024 20:02:49 -0300 Subject: [PATCH 02/29] chore: remove commented code (#7231) Even though https://github.com/noir-lang/noir/issues/4633 is now closed, we no longer need this function due to how github.com/AztecProtocol/aztec-packages/pull/7169 (the sole user of the API) does historical proofs (i.e. it reads a single slot). --- .../aztec-nr/aztec/src/public_storage.nr | 16 ---------------- 1 file changed, 16 deletions(-) diff --git a/noir-projects/aztec-nr/aztec/src/public_storage.nr b/noir-projects/aztec-nr/aztec/src/public_storage.nr index 59bc25b5197..b9a34811bab 100644 --- a/noir-projects/aztec-nr/aztec/src/public_storage.nr +++ b/noir-projects/aztec-nr/aztec/src/public_storage.nr @@ -9,22 +9,6 @@ pub fn write(storage_slot: Field, value: T) where T: Serialize { storage_write(storage_slot, value.serialize()); } -// Ideally we'd do the following, but we cannot because of https://github.com/noir-lang/noir/issues/4633 -// pub fn read_historical( -// storage_slot: Field, -// context: PrivateContext -// ) -> T where T: Deserialize { -// let mut fields = [0; N]; -// for i in 0..N { -// fields[i] = public_storage_historical_read( -// context, -// storage_slot + i as Field, -// context.this_address() -// ); -// } -// T::deserialize(fields) -// } - mod tests { use std::test::OracleMock; use dep::protocol_types::traits::{Deserialize, Serialize}; From d7c975d1a4ff68387d5443ef07dad45bfe6ef14c Mon Sep 17 00:00:00 2001 From: AztecBot Date: Fri, 28 Jun 2024 02:15:04 +0000 Subject: [PATCH 03/29] git subrepo push --branch=master barretenberg subrepo: subdir: "barretenberg" merged: "696e195e46" upstream: origin: "https://github.com/AztecProtocol/barretenberg" branch: "master" commit: "696e195e46" git-subrepo: version: "0.4.6" origin: "???" commit: "???" [skip ci] --- barretenberg/.gitrepo | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/barretenberg/.gitrepo b/barretenberg/.gitrepo index dd00b977b67..1d7dc1ca5e2 100644 --- a/barretenberg/.gitrepo +++ b/barretenberg/.gitrepo @@ -6,7 +6,7 @@ [subrepo] remote = https://github.com/AztecProtocol/barretenberg branch = master - commit = 44c9c7e368c0b33a4096a359e7b2b0d45f8bfea1 - parent = c5dc0946f4d300df5c6a70026e102de8e69f020b + commit = 696e195e46d64aeb8fdc0cb1f73aa336dd208b2a + parent = 2740d600c0d4a18ce90df24e334e572a80233832 method = merge cmdver = 0.4.6 From 6f817e86b61aea78d9f4132ecf4c3ed2f96b4e5c Mon Sep 17 00:00:00 2001 From: AztecBot Date: Fri, 28 Jun 2024 02:15:36 +0000 Subject: [PATCH 04/29] chore: replace relative paths to noir-protocol-circuits --- noir-projects/aztec-nr/aztec/Nargo.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/noir-projects/aztec-nr/aztec/Nargo.toml b/noir-projects/aztec-nr/aztec/Nargo.toml index 7a1f1af5863..3e6f8c8bf4c 100644 --- a/noir-projects/aztec-nr/aztec/Nargo.toml +++ b/noir-projects/aztec-nr/aztec/Nargo.toml @@ -5,4 +5,4 @@ compiler_version = ">=0.18.0" type = "lib" [dependencies] -protocol_types = { path = "../../noir-protocol-circuits/crates/types" } +protocol_types = { git="https://github.com/AztecProtocol/aztec-packages", tag="aztec-packages-v0.44.0", directory="noir-projects/noir-protocol-circuits/crates/types" } From 1d4ca2b77dac07448b06e9023fa9b91c745d24e7 Mon Sep 17 00:00:00 2001 From: AztecBot Date: Fri, 28 Jun 2024 02:15:36 +0000 Subject: [PATCH 05/29] git_subrepo.sh: Fix parent in .gitrepo file. [skip ci] --- noir-projects/aztec-nr/.gitrepo | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/noir-projects/aztec-nr/.gitrepo b/noir-projects/aztec-nr/.gitrepo index 62e0a6d03de..3f2a2e7e3a2 100644 --- a/noir-projects/aztec-nr/.gitrepo +++ b/noir-projects/aztec-nr/.gitrepo @@ -9,4 +9,4 @@ commit = 03b6171663564ad6313ad8ae466a2707b06eb05c method = merge cmdver = 0.4.6 - parent = aeb5908a8ff4631a7c0985272fbd8b0d16b0d472 + parent = 5f9fdc32f3e37f7cdf7e364f982b406838262e45 From 3cd4d60be9525ead3cbaeb10e2ae3c1d4d1648d6 Mon Sep 17 00:00:00 2001 From: AztecBot Date: Fri, 28 Jun 2024 02:15:39 +0000 Subject: [PATCH 06/29] git subrepo push --branch=master noir-projects/aztec-nr subrepo: subdir: "noir-projects/aztec-nr" merged: "497d868c8a" upstream: origin: "https://github.com/AztecProtocol/aztec-nr" branch: "master" commit: "497d868c8a" git-subrepo: version: "0.4.6" origin: "???" commit: "???" [skip ci] --- noir-projects/aztec-nr/.gitrepo | 4 ++-- noir-projects/aztec-nr/aztec/Nargo.toml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/noir-projects/aztec-nr/.gitrepo b/noir-projects/aztec-nr/.gitrepo index 3f2a2e7e3a2..0266e13fd62 100644 --- a/noir-projects/aztec-nr/.gitrepo +++ b/noir-projects/aztec-nr/.gitrepo @@ -6,7 +6,7 @@ [subrepo] remote = https://github.com/AztecProtocol/aztec-nr branch = master - commit = 03b6171663564ad6313ad8ae466a2707b06eb05c + commit = 497d868c8a62f7792cebf999e9e0dea2be3b0c81 method = merge cmdver = 0.4.6 - parent = 5f9fdc32f3e37f7cdf7e364f982b406838262e45 + parent = 81cfdc9971d0b750aa9ea40327c5c6ce5aa4508b diff --git a/noir-projects/aztec-nr/aztec/Nargo.toml b/noir-projects/aztec-nr/aztec/Nargo.toml index 3e6f8c8bf4c..7a1f1af5863 100644 --- a/noir-projects/aztec-nr/aztec/Nargo.toml +++ b/noir-projects/aztec-nr/aztec/Nargo.toml @@ -5,4 +5,4 @@ compiler_version = ">=0.18.0" type = "lib" [dependencies] -protocol_types = { git="https://github.com/AztecProtocol/aztec-packages", tag="aztec-packages-v0.44.0", directory="noir-projects/noir-protocol-circuits/crates/types" } +protocol_types = { path = "../../noir-protocol-circuits/crates/types" } From af592474c1d57c9d7886763d04afeb793f98efe3 Mon Sep 17 00:00:00 2001 From: Lucas Xia Date: Fri, 28 Jun 2024 08:54:58 -0400 Subject: [PATCH 07/29] feat: update rebuild script (#7225) Updates the ACIR artifacts rebuild.sh script to output which test programs failed to rebuild. Also updates the reset_acir_tests.sh script to use relative paths. --- barretenberg/acir_tests/reset_acir_tests.sh | 5 ++- noir/noir-repo/test_programs/rebuild.sh | 47 +++++++++++++++++++-- 2 files changed, 47 insertions(+), 5 deletions(-) diff --git a/barretenberg/acir_tests/reset_acir_tests.sh b/barretenberg/acir_tests/reset_acir_tests.sh index e83bea9189e..dffb4d43837 100755 --- a/barretenberg/acir_tests/reset_acir_tests.sh +++ b/barretenberg/acir_tests/reset_acir_tests.sh @@ -1,7 +1,8 @@ -cd ~/aztec-packages/noir/noir-repo +# Run from barretenberg/acir_tests +cd ../../noir/noir-repo cargo clean noirup -p . cd test_programs && ./rebuild.sh -cd ~/aztec-packages/barretenberg/acir_tests +cd ../../../barretenberg/acir_tests rm -rf acir_tests diff --git a/noir/noir-repo/test_programs/rebuild.sh b/noir/noir-repo/test_programs/rebuild.sh index 094c3902583..a8175d05066 100755 --- a/noir/noir-repo/test_programs/rebuild.sh +++ b/noir/noir-repo/test_programs/rebuild.sh @@ -1,6 +1,8 @@ #!/usr/bin/env bash set -e +NO_PARALLEL=${1:-} + process_dir() { local dir=$1 local current_dir=$2 @@ -60,7 +62,46 @@ for dir in $current_dir/benchmarks/*; do dirs_to_process+=("$dir") done +pids=() # Array to hold PIDs of background processes +dirs_map=() # Array to map PIDs to directories + +if [ -z $NO_PARALLEL ]; then + # Process directories in parallel + for dir in "${dirs_to_process[@]}"; do + process_dir "$dir" "$current_dir" & # Run process_dir in the background + pid=$! # Get PID of the last background command + pids+=($pid) # Add PID to the pids array + dirs_map[$pid]=$dir # Map PID to the directory being processed + done +else + # Process directories sequentially + for dir in "${dirs_to_process[@]}"; do + process_dir "$dir" "$current_dir" # Run process_dir in the foreground + pid=$! # Get PID of the last command + pids+=($pid) # Add PID to the pids array + dirs_map[$pid]=$dir # Map PID to the directory being processed + done +fi + +# Store the failed processes +failed_pids=() +# Check the exit status of each background job. +for pid in "${pids[@]}"; do + if ! wait $pid; then # Wait for the process to complete, check if it failed + exit_status=$? # Capture the failed exit status + failed_pids+=($pid) + fi +done -parallel -j0 process_dir {} "$current_dir" ::: ${dirs_to_process[@]} - -echo "Rebuild Succeeded!" +echo "" + +# Exit with a failure status if any job failed. +if [ ! -z "$exit_status" ]; then + echo "Rebuild failed for directories:" + # Print the failed directories after waiting for each process to complete + for pid in "${failed_pids[@]}"; do + echo "${dirs_map[$pid]}" + done + exit $exit_status +fi +echo "Rebuild Succeeded!" \ No newline at end of file From 9cf49048eefd1f02d22c6b4a8db100b863f39f84 Mon Sep 17 00:00:00 2001 From: spypsy Date: Fri, 28 Jun 2024 14:53:52 +0100 Subject: [PATCH 08/29] fix: devnet deployment issues (#7197) - Fix issues with devnet deployments - fixes #6992 --- .github/workflows/devnet-deploys.yml | 16 +- docs/docs/migration_notes.md | 2 +- yarn-project/aztec/terraform/node/main.tf | 323 +----------------- .../aztec/terraform/node/variables.tf | 20 +- yarn-project/aztec/terraform/prover/main.tf | 270 +++++++++++++++ .../aztec/terraform/prover/variables.tf | 17 + yarn-project/p2p-bootstrap/terraform/main.tf | 9 +- .../p2p-bootstrap/terraform/variables.tf | 5 - 8 files changed, 316 insertions(+), 346 deletions(-) create mode 100644 yarn-project/aztec/terraform/prover/main.tf create mode 100644 yarn-project/aztec/terraform/prover/variables.tf diff --git a/.github/workflows/devnet-deploys.yml b/.github/workflows/devnet-deploys.yml index a2b09291c1e..1120fef4628 100644 --- a/.github/workflows/devnet-deploys.yml +++ b/.github/workflows/devnet-deploys.yml @@ -3,6 +3,10 @@ on: push: branches: [devnet] +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + env: DOCKERHUB_PASSWORD: ${{ secrets.DOCKERHUB_PASSWORD }} GIT_COMMIT: ${{ github.sha }} @@ -56,10 +60,10 @@ jobs: aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} aws-region: us-west-2 - - name: Deploy Bootstrap Nodes - working-directory: ./yarn-project/aztec/terraform/node + - name: Deploy P2P Bootstrap Nodes + working-directory: ./yarn-project/p2p-bootstrap/terraform run: | - terraform init -input=false -backend-config="key=devnet/aztec-node" + terraform init -input=false -backend-config="key=devnet/p2p-bootstrap" terraform apply -input=false -auto-approve - name: Deploy Aztec Nodes @@ -67,3 +71,9 @@ jobs: run: | terraform init -input=false -backend-config="key=devnet/aztec-node" terraform apply -input=false -auto-approve + + - name: Deploy Provers + working-directory: ./yarn-project/aztec/terraform/prover + run: | + terraform init -input=false -backend-config="key=devnet/prover" + terraform apply -input=false -auto-approve diff --git a/docs/docs/migration_notes.md b/docs/docs/migration_notes.md index 8dce681e600..79c62834ee3 100644 --- a/docs/docs/migration_notes.md +++ b/docs/docs/migration_notes.md @@ -1665,4 +1665,4 @@ Now, just remove the `src` folder,: ```rust easy_private_token_contract = {git = "https://github.com/AztecProtocol/aztec-packages/", tag ="v0.17.0", directory = "noir-projects/noir-contracts/contracts/easy_private_token_contract"} -``` +``` \ No newline at end of file diff --git a/yarn-project/aztec/terraform/node/main.tf b/yarn-project/aztec/terraform/node/main.tf index d627d416f0a..d446d334c89 100644 --- a/yarn-project/aztec/terraform/node/main.tf +++ b/yarn-project/aztec/terraform/node/main.tf @@ -57,9 +57,11 @@ locals { publisher_private_keys = [var.SEQ_1_PUBLISHER_PRIVATE_KEY, var.SEQ_2_PUBLISHER_PRIVATE_KEY] node_p2p_private_keys = [var.NODE_1_PRIVATE_KEY, var.NODE_2_PRIVATE_KEY] node_count = length(local.publisher_private_keys) - #node_count = 1 - data_dir = "/usr/src/yarn-project/aztec/data" - agents_per_sequencer = var.AGENTS_PER_SEQUENCER + data_dir = "/usr/src/yarn-project/aztec/data" +} + +output "node_count" { + value = local.node_count } resource "aws_cloudwatch_log_group" "aztec-node-log-group" { @@ -115,20 +117,6 @@ resource "aws_efs_file_system" "node_data_store" { } } -# resource "aws_efs_mount_target" "private_az1" { -# count = local.node_count -# file_system_id = aws_efs_file_system.node_data_store[count.index].id -# subnet_id = data.terraform_remote_state.setup_iac.outputs.subnet_az1_private_id -# security_groups = [data.terraform_remote_state.setup_iac.outputs.security_group_private_id] -# } - -# resource "aws_efs_mount_target" "private_az2" { -# count = local.node_count -# file_system_id = aws_efs_file_system.node_data_store[count.index].id -# subnet_id = data.terraform_remote_state.setup_iac.outputs.subnet_az2_private_id -# security_groups = [data.terraform_remote_state.setup_iac.outputs.security_group_private_id] -# } - resource "aws_efs_mount_target" "public_az1" { count = local.node_count file_system_id = aws_efs_file_system.node_data_store[count.index].id @@ -165,7 +153,7 @@ resource "aws_ecs_task_definition" "aztec-node" { [ { "name": "${var.DEPLOY_TAG}-aztec-node-${count.index + 1}", - "image": "${var.FULL_IMAGE}", + "image": "${var.DOCKERHUB_ACCOUNT}/aztec:${var.DEPLOY_TAG}", "command": ["start", "--node", "--archiver", "--sequencer", "--prover"], "essential": true, "memoryReservation": 3776, @@ -205,7 +193,7 @@ resource "aws_ecs_task_definition" "aztec-node" { }, { "name": "ETHEREUM_HOST", - "value": "https://${var.DEPLOY_TAG}-mainnet-fork.aztec.network:8545/${var.API_KEY}" + "value": "https://aztec-dev-mainnet-fork.aztec.network:8545/${var.API_KEY}" }, { "name": "DATA_DIRECTORY", @@ -357,6 +345,7 @@ resource "aws_ecs_service" "aztec-node" { deployment_maximum_percent = 100 deployment_minimum_healthy_percent = 0 platform_version = "1.4.0" + force_new_deployment = true network_configuration { @@ -373,19 +362,6 @@ resource "aws_ecs_service" "aztec-node" { container_port = 80 } - - # load_balancer { - # target_group_arn = aws_lb_target_group.aztec-node-tcp[count.index].arn - # container_name = "${var.DEPLOY_TAG}-aztec-node-${count.index + 1}" - # container_port = var.NODE_P2P_TCP_PORT + count.index - # } - - # load_balancer { - # target_group_arn = aws_lb_target_group.aztec-node-udp[count.index].arn - # container_name = "${var.DEPLOY_TAG}-aztec-node-${count.index + 1}" - # container_port = var.NODE_P2P_UDP_PORT + count.index - # } - service_registries { registry_arn = aws_service_discovery_service.aztec-node[count.index].arn container_name = "${var.DEPLOY_TAG}-aztec-node-${count.index + 1}" @@ -436,23 +412,6 @@ resource "aws_lb_listener_rule" "api" { } } -# resource "aws_lb_target_group" "aztec-node-tcp" { -# count = local.node_count -# name = "${var.DEPLOY_TAG}-node-${count.index + 1}-p2p-tcp-target" -# port = var.NODE_P2P_TCP_PORT + count.index -# protocol = "TCP" -# target_type = "ip" -# vpc_id = data.terraform_remote_state.setup_iac.outputs.vpc_id - -# health_check { -# protocol = "TCP" -# interval = 10 -# healthy_threshold = 2 -# unhealthy_threshold = 2 -# port = var.NODE_P2P_TCP_PORT + count.index -# } -# } - resource "aws_security_group_rule" "allow-node-tcp-in" { count = local.node_count type = "ingress" @@ -473,40 +432,6 @@ resource "aws_security_group_rule" "allow-node-tcp-out" { security_group_id = data.terraform_remote_state.aztec-network_iac.outputs.p2p_security_group_id } -# resource "aws_lb_listener" "aztec-node-tcp-listener" { -# count = local.node_count -# load_balancer_arn = data.terraform_remote_state.aztec-network_iac.outputs.nlb_arn -# port = var.NODE_P2P_TCP_PORT + count.index -# protocol = "TCP" - -# tags = { -# name = "aztec-node-${count.index}-tcp-listener" -# } - -# default_action { -# type = "forward" -# target_group_arn = aws_lb_target_group.aztec-node-tcp[count.index].arn -# } -# } - - -# resource "aws_lb_target_group" "aztec-node-udp" { -# count = local.node_count -# name = "${var.DEPLOY_TAG}-node-${count.index + 1}-p2p-udp-target" -# port = var.NODE_P2P_UDP_PORT + count.index -# protocol = "UDP" -# target_type = "ip" -# vpc_id = data.terraform_remote_state.setup_iac.outputs.vpc_id - -# health_check { -# protocol = "TCP" -# interval = 10 -# healthy_threshold = 2 -# unhealthy_threshold = 2 -# port = var.NODE_P2P_TCP_PORT + count.index -# } -# } - resource "aws_security_group_rule" "allow-node-udp-in" { type = "ingress" from_port = var.NODE_P2P_UDP_PORT @@ -524,235 +449,3 @@ resource "aws_security_group_rule" "allow-node-udp-out" { cidr_blocks = ["0.0.0.0/0"] security_group_id = data.terraform_remote_state.aztec-network_iac.outputs.p2p_security_group_id } - -# resource "aws_lb_listener" "aztec-node-udp-listener" { -# count = local.node_count -# load_balancer_arn = data.terraform_remote_state.aztec-network_iac.outputs.nlb_arn -# port = var.NODE_P2P_UDP_PORT + count.index -# protocol = "UDP" - -# tags = { -# name = "aztec-node-${count.index}-udp-listener" -# } - -# default_action { -# type = "forward" -# target_group_arn = aws_lb_target_group.aztec-node-udp[count.index].arn -# } -# } - - - -// Configuration for proving agents - -resource "aws_cloudwatch_log_group" "aztec-proving-agent-log-group" { - count = local.node_count - name = "/fargate/service/${var.DEPLOY_TAG}/aztec-proving-agent-group-${count.index + 1}" - retention_in_days = 14 -} - -resource "aws_service_discovery_service" "aztec-proving-agent" { - count = local.node_count - name = "${var.DEPLOY_TAG}-aztec-proving-agent-group-${count.index + 1}" - - health_check_custom_config { - failure_threshold = 1 - } - dns_config { - namespace_id = data.terraform_remote_state.setup_iac.outputs.local_service_discovery_id - dns_records { - ttl = 60 - type = "A" - } - dns_records { - ttl = 60 - type = "SRV" - } - routing_policy = "MULTIVALUE" - } - # Terraform just fails if this resource changes and you have registered instances. - provisioner "local-exec" { - when = destroy - command = "${path.module}/servicediscovery-drain.sh ${self.id}" - } -} - -# Define task definitions for each node. -resource "aws_ecs_task_definition" "aztec-proving-agent" { - count = local.node_count - family = "${var.DEPLOY_TAG}-aztec-proving-agent-group-${count.index + 1}" - requires_compatibilities = ["FARGATE"] - network_mode = "awsvpc" - cpu = "16384" - memory = "98304" - execution_role_arn = data.terraform_remote_state.setup_iac.outputs.ecs_task_execution_role_arn - task_role_arn = data.terraform_remote_state.aztec2_iac.outputs.cloudwatch_logging_ecs_role_arn - container_definitions = < Date: Fri, 28 Jun 2024 17:25:38 +0200 Subject: [PATCH 09/29] feat: Unify unencrypted log emission and decoding (#7232) Currently event_selectors for unencrypted_l2_logs emitted from public default to 0. This will be further investigated and addressed with #7198. --------- Co-authored-by: Lasse Herskind <16536249+LHerskind@users.noreply.github.com> --- .../aztec/src/context/public_context.nr | 9 +-- noir-projects/aztec-nr/aztec/src/lib.nr | 1 + .../aztec-nr/aztec/src/unencrypted_logs.nr | 1 + .../unencrypted_event_emission.nr | 58 ++++++++++++++ .../contracts/test_log_contract/src/main.nr | 12 +++ yarn-project/aztec.js/src/index.ts | 1 + .../aztec.js/src/wallet/base_wallet.ts | 8 +- .../circuit-types/src/interfaces/pxe.ts | 23 +++++- .../end-to-end/src/e2e_event_logs.test.ts | 55 +++++++++++++- .../pxe/src/pxe_service/pxe_service.ts | 76 ++++++++++++++++++- 10 files changed, 225 insertions(+), 19 deletions(-) create mode 100644 noir-projects/aztec-nr/aztec/src/unencrypted_logs.nr create mode 100644 noir-projects/aztec-nr/aztec/src/unencrypted_logs/unencrypted_event_emission.nr diff --git a/noir-projects/aztec-nr/aztec/src/context/public_context.nr b/noir-projects/aztec-nr/aztec/src/context/public_context.nr index 87b17aa92d2..57d2380e6cf 100644 --- a/noir-projects/aztec-nr/aztec/src/context/public_context.nr +++ b/noir-projects/aztec-nr/aztec/src/context/public_context.nr @@ -17,17 +17,15 @@ impl PublicContext { pub fn storage_address(self) -> AztecAddress { storage_address() } + pub fn fee_per_l2_gas(self) -> Field { fee_per_l2_gas() } + pub fn fee_per_da_gas(self) -> Field { fee_per_da_gas() } - /** - * Emit a log with the given event selector and message. - * @param event_selector The event selector for the log. - * @param message The message to emit in the log. - */ + pub fn emit_unencrypted_log(&mut self, log: T) where T: Serialize { emit_unencrypted_log(Serialize::serialize(log).as_slice()); } @@ -35,6 +33,7 @@ impl PublicContext { pub fn note_hash_exists(self, note_hash: Field, leaf_index: Field) -> bool { note_hash_exists(note_hash, leaf_index) == 1 } + pub fn l1_to_l2_msg_exists(self, msg_hash: Field, msg_leaf_index: Field) -> bool { l1_to_l2_msg_exists(msg_hash, msg_leaf_index) == 1 } diff --git a/noir-projects/aztec-nr/aztec/src/lib.nr b/noir-projects/aztec-nr/aztec/src/lib.nr index 7e7b1af2a8a..e569b892e1a 100644 --- a/noir-projects/aztec-nr/aztec/src/lib.nr +++ b/noir-projects/aztec-nr/aztec/src/lib.nr @@ -12,6 +12,7 @@ mod state_vars; mod prelude; mod public_storage; mod encrypted_logs; +mod unencrypted_logs; use dep::protocol_types; mod utils; diff --git a/noir-projects/aztec-nr/aztec/src/unencrypted_logs.nr b/noir-projects/aztec-nr/aztec/src/unencrypted_logs.nr new file mode 100644 index 00000000000..3eae1f8dc8e --- /dev/null +++ b/noir-projects/aztec-nr/aztec/src/unencrypted_logs.nr @@ -0,0 +1 @@ +mod unencrypted_event_emission; diff --git a/noir-projects/aztec-nr/aztec/src/unencrypted_logs/unencrypted_event_emission.nr b/noir-projects/aztec-nr/aztec/src/unencrypted_logs/unencrypted_event_emission.nr new file mode 100644 index 00000000000..f374a2a1195 --- /dev/null +++ b/noir-projects/aztec-nr/aztec/src/unencrypted_logs/unencrypted_event_emission.nr @@ -0,0 +1,58 @@ +use crate::{ + context::{PrivateContext, PublicContext}, event::event_interface::EventInterface, + encrypted_logs::payload::compute_encrypted_event_log, oracle::logs_traits::LensForEncryptedEvent +}; +use dep::protocol_types::{address::AztecAddress, grumpkin_point::GrumpkinPoint, traits::Serialize}; + +fn emit( + context: &mut PublicContext, + event: Event +) where Event: EventInterface, Event: Serialize, [Field; N]: LensForEventSelector { + let selector = Event::get_event_type_id(); + + let serialized_event = event.serialize(); + let mut emitted_log = [0; M]; + + // We put the selector in the "last" place, to avoid reading or assigning to an expression in an index + for i in 0..serialized_event.len() { + emitted_log[i] = serialized_event[i]; + } + + emitted_log[serialized_event.len()] = selector.to_field(); + + context.emit_unencrypted_log(emitted_log); +} + +pub fn encode_event(context: &mut PublicContext) -> fn[(&mut PublicContext,)](Event) -> () where Event: EventInterface, Event: Serialize, [Field; N]: LensForEventSelector { + | e: Event | { + emit( + context, + e, + ); + } +} + +trait LensForEventSelector { + // N = event preimage input in fields + // M = event preimage input in fields + event selector as field + fn output(self: [Field; N]) -> [Field; M]; +} + +impl LensForEventSelector<1, 2> for [Field; 1] { + fn output(self) -> [Field; 2] {[self[0] as Field; 2]} +} +impl LensForEventSelector<2, 3> for [Field; 2] { + fn output(self) -> [Field; 3] {[self[0] as Field; 3]} +} +impl LensForEventSelector<3, 4> for [Field; 3] { + fn output(self) -> [Field; 4] {[self[0] as Field; 4]} +} +impl LensForEventSelector<4, 5> for [Field; 4] { + fn output(self) -> [Field; 5] {[self[0] as Field; 5]} +} +impl LensForEventSelector<5, 6> for [Field; 5] { + fn output(self) -> [Field; 6] {[self[0] as Field; 6]} +} +impl LensForEventSelector<6, 7> for [Field; 6] { + fn output(self) -> [Field; 7] {[self[0] as Field; 7]} +} diff --git a/noir-projects/noir-contracts/contracts/test_log_contract/src/main.nr b/noir-projects/noir-contracts/contracts/test_log_contract/src/main.nr index f42cb2ffd7a..ef63b330603 100644 --- a/noir-projects/noir-contracts/contracts/test_log_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/test_log_contract/src/main.nr @@ -5,6 +5,7 @@ contract TestLog { use dep::aztec::encrypted_logs::incoming_body::EncryptedLogIncomingBody; use dep::aztec::event::event_interface::EventInterface; use dep::aztec::encrypted_logs::encrypted_event_emission::{encode_and_encrypt_event, encode_and_encrypt_event_with_keys}; + use dep::aztec::unencrypted_logs::unencrypted_event_emission::encode_event; #[aztec(event)] struct ExampleEvent0 { @@ -64,4 +65,15 @@ contract TestLog { ) ); } + + #[aztec(public)] + fn emit_unencrypted_events(preimages: [Field; 4]) { + let event0 = ExampleEvent0 { value0: preimages[0], value1: preimages[1] }; + + event0.emit(encode_event(&mut context)); + + let event1 = ExampleEvent1 { value2: preimages[2], value3: preimages[3] }; + + event1.emit(encode_event(&mut context)); + } } diff --git a/yarn-project/aztec.js/src/index.ts b/yarn-project/aztec.js/src/index.ts index 4e3d71e4d8f..38cf7e986bc 100644 --- a/yarn-project/aztec.js/src/index.ts +++ b/yarn-project/aztec.js/src/index.ts @@ -99,6 +99,7 @@ export { EncryptedLogHeader, EncryptedNoteLogIncomingBody, EncryptedLogOutgoingBody, + EventType, ExtendedNote, FunctionCall, GrumpkinPrivateKey, diff --git a/yarn-project/aztec.js/src/wallet/base_wallet.ts b/yarn-project/aztec.js/src/wallet/base_wallet.ts index 247b509fbaf..737247c2270 100644 --- a/yarn-project/aztec.js/src/wallet/base_wallet.ts +++ b/yarn-project/aztec.js/src/wallet/base_wallet.ts @@ -1,6 +1,7 @@ import { type AuthWitness, type EventMetadata, + type EventType, type ExtendedNote, type GetUnencryptedLogsResponse, type IncomingNotesFilter, @@ -182,11 +183,12 @@ export abstract class BaseWallet implements Wallet { return this.pxe.getPXEInfo(); } getEvents( + type: EventType, + eventMetadata: EventMetadata, from: number, limit: number, - eventMetadata: EventMetadata, ivpk: Point = this.getCompleteAddress().publicKeys.masterIncomingViewingPublicKey, - ): Promise { - return this.pxe.getEvents(from, limit, eventMetadata, ivpk); + ) { + return this.pxe.getEvents(type, eventMetadata, from, limit, ivpk); } } diff --git a/yarn-project/circuit-types/src/interfaces/pxe.ts b/yarn-project/circuit-types/src/interfaces/pxe.ts index 6092eda2780..1cdc9d9f6fe 100644 --- a/yarn-project/circuit-types/src/interfaces/pxe.ts +++ b/yarn-project/circuit-types/src/interfaces/pxe.ts @@ -381,14 +381,21 @@ export interface PXE { isContractPubliclyDeployed(address: AztecAddress): Promise; /** - * Returns the events of a specified type. + * Returns the events of a specified type given search parameters. + * @param type - The type of the event to search for—Encrypted, or Unencrypted. + * @param eventMetadata - Identifier of the event. This should be the class generated from the contract. e.g. Contract.events.Event * @param from - The block number to search from. * @param limit - The amount of blocks to search. - * @param eventMetadata - Identifier of the event. This should be the class generated from the contract. e.g. Contract.events.Event - * @param ivpk - The incoming viewing public key that corresponds to the incoming viewing secret key that can decrypt the log. + * @param ivpk - (Used for encrypted logs only) The incoming viewing public key that corresponds to the incoming viewing secret key that can decrypt the log. * @returns - The deserialized events. */ - getEvents(from: number, limit: number, eventMetadata: EventMetadata, ivpk: Point): Promise; + getEvents( + type: EventType, + eventMetadata: EventMetadata, + from: number, + limit: number, + ivpk: Point, + ): Promise; } // docs:end:pxe-interface @@ -401,6 +408,14 @@ export interface EventMetadata { fieldNames: string[]; } +/** + * This is used in getting events via the filter + */ +export enum EventType { + Encrypted = 'Encrypted', + Unencrypted = 'Unencrypted', +} + /** * Provides basic information about the running PXE. */ diff --git a/yarn-project/end-to-end/src/e2e_event_logs.test.ts b/yarn-project/end-to-end/src/e2e_event_logs.test.ts index 02122e165d1..8421e87b8dc 100644 --- a/yarn-project/end-to-end/src/e2e_event_logs.test.ts +++ b/yarn-project/end-to-end/src/e2e_event_logs.test.ts @@ -1,4 +1,11 @@ -import { type AccountWalletWithSecretKey, type AztecNode, Fr, L1EventPayload, TaggedLog } from '@aztec/aztec.js'; +import { + type AccountWalletWithSecretKey, + type AztecNode, + EventType, + Fr, + L1EventPayload, + TaggedLog, +} from '@aztec/aztec.js'; import { deriveMasterIncomingViewingSecretKey } from '@aztec/circuits.js'; import { EventSelector } from '@aztec/foundation/abi'; import { makeTuple } from '@aztec/foundation/array'; @@ -31,7 +38,7 @@ describe('Logs', () => { afterAll(() => teardown()); describe('functionality around emitting an encrypted log', () => { - it('emits multiple events as encrypted logs and decodes them', async () => { + it('emits multiple events as encrypted logs and decodes a single one manually', async () => { const randomness = makeTuple(2, Fr.random); const preimage = makeTuple(4, Fr.random); @@ -103,15 +110,17 @@ describe('Logs', () => { const lastTx = await testLogContract.methods.emit_encrypted_events(randomness[++i], preimage[i]).send().wait(); const collectedEvent0s = await wallets[0].getEvents( + EventType.Encrypted, + TestLogContract.events.ExampleEvent0, firstTx.blockNumber!, lastTx.blockNumber! - firstTx.blockNumber! + 1, - TestLogContract.events.ExampleEvent0, ); const collectedEvent1s = await wallets[0].getEvents( + EventType.Encrypted, + TestLogContract.events.ExampleEvent1, firstTx.blockNumber!, lastTx.blockNumber! - firstTx.blockNumber! + 1, - TestLogContract.events.ExampleEvent1, // This function can also be called specifying the incoming viewing public key associated with the encrypted event. wallets[0].getCompleteAddress().publicKeys.masterIncomingViewingPublicKey, ); @@ -129,5 +138,43 @@ describe('Logs', () => { preimage.map(preimage => ({ value2: preimage[2], value3: preimage[3] })).sort(exampleEvent1Sort), ); }); + + it('emits multiple events as unencrypted logs and decodes them', async () => { + const preimage = makeTuple(5, makeTuple.bind(undefined, 4, Fr.random)) as Tuple, 5>; + + let i = 0; + const firstTx = await testLogContract.methods.emit_unencrypted_events(preimage[i]).send().wait(); + await Promise.all( + [...new Array(3)].map(() => testLogContract.methods.emit_unencrypted_events(preimage[++i]).send().wait()), + ); + const lastTx = await testLogContract.methods.emit_unencrypted_events(preimage[++i]).send().wait(); + + const collectedEvent0s = await wallets[0].getEvents( + EventType.Unencrypted, + TestLogContract.events.ExampleEvent0, + firstTx.blockNumber!, + lastTx.blockNumber! - firstTx.blockNumber! + 1, + ); + + const collectedEvent1s = await wallets[0].getEvents( + EventType.Unencrypted, + TestLogContract.events.ExampleEvent1, + firstTx.blockNumber!, + lastTx.blockNumber! - firstTx.blockNumber! + 1, + ); + + expect(collectedEvent0s.length).toBe(5); + expect(collectedEvent1s.length).toBe(5); + + const exampleEvent0Sort = (a: ExampleEvent0, b: ExampleEvent0) => (a.value0 > b.value0 ? 1 : -1); + expect(collectedEvent0s.sort(exampleEvent0Sort)).toStrictEqual( + preimage.map(preimage => ({ value0: preimage[0], value1: preimage[1] })).sort(exampleEvent0Sort), + ); + + const exampleEvent1Sort = (a: ExampleEvent1, b: ExampleEvent1) => (a.value2 > b.value2 ? 1 : -1); + expect(collectedEvent1s.sort(exampleEvent1Sort)).toStrictEqual( + preimage.map(preimage => ({ value2: preimage[2], value3: preimage[3] })).sort(exampleEvent1Sort), + ); + }); }); }); diff --git a/yarn-project/pxe/src/pxe_service/pxe_service.ts b/yarn-project/pxe/src/pxe_service/pxe_service.ts index cf351c5bf9b..e9b76b6d07c 100644 --- a/yarn-project/pxe/src/pxe_service/pxe_service.ts +++ b/yarn-project/pxe/src/pxe_service/pxe_service.ts @@ -4,6 +4,7 @@ import { EncryptedNoteTxL2Logs, EncryptedTxL2Logs, type EventMetadata, + EventType, ExtendedNote, type FunctionCall, type GetUnencryptedLogsResponse, @@ -35,8 +36,14 @@ import { getContractClassFromArtifact, } from '@aztec/circuits.js'; import { computeNoteHashNonce, siloNullifier } from '@aztec/circuits.js/hash'; -import { type ContractArtifact, type DecodedReturn, FunctionSelector, encodeArguments } from '@aztec/foundation/abi'; -import { type Fq, Fr, type Point } from '@aztec/foundation/fields'; +import { + type ContractArtifact, + type DecodedReturn, + EventSelector, + FunctionSelector, + encodeArguments, +} from '@aztec/foundation/abi'; +import { type Fq, Fr, Point } from '@aztec/foundation/fields'; import { SerialQueue } from '@aztec/foundation/fifo'; import { type DebugLogger, createDebugLogger } from '@aztec/foundation/log'; import { type KeyStore } from '@aztec/key-store'; @@ -834,7 +841,34 @@ export class PXEService implements PXE { return !!(await this.node.getContract(address)); } - public async getEvents(from: number, limit: number, eventMetadata: EventMetadata, ivpk: Point): Promise { + public getEvents( + type: EventType.Encrypted, + eventMetadata: EventMetadata, + from: number, + limit: number, + ivpk: Point, + ): Promise; + public getEvents( + type: EventType.Unencrypted, + eventMetadata: EventMetadata, + from: number, + limit: number, + ): Promise; + public getEvents( + type: EventType, + eventMetadata: EventMetadata, + from: number, + limit: number, + ivpk: Point = Point.ZERO, + ): Promise { + if (type.includes(EventType.Encrypted)) { + return this.getEncryptedEvents(from, limit, eventMetadata, ivpk); + } + + return this.getUnencryptedEvents(from, limit, eventMetadata); + } + + async getEncryptedEvents(from: number, limit: number, eventMetadata: EventMetadata, ivpk: Point): Promise { const blocks = await this.node.getBlocks(from, limit); const txEffects = blocks.flatMap(block => block.body.txEffects); @@ -874,4 +908,40 @@ export class PXEService implements PXE { return decodedEvents; } + + async getUnencryptedEvents(from: number, limit: number, eventMetadata: EventMetadata): Promise { + const { logs: unencryptedLogs } = await this.node.getUnencryptedLogs({ + fromBlock: from, + toBlock: from + limit, + }); + + const decodedEvents = unencryptedLogs + .map(unencryptedLog => { + const unencryptedLogBuf = unencryptedLog.log.data; + if ( + !EventSelector.fromBuffer(unencryptedLogBuf.subarray(unencryptedLogBuf.byteLength - 4)).equals( + eventMetadata.eventSelector, + ) + ) { + return undefined; + } + + if (unencryptedLogBuf.byteLength !== eventMetadata.fieldNames.length * 32 + 32) { + throw new Error( + 'Something is weird here, we have matching FunctionSelectors, but the actual payload has mismatched length', + ); + } + + return eventMetadata.fieldNames.reduce( + (acc, curr, i) => ({ + ...acc, + [curr]: new Fr(unencryptedLogBuf.subarray(i * 32, i * 32 + 32)), + }), + {} as T, + ); + }) + .filter(unencryptedLog => unencryptedLog !== undefined) as T[]; + + return decodedEvents; + } } From 25507e63e6a629a8a16ad47434141a95bbb0e102 Mon Sep 17 00:00:00 2001 From: Tom French <15848336+TomAFrench@users.noreply.github.com> Date: Fri, 28 Jun 2024 16:44:42 +0100 Subject: [PATCH 10/29] chore: avoid building contracts when producing gates report (#7136) This build target can be built from `+build-protocol-circuits` rather than `+build` as it doesn't need access to the contracts in order to do its job. This will make it complete much faster. --- noir-projects/Earthfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/noir-projects/Earthfile b/noir-projects/Earthfile index 13f21ffed8d..50267fa3084 100644 --- a/noir-projects/Earthfile +++ b/noir-projects/Earthfile @@ -77,7 +77,7 @@ format: RUN nargo fmt --check gates-report: - FROM +build + FROM +build-protocol-circuits WORKDIR /usr/src/noir-projects COPY ./gates_report.sh ./gates_report.sh From b3409c48b5d116698a67b4ceb52bd2fb4ee3c8ad Mon Sep 17 00:00:00 2001 From: josh crites Date: Fri, 28 Jun 2024 12:32:30 -0400 Subject: [PATCH 11/29] fix(docs): Historical reference library updates (#7166) This PR updates the references and adds links to examples and implementations closes: https://github.com/AztecProtocol/dev-rel/issues/291 --- .../history_lib_reference.md | 164 ++++++++++-------- .../aztec/src/history/contract_inclusion.nr | 2 + .../aztec/src/history/nullifier_inclusion.nr | 2 + .../src/history/nullifier_non_inclusion.nr | 2 + .../inclusion_proofs_contract/src/main.nr | 10 ++ 5 files changed, 105 insertions(+), 75 deletions(-) diff --git a/docs/docs/reference/smart_contract_reference/history_lib_reference.md b/docs/docs/reference/smart_contract_reference/history_lib_reference.md index 825d2ddb8cc..a717c3fb9e6 100644 --- a/docs/docs/reference/smart_contract_reference/history_lib_reference.md +++ b/docs/docs/reference/smart_contract_reference/history_lib_reference.md @@ -7,43 +7,26 @@ sidebar_position: 3 ## Note inclusion -Note inclusion proves that a note existed (its hash was included in a note hash tree) at a specific block number. There exists a version that tests for note inclusion at current block number. It is recommended to use this version whenever possible to reduce cost. +Note inclusion proves that a note existed (its hash was included in a note hash tree) in a block header. ### prove_note_inclusion -`prove_note_inclusion_at` takes 3 parameters: +`prove_note_inclusion` takes 1 parameter: -| Name | Type | Description | -| ---------------- | -------------- | ----------------------------------------- | -| note_with_header | Note | The note you are proving inclusion for | -| block_number | u32 | Block number for proving note's existence | -| context | PrivateContext | Private context | +| Name | Type | Description | +| ---- | ---- | -------------------------------------- | +| note | Note | The note you are proving inclusion for | -## prove_note_commitment_inclusion +#### Example -A **commitment**, also referred to as a **note hash** is a public acknowledgment of the existence of a note without revealing the content of the note. You can learn more about how to compress a note to a note hash [here](../../aztec/concepts/storage/trees/index.md#example-note). - -`prove_note_commitment_inclusion` takes 2 parameters: - -| Name | Type | Description | -| ---------------- | -------------- | -------------------------------------- | -| note_with_header | Note | The note you are proving inclusion for | -| context | PrivateContext | Private context | +#include_code prove_note_inclusion noir-projects/noir-contracts/contracts/inclusion_proofs_contract/src/main.nr rust ## Note validity -This proves that a note exists and has not been nullified at a specified block. Again as above, there exists a version that tests for validity at current block. It is recommended to use this version whenever possible to reduce cost. +This proves that a note exists and has not been nullified in a specific block header. ### prove_note_validity -`prove_note_validity_at` takes 3 parameters: - -| Name | Type | Description | -| ---------------- | -------------- | ----------------------------------------- | -| note_with_header | Note | The note you are proving inclusion for | -| block_number | u32 | Block number for proving note's existence | -| context | PrivateContext | Private context | - `prove_note_validity` takes 2 parameters: | Name | Type | Description | @@ -51,56 +34,58 @@ This proves that a note exists and has not been nullified at a specified block. | note_with_header | Note | The note you are proving inclusion for | | context | PrivateContext | Private context | +#### Example + +#include_code prove_note_validity noir-projects/noir-contracts/contracts/inclusion_proofs_contract/src/main.nr rust + ## Nullifier inclusion -This proves that a nullifier was included in a certain block (can be used to prove that a note had been nullified). The same disclaimer above holds true for this, and subsequent functions that specify another version without a block_number argument. +This proves that a nullifier exists in a given block header (can be used to prove that a note had been nullified). ### prove_nullifier_inclusion -`prove_nullifier_inclusion_at` takes 3 parameters: +`prove_nullifier_inclusion` takes 1 parameter: -| Name | Type | Description | -| ------------ | -------------- | ------------------------------------------- | -| nullifier | Field | The nullifier you are proving inclusion for | -| block_number | u32 | Block number for proving note's existence | -| context | PrivateContext | Private context | +| Name | Type | Description | +| --------- | ----- | ------------------------------------------- | +| nullifier | Field | The nullifier you are proving inclusion for | -`prove_nullifier_inclusion` takes 2 parameters: +#### Example -| Name | Type | Description | -| --------- | -------------- | ------------------------------------------- | -| nullifier | Field | The nullifier you are proving inclusion for | -| context | PrivateContext | Private context | +#include_code prove_nullifier_inclusion noir-projects/noir-contracts/contracts/inclusion_proofs_contract/src/main.nr rust -### prove_note_is_nullified_at / prove_note_is_nullified +### prove_note_is_nullified Instead of passing the nullifier, you can check that a note has been nullified by passing the note. +#### Implementation + +#include_code prove_note_is_nullified noir-projects/aztec-nr/aztec/src/history/nullifier_inclusion.nr rust + ## Nullifier non inclusion -This proves that a nullifier was not included in a certain block (can be used to prove that a note had not yet been nullified in a given block). +This proves that a nullifier was not included in a certain block, given the block header (can be used to prove that a note had not yet been nullified in a given block). -### prove_nullifier_not_included +### prove_nullifier_non_inclusion -`prove_nullifier_not_included_at` takes 3 parameters: +`prove_nullifier_non_inclusion` takes 1 parameters: -| Name | Type | Description | -| ------------ | -------------- | ------------------------------------------- | -| nullifier | Field | The nullifier you are proving inclusion for | -| block_number | u32 | Block number for proving note's existence | -| context | PrivateContext | Private context | +| Name | Type | Description | +| --------- | ----- | ------------------------------------------- | +| nullifier | Field | The nullifier you are proving inclusion for | -`prove_nullifier_not_included` takes 2 parameters: +#### Example -| Name | Type | Description | -| --------- | -------------- | ------------------------------------------- | -| nullifier | Field | The nullifier you are proving inclusion for | -| context | PrivateContext | Private context | +#include_code prove_nullifier_non_inclusion noir-projects/aztec-nr/aztec/src/history/contract_inclusion.nr rust -### prove_note_not_nullified_at / prove_note_not_nullified +### prove_note_not_nullified Instead of passing the nullifier, you can check that a note has not been nullified by passing the note. +#### Implementation + +#include_code prove_note_not_nullified noir-projects/aztec-nr/aztec/src/history/nullifier_non_inclusion.nr rust + ## Public storage historical reads These return the value stored in a public storage slot of a given contract at the end of the execution of a certain block (the latest one if using `public_storage_historical_read`). @@ -109,36 +94,65 @@ Note that it is never possible to read the _current_ value in a public storage s ### public_storage_historical_read -`public_storage_historical_read_at` takes 4 parameters: +`public_storage_historical_read` takes 2 parameters: -| Name | Type | Description | -| ---------------- | -------------- | ---------------------------------------- | -| context | PrivateContext | Private context | -| storage_slot | Field | Storage slot | -| contract_address | AztecAddress | The contract that owns the storage slot | -| block_number | u32 | Historical block number in which to read | +| Name | Type | Description | +| ---------------- | ------------ | --------------------------------------- | +| storage_slot | Field | Storage slot | +| contract_address | AztecAddress | The contract that owns the storage slot | -`public_storage_historical_read` takes 3 parameters. `block_number` is implicitly the historical block number from the context: +#### Example -| Name | Type | Description | -| ---------------- | -------------- | --------------------------------------- | -| context | PrivateContext | Private context | -| storage_slot | Field | Storage slot | -| contract_address | AztecAddress | The contract that owns the storage slot | +#include_code public_storage_historical_read noir-projects/noir-contracts/contracts/inclusion_proofs_contract/src/main.nr rust ## Contract inclusion This proves that a contract exists in, ie had been deployed before or in, a certain block. -### prove_contract_inclusion +### prove_contract_deployment + +`prove_contract_deployment` takes 1 parameter: + +| Name | Type | Description | +| ---------------- | ------------ | ------------------------------------------- | +| contract_address | AztecAddress | The contract address to prove deployment of | + +#### Example + +#include_code prove_contract_deployment noir-projects/noir-contracts/contracts/inclusion_proofs_contract/src/main.nr rust + +### prove_contract_non_deployment + +`prove_contract_non_deployment` takes 1 parameter: + +| Name | Type | Description | +| ---------------- | ------------ | ----------------------------------------------- | +| contract_address | AztecAddress | The contract address to prove non-deployment of | + +#### Example + +#include_code prove_contract_non_deployment noir-projects/noir-contracts/contracts/inclusion_proofs_contract/src/main.nr rust + +### prove_contract_initialization + +`prove_contract_initialization` takes 1 parameter: + +| Name | Type | Description | +| ---------------- | ------------ | ----------------------------------------------- | +| contract_address | AztecAddress | The contract address to prove initialization of | + +#### Example + +#include_code prove_contract_initialization noir-projects/noir-contracts/contracts/inclusion_proofs_contract/src/main.nr rust + +### prove_contract_non_initialization + +`prove_contract_non_initialization` takes 1 parameter: + +| Name | Type | Description | +| ---------------- | ------------ | --------------------------------------------------- | +| contract_address | AztecAddress | The contract address to prove non-initialization of | -`prove_contract_inclusion_at` takes 7 parameters: +#### Example -| Name | Type | Description | -| --------------------- | -------------- | -------------------------------------------- | -| deployer_public_key | GrumpkinPoint | Public key of the contract deployer | -| contract_address_salt | Field | Unique identifier for the contract's address | -| function_tree_root | Field | Root of the contract's function tree | -| constructor_hash | Field | Hash of the contract's constructor | -| block_number | u32 | Block number for proof verification | -| context | PrivateContext | Private context | +#include_code prove_contract_non_initialization noir-projects/noir-contracts/contracts/inclusion_proofs_contract/src/main.nr rust diff --git a/noir-projects/aztec-nr/aztec/src/history/contract_inclusion.nr b/noir-projects/aztec-nr/aztec/src/history/contract_inclusion.nr index 986ccc39e2f..78ce23575e4 100644 --- a/noir-projects/aztec-nr/aztec/src/history/contract_inclusion.nr +++ b/noir-projects/aztec-nr/aztec/src/history/contract_inclusion.nr @@ -31,7 +31,9 @@ impl ProveContractNonDeployment for Header { contract_address.to_field() ); + // docs:start:prove_nullifier_non_inclusion self.prove_nullifier_non_inclusion(nullifier); + // docs:end:prove_nullifier_non_inclusion } } diff --git a/noir-projects/aztec-nr/aztec/src/history/nullifier_inclusion.nr b/noir-projects/aztec-nr/aztec/src/history/nullifier_inclusion.nr index 69d571e9623..ea2a199d75c 100644 --- a/noir-projects/aztec-nr/aztec/src/history/nullifier_inclusion.nr +++ b/noir-projects/aztec-nr/aztec/src/history/nullifier_inclusion.nr @@ -36,9 +36,11 @@ trait ProveNoteIsNullified { } impl ProveNoteIsNullified for Header { + // docs:start:prove_note_is_nullified fn prove_note_is_nullified(self, note: Note, context: &mut PrivateContext) where Note: NoteInterface { let nullifier = compute_siloed_nullifier(note, context); self.prove_nullifier_inclusion(nullifier); } + // docs:end:prove_note_is_nullified } diff --git a/noir-projects/aztec-nr/aztec/src/history/nullifier_non_inclusion.nr b/noir-projects/aztec-nr/aztec/src/history/nullifier_non_inclusion.nr index eb67b1bb23d..d43b7c3e1cd 100644 --- a/noir-projects/aztec-nr/aztec/src/history/nullifier_non_inclusion.nr +++ b/noir-projects/aztec-nr/aztec/src/history/nullifier_non_inclusion.nr @@ -44,9 +44,11 @@ trait ProveNoteNotNullified { } impl ProveNoteNotNullified for Header { + // docs:start:prove_note_not_nullified fn prove_note_not_nullified(self, note: Note, context: &mut PrivateContext) where Note: NoteInterface { let nullifier = compute_siloed_nullifier(note, context); self.prove_nullifier_non_inclusion(nullifier); } + // docs:end:prove_note_not_nullified } diff --git a/noir-projects/noir-contracts/contracts/inclusion_proofs_contract/src/main.nr b/noir-projects/noir-contracts/contracts/inclusion_proofs_contract/src/main.nr index 71cc8d803f7..875686868c4 100644 --- a/noir-projects/noir-contracts/contracts/inclusion_proofs_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/inclusion_proofs_contract/src/main.nr @@ -194,9 +194,11 @@ contract InclusionProofs { let header = context.get_header_at(block_number); assert_eq( + // docs:start:public_storage_historical_read header.public_storage_historical_read( storage.public_unused_value.storage_slot, context.this_address() + // docs:end:public_storage_historical_read ), 0 ); } @@ -229,10 +231,14 @@ contract InclusionProofs { let header = context.get_header_at(block_number); if test_deployment { + // docs:start:prove_contract_deployment header.prove_contract_deployment(contract_address); + // docs:end:prove_contract_deployment } if test_initialization { + // docs:start:prove_contract_initialization header.prove_contract_initialization(contract_address); + // docs:end:prove_contract_initialization } } @@ -247,10 +253,14 @@ contract InclusionProofs { let header = context.get_header_at(block_number); if test_deployment { + // docs:start:prove_contract_non_deployment header.prove_contract_non_deployment(contract_address); + // docs:end:prove_contract_non_deployment } if test_initialization { + // docs:start:prove_contract_non_initialization header.prove_contract_non_initialization(contract_address); + // docs:end:prove_contract_non_initialization } } } From 79e85883c90465cf2ff6e1a2d7af0e5d4d3e111c Mon Sep 17 00:00:00 2001 From: Jean M <132435771+jeanmon@users.noreply.github.com> Date: Fri, 28 Jun 2024 19:01:44 +0200 Subject: [PATCH 12/29] feat(avm): calldata gadget preliminaries (#7227) First preliminary work for issue #7211. Added a new public calldata column and passes calldata file to the avm verifier. --- barretenberg/cpp/pil/avm/main.pil | 3 + .../relations/generated/avm/declare_views.hpp | 1 + .../vm/avm_trace/avm_execution.cpp | 28 +++-- .../barretenberg/vm/avm_trace/avm_helper.cpp | 6 +- .../barretenberg/vm/avm_trace/avm_helper.hpp | 3 +- .../barretenberg/vm/avm_trace/avm_trace.cpp | 51 +++++++-- .../barretenberg/vm/avm_trace/avm_trace.hpp | 11 +- .../vm/generated/avm_circuit_builder.cpp | 26 +++-- .../vm/generated/avm_circuit_builder.hpp | 6 +- .../barretenberg/vm/generated/avm_flavor.hpp | 13 ++- .../barretenberg/vm/generated/avm_prover.cpp | 2 + .../vm/generated/avm_verifier.cpp | 6 + .../vm/tests/avm_arithmetic.test.cpp | 103 +++++++++++++----- .../vm/tests/avm_bitwise.test.cpp | 2 +- .../barretenberg/vm/tests/avm_cast.test.cpp | 29 +++-- .../vm/tests/avm_comparison.test.cpp | 23 +++- .../vm/tests/avm_control_flow.test.cpp | 2 +- .../vm/tests/avm_execution.test.cpp | 30 ++--- .../vm/tests/avm_indirect_mem.test.cpp | 2 +- .../barretenberg/vm/tests/avm_kernel.test.cpp | 2 +- .../vm/tests/avm_mem_opcodes.test.cpp | 2 +- .../barretenberg/vm/tests/avm_memory.test.cpp | 17 +-- .../barretenberg/vm/tests/helpers.test.cpp | 12 +- .../barretenberg/vm/tests/helpers.test.hpp | 3 +- yarn-project/bb-prover/src/bb/execute.ts | 12 +- 25 files changed, 269 insertions(+), 126 deletions(-) diff --git a/barretenberg/cpp/pil/avm/main.pil b/barretenberg/cpp/pil/avm/main.pil index 19e445d6290..8ea4377a538 100644 --- a/barretenberg/cpp/pil/avm/main.pil +++ b/barretenberg/cpp/pil/avm/main.pil @@ -17,6 +17,9 @@ namespace main(256); pol constant sel_first = [1] + [0]*; // Used mostly to toggle off the first row consisting // only in first element of shifted polynomials. + //===== PUBLIC COLUMNS========================================================= + pol public calldata; + //===== KERNEL INPUTS ========================================================= // Kernel lookup selector opcodes pol commit sel_q_kernel_lookup; diff --git a/barretenberg/cpp/src/barretenberg/relations/generated/avm/declare_views.hpp b/barretenberg/cpp/src/barretenberg/relations/generated/avm/declare_views.hpp index 2cdb82e6d1e..9fa9e19c6c4 100644 --- a/barretenberg/cpp/src/barretenberg/relations/generated/avm/declare_views.hpp +++ b/barretenberg/cpp/src/barretenberg/relations/generated/avm/declare_views.hpp @@ -8,6 +8,7 @@ [[maybe_unused]] auto kernel_kernel_value_out = View(new_term.kernel_kernel_value_out); \ [[maybe_unused]] auto kernel_kernel_side_effect_out = View(new_term.kernel_kernel_side_effect_out); \ [[maybe_unused]] auto kernel_kernel_metadata_out = View(new_term.kernel_kernel_metadata_out); \ + [[maybe_unused]] auto main_calldata = View(new_term.main_calldata); \ [[maybe_unused]] auto alu_a_hi = View(new_term.alu_a_hi); \ [[maybe_unused]] auto alu_a_lo = View(new_term.alu_a_lo); \ [[maybe_unused]] auto alu_b_hi = View(new_term.alu_b_hi); \ diff --git a/barretenberg/cpp/src/barretenberg/vm/avm_trace/avm_execution.cpp b/barretenberg/cpp/src/barretenberg/vm/avm_trace/avm_execution.cpp index f055af7abe1..79bc084ac06 100644 --- a/barretenberg/cpp/src/barretenberg/vm/avm_trace/avm_execution.cpp +++ b/barretenberg/cpp/src/barretenberg/vm/avm_trace/avm_execution.cpp @@ -1,6 +1,7 @@ #include "barretenberg/vm/avm_trace/avm_execution.hpp" #include "barretenberg/bb/log.hpp" #include "barretenberg/common/serialize.hpp" +#include "barretenberg/numeric/uint256/uint256.hpp" #include "barretenberg/vm/avm_trace/avm_common.hpp" #include "barretenberg/vm/avm_trace/avm_deserialization.hpp" #include "barretenberg/vm/avm_trace/avm_helper.hpp" @@ -78,10 +79,11 @@ std::tuple Execution::prove(std::vector public_inputs_vec; + std::vector calldata; std::vector raw_proof; - std::copy( - proof.begin(), proof.begin() + PUBLIC_CIRCUIT_PUBLIC_INPUTS_LENGTH, std::back_inserter(public_inputs_vec)); - std::copy(proof.begin() + PUBLIC_CIRCUIT_PUBLIC_INPUTS_LENGTH, proof.end(), std::back_inserter(raw_proof)); + + // This can be made nicer using BB's serialize::read, probably. + const auto public_inputs_offset = proof.begin(); + const auto calldata_size_offset = public_inputs_offset + PUBLIC_CIRCUIT_PUBLIC_INPUTS_LENGTH; + const auto calldata_offset = calldata_size_offset + 1; + const auto raw_proof_offset = calldata_offset + static_cast(uint64_t(*calldata_size_offset)); + + std::copy(public_inputs_offset, calldata_size_offset, std::back_inserter(public_inputs_vec)); + std::copy(calldata_offset, raw_proof_offset, std::back_inserter(calldata)); + std::copy(raw_proof_offset, proof.end(), std::back_inserter(raw_proof)); VmPublicInputs public_inputs = convert_public_inputs(public_inputs_vec); - std::vector> public_inputs_columns = copy_public_inputs_columns(public_inputs); + std::vector> public_inputs_columns = copy_public_inputs_columns(public_inputs, calldata); return verifier.verify_proof(raw_proof, public_inputs_columns); } @@ -309,7 +320,7 @@ std::vector Execution::gen_trace(std::vector const& instructio uint32_t start_side_effect_counter = !public_inputs_vec.empty() ? static_cast(public_inputs_vec[PCPI_START_SIDE_EFFECT_COUNTER_OFFSET]) : 0; - AvmTraceBuilder trace_builder(public_inputs, execution_hints, start_side_effect_counter); + AvmTraceBuilder trace_builder(public_inputs, execution_hints, start_side_effect_counter, calldata); // Copied version of pc maintained in trace builder. The value of pc is evolving based // on opcode logic and therefore is not maintained here. However, the next opcode in the execution @@ -436,8 +447,7 @@ std::vector Execution::gen_trace(std::vector const& instructio trace_builder.op_calldata_copy(std::get(inst.operands.at(0)), std::get(inst.operands.at(1)), std::get(inst.operands.at(2)), - std::get(inst.operands.at(3)), - calldata); + std::get(inst.operands.at(3))); break; // Machine State - Gas case OpCode::L2GASLEFT: diff --git a/barretenberg/cpp/src/barretenberg/vm/avm_trace/avm_helper.cpp b/barretenberg/cpp/src/barretenberg/vm/avm_trace/avm_helper.cpp index 8a508442aa5..231b62278a9 100644 --- a/barretenberg/cpp/src/barretenberg/vm/avm_trace/avm_helper.cpp +++ b/barretenberg/cpp/src/barretenberg/vm/avm_trace/avm_helper.cpp @@ -136,7 +136,8 @@ bool is_operand_indirect(uint8_t ind_value, uint8_t operand_idx) return static_cast((ind_value & (1 << operand_idx)) >> operand_idx); } -std::vector> copy_public_inputs_columns(VmPublicInputs const& public_inputs) +std::vector> copy_public_inputs_columns(VmPublicInputs const& public_inputs, + std::vector const& calldata) { // We convert to a vector as the pil generated verifier is generic and unaware of the KERNEL_INPUTS_LENGTH // For each of the public input vectors @@ -158,7 +159,8 @@ std::vector> copy_public_inputs_columns(VmPublicInputs const& pu return { std::move(public_inputs_kernel_inputs), std::move(public_inputs_kernel_value_outputs), std::move(public_inputs_kernel_side_effect_outputs), - std::move(public_inputs_kernel_metadata_outputs) }; + std::move(public_inputs_kernel_metadata_outputs), + calldata }; } } // namespace bb::avm_trace diff --git a/barretenberg/cpp/src/barretenberg/vm/avm_trace/avm_helper.hpp b/barretenberg/cpp/src/barretenberg/vm/avm_trace/avm_helper.hpp index 105e1c529f7..d982ee258a9 100644 --- a/barretenberg/cpp/src/barretenberg/vm/avm_trace/avm_helper.hpp +++ b/barretenberg/cpp/src/barretenberg/vm/avm_trace/avm_helper.hpp @@ -16,6 +16,7 @@ bool is_operand_indirect(uint8_t ind_value, uint8_t operand_idx); // There are 4 public input columns, one for inputs, and 3 for the kernel outputs {value, side effect counter, metadata} // The verifier is generic, and so accepts vectors of these values rather than the fixed length arrays that are used // during circuit building. This method copies each array into a vector to be used by the verifier. -std::vector> copy_public_inputs_columns(VmPublicInputs const& public_inputs); +std::vector> copy_public_inputs_columns(VmPublicInputs const& public_inputs, + std::vector const& calldata); } // namespace bb::avm_trace \ No newline at end of file diff --git a/barretenberg/cpp/src/barretenberg/vm/avm_trace/avm_trace.cpp b/barretenberg/cpp/src/barretenberg/vm/avm_trace/avm_trace.cpp index b1f7109c6d5..477775e22b4 100644 --- a/barretenberg/cpp/src/barretenberg/vm/avm_trace/avm_trace.cpp +++ b/barretenberg/cpp/src/barretenberg/vm/avm_trace/avm_trace.cpp @@ -32,9 +32,11 @@ namespace bb::avm_trace { */ AvmTraceBuilder::AvmTraceBuilder(VmPublicInputs public_inputs, ExecutionHints execution_hints, - uint32_t side_effect_counter) + uint32_t side_effect_counter, + std::vector calldata) // NOTE: we initialise the environment builder here as it requires public inputs : kernel_trace_builder(std::move(public_inputs)) + , calldata(std::move(calldata)) , side_effect_counter(side_effect_counter) , initial_side_effect_counter(side_effect_counter) , execution_hints(std::move(execution_hints)) @@ -1886,10 +1888,8 @@ void AvmTraceBuilder::op_div( * @param cd_offset The starting index of the region in calldata to be copied. * @param copy_size The number of finite field elements to be copied into memory. * @param dst_offset The starting index of memory where calldata will be copied to. - * @param call_data_mem The vector containing calldata. */ -void AvmTraceBuilder::op_calldata_copy( - uint8_t indirect, uint32_t cd_offset, uint32_t copy_size, uint32_t dst_offset, std::vector const& call_data_mem) +void AvmTraceBuilder::op_calldata_copy(uint8_t indirect, uint32_t cd_offset, uint32_t copy_size, uint32_t dst_offset) { // We parallelize storing memory operations in chunk of 3, i.e., 1 per intermediate register. // The variable pos is an index pointing to the first storing operation (pertaining to intermediate @@ -1912,7 +1912,7 @@ void AvmTraceBuilder::op_calldata_copy( uint32_t rwc(0); auto clk = static_cast(main_trace.size()) + 1; - FF ia = call_data_mem.at(cd_offset + pos); + FF ia = calldata.at(cd_offset + pos); uint32_t mem_op_a(1); uint32_t rwa = 1; @@ -1934,7 +1934,7 @@ void AvmTraceBuilder::op_calldata_copy( call_ptr, clk, IntermRegister::IA, mem_addr_a, ia, AvmMemoryTag::U0, AvmMemoryTag::FF); if (copy_size - pos > 1) { - ib = call_data_mem.at(cd_offset + pos + 1); + ib = calldata.at(cd_offset + pos + 1); mem_op_b = 1; mem_addr_b = direct_dst_offset + pos + 1; rwb = 1; @@ -1945,7 +1945,7 @@ void AvmTraceBuilder::op_calldata_copy( } if (copy_size - pos > 2) { - ic = call_data_mem.at(cd_offset + pos + 2); + ic = calldata.at(cd_offset + pos + 2); mem_op_c = 1; mem_addr_c = direct_dst_offset + pos + 2; rwc = 1; @@ -3762,7 +3762,9 @@ std::vector AvmTraceBuilder::finalize(uint32_t min_trace_size, bool range_c main_trace.at(*trace_size - 1).main_sel_last = FF(1); - // Memory trace inclusion + /********************************************************************************************** + * MEMORY TRACE INCLUSION + **********************************************************************************************/ // We compute in the main loop the timestamp and global address for next row. // Perform initialization for index 0 outside of the loop provided that mem trace exists. @@ -3866,7 +3868,10 @@ std::vector AvmTraceBuilder::finalize(uint32_t min_trace_size, bool range_c } } - // Alu trace inclusion + /********************************************************************************************** + * ALU TRACE INCLUSION + **********************************************************************************************/ + for (size_t i = 0; i < alu_trace_size; i++) { auto const& src = alu_trace.at(i); auto& dest = main_trace.at(i); @@ -4013,6 +4018,10 @@ std::vector AvmTraceBuilder::finalize(uint32_t min_trace_size, bool range_c } } + /********************************************************************************************** + * GADGET TABLES INCLUSION + **********************************************************************************************/ + // Add Conversion Gadget table for (size_t i = 0; i < conv_trace_size; i++) { auto const& src = conv_trace.at(i); @@ -4067,6 +4076,10 @@ std::vector AvmTraceBuilder::finalize(uint32_t min_trace_size, bool range_c dest.pedersen_sel_pedersen = FF(1); } + /********************************************************************************************** + * BINARY TRACE INCLUSION + **********************************************************************************************/ + // Add Binary Trace table for (size_t i = 0; i < bin_trace_size; i++) { auto const& src = bin_trace.at(i); @@ -4132,7 +4145,9 @@ std::vector AvmTraceBuilder::finalize(uint32_t min_trace_size, bool range_c } } - /////////// GAS ACCOUNTING ////////////////////////// + /********************************************************************************************** + * GAS TRACE INCLUSION + **********************************************************************************************/ // Add the gas cost table to the main trace // TODO: do i need a way to produce an interupt that will stop the execution of the trace when the gas left @@ -4222,11 +4237,14 @@ std::vector AvmTraceBuilder::finalize(uint32_t min_trace_size, bool range_c dest.main_da_gas_remaining = current_da_gas_remaining; } - /////////// END OF GAS ACCOUNTING ////////////////////////// - // Adding extra row for the shifted values at the top of the execution trace. Row first_row = Row{ .main_sel_first = FF(1), .mem_lastAccess = FF(1) }; main_trace.insert(main_trace.begin(), first_row); + + /********************************************************************************************** + * RANGE CHECKS AND SELECTORS INCLUSION + **********************************************************************************************/ + auto const old_trace_size = main_trace.size(); auto new_trace_size = range_check_required ? old_trace_size @@ -4316,6 +4334,10 @@ std::vector AvmTraceBuilder::finalize(uint32_t min_trace_size, bool range_c } } + /********************************************************************************************** + * KERNEL TRACE INCLUSION + **********************************************************************************************/ + // Write the kernel trace into the main trace // 1. The write offsets are constrained to be non changing over the entire trace, so we fill in the values // until we @@ -4494,6 +4516,11 @@ std::vector AvmTraceBuilder::finalize(uint32_t min_trace_size, bool range_c std::get(kernel_trace_builder.public_inputs).at(i); } + // calldata column inclusion + for (size_t i = 0; i < calldata.size(); i++) { + main_trace.at(i).main_calldata = calldata.at(i); + } + // Get tag_err counts from the mem_trace_builder if (range_check_required) { finalise_mem_trace_lookup_counts(); diff --git a/barretenberg/cpp/src/barretenberg/vm/avm_trace/avm_trace.hpp b/barretenberg/cpp/src/barretenberg/vm/avm_trace/avm_trace.hpp index 951dd1eb6d8..3baf3ce50d2 100644 --- a/barretenberg/cpp/src/barretenberg/vm/avm_trace/avm_trace.hpp +++ b/barretenberg/cpp/src/barretenberg/vm/avm_trace/avm_trace.hpp @@ -39,7 +39,8 @@ class AvmTraceBuilder { public: AvmTraceBuilder(VmPublicInputs public_inputs = {}, ExecutionHints execution_hints = {}, - uint32_t side_effect_counter = 0); + uint32_t side_effect_counter = 0, + std::vector calldata = {}); std::vector finalize(uint32_t min_trace_size = 0, bool range_check_required = ENABLE_PROVING); void reset(); @@ -158,11 +159,7 @@ class AvmTraceBuilder { // CALLDATACOPY opcode with direct/indirect memory access, i.e., // direct: M[dst_offset:dst_offset+copy_size] = calldata[cd_offset:cd_offset+copy_size] // indirect: M[M[dst_offset]:M[dst_offset]+copy_size] = calldata[cd_offset:cd_offset+copy_size] - void op_calldata_copy(uint8_t indirect, - uint32_t cd_offset, - uint32_t copy_size, - uint32_t dst_offset, - std::vector const& call_data_mem); + void op_calldata_copy(uint8_t indirect, uint32_t cd_offset, uint32_t copy_size, uint32_t dst_offset); // REVERT Opcode (that just call return under the hood for now) std::vector op_revert(uint8_t indirect, uint32_t ret_offset, uint32_t ret_size); @@ -241,6 +238,8 @@ class AvmTraceBuilder { AvmPedersenTraceBuilder pedersen_trace_builder; AvmEccTraceBuilder ecc_trace_builder; + std::vector calldata{}; + /** * @brief Create a kernel lookup opcode object * diff --git a/barretenberg/cpp/src/barretenberg/vm/generated/avm_circuit_builder.cpp b/barretenberg/cpp/src/barretenberg/vm/generated/avm_circuit_builder.cpp index 448bf350a0e..5ccb624b528 100644 --- a/barretenberg/cpp/src/barretenberg/vm/generated/avm_circuit_builder.cpp +++ b/barretenberg/cpp/src/barretenberg/vm/generated/avm_circuit_builder.cpp @@ -24,6 +24,7 @@ template std::vector AvmFullRow::names() "kernel_kernel_value_out", "kernel_kernel_side_effect_out", "kernel_kernel_metadata_out", + "main_calldata", "alu_a_hi", "alu_a_lo", "alu_b_hi", @@ -412,18 +413,19 @@ template std::ostream& operator<<(std::ostream& os, AvmFullRow << field_to_string(row.main_clk) << "," << field_to_string(row.main_sel_first) << "," << field_to_string(row.kernel_kernel_inputs) << "," << field_to_string(row.kernel_kernel_value_out) << "," << field_to_string(row.kernel_kernel_side_effect_out) << "," - << field_to_string(row.kernel_kernel_metadata_out) << "," << field_to_string(row.alu_a_hi) << "," - << field_to_string(row.alu_a_lo) << "," << field_to_string(row.alu_b_hi) << "," - << field_to_string(row.alu_b_lo) << "," << field_to_string(row.alu_borrow) << "," - << field_to_string(row.alu_cf) << "," << field_to_string(row.alu_clk) << "," - << field_to_string(row.alu_cmp_rng_ctr) << "," << field_to_string(row.alu_div_u16_r0) << "," - << field_to_string(row.alu_div_u16_r1) << "," << field_to_string(row.alu_div_u16_r2) << "," - << field_to_string(row.alu_div_u16_r3) << "," << field_to_string(row.alu_div_u16_r4) << "," - << field_to_string(row.alu_div_u16_r5) << "," << field_to_string(row.alu_div_u16_r6) << "," - << field_to_string(row.alu_div_u16_r7) << "," << field_to_string(row.alu_divisor_hi) << "," - << field_to_string(row.alu_divisor_lo) << "," << field_to_string(row.alu_ff_tag) << "," - << field_to_string(row.alu_ia) << "," << field_to_string(row.alu_ib) << "," << field_to_string(row.alu_ic) - << "," << field_to_string(row.alu_in_tag) << "," << field_to_string(row.alu_op_add) << "," + << field_to_string(row.kernel_kernel_metadata_out) << "," << field_to_string(row.main_calldata) << "," + << field_to_string(row.alu_a_hi) << "," << field_to_string(row.alu_a_lo) << "," + << field_to_string(row.alu_b_hi) << "," << field_to_string(row.alu_b_lo) << "," + << field_to_string(row.alu_borrow) << "," << field_to_string(row.alu_cf) << "," + << field_to_string(row.alu_clk) << "," << field_to_string(row.alu_cmp_rng_ctr) << "," + << field_to_string(row.alu_div_u16_r0) << "," << field_to_string(row.alu_div_u16_r1) << "," + << field_to_string(row.alu_div_u16_r2) << "," << field_to_string(row.alu_div_u16_r3) << "," + << field_to_string(row.alu_div_u16_r4) << "," << field_to_string(row.alu_div_u16_r5) << "," + << field_to_string(row.alu_div_u16_r6) << "," << field_to_string(row.alu_div_u16_r7) << "," + << field_to_string(row.alu_divisor_hi) << "," << field_to_string(row.alu_divisor_lo) << "," + << field_to_string(row.alu_ff_tag) << "," << field_to_string(row.alu_ia) << "," + << field_to_string(row.alu_ib) << "," << field_to_string(row.alu_ic) << "," + << field_to_string(row.alu_in_tag) << "," << field_to_string(row.alu_op_add) << "," << field_to_string(row.alu_op_cast) << "," << field_to_string(row.alu_op_cast_prev) << "," << field_to_string(row.alu_op_div) << "," << field_to_string(row.alu_op_div_a_lt_b) << "," << field_to_string(row.alu_op_div_std) << "," << field_to_string(row.alu_op_eq) << "," diff --git a/barretenberg/cpp/src/barretenberg/vm/generated/avm_circuit_builder.hpp b/barretenberg/cpp/src/barretenberg/vm/generated/avm_circuit_builder.hpp index 9bbe9334c85..b0acd7a8bac 100644 --- a/barretenberg/cpp/src/barretenberg/vm/generated/avm_circuit_builder.hpp +++ b/barretenberg/cpp/src/barretenberg/vm/generated/avm_circuit_builder.hpp @@ -93,6 +93,7 @@ template struct AvmFullRow { FF kernel_kernel_value_out{}; FF kernel_kernel_side_effect_out{}; FF kernel_kernel_metadata_out{}; + FF main_calldata{}; FF alu_a_hi{}; FF alu_a_lo{}; FF alu_b_hi{}; @@ -553,8 +554,8 @@ class AvmCircuitBuilder { using Polynomial = Flavor::Polynomial; using ProverPolynomials = Flavor::ProverPolynomials; - static constexpr size_t num_fixed_columns = 450; - static constexpr size_t num_polys = 385; + static constexpr size_t num_fixed_columns = 451; + static constexpr size_t num_polys = 386; std::vector rows; void set_trace(std::vector&& trace) { rows = std::move(trace); } @@ -576,6 +577,7 @@ class AvmCircuitBuilder { polys.kernel_kernel_value_out[i] = rows[i].kernel_kernel_value_out; polys.kernel_kernel_side_effect_out[i] = rows[i].kernel_kernel_side_effect_out; polys.kernel_kernel_metadata_out[i] = rows[i].kernel_kernel_metadata_out; + polys.main_calldata[i] = rows[i].main_calldata; polys.alu_a_hi[i] = rows[i].alu_a_hi; polys.alu_a_lo[i] = rows[i].alu_a_lo; polys.alu_b_hi[i] = rows[i].alu_b_hi; diff --git a/barretenberg/cpp/src/barretenberg/vm/generated/avm_flavor.hpp b/barretenberg/cpp/src/barretenberg/vm/generated/avm_flavor.hpp index e5729066c00..0d33acf8c1d 100644 --- a/barretenberg/cpp/src/barretenberg/vm/generated/avm_flavor.hpp +++ b/barretenberg/cpp/src/barretenberg/vm/generated/avm_flavor.hpp @@ -100,11 +100,11 @@ class AvmFlavor { using RelationSeparator = FF; static constexpr size_t NUM_PRECOMPUTED_ENTITIES = 2; - static constexpr size_t NUM_WITNESS_ENTITIES = 383; + static constexpr size_t NUM_WITNESS_ENTITIES = 384; static constexpr size_t NUM_WIRES = NUM_WITNESS_ENTITIES + NUM_PRECOMPUTED_ENTITIES; // We have two copies of the witness entities, so we subtract the number of fixed ones (they have no shift), one for // the unshifted and one for the shifted - static constexpr size_t NUM_ALL_ENTITIES = 450; + static constexpr size_t NUM_ALL_ENTITIES = 451; using GrandProductRelations = std::tuple, perm_main_bin_relation, @@ -264,6 +264,7 @@ class AvmFlavor { kernel_kernel_value_out, kernel_kernel_side_effect_out, kernel_kernel_metadata_out, + main_calldata, alu_a_hi, alu_a_lo, alu_b_hi, @@ -650,6 +651,7 @@ class AvmFlavor { kernel_kernel_value_out, kernel_kernel_side_effect_out, kernel_kernel_metadata_out, + main_calldata, alu_a_hi, alu_a_lo, alu_b_hi, @@ -1041,6 +1043,7 @@ class AvmFlavor { kernel_kernel_value_out, kernel_kernel_side_effect_out, kernel_kernel_metadata_out, + main_calldata, alu_a_hi, alu_a_lo, alu_b_hi, @@ -1494,6 +1497,7 @@ class AvmFlavor { kernel_kernel_value_out, kernel_kernel_side_effect_out, kernel_kernel_metadata_out, + main_calldata, alu_a_hi, alu_a_lo, alu_b_hi, @@ -1947,6 +1951,7 @@ class AvmFlavor { kernel_kernel_value_out, kernel_kernel_side_effect_out, kernel_kernel_metadata_out, + main_calldata, alu_a_hi, alu_a_lo, alu_b_hi, @@ -2756,6 +2761,7 @@ class AvmFlavor { Base::kernel_kernel_value_out = "KERNEL_KERNEL_VALUE_OUT"; Base::kernel_kernel_side_effect_out = "KERNEL_KERNEL_SIDE_EFFECT_OUT"; Base::kernel_kernel_metadata_out = "KERNEL_KERNEL_METADATA_OUT"; + Base::main_calldata = "MAIN_CALLDATA"; Base::alu_a_hi = "ALU_A_HI"; Base::alu_a_lo = "ALU_A_LO"; Base::alu_b_hi = "ALU_B_HI"; @@ -3158,6 +3164,7 @@ class AvmFlavor { Commitment kernel_kernel_value_out; Commitment kernel_kernel_side_effect_out; Commitment kernel_kernel_metadata_out; + Commitment main_calldata; Commitment alu_a_hi; Commitment alu_a_lo; Commitment alu_b_hi; @@ -3560,6 +3567,7 @@ class AvmFlavor { kernel_kernel_value_out = deserialize_from_buffer(Transcript::proof_data, num_frs_read); kernel_kernel_side_effect_out = deserialize_from_buffer(Transcript::proof_data, num_frs_read); kernel_kernel_metadata_out = deserialize_from_buffer(Transcript::proof_data, num_frs_read); + main_calldata = deserialize_from_buffer(Transcript::proof_data, num_frs_read); alu_a_hi = deserialize_from_buffer(Transcript::proof_data, num_frs_read); alu_a_lo = deserialize_from_buffer(Transcript::proof_data, num_frs_read); alu_b_hi = deserialize_from_buffer(Transcript::proof_data, num_frs_read); @@ -3978,6 +3986,7 @@ class AvmFlavor { serialize_to_buffer(kernel_kernel_value_out, Transcript::proof_data); serialize_to_buffer(kernel_kernel_side_effect_out, Transcript::proof_data); serialize_to_buffer(kernel_kernel_metadata_out, Transcript::proof_data); + serialize_to_buffer(main_calldata, Transcript::proof_data); serialize_to_buffer(alu_a_hi, Transcript::proof_data); serialize_to_buffer(alu_a_lo, Transcript::proof_data); serialize_to_buffer(alu_b_hi, Transcript::proof_data); diff --git a/barretenberg/cpp/src/barretenberg/vm/generated/avm_prover.cpp b/barretenberg/cpp/src/barretenberg/vm/generated/avm_prover.cpp index a10bc668d02..b35428e51f1 100644 --- a/barretenberg/cpp/src/barretenberg/vm/generated/avm_prover.cpp +++ b/barretenberg/cpp/src/barretenberg/vm/generated/avm_prover.cpp @@ -62,6 +62,7 @@ void AvmProver::execute_wire_commitments_round() witness_commitments.kernel_kernel_value_out = commitment_key->commit(key->kernel_kernel_value_out); witness_commitments.kernel_kernel_side_effect_out = commitment_key->commit(key->kernel_kernel_side_effect_out); witness_commitments.kernel_kernel_metadata_out = commitment_key->commit(key->kernel_kernel_metadata_out); + witness_commitments.main_calldata = commitment_key->commit(key->main_calldata); witness_commitments.alu_a_hi = commitment_key->commit(key->alu_a_hi); witness_commitments.alu_a_lo = commitment_key->commit(key->alu_a_lo); witness_commitments.alu_b_hi = commitment_key->commit(key->alu_b_hi); @@ -408,6 +409,7 @@ void AvmProver::execute_wire_commitments_round() witness_commitments.kernel_kernel_side_effect_out); transcript->send_to_verifier(commitment_labels.kernel_kernel_metadata_out, witness_commitments.kernel_kernel_metadata_out); + transcript->send_to_verifier(commitment_labels.main_calldata, witness_commitments.main_calldata); transcript->send_to_verifier(commitment_labels.alu_a_hi, witness_commitments.alu_a_hi); transcript->send_to_verifier(commitment_labels.alu_a_lo, witness_commitments.alu_a_lo); transcript->send_to_verifier(commitment_labels.alu_b_hi, witness_commitments.alu_b_hi); diff --git a/barretenberg/cpp/src/barretenberg/vm/generated/avm_verifier.cpp b/barretenberg/cpp/src/barretenberg/vm/generated/avm_verifier.cpp index 0a863144aad..49302f921bb 100644 --- a/barretenberg/cpp/src/barretenberg/vm/generated/avm_verifier.cpp +++ b/barretenberg/cpp/src/barretenberg/vm/generated/avm_verifier.cpp @@ -79,6 +79,7 @@ bool AvmVerifier::verify_proof(const HonkProof& proof, const std::vectortemplate receive_from_prover(commitment_labels.kernel_kernel_side_effect_out); commitments.kernel_kernel_metadata_out = transcript->template receive_from_prover(commitment_labels.kernel_kernel_metadata_out); + commitments.main_calldata = transcript->template receive_from_prover(commitment_labels.main_calldata); commitments.alu_a_hi = transcript->template receive_from_prover(commitment_labels.alu_a_hi); commitments.alu_a_lo = transcript->template receive_from_prover(commitment_labels.alu_a_lo); commitments.alu_b_hi = transcript->template receive_from_prover(commitment_labels.alu_b_hi); @@ -737,6 +738,11 @@ bool AvmVerifier::verify_proof(const HonkProof& proof, const std::vector const& calldata) + { + trace_builder = AvmTraceBuilder(public_inputs, {}, 0, calldata); + } + // Generate a trace with an EQ opcode operation. std::vector gen_trace_eq(uint128_t const& a, uint128_t const& b, @@ -368,9 +373,11 @@ std::vector> positive_op_div_test_values = { { // Test on basic addition over finite field type. TEST_F(AvmArithmeticTestsFF, addition) { - trace_builder.op_calldata_copy(0, 0, 3, 0, std::vector{ 37, 4, 11 }); + std::vector const calldata = { 37, 4, 11 }; + gen_trace_builder(calldata); + trace_builder.op_calldata_copy(0, 0, 3, 0); - // Memory layout: [37,4,11,0,0,0,....] + // Memory layout: [37,4,11,0,0,0,....] trace_builder.op_add(0, 0, 1, 4, AvmMemoryTag::FF); // [37,4,11,0,41,0,....] trace_builder.op_return(0, 0, 5); auto trace = trace_builder.finalize(); @@ -381,13 +388,15 @@ TEST_F(AvmArithmeticTestsFF, addition) EXPECT_EQ(alu_row.alu_cf, FF(0)); EXPECT_EQ(alu_row.alu_u8_r0, FF(0)); - validate_trace(std::move(trace), public_inputs, true); + validate_trace(std::move(trace), public_inputs, calldata, true); } // Test on basic subtraction over finite field type. TEST_F(AvmArithmeticTestsFF, subtraction) { - trace_builder.op_calldata_copy(0, 0, 3, 0, std::vector{ 8, 4, 17 }); + std::vector const calldata = { 8, 4, 17 }; + gen_trace_builder(calldata); + trace_builder.op_calldata_copy(0, 0, 3, 0); // Memory layout: [8,4,17,0,0,0,....] trace_builder.op_sub(0, 2, 0, 1, AvmMemoryTag::FF); // [8,9,17,0,0,0....] @@ -400,13 +409,15 @@ TEST_F(AvmArithmeticTestsFF, subtraction) EXPECT_EQ(alu_row.alu_cf, FF(0)); EXPECT_EQ(alu_row.alu_u8_r0, FF(0)); - validate_trace(std::move(trace), public_inputs); + validate_trace(std::move(trace), public_inputs, calldata); } // Test on basic multiplication over finite field type. TEST_F(AvmArithmeticTestsFF, multiplication) { - trace_builder.op_calldata_copy(0, 0, 3, 0, std::vector{ 5, 0, 20 }); + std::vector const calldata = { 5, 0, 20 }; + gen_trace_builder(calldata); + trace_builder.op_calldata_copy(0, 0, 3, 0); // Memory layout: [5,0,20,0,0,0,....] trace_builder.op_mul(0, 2, 0, 1, AvmMemoryTag::FF); // [5,100,20,0,0,0....] @@ -420,13 +431,15 @@ TEST_F(AvmArithmeticTestsFF, multiplication) EXPECT_EQ(alu_row.alu_cf, FF(0)); EXPECT_EQ(alu_row.alu_u8_r0, FF(0)); - validate_trace(std::move(trace), public_inputs); + validate_trace(std::move(trace), public_inputs, calldata); } // Test on multiplication by zero over finite field type. TEST_F(AvmArithmeticTestsFF, multiplicationByZero) { - trace_builder.op_calldata_copy(0, 0, 1, 0, std::vector{ 127 }); + std::vector const calldata = { 127 }; + gen_trace_builder(calldata); + trace_builder.op_calldata_copy(0, 0, 1, 0); // Memory layout: [127,0,0,0,0,0,....] trace_builder.op_mul(0, 0, 1, 2, AvmMemoryTag::FF); // [127,0,0,0,0,0....] @@ -440,13 +453,15 @@ TEST_F(AvmArithmeticTestsFF, multiplicationByZero) EXPECT_EQ(alu_row.alu_cf, FF(0)); EXPECT_EQ(alu_row.alu_u8_r0, FF(0)); - validate_trace(std::move(trace), public_inputs); + validate_trace(std::move(trace), public_inputs, calldata); } // Test on basic division over finite field type. TEST_F(AvmArithmeticTestsFF, fDivision) { - trace_builder.op_calldata_copy(0, 0, 2, 0, std::vector{ 15, 315 }); + std::vector const calldata = { 15, 315 }; + gen_trace_builder(calldata); + trace_builder.op_calldata_copy(0, 0, 2, 0); // Memory layout: [15,315,0,0,0,0,....] trace_builder.op_fdiv(0, 1, 0, 2); // [15,315,21,0,0,0....] @@ -463,13 +478,15 @@ TEST_F(AvmArithmeticTestsFF, fDivision) EXPECT_EQ(row->main_sel_mem_op_c, FF(1)); EXPECT_EQ(row->main_rwc, FF(1)); - validate_trace(std::move(trace), public_inputs); + validate_trace(std::move(trace), public_inputs, calldata); } // Test on division with zero numerator over finite field type. TEST_F(AvmArithmeticTestsFF, fDivisionNumeratorZero) { - trace_builder.op_calldata_copy(0, 0, 1, 0, std::vector{ 15 }); + std::vector const calldata = { 15 }; + gen_trace_builder(calldata); + trace_builder.op_calldata_copy(0, 0, 1, 0); // Memory layout: [15,0,0,0,0,0,....] trace_builder.op_fdiv(0, 1, 0, 0); // [0,0,0,0,0,0....] @@ -486,14 +503,16 @@ TEST_F(AvmArithmeticTestsFF, fDivisionNumeratorZero) EXPECT_EQ(row->main_sel_mem_op_c, FF(1)); EXPECT_EQ(row->main_rwc, FF(1)); - validate_trace(std::move(trace), public_inputs); + validate_trace(std::move(trace), public_inputs, calldata); } // Test on division by zero over finite field type. // We check that the operator error flag is raised. TEST_F(AvmArithmeticTestsFF, fDivisionByZeroError) { - trace_builder.op_calldata_copy(0, 0, 1, 0, std::vector{ 15 }); + std::vector const calldata = { 15 }; + gen_trace_builder(calldata); + trace_builder.op_calldata_copy(0, 0, 1, 0); // Memory layout: [15,0,0,0,0,0,....] trace_builder.op_fdiv(0, 0, 1, 2); // [15,0,0,0,0,0....] @@ -511,7 +530,7 @@ TEST_F(AvmArithmeticTestsFF, fDivisionByZeroError) EXPECT_EQ(row->main_rwc, FF(1)); EXPECT_EQ(row->main_op_err, FF(1)); - validate_trace(std::move(trace), public_inputs); + validate_trace(std::move(trace), public_inputs, calldata); } // Test on division of zero by zero over finite field type. @@ -543,7 +562,9 @@ TEST_F(AvmArithmeticTestsFF, fDivisionZeroByZeroError) // No check on the evaluation is performed here. TEST_F(AvmArithmeticTestsFF, mixedOperationsWithError) { - trace_builder.op_calldata_copy(0, 0, 3, 2, std::vector{ 45, 23, 12 }); + std::vector const calldata = { 45, 23, 12 }; + gen_trace_builder(calldata); + trace_builder.op_calldata_copy(0, 0, 3, 2); // Memory layout: [0,0,45,23,12,0,0,0,....] trace_builder.op_add(0, 2, 3, 4, AvmMemoryTag::FF); // [0,0,45,23,68,0,0,0,....] @@ -558,7 +579,7 @@ TEST_F(AvmArithmeticTestsFF, mixedOperationsWithError) trace_builder.halt(); auto trace = trace_builder.finalize(); - validate_trace(std::move(trace), public_inputs, true); + validate_trace(std::move(trace), public_inputs, calldata, true); } // Test of equality on FF elements @@ -566,7 +587,9 @@ TEST_F(AvmArithmeticTestsFF, equality) { // Pick a field-sized number FF elem = FF::modulus - FF(1); - trace_builder.op_calldata_copy(0, 0, 3, 0, std::vector{ elem, elem, 1 }); + std::vector const calldata = { elem, elem, 1 }; + gen_trace_builder(calldata); + trace_builder.op_calldata_copy(0, 0, 3, 0); trace_builder.op_eq(0, 0, 1, 2, AvmMemoryTag::FF); // Memory Layout [q - 1, q -1, 1,0..] trace_builder.op_return(0, 0, 3); auto trace = trace_builder.finalize(); @@ -576,14 +599,16 @@ TEST_F(AvmArithmeticTestsFF, equality) EXPECT_EQ(alu_row.alu_ff_tag, FF(1)); EXPECT_EQ(alu_row.alu_op_eq_diff_inv, FF(0)); // Expect 0 as inv of (q-1) - (q-1) - validate_trace(std::move(trace), public_inputs); + validate_trace(std::move(trace), public_inputs, calldata); } // Test correct non-equality of FF elements TEST_F(AvmArithmeticTestsFF, nonEquality) { FF elem = FF::modulus - FF(1); - trace_builder.op_calldata_copy(0, 0, 3, 0, std::vector{ elem, elem + FF(1), 0 }); + std::vector const calldata = { elem, elem + FF(1), 0 }; + gen_trace_builder(calldata); + trace_builder.op_calldata_copy(0, 0, 3, 0); trace_builder.op_eq(0, 0, 1, 2, AvmMemoryTag::FF); // Memory Layout [q - 1, q, 1,0..] trace_builder.op_return(0, 0, 0); auto trace = trace_builder.finalize(); @@ -593,7 +618,7 @@ TEST_F(AvmArithmeticTestsFF, nonEquality) EXPECT_EQ(alu_row.alu_ff_tag, FF(1)); EXPECT_EQ(alu_row.alu_op_eq_diff_inv, FF(-1).invert()); - validate_trace(std::move(trace), public_inputs); + validate_trace(std::move(trace), public_inputs, calldata); } TEST_P(AvmArithmeticTestsDiv, division) @@ -1758,7 +1783,9 @@ TEST_F(AvmArithmeticNegativeTestsFF, multiplication) // Test on basic incorrect division over finite field type. TEST_F(AvmArithmeticNegativeTestsFF, fDivision) { - trace_builder.op_calldata_copy(0, 0, 2, 0, std::vector{ 15, 315 }); + std::vector const calldata = { 15, 315 }; + gen_trace_builder(calldata); + trace_builder.op_calldata_copy(0, 0, 2, 0); // Memory layout: [15,315,0,0,0,0,....] trace_builder.op_fdiv(0, 1, 0, 2); // [15,315,21,0,0,0....] @@ -1775,7 +1802,9 @@ TEST_F(AvmArithmeticNegativeTestsFF, fDivision) // in the trace. TEST_F(AvmArithmeticNegativeTestsFF, fDivisionNoZeroButError) { - trace_builder.op_calldata_copy(0, 0, 2, 0, std::vector{ 15, 315 }); + std::vector const calldata = { 15, 315 }; + gen_trace_builder(calldata); + trace_builder.op_calldata_copy(0, 0, 2, 0); // Memory layout: [15,315,0,0,0,0,....] trace_builder.op_fdiv(0, 1, 0, 2); // [15,315,21,0,0,0....] @@ -1801,7 +1830,9 @@ TEST_F(AvmArithmeticNegativeTestsFF, fDivisionNoZeroButError) // Test with finite field division by zero occurs and no error is raised (remove error flag) TEST_F(AvmArithmeticNegativeTestsFF, fDivisionByZeroNoError) { - trace_builder.op_calldata_copy(0, 0, 1, 0, std::vector{ 15 }); + std::vector const calldata = { 15 }; + gen_trace_builder(calldata); + trace_builder.op_calldata_copy(0, 0, 1, 0); // Memory layout: [15,0,0,0,0,0,....] trace_builder.op_fdiv(0, 0, 1, 2); // [15,0,0,0,0,0....] @@ -1837,7 +1868,9 @@ TEST_F(AvmArithmeticNegativeTestsFF, fDivisionZeroByZeroNoError) // Test with finite field division using a wrong read instruction tag TEST_F(AvmArithmeticNegativeTestsFF, fDivisionWrongRInTag) { - trace_builder.op_calldata_copy(0, 0, 1, 0, std::vector{ 18, 6 }); + std::vector const calldata = { 18, 6 }; + gen_trace_builder(calldata); + trace_builder.op_calldata_copy(0, 0, 1, 0); // Memory layout: [18,6,0,0,0,0,....] trace_builder.op_fdiv(0, 0, 1, 2); // [18,6,3,0,0,0....] trace_builder.halt(); @@ -1855,7 +1888,9 @@ TEST_F(AvmArithmeticNegativeTestsFF, fDivisionWrongRInTag) // Test with finite field division using a wrong write instruction tag TEST_F(AvmArithmeticNegativeTestsFF, fDivisionWrongWInTag) { - trace_builder.op_calldata_copy(0, 0, 1, 0, std::vector{ 18, 6 }); + std::vector const calldata = { 18, 6 }; + gen_trace_builder(calldata); + trace_builder.op_calldata_copy(0, 0, 1, 0); // Memory layout: [18,6,0,0,0,0,....] trace_builder.op_fdiv(0, 0, 1, 2); // [18,6,3,0,0,0....] trace_builder.halt(); @@ -1874,7 +1909,9 @@ TEST_F(AvmArithmeticNegativeTestsFF, fDivisionWrongWInTag) // the addition, subtraction, multiplication. TEST_F(AvmArithmeticNegativeTestsFF, operationWithErrorFlag1) { - trace_builder.op_calldata_copy(0, 0, 3, 0, std::vector{ 37, 4, 11 }); + std::vector const calldata = { 37, 4, 11 }; + gen_trace_builder(calldata); + trace_builder.op_calldata_copy(0, 0, 3, 0); // Memory layout: [37,4,11,0,0,0,....] trace_builder.op_add(0, 0, 1, 4, AvmMemoryTag::FF); // [37,4,11,0,41,0,....] @@ -1893,7 +1930,9 @@ TEST_F(AvmArithmeticNegativeTestsFF, operationWithErrorFlag1) TEST_F(AvmArithmeticNegativeTestsFF, operationWithErrorFlag2) { - trace_builder.op_calldata_copy(0, 0, 3, 0, std::vector{ 8, 4, 17 }); + std::vector const calldata = { 8, 4, 17 }; + gen_trace_builder(calldata); + trace_builder.op_calldata_copy(0, 0, 3, 0); // Memory layout: [8,4,17,0,0,0,....] trace_builder.op_sub(0, 2, 0, 1, AvmMemoryTag::FF); // [8,9,17,0,0,0....] @@ -1911,7 +1950,9 @@ TEST_F(AvmArithmeticNegativeTestsFF, operationWithErrorFlag2) TEST_F(AvmArithmeticNegativeTestsFF, operationWithErrorFlag3) { - trace_builder.op_calldata_copy(0, 0, 3, 0, std::vector{ 5, 0, 20 }); + std::vector const calldata = { 5, 0, 20 }; + gen_trace_builder(calldata); + trace_builder.op_calldata_copy(0, 0, 3, 0); // Memory layout: [5,0,20,0,0,0,....] trace_builder.op_mul(0, 2, 0, 1, AvmMemoryTag::FF); // [5,100,20,0,0,0....] @@ -1954,7 +1995,9 @@ TEST_F(AvmArithmeticNegativeTestsFF, nonBooleanEq) TEST_F(AvmArithmeticNegativeTestsFF, eqOutputWrongTag) { FF elem = FF::modulus - FF(15); - trace_builder.op_calldata_copy(0, 0, 2, 0, std::vector{ elem, elem }); + std::vector const calldata = { elem, elem }; + gen_trace_builder(calldata); + trace_builder.op_calldata_copy(0, 0, 2, 0); trace_builder.op_eq(0, 0, 1, 2, AvmMemoryTag::FF); // Memory Layout [elem, elem, 1, 0..] trace_builder.op_return(0, 0, 0); auto trace = trace_builder.finalize(); diff --git a/barretenberg/cpp/src/barretenberg/vm/tests/avm_bitwise.test.cpp b/barretenberg/cpp/src/barretenberg/vm/tests/avm_bitwise.test.cpp index 387da11f11b..e70b50106af 100644 --- a/barretenberg/cpp/src/barretenberg/vm/tests/avm_bitwise.test.cpp +++ b/barretenberg/cpp/src/barretenberg/vm/tests/avm_bitwise.test.cpp @@ -492,7 +492,7 @@ TEST_P(AvmBitwiseTestsAnd, AllAndTest) auto trace = trace_builder.finalize(); common_validate_bit_op(trace, 0, a, b, output, FF(0), FF(1), FF(2), mem_tag); - validate_trace(std::move(trace), public_inputs, true); + validate_trace(std::move(trace), public_inputs, {}, true); } INSTANTIATE_TEST_SUITE_P(AvmBitwiseTests, AvmBitwiseTestsAnd, diff --git a/barretenberg/cpp/src/barretenberg/vm/tests/avm_cast.test.cpp b/barretenberg/cpp/src/barretenberg/vm/tests/avm_cast.test.cpp index 80629cd9e57..11d9768300d 100644 --- a/barretenberg/cpp/src/barretenberg/vm/tests/avm_cast.test.cpp +++ b/barretenberg/cpp/src/barretenberg/vm/tests/avm_cast.test.cpp @@ -23,6 +23,7 @@ class AvmCastTests : public ::testing::Test { VmPublicInputs public_inputs; AvmTraceBuilder trace_builder; + std::vector calldata; std::vector trace; size_t main_addr; @@ -113,9 +114,9 @@ class AvmCastTests : public ::testing::Test { // We still want the ability to enable proving through the environment variable and therefore we do not pass // the boolean variable force_proof to validate_trace second argument. if (force_proof) { - validate_trace(std::move(trace), public_inputs, true); + validate_trace(std::move(trace), public_inputs, calldata, true); } else { - validate_trace(std::move(trace), public_inputs); + validate_trace(std::move(trace), public_inputs, calldata); } } }; @@ -171,7 +172,9 @@ TEST_F(AvmCastTests, noTruncationFFToU32) TEST_F(AvmCastTests, truncationFFToU16ModMinus1) { - trace_builder.op_calldata_copy(0, 0, 1, 0, { FF(FF::modulus - 1) }); + calldata = { FF::modulus - 1 }; + trace_builder = AvmTraceBuilder(public_inputs, {}, 0, calldata); + trace_builder.op_calldata_copy(0, 0, 1, 0); trace_builder.op_cast(0, 0, 1, AvmMemoryTag::U16); trace_builder.op_return(0, 0, 0); trace = trace_builder.finalize(); @@ -182,7 +185,9 @@ TEST_F(AvmCastTests, truncationFFToU16ModMinus1) TEST_F(AvmCastTests, truncationFFToU16ModMinus2) { - trace_builder.op_calldata_copy(0, 0, 1, 0, { FF(FF::modulus_minus_two) }); + calldata = { FF::modulus_minus_two }; + trace_builder = AvmTraceBuilder(public_inputs, {}, 0, calldata); + trace_builder.op_calldata_copy(0, 0, 1, 0); trace_builder.op_cast(0, 0, 1, AvmMemoryTag::U16); trace_builder.op_return(0, 0, 0); trace = trace_builder.finalize(); @@ -291,7 +296,9 @@ TEST_F(AvmCastNegativeTests, wrongOutputAluIc) TEST_F(AvmCastNegativeTests, wrongLimbDecompositionInput) { - trace_builder.op_calldata_copy(0, 0, 1, 0, { FF(FF::modulus_minus_two) }); + calldata = { FF::modulus_minus_two }; + trace_builder = AvmTraceBuilder(public_inputs, {}, 0, calldata); + trace_builder.op_calldata_copy(0, 0, 1, 0); trace_builder.op_cast(0, 0, 1, AvmMemoryTag::U16); trace_builder.op_return(0, 0, 0); trace = trace_builder.finalize(); @@ -314,7 +321,9 @@ TEST_F(AvmCastNegativeTests, wrongPSubALo) TEST_F(AvmCastNegativeTests, wrongPSubAHi) { - trace_builder.op_calldata_copy(0, 0, 1, 0, { FF(FF::modulus_minus_two - 987) }); + calldata = { FF::modulus_minus_two - 987 }; + trace_builder = AvmTraceBuilder(public_inputs, {}, 0, calldata); + trace_builder.op_calldata_copy(0, 0, 1, 0); trace_builder.op_cast(0, 0, 1, AvmMemoryTag::U16); trace_builder.op_return(0, 0, 0); trace = trace_builder.finalize(); @@ -352,7 +361,9 @@ TEST_F(AvmCastNegativeTests, wrongRangeCheckDecompositionLo) TEST_F(AvmCastNegativeTests, wrongRangeCheckDecompositionHi) { - trace_builder.op_calldata_copy(0, 0, 1, 0, { FF(FF::modulus_minus_two - 987) }); + calldata = { FF::modulus_minus_two - 987 }; + trace_builder = AvmTraceBuilder(public_inputs, {}, 0, calldata); + trace_builder.op_calldata_copy(0, 0, 1, 0); trace_builder.op_cast(0, 0, 1, AvmMemoryTag::U16); trace_builder.op_return(0, 0, 0); trace = trace_builder.finalize(); @@ -396,7 +407,9 @@ TEST_F(AvmCastNegativeTests, wrongCopySubLoForRangeCheck) TEST_F(AvmCastNegativeTests, wrongCopySubHiForRangeCheck) { - trace_builder.op_calldata_copy(0, 0, 1, 0, { FF(FF::modulus_minus_two - 972836) }); + std::vector const calldata = { FF::modulus_minus_two - 972836 }; + trace_builder = AvmTraceBuilder(public_inputs, {}, 0, calldata); + trace_builder.op_calldata_copy(0, 0, 1, 0); trace_builder.op_cast(0, 0, 1, AvmMemoryTag::U128); trace_builder.op_return(0, 0, 0); trace = trace_builder.finalize(); diff --git a/barretenberg/cpp/src/barretenberg/vm/tests/avm_comparison.test.cpp b/barretenberg/cpp/src/barretenberg/vm/tests/avm_comparison.test.cpp index 0ed161a8afd..c3e65de1242 100644 --- a/barretenberg/cpp/src/barretenberg/vm/tests/avm_comparison.test.cpp +++ b/barretenberg/cpp/src/barretenberg/vm/tests/avm_comparison.test.cpp @@ -106,8 +106,12 @@ TEST_P(AvmCmpTestsLT, ParamTest) { const auto [params, mem_tag] = GetParam(); const auto [a, b, c] = params; + std::vector calldata{}; + if (mem_tag == AvmMemoryTag::FF) { - trace_builder.op_calldata_copy(0, 0, 2, 0, std::vector{ a, b }); + calldata = { a, b }; + trace_builder = AvmTraceBuilder(public_inputs, {}, 0, calldata); + trace_builder.op_calldata_copy(0, 0, 2, 0); } else { trace_builder.op_set(0, uint128_t(a), 0, mem_tag); trace_builder.op_set(0, uint128_t(b), 1, mem_tag); @@ -128,7 +132,7 @@ TEST_P(AvmCmpTestsLT, ParamTest) ASSERT_TRUE(alu_row != trace.end()); common_validate_cmp(*row, *alu_row, a, b, c, FF(0), FF(1), FF(2), mem_tag); - validate_trace(std::move(trace), public_inputs); + validate_trace(std::move(trace), public_inputs, calldata); } INSTANTIATE_TEST_SUITE_P(AvmCmpTests, AvmCmpTestsLT, @@ -138,8 +142,12 @@ TEST_P(AvmCmpTestsLTE, ParamTest) { const auto [params, mem_tag] = GetParam(); const auto [a, b, c] = params; + std::vector calldata{}; + if (mem_tag == AvmMemoryTag::FF) { - trace_builder.op_calldata_copy(0, 0, 2, 0, std::vector{ a, b }); + calldata = { a, b }; + trace_builder = AvmTraceBuilder(public_inputs, {}, 0, calldata); + trace_builder.op_calldata_copy(0, 0, 2, 0); } else { trace_builder.op_set(0, uint128_t(a), 0, mem_tag); trace_builder.op_set(0, uint128_t(b), 1, mem_tag); @@ -157,7 +165,7 @@ TEST_P(AvmCmpTestsLTE, ParamTest) ASSERT_TRUE(row != trace.end()); ASSERT_TRUE(alu_row != trace.end()); common_validate_cmp(*row, *alu_row, a, b, c, FF(0), FF(1), FF(2), mem_tag); - validate_trace(std::move(trace), public_inputs); + validate_trace(std::move(trace), public_inputs, calldata); } INSTANTIATE_TEST_SUITE_P(AvmCmpTests, AvmCmpTestsLTE, @@ -309,7 +317,9 @@ TEST_P(AvmCmpNegativeTestsLT, ParamTest) const auto [failure, params] = GetParam(); const auto [failure_string, failure_mode] = failure; const auto [a, b, output] = params; - trace_builder.op_calldata_copy(0, 0, 3, 0, std::vector{ a, b, output }); + + trace_builder = AvmTraceBuilder(public_inputs, {}, 0, std::vector{ a, b, output }); + trace_builder.op_calldata_copy(0, 0, 3, 0); trace_builder.op_lt(0, 0, 1, 2, AvmMemoryTag::FF); trace_builder.op_return(0, 0, 0); auto trace = trace_builder.finalize(); @@ -327,7 +337,8 @@ TEST_P(AvmCmpNegativeTestsLTE, ParamTest) const auto [failure, params] = GetParam(); const auto [failure_string, failure_mode] = failure; const auto [a, b, output] = params; - trace_builder.op_calldata_copy(0, 0, 3, 0, std::vector{ a, b, output }); + trace_builder = AvmTraceBuilder(public_inputs, {}, 0, std::vector{ a, b, output }); + trace_builder.op_calldata_copy(0, 0, 3, 0); trace_builder.op_lte(0, 0, 1, 2, AvmMemoryTag::FF); trace_builder.op_return(0, 0, 0); auto trace = trace_builder.finalize(); diff --git a/barretenberg/cpp/src/barretenberg/vm/tests/avm_control_flow.test.cpp b/barretenberg/cpp/src/barretenberg/vm/tests/avm_control_flow.test.cpp index 31d4facca5e..f961e542cb4 100644 --- a/barretenberg/cpp/src/barretenberg/vm/tests/avm_control_flow.test.cpp +++ b/barretenberg/cpp/src/barretenberg/vm/tests/avm_control_flow.test.cpp @@ -87,7 +87,7 @@ TEST_F(AvmControlFlowTests, simpleCall) EXPECT_EQ(halt_row->main_pc, FF(CALL_PC)); EXPECT_EQ(halt_row->main_internal_return_ptr, FF(1)); } - validate_trace(std::move(trace), public_inputs, true); + validate_trace(std::move(trace), public_inputs, {}, true); } TEST_F(AvmControlFlowTests, simpleJump) diff --git a/barretenberg/cpp/src/barretenberg/vm/tests/avm_execution.test.cpp b/barretenberg/cpp/src/barretenberg/vm/tests/avm_execution.test.cpp index c7af3789b56..6e9d749e6d4 100644 --- a/barretenberg/cpp/src/barretenberg/vm/tests/avm_execution.test.cpp +++ b/barretenberg/cpp/src/barretenberg/vm/tests/avm_execution.test.cpp @@ -100,7 +100,7 @@ TEST_F(AvmExecutionTests, basicAddReturn) ElementsAre(VariantWith(0), VariantWith(0), VariantWith(0))))); auto trace = gen_trace_from_instr(instructions); - validate_trace(std::move(trace), public_inputs, true); + validate_trace(std::move(trace), public_inputs, {}, true); } // Positive test for SET and SUB opcodes @@ -165,7 +165,7 @@ TEST_F(AvmExecutionTests, setAndSubOpcodes) // Find the first row enabling the subtraction selector auto row = std::ranges::find_if(trace.begin(), trace.end(), [](Row r) { return r.main_sel_op_sub == 1; }); EXPECT_EQ(row->main_ic, 10000); // 47123 - 37123 = 10000 - validate_trace(std::move(trace), public_inputs, true); + validate_trace(std::move(trace), public_inputs, {}, true); } // Positive test for multiple MUL opcodes @@ -467,7 +467,7 @@ TEST_F(AvmExecutionTests, jumpAndCalldatacopy) // It must have failed as subtraction was "jumped over". EXPECT_EQ(row, trace.end()); - validate_trace(std::move(trace), public_inputs); + validate_trace(std::move(trace), public_inputs, { 13, 156 }); } // Positive test for JUMPI. @@ -561,8 +561,8 @@ TEST_F(AvmExecutionTests, jumpiAndCalldatacopy) EXPECT_EQ(row->main_ic, 1600); // 800 = (20 + 20) * (20 + 20) // traces validation - validate_trace(std::move(trace_jump), public_inputs); - validate_trace(std::move(trace_no_jump), public_inputs); + validate_trace(std::move(trace_jump), public_inputs, { 9873123 }); + validate_trace(std::move(trace_no_jump), public_inputs, { 0 }); } // Positive test with MOV. @@ -805,7 +805,7 @@ TEST_F(AvmExecutionTests, toRadixLeOpcode) } EXPECT_EQ(returndata, expected_output); - validate_trace(std::move(trace), public_inputs); + validate_trace(std::move(trace), public_inputs, { FF::modulus - FF(1) }); } // // Positive test with SHA256COMPRESSION. @@ -873,7 +873,7 @@ TEST_F(AvmExecutionTests, sha256CompressionOpcode) EXPECT_EQ(returndata, expected_output); - validate_trace(std::move(trace), public_inputs); + validate_trace(std::move(trace), public_inputs, calldata); } // Positive test with SHA256 @@ -992,7 +992,7 @@ TEST_F(AvmExecutionTests, poseidon2PermutationOpCode) EXPECT_EQ(returndata, expected_output); - validate_trace(std::move(trace), public_inputs); + validate_trace(std::move(trace), public_inputs, calldata); } // Positive test with Keccakf1600. @@ -1177,7 +1177,7 @@ TEST_F(AvmExecutionTests, pedersenHashOpCode) EXPECT_EQ(returndata[0], expected_output); - validate_trace(std::move(trace), public_inputs); + validate_trace(std::move(trace), public_inputs, calldata); } // // Positive test with EmbeddedCurveAdd @@ -1239,7 +1239,7 @@ TEST_F(AvmExecutionTests, embeddedCurveAddOpCode) EXPECT_EQ(returndata, expected_output); - validate_trace(std::move(trace), public_inputs); + validate_trace(std::move(trace), public_inputs, calldata); } // Positive test with MSM @@ -1316,7 +1316,7 @@ TEST_F(AvmExecutionTests, msmOpCode) EXPECT_EQ(returndata, expected_output); - validate_trace(std::move(trace), public_inputs); + validate_trace(std::move(trace), public_inputs, calldata); } // Positive test for Kernel Input opcodes @@ -1899,7 +1899,7 @@ TEST_F(AvmExecutionTests, kernelOutputStorageStoreOpcodeSimple) EXPECT_EQ(metadata_out, 9); // slot feed_output(sstore_out_offset, value_out, side_effect_out, metadata_out); - validate_trace(std::move(trace), public_inputs); + validate_trace(std::move(trace), public_inputs, calldata); } // SSTORE @@ -1961,7 +1961,7 @@ TEST_F(AvmExecutionTests, kernelOutputStorageStoreOpcodeComplex) feed_output(sstore_out_offset, 42, 0, 9); feed_output(sstore_out_offset + 1, 123, 1, 10); - validate_trace(std::move(trace), public_inputs); + validate_trace(std::move(trace), public_inputs, calldata); } // SLOAD and SSTORE @@ -2217,7 +2217,7 @@ TEST_F(AvmExecutionTests, opCallOpcodes) auto trace = Execution::gen_trace(instructions, returndata, calldata, public_inputs_vec, execution_hints); EXPECT_EQ(returndata, std::vector({ 9, 8, 1 })); // The 1 represents the success - validate_trace(std::move(trace), public_inputs); + validate_trace(std::move(trace), public_inputs, calldata); } TEST_F(AvmExecutionTests, opGetContractInstanceOpcodes) @@ -2256,7 +2256,7 @@ TEST_F(AvmExecutionTests, opGetContractInstanceOpcodes) auto trace = Execution::gen_trace(instructions, returndata, calldata, public_inputs_vec, execution_hints); EXPECT_EQ(returndata, std::vector({ 1, 2, 3, 4, 5, 6 })); // The first one represents true - validate_trace(std::move(trace), public_inputs); + validate_trace(std::move(trace), public_inputs, calldata); } // Negative test detecting an invalid opcode byte. TEST_F(AvmExecutionTests, invalidOpcode) diff --git a/barretenberg/cpp/src/barretenberg/vm/tests/avm_indirect_mem.test.cpp b/barretenberg/cpp/src/barretenberg/vm/tests/avm_indirect_mem.test.cpp index ccead55c438..47e82b75102 100644 --- a/barretenberg/cpp/src/barretenberg/vm/tests/avm_indirect_mem.test.cpp +++ b/barretenberg/cpp/src/barretenberg/vm/tests/avm_indirect_mem.test.cpp @@ -68,7 +68,7 @@ TEST_F(AvmIndirectMemTests, allIndirectAdd) EXPECT_EQ(row->main_sel_mem_op_b, FF(1)); EXPECT_EQ(row->main_sel_mem_op_c, FF(1)); - validate_trace(std::move(trace), public_inputs, true); + validate_trace(std::move(trace), public_inputs, {}, true); } // Testing a subtraction operation with direct input operands a, b, and an indirect diff --git a/barretenberg/cpp/src/barretenberg/vm/tests/avm_kernel.test.cpp b/barretenberg/cpp/src/barretenberg/vm/tests/avm_kernel.test.cpp index f4076677049..dcfb3ab71a1 100644 --- a/barretenberg/cpp/src/barretenberg/vm/tests/avm_kernel.test.cpp +++ b/barretenberg/cpp/src/barretenberg/vm/tests/avm_kernel.test.cpp @@ -581,7 +581,7 @@ void negative_test_incorrect_ia_kernel_lookup(OpcodesFunc apply_opcodes, check_trace(/*indirect*/ false, trace); - EXPECT_THROW_WITH_MESSAGE(validate_trace_check_circuit(std::move(trace), public_inputs), expected_message); + EXPECT_THROW_WITH_MESSAGE(validate_trace_check_circuit(std::move(trace)), expected_message); } TEST_F(AvmKernelNegativeTests, incorrectIaSender) diff --git a/barretenberg/cpp/src/barretenberg/vm/tests/avm_mem_opcodes.test.cpp b/barretenberg/cpp/src/barretenberg/vm/tests/avm_mem_opcodes.test.cpp index 7114919ee69..94157443a75 100644 --- a/barretenberg/cpp/src/barretenberg/vm/tests/avm_mem_opcodes.test.cpp +++ b/barretenberg/cpp/src/barretenberg/vm/tests/avm_mem_opcodes.test.cpp @@ -402,7 +402,7 @@ TEST_F(AvmMemOpcodeTests, indirectMovInvalidAddressTag) MEM_ROW_FIELD_EQ(r_in_tag, static_cast(AvmMemoryTag::U32)), MEM_ROW_FIELD_EQ(sel_resolve_ind_addr_c, 1))); - validate_trace(std::move(trace), public_inputs, true); + validate_trace(std::move(trace), public_inputs, {}, true); } /****************************************************************************** diff --git a/barretenberg/cpp/src/barretenberg/vm/tests/avm_memory.test.cpp b/barretenberg/cpp/src/barretenberg/vm/tests/avm_memory.test.cpp index 4d3b6fd18a9..05a02bb3862 100644 --- a/barretenberg/cpp/src/barretenberg/vm/tests/avm_memory.test.cpp +++ b/barretenberg/cpp/src/barretenberg/vm/tests/avm_memory.test.cpp @@ -36,7 +36,9 @@ class AvmMemoryTests : public ::testing::Test { // The proof must pass and we check that the AVM error is raised. TEST_F(AvmMemoryTests, mismatchedTagAddOperation) { - trace_builder.op_calldata_copy(0, 0, 2, 0, std::vector{ 98, 12 }); + std::vector const calldata = { 98, 12 }; + trace_builder = AvmTraceBuilder(public_inputs, {}, 0, calldata); + trace_builder.op_calldata_copy(0, 0, 2, 0); trace_builder.op_add(0, 0, 1, 4, AvmMemoryTag::U8); trace_builder.halt(); @@ -75,7 +77,7 @@ TEST_F(AvmMemoryTests, mismatchedTagAddOperation) EXPECT_EQ(row->mem_r_in_tag, FF(static_cast(AvmMemoryTag::U8))); EXPECT_EQ(row->mem_tag, FF(static_cast(AvmMemoryTag::FF))); - validate_trace(std::move(trace), public_inputs, true); + validate_trace(std::move(trace), public_inputs, calldata, true); } // Testing an equality operation with a mismatched memory tag. @@ -228,7 +230,8 @@ TEST_F(AvmMemoryTests, readUninitializedMemoryViolation) // must raise a VM error. TEST_F(AvmMemoryTests, mismatchedTagErrorViolation) { - trace_builder.op_calldata_copy(0, 0, 2, 0, std::vector{ 98, 12 }); + trace_builder = AvmTraceBuilder(public_inputs, {}, 0, { 98, 12 }); + trace_builder.op_calldata_copy(0, 0, 2, 0); trace_builder.op_sub(0, 0, 1, 4, AvmMemoryTag::U8); trace_builder.halt(); @@ -262,8 +265,8 @@ TEST_F(AvmMemoryTests, mismatchedTagErrorViolation) // must not set a VM error. TEST_F(AvmMemoryTests, consistentTagNoErrorViolation) { - trace_builder.op_calldata_copy(0, 0, 2, 0, std::vector{ 84, 7 }); - + trace_builder = AvmTraceBuilder(public_inputs, {}, 0, std::vector{ 84, 7 }); + trace_builder.op_calldata_copy(0, 0, 2, 0); trace_builder.op_fdiv(0, 0, 1, 4); trace_builder.halt(); auto trace = trace_builder.finalize(); @@ -288,8 +291,8 @@ TEST_F(AvmMemoryTests, consistentTagNoErrorViolation) // Testing violation that a write operation must not set a VM error. TEST_F(AvmMemoryTests, noErrorTagWriteViolation) { - trace_builder.op_calldata_copy(0, 0, 2, 0, std::vector{ 84, 7 }); - + trace_builder = AvmTraceBuilder(public_inputs, {}, 0, { 84, 7 }); + trace_builder.op_calldata_copy(0, 0, 2, 0); trace_builder.op_fdiv(0, 0, 1, 4); trace_builder.halt(); auto trace = trace_builder.finalize(); diff --git a/barretenberg/cpp/src/barretenberg/vm/tests/helpers.test.cpp b/barretenberg/cpp/src/barretenberg/vm/tests/helpers.test.cpp index 1657ba8ce0d..d84f11226de 100644 --- a/barretenberg/cpp/src/barretenberg/vm/tests/helpers.test.cpp +++ b/barretenberg/cpp/src/barretenberg/vm/tests/helpers.test.cpp @@ -23,9 +23,9 @@ std::vector gen_three_op_params(std::vector opera * * @param trace The execution trace */ -void validate_trace_check_circuit(std::vector&& trace, VmPublicInputs public_inputs) +void validate_trace_check_circuit(std::vector&& trace) { - validate_trace(std::move(trace), public_inputs, false); + validate_trace(std::move(trace), {}, {}, false); }; /** @@ -34,7 +34,10 @@ void validate_trace_check_circuit(std::vector&& trace, VmPublicInputs publi * * @param trace The execution trace */ -void validate_trace(std::vector&& trace, VmPublicInputs const& public_inputs, bool with_proof) +void validate_trace(std::vector&& trace, + VmPublicInputs const& public_inputs, + std::vector const& calldata, + bool with_proof) { auto circuit_builder = AvmCircuitBuilder(); circuit_builder.set_trace(std::move(trace)); @@ -47,7 +50,8 @@ void validate_trace(std::vector&& trace, VmPublicInputs const& public_input AvmVerifier verifier = composer.create_verifier(circuit_builder); - std::vector> public_inputs_as_vec = bb::avm_trace::copy_public_inputs_columns(public_inputs); + std::vector> public_inputs_as_vec = + bb::avm_trace::copy_public_inputs_columns(public_inputs, calldata); bool verified = verifier.verify_proof(proof, { public_inputs_as_vec }); diff --git a/barretenberg/cpp/src/barretenberg/vm/tests/helpers.test.hpp b/barretenberg/cpp/src/barretenberg/vm/tests/helpers.test.hpp index 0dcf6381502..0640121b319 100644 --- a/barretenberg/cpp/src/barretenberg/vm/tests/helpers.test.hpp +++ b/barretenberg/cpp/src/barretenberg/vm/tests/helpers.test.hpp @@ -28,9 +28,10 @@ using VmPublicInputs = bb::avm_trace::VmPublicInputs; // If the test is expecting a relation to fail, then use validate_trace_check_circuit. // Otherwise, use validate_trace with a single argument. If the proving needs to be // enabled all the time in a given test, use validate_trace with setting with_proof = true. -void validate_trace_check_circuit(std::vector&& trace, VmPublicInputs public_inputs = {}); +void validate_trace_check_circuit(std::vector&& trace); void validate_trace(std::vector&& trace, VmPublicInputs const& public_inputs = {}, + std::vector const& calldata = {}, bool with_proof = bb::avm_trace::ENABLE_PROVING); void mutate_ic_in_trace(std::vector& trace, std::function&& selectRow, diff --git a/yarn-project/bb-prover/src/bb/execute.ts b/yarn-project/bb-prover/src/bb/execute.ts index 9a5753a68ad..7c97fcede72 100644 --- a/yarn-project/bb-prover/src/bb/execute.ts +++ b/yarn-project/bb-prover/src/bb/execute.ts @@ -12,6 +12,10 @@ export const VK_FILENAME = 'vk'; export const VK_FIELDS_FILENAME = 'vk_fields.json'; export const PROOF_FILENAME = 'proof'; export const PROOF_FIELDS_FILENAME = 'proof_fields.json'; +export const AVM_BYTECODE_FILENAME = 'avm_bytecode.bin'; +export const AVM_CALLDATA_FILENAME = 'avm_calldata.bin'; +export const AVM_PUBLIC_INPUTS_FILENAME = 'avm_public_inputs.bin'; +export const AVM_HINTS_FILENAME = 'avm_hints.bin'; export enum BB_RESULT { SUCCESS, @@ -276,10 +280,10 @@ export async function generateAvmProof( } // Paths for the inputs - const bytecodePath = join(workingDirectory, 'avm_bytecode.bin'); - const calldataPath = join(workingDirectory, 'avm_calldata.bin'); - const publicInputsPath = join(workingDirectory, 'avm_public_inputs.bin'); - const avmHintsPath = join(workingDirectory, 'avm_hints.bin'); + const bytecodePath = join(workingDirectory, AVM_BYTECODE_FILENAME); + const calldataPath = join(workingDirectory, AVM_CALLDATA_FILENAME); + const publicInputsPath = join(workingDirectory, AVM_PUBLIC_INPUTS_FILENAME); + const avmHintsPath = join(workingDirectory, AVM_HINTS_FILENAME); // The proof is written to e.g. /workingDirectory/proof const outputPath = workingDirectory; From c07cf2cf2b004dba46a3138a1f64f207b6ee537f Mon Sep 17 00:00:00 2001 From: esau <152162806+sklppy88@users.noreply.github.com> Date: Fri, 28 Jun 2024 19:58:20 +0200 Subject: [PATCH 13/29] feat: add new lenses for encryted notes (#7238) A very quick fix to just give a bit more options to the length of notes --- .../aztec-nr/aztec/src/oracle/logs_traits.nr | 24 +++++++++++++++++++ 1 file changed, 24 insertions(+) diff --git a/noir-projects/aztec-nr/aztec/src/oracle/logs_traits.nr b/noir-projects/aztec-nr/aztec/src/oracle/logs_traits.nr index 899f7f0d0d1..c3d2cfb83c7 100644 --- a/noir-projects/aztec-nr/aztec/src/oracle/logs_traits.nr +++ b/noir-projects/aztec-nr/aztec/src/oracle/logs_traits.nr @@ -40,6 +40,30 @@ impl LensForEncryptedLog<6, 672> for [Field; 6] { fn output_fields(self) -> [Field; 6] {[self[0]; 6]} fn output_bytes(self) -> [u8; 672] {[self[0] as u8; 672]} } +impl LensForEncryptedLog<7, 704> for [Field; 7] { + fn output_fields(self) -> [Field; 7] {[self[0]; 7]} + fn output_bytes(self) -> [u8; 704] {[self[0] as u8; 704]} +} +impl LensForEncryptedLog<8, 736> for [Field; 8] { + fn output_fields(self) -> [Field; 8] {[self[0]; 8]} + fn output_bytes(self) -> [u8; 736] {[self[0] as u8; 736]} +} +impl LensForEncryptedLog<9, 768> for [Field; 9] { + fn output_fields(self) -> [Field; 9] {[self[0]; 9]} + fn output_bytes(self) -> [u8; 768] {[self[0] as u8; 768]} +} +impl LensForEncryptedLog<10, 800> for [Field; 10] { + fn output_fields(self) -> [Field; 10] {[self[0]; 10]} + fn output_bytes(self) -> [u8; 800] {[self[0] as u8; 800]} +} +impl LensForEncryptedLog<11, 832> for [Field; 11] { + fn output_fields(self) -> [Field; 11] {[self[0]; 11]} + fn output_bytes(self) -> [u8; 832] {[self[0] as u8; 832]} +} +impl LensForEncryptedLog<12, 864> for [Field; 12] { + fn output_fields(self) -> [Field; 12] {[self[0]; 12]} + fn output_bytes(self) -> [u8; 864] {[self[0] as u8; 864]} +} trait LensForEncryptedEvent { // N = event preimage input in bytes From 51f7d65d69eede9508f44224db554d5185298509 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Nicol=C3=A1s=20Venturo?= Date: Fri, 28 Jun 2024 16:01:27 -0300 Subject: [PATCH 14/29] feat!: split storage access oracles (#7237) Part of https://github.com/AztecProtocol/aztec-packages/issues/7230. This splits the storage access oracles that transpile into opcodes, and the ones that are resolved as actual oracle calls by PXE. I took the liberty to introduce some slight improvements, such as removing unused `length` params, write return values, and making the default fns call serialize and deserialize, mirroring the actual usage they had throughout our codebase and cleaning up the callsites. The oracle improvements proposed in #7230 (taking arbitrary addresses and block number) will come in a separate PR, as those require further changes to PXE etc. --- avm-transpiler/src/transpile.rs | 10 ++-- .../aztec/src/context/public_context.nr | 30 ++++++++++ noir-projects/aztec-nr/aztec/src/lib.nr | 1 - .../aztec-nr/aztec/src/oracle/storage.nr | 44 +++++++++++---- .../aztec-nr/aztec/src/public_storage.nr | 56 ------------------- .../aztec/src/state_vars/public_immutable.nr | 23 +++----- .../aztec/src/state_vars/public_mutable.nr | 15 ++--- .../aztec/src/state_vars/shared_immutable.nr | 22 +++----- .../shared_mutable/shared_mutable.nr | 9 ++- .../shared_mutable_private_getter.nr | 1 - .../token_contract/src/test/utils.nr | 4 +- .../end-to-end/src/e2e_state_vars.test.ts | 4 +- 12 files changed, 98 insertions(+), 121 deletions(-) delete mode 100644 noir-projects/aztec-nr/aztec/src/public_storage.nr diff --git a/avm-transpiler/src/transpile.rs b/avm-transpiler/src/transpile.rs index 22e5cbcd8ca..80610006ec0 100644 --- a/avm-transpiler/src/transpile.rs +++ b/avm-transpiler/src/transpile.rs @@ -263,8 +263,8 @@ fn handle_foreign_call( "avmOpcodeGetContractInstance" => { handle_get_contract_instance(avm_instrs, destinations, inputs); } - "storageRead" => handle_storage_read(avm_instrs, destinations, inputs), - "storageWrite" => handle_storage_write(avm_instrs, destinations, inputs), + "avmOpcodeStorageRead" => handle_storage_read(avm_instrs, destinations, inputs), + "avmOpcodeStorageWrite" => handle_storage_write(avm_instrs, destinations, inputs), "debugLog" => handle_debug_log(avm_instrs, destinations, inputs), // Getters. _ if inputs.is_empty() && destinations.len() == 1 => { @@ -926,7 +926,7 @@ fn handle_storage_write( inputs: &Vec, ) { assert!(inputs.len() == 2); - assert!(destinations.len() == 1); + assert!(destinations.len() == 0); let slot_offset_maybe = inputs[0]; let slot_offset = match slot_offset_maybe { @@ -992,8 +992,8 @@ fn handle_storage_read( inputs: &Vec, ) { // For the foreign calls we want to handle, we do not want inputs, as they are getters - assert!(inputs.len() == 2); // output, len - but we dont use this len - its for the oracle - assert!(destinations.len() == 1); + assert!(inputs.len() == 1); // storage_slot + assert!(destinations.len() == 1); // return values let slot_offset_maybe = inputs[0]; let slot_offset = match slot_offset_maybe { diff --git a/noir-projects/aztec-nr/aztec/src/context/public_context.nr b/noir-projects/aztec-nr/aztec/src/context/public_context.nr index 57d2380e6cf..7c056a43506 100644 --- a/noir-projects/aztec-nr/aztec/src/context/public_context.nr +++ b/noir-projects/aztec-nr/aztec/src/context/public_context.nr @@ -166,6 +166,22 @@ impl PublicContext { fn da_gas_left(self) -> Field { da_gas_left() } + + fn raw_storage_read(_self: Self, storage_slot: Field) -> [Field; N] { + storage_read(storage_slot) + } + + fn storage_read(self, storage_slot: Field) -> T where T: Deserialize { + T::deserialize(self.raw_storage_read(storage_slot)) + } + + fn raw_storage_write(_self: Self, storage_slot: Field, values: [Field; N]) { + storage_write(storage_slot, values); + } + + fn storage_write(self, storage_slot: Field, value: T) where T: Serialize { + self.raw_storage_write(storage_slot, value.serialize()); + } } // Helper functions @@ -258,6 +274,14 @@ unconstrained fn call_static( call_static_opcode(gas, address, args, function_selector) } +unconstrained fn storage_read(storage_slot: Field) -> [Field; N] { + storage_read_opcode(storage_slot) +} + +unconstrained fn storage_write(storage_slot: Field, values: [Field; N]) { + storage_write_opcode(storage_slot, values); +} + impl Empty for PublicContext { fn empty() -> Self { PublicContext::new(PublicContextInputs::empty()) @@ -345,6 +369,12 @@ unconstrained fn call_static_opcode( ) -> ([Field; RET_SIZE], u8) {} // ^ return data ^ success +#[oracle(avmOpcodeStorageRead)] +unconstrained fn storage_read_opcode(storage_slot: Field) -> [Field; N] {} + +#[oracle(avmOpcodeStorageWrite)] +unconstrained fn storage_write_opcode(storage_slot: Field, values: [Field; N]) {} + struct FunctionReturns { values: [Field; N] } diff --git a/noir-projects/aztec-nr/aztec/src/lib.nr b/noir-projects/aztec-nr/aztec/src/lib.nr index e569b892e1a..d4b1abf35e7 100644 --- a/noir-projects/aztec-nr/aztec/src/lib.nr +++ b/noir-projects/aztec-nr/aztec/src/lib.nr @@ -10,7 +10,6 @@ mod event; mod oracle; mod state_vars; mod prelude; -mod public_storage; mod encrypted_logs; mod unencrypted_logs; use dep::protocol_types; diff --git a/noir-projects/aztec-nr/aztec/src/oracle/storage.nr b/noir-projects/aztec-nr/aztec/src/oracle/storage.nr index b25e4a3b55c..92925f889fa 100644 --- a/noir-projects/aztec-nr/aztec/src/oracle/storage.nr +++ b/noir-projects/aztec-nr/aztec/src/oracle/storage.nr @@ -1,19 +1,43 @@ -use dep::protocol_types::traits::{Deserialize, Serialize}; +use dep::protocol_types::traits::Deserialize; #[oracle(storageRead)] -unconstrained fn storage_read_oracle(_storage_slot: Field, _number_of_elements: Field) -> [Field; N] {} +unconstrained fn storage_read_oracle(storage_slot: Field, length: Field) -> [Field; N] {} -unconstrained fn storage_read_oracle_wrapper(_storage_slot: Field) -> [Field; N] { - storage_read_oracle(_storage_slot, N) +unconstrained pub fn raw_storage_read(storage_slot: Field) -> [Field; N] { + storage_read_oracle(storage_slot, N) } -pub fn storage_read(storage_slot: Field) -> [Field; N] { - storage_read_oracle_wrapper(storage_slot) +unconstrained pub fn storage_read(storage_slot: Field) -> T where T: Deserialize { + T::deserialize(raw_storage_read(storage_slot)) } -#[oracle(storageWrite)] -unconstrained fn storage_write_oracle(_storage_slot: Field, _values: [Field; N]) -> [Field; N] {} +mod tests { + use crate::oracle::storage::{raw_storage_read, storage_read}; -unconstrained pub fn storage_write(storage_slot: Field, fields: [Field; N]) { - let _hash = storage_write_oracle(storage_slot, fields); + use std::test::OracleMock; + use crate::test::mocks::mock_struct::MockStruct; + + #[test] + fn test_raw_storage_read() { + let slot = 7; + let written = MockStruct { a: 13, b: 42 }; + + let _ = OracleMock::mock("storageRead").with_params((slot, 2)).returns(written.serialize()); + + let read: [Field; 2] = raw_storage_read(slot); + assert_eq(read[0], 13); + assert_eq(read[1], 42); + } + + #[test] + fn test_storage_read() { + let slot = 7; + let written = MockStruct { a: 13, b: 42 }; + + let _ = OracleMock::mock("storageRead").with_params((slot, 2)).returns(written.serialize()); + + let read: MockStruct = storage_read(slot); + assert_eq(read.a, 13); + assert_eq(read.b, 42); + } } diff --git a/noir-projects/aztec-nr/aztec/src/public_storage.nr b/noir-projects/aztec-nr/aztec/src/public_storage.nr deleted file mode 100644 index b9a34811bab..00000000000 --- a/noir-projects/aztec-nr/aztec/src/public_storage.nr +++ /dev/null @@ -1,56 +0,0 @@ -use dep::protocol_types::traits::{Deserialize, Serialize}; -use crate::oracle::storage::{storage_read, storage_write}; - -pub fn read(storage_slot: Field) -> T where T: Deserialize { - T::deserialize(storage_read(storage_slot)) -} - -pub fn write(storage_slot: Field, value: T) where T: Serialize { - storage_write(storage_slot, value.serialize()); -} - -mod tests { - use std::test::OracleMock; - use dep::protocol_types::traits::{Deserialize, Serialize}; - use crate::public_storage; - - struct TestStruct { - a: Field, - b: Field, - } - - impl Deserialize<2> for TestStruct { - fn deserialize(fields: [Field; 2]) -> TestStruct { - TestStruct { a: fields[0], b: fields[1] } - } - } - - impl Serialize<2> for TestStruct { - fn serialize(self) -> [Field; 2] { - [self.a, self.b] - } - } - - #[test] - fn test_read() { - let slot = 7; - let written = TestStruct { a: 13, b: 42 }; - - OracleMock::mock("storageRead").with_params((slot, 2)).returns(written.serialize()); - - let read: TestStruct = public_storage::read(slot); - assert_eq(read.a, 13); - assert_eq(read.b, 42); - } - - #[test] - fn test_write() { - let slot = 7; - let to_write = TestStruct { a: 13, b: 42 }; - - let mock = OracleMock::mock("storageWrite").returns([0; 2]); // The return value is unused - - public_storage::write(slot, to_write); - assert_eq(mock.get_last_params(), (slot, to_write.serialize())); - } -} diff --git a/noir-projects/aztec-nr/aztec/src/state_vars/public_immutable.nr b/noir-projects/aztec-nr/aztec/src/state_vars/public_immutable.nr index b8f2c1d2dde..32955df7593 100644 --- a/noir-projects/aztec-nr/aztec/src/state_vars/public_immutable.nr +++ b/noir-projects/aztec-nr/aztec/src/state_vars/public_immutable.nr @@ -1,5 +1,5 @@ use crate::{ - context::{PublicContext, UnconstrainedContext}, oracle::{storage::{storage_read, storage_write}}, + context::{PublicContext, UnconstrainedContext}, oracle::storage::storage_read, state_vars::storage::Storage }; use dep::protocol_types::{constants::INITIALIZATION_SLOT_SEPARATOR, traits::{Deserialize, Serialize}}; @@ -37,32 +37,25 @@ impl PublicImmutable { // We check that the struct is not yet initialized by checking if the initialization slot is 0 let initialization_slot = INITIALIZATION_SLOT_SEPARATOR + self.storage_slot; - let fields_read: [Field; 1] = storage_read(initialization_slot); - assert(fields_read[0] == 0, "PublicImmutable already initialized"); + let init_field: Field = self.context.storage_read(initialization_slot); + assert(init_field == 0, "PublicImmutable already initialized"); // We populate the initialization slot with a non-zero value to indicate that the struct is initialized - storage_write(initialization_slot, [0xdead]); - - let fields_write = T::serialize(value); - storage_write(self.storage_slot, fields_write); + self.context.storage_write(initialization_slot, 0xdead); + self.context.storage_write(self.storage_slot, value); } // docs:end:public_immutable_struct_write // Note that we don't access the context, but we do call oracles that are only available in public // docs:start:public_immutable_struct_read pub fn read(self) -> T where T: Deserialize { - let fields = storage_read(self.storage_slot); - T::deserialize(fields) + self.context.storage_read(self.storage_slot) } // docs:end:public_immutable_struct_read } impl PublicImmutable { - pub fn read(self) -> T where T: Deserialize { - // This looks the same as the &mut PublicContext impl, but is actually very different. In public execution the - // storage read oracle gets transpiled to SLOAD opcodes, whereas in unconstrained execution the PXE returns - // historical data. - let fields = storage_read(self.storage_slot); - T::deserialize(fields) + unconstrained pub fn read(self) -> T where T: Deserialize { + storage_read(self.storage_slot) } } diff --git a/noir-projects/aztec-nr/aztec/src/state_vars/public_mutable.nr b/noir-projects/aztec-nr/aztec/src/state_vars/public_mutable.nr index 69a6a0f8a48..0d463051717 100644 --- a/noir-projects/aztec-nr/aztec/src/state_vars/public_mutable.nr +++ b/noir-projects/aztec-nr/aztec/src/state_vars/public_mutable.nr @@ -1,6 +1,5 @@ use crate::context::{PublicContext, UnconstrainedContext}; use crate::oracle::storage::storage_read; -use crate::oracle::storage::storage_write; use dep::protocol_types::traits::{Deserialize, Serialize}; use crate::state_vars::storage::Storage; @@ -29,25 +28,19 @@ impl PublicMutable { impl PublicMutable { // docs:start:public_mutable_struct_read pub fn read(self) -> T where T: Deserialize { - let fields = storage_read(self.storage_slot); - T::deserialize(fields) + self.context.storage_read(self.storage_slot) } // docs:end:public_mutable_struct_read // docs:start:public_mutable_struct_write pub fn write(self, value: T) where T: Serialize { - let fields = T::serialize(value); - storage_write(self.storage_slot, fields); + self.context.storage_write(self.storage_slot, value); } // docs:end:public_mutable_struct_write } impl PublicMutable { - pub fn read(self) -> T where T: Deserialize { - // This looks the same as the &mut PublicContext impl, but is actually very different. In public execution the - // storage read oracle gets transpiled to SLOAD opcodes, whereas in unconstrained execution the PXE returns - // historical data. - let fields = storage_read(self.storage_slot); - T::deserialize(fields) + unconstrained pub fn read(self) -> T where T: Deserialize { + storage_read(self.storage_slot) } } diff --git a/noir-projects/aztec-nr/aztec/src/state_vars/shared_immutable.nr b/noir-projects/aztec-nr/aztec/src/state_vars/shared_immutable.nr index 086c47aca56..f71448809d4 100644 --- a/noir-projects/aztec-nr/aztec/src/state_vars/shared_immutable.nr +++ b/noir-projects/aztec-nr/aztec/src/state_vars/shared_immutable.nr @@ -1,6 +1,6 @@ use crate::{ - context::{PrivateContext, PublicContext, UnconstrainedContext}, - oracle::{storage::{storage_read, storage_write}}, state_vars::storage::Storage + context::{PrivateContext, PublicContext, UnconstrainedContext}, oracle::storage::storage_read, + state_vars::storage::Storage }; use dep::protocol_types::{constants::INITIALIZATION_SLOT_SEPARATOR, traits::{Deserialize, Serialize}}; @@ -33,26 +33,22 @@ impl SharedImmutable { // We check that the struct is not yet initialized by checking if the initialization slot is 0 let initialization_slot = INITIALIZATION_SLOT_SEPARATOR + self.storage_slot; - let fields_read: [Field; 1] = storage_read(initialization_slot); - assert(fields_read[0] == 0, "SharedImmutable already initialized"); + let init_field: Field = self.context.storage_read(initialization_slot); + assert(init_field == 0, "SharedImmutable already initialized"); // We populate the initialization slot with a non-zero value to indicate that the struct is initialized - storage_write(initialization_slot, [0xdead]); - - let fields_write = T::serialize(value); - storage_write(self.storage_slot, fields_write); + self.context.storage_write(initialization_slot, 0xdead); + self.context.storage_write(self.storage_slot, value); } pub fn read_public(self) -> T where T: Deserialize { - let fields = storage_read(self.storage_slot); - T::deserialize(fields) + self.context.storage_read(self.storage_slot) } } impl SharedImmutable { - pub fn read_public(self) -> T where T: Deserialize { - let fields = storage_read(self.storage_slot); - T::deserialize(fields) + unconstrained pub fn read_public(self) -> T where T: Deserialize { + storage_read(self.storage_slot) } } diff --git a/noir-projects/aztec-nr/aztec/src/state_vars/shared_mutable/shared_mutable.nr b/noir-projects/aztec-nr/aztec/src/state_vars/shared_mutable/shared_mutable.nr index 91c864a03b2..197d1402986 100644 --- a/noir-projects/aztec-nr/aztec/src/state_vars/shared_mutable/shared_mutable.nr +++ b/noir-projects/aztec-nr/aztec/src/state_vars/shared_mutable/shared_mutable.nr @@ -1,7 +1,6 @@ use dep::protocol_types::{hash::pedersen_hash, traits::FromField}; use crate::context::{PrivateContext, PublicContext}; -use crate::public_storage; use crate::state_vars::{ storage::Storage, shared_mutable::{scheduled_value_change::ScheduledValueChange, scheduled_delay_change::ScheduledDelayChange} @@ -98,19 +97,19 @@ impl SharedMutable { } fn read_value_change(self) -> ScheduledValueChange { - public_storage::read(self.get_value_change_storage_slot()) + self.context.storage_read(self.get_value_change_storage_slot()) } fn read_delay_change(self) -> ScheduledDelayChange { - public_storage::read(self.get_delay_change_storage_slot()) + self.context.storage_read(self.get_delay_change_storage_slot()) } fn write_value_change(self, value_change: ScheduledValueChange) { - public_storage::write(self.get_value_change_storage_slot(), value_change); + self.context.storage_write(self.get_value_change_storage_slot(), value_change); } fn write_delay_change(self, delay_change: ScheduledDelayChange) { - public_storage::write(self.get_delay_change_storage_slot(), delay_change); + self.context.storage_write(self.get_delay_change_storage_slot(), delay_change); } } diff --git a/noir-projects/aztec-nr/aztec/src/state_vars/shared_mutable/shared_mutable_private_getter.nr b/noir-projects/aztec-nr/aztec/src/state_vars/shared_mutable/shared_mutable_private_getter.nr index 5a6ad9ad6fd..29ef525192c 100644 --- a/noir-projects/aztec-nr/aztec/src/state_vars/shared_mutable/shared_mutable_private_getter.nr +++ b/noir-projects/aztec-nr/aztec/src/state_vars/shared_mutable/shared_mutable_private_getter.nr @@ -1,7 +1,6 @@ use dep::protocol_types::{hash::pedersen_hash, traits::FromField, address::AztecAddress, header::Header}; use crate::context::PrivateContext; -use crate::public_storage; use crate::state_vars::{ storage::Storage, shared_mutable::{scheduled_delay_change::ScheduledDelayChange, scheduled_value_change::ScheduledValueChange} diff --git a/noir-projects/noir-contracts/contracts/token_contract/src/test/utils.nr b/noir-projects/noir-contracts/contracts/token_contract/src/test/utils.nr index 1801ddd7213..cb5b51339d6 100644 --- a/noir-projects/noir-contracts/contracts/token_contract/src/test/utils.nr +++ b/noir-projects/noir-contracts/contracts/token_contract/src/test/utils.nr @@ -75,8 +75,8 @@ pub fn check_public_balance(token_contract_address: AztecAddress, address: Aztec let balances_slot = Token::storage().public_balances.slot; let address_slot = derive_storage_slot_in_map(balances_slot, address); - let fields = storage_read(address_slot); - assert(U128::deserialize(fields).to_field() == address_amount, "Public balance is not correct"); + let amount: U128 = storage_read(address_slot); + assert(amount.to_field() == address_amount, "Public balance is not correct"); cheatcodes::set_contract_address(current_contract_address); } diff --git a/yarn-project/end-to-end/src/e2e_state_vars.test.ts b/yarn-project/end-to-end/src/e2e_state_vars.test.ts index d306d976ffb..400a25e9f36 100644 --- a/yarn-project/end-to-end/src/e2e_state_vars.test.ts +++ b/yarn-project/end-to-end/src/e2e_state_vars.test.ts @@ -94,7 +94,7 @@ describe('e2e_state_vars', () => { // Jest executes the tests sequentially and the first call to initialize_shared_immutable was executed // in the previous test, so the call below should fail. await expect(contract.methods.initialize_shared_immutable(1).prove()).rejects.toThrow( - "Assertion failed: SharedImmutable already initialized 'fields_read[0] == 0'", + 'Assertion failed: SharedImmutable already initialized', ); }); }); @@ -114,7 +114,7 @@ describe('e2e_state_vars', () => { // Jest executes the tests sequentially and the first call to initialize_public_immutable was executed // in the previous test, so the call below should fail. await expect(contract.methods.initialize_public_immutable(1).prove()).rejects.toThrow( - "Assertion failed: PublicImmutable already initialized 'fields_read[0] == 0'", + 'Assertion failed: PublicImmutable already initialized', ); }); }); From f2abb4e9deb05437666db9c27cd0d49c2ec9ac3d Mon Sep 17 00:00:00 2001 From: Aztec Bot <49558828+AztecBot@users.noreply.github.com> Date: Fri, 28 Jun 2024 15:40:28 -0400 Subject: [PATCH 15/29] feat: Sync from noir (#7223) Automated pull of development from the [noir](https://github.com/noir-lang/noir) programming language, a dependency of Aztec. BEGIN_COMMIT_OVERRIDE fix: Implement generic functions in the interpreter (https://github.com/noir-lang/noir/pull/5330) feat: Insert trait impls into the program from type annotations (https://github.com/noir-lang/noir/pull/5327) feat: Sync from aztec-packages (https://github.com/noir-lang/noir/pull/5340) chore: address TODO in `compat.nr` (https://github.com/noir-lang/noir/pull/5339) chore: use `push_err` more in elaborator (https://github.com/noir-lang/noir/pull/5336) chore: remove `is_unconstrained_fn` field from elaborator (https://github.com/noir-lang/noir/pull/5335) fix: Error when a local function is called in a comptime context (https://github.com/noir-lang/noir/pull/5334) feat: Add `map`, `fold`, `reduce`, `any`, and `all` for slices (https://github.com/noir-lang/noir/pull/5331) fix: Avoid panic in type system (https://github.com/noir-lang/noir/pull/5332) chore: remove panic for unimplemented trait dispatch (https://github.com/noir-lang/noir/pull/5329) feat: Use runtime loops for brillig array initialization (https://github.com/noir-lang/noir/pull/5243) fix: Fix tokenization of unquoted types in macros (https://github.com/noir-lang/noir/pull/5326) chore: replace cached `in_contract` with `in_contract()` method (https://github.com/noir-lang/noir/pull/5324) fix: fix usage of `#[abi(tag)]` attribute with elaborator (https://github.com/noir-lang/noir/pull/5298) fix: don't benchmark the "prove" command as it doesn't exist anymore (https://github.com/noir-lang/noir/pull/5323) feat(stdlib): Update stdlib to use explicit numeric generics (https://github.com/noir-lang/noir/pull/5306) feat: let `should_fail_with` check that the failure reason contains the expected message (https://github.com/noir-lang/noir/pull/5319) chore: bump `bb` to 0.43.0 (https://github.com/noir-lang/noir/pull/5321) feat(frontend): Explicit numeric generics and type kinds (https://github.com/noir-lang/noir/pull/5155) feat(frontend): Where clause on impl (https://github.com/noir-lang/noir/pull/5320) END_COMMIT_OVERRIDE --------- Co-authored-by: TomAFrench Co-authored-by: Tom French <15848336+TomAFrench@users.noreply.github.com> Co-authored-by: Maxim Vezenov --- .noir-sync-commit | 2 +- noir/bb-version | 2 +- ...te_note_hash_and_optionally_a_nullifier.rs | 8 +- .../src/transforms/contract_interface.rs | 14 +- .../src/transforms/note_interface.rs | 21 +- .../aztec_macros/src/transforms/storage.rs | 6 +- .../aztec_macros/src/utils/ast_utils.rs | 2 +- .../noirc_driver/tests/stdlib_warnings.rs | 2 +- .../src/brillig/brillig_gen/brillig_block.rs | 140 +++++- .../brillig_ir/codegen_control_flow.rs | 32 +- .../noirc_frontend/src/ast/expression.rs | 87 +++- .../compiler/noirc_frontend/src/ast/mod.rs | 12 +- .../noirc_frontend/src/ast/structure.rs | 2 +- .../compiler/noirc_frontend/src/ast/traits.rs | 5 +- .../src/elaborator/expressions.rs | 26 +- .../noirc_frontend/src/elaborator/mod.rs | 415 ++++++++++++---- .../noirc_frontend/src/elaborator/patterns.rs | 14 +- .../src/elaborator/statements.rs | 8 +- .../noirc_frontend/src/elaborator/traits.rs | 30 +- .../noirc_frontend/src/elaborator/types.rs | 190 ++++++-- .../noirc_frontend/src/hir/comptime/errors.rs | 21 +- .../src/hir/comptime/interpreter.rs | 106 ++--- .../src/hir/comptime/interpreter/builtin.rs | 36 +- .../noirc_frontend/src/hir/comptime/tests.rs | 50 +- .../noirc_frontend/src/hir/comptime/value.rs | 40 +- .../src/hir/def_collector/dc_crate.rs | 25 +- .../src/hir/def_collector/dc_mod.rs | 161 ++++--- .../src/hir/def_collector/errors.rs | 13 +- .../compiler/noirc_frontend/src/hir/mod.rs | 40 +- .../src/hir/resolution/errors.rs | 31 ++ .../src/hir/resolution/functions.rs | 9 +- .../src/hir/resolution/import.rs | 7 - .../src/hir/resolution/resolver.rs | 141 ++++-- .../src/hir/resolution/traits.rs | 41 +- .../src/hir/type_check/errors.rs | 9 + .../noirc_frontend/src/hir/type_check/expr.rs | 6 +- .../noirc_frontend/src/hir/type_check/mod.rs | 23 +- .../noirc_frontend/src/hir_def/function.rs | 8 +- .../noirc_frontend/src/hir_def/types.rs | 241 ++++++++-- .../noirc_frontend/src/lexer/token.rs | 19 +- .../src/monomorphization/mod.rs | 6 +- .../noirc_frontend/src/node_interner.rs | 69 +-- .../noirc_frontend/src/parser/errors.rs | 2 +- .../compiler/noirc_frontend/src/parser/mod.rs | 6 +- .../noirc_frontend/src/parser/parser.rs | 18 +- .../src/parser/parser/function.rs | 34 +- .../noirc_frontend/src/parser/parser/types.rs | 14 +- .../compiler/noirc_frontend/src/tests.rs | 442 +++++++++++++++++- noir/noir-repo/deny.toml | 2 +- .../docs/docs/how_to/how-to-oracles.md | 8 +- .../docs/noir/concepts/data_types/arrays.md | 2 +- .../docs/noir/concepts/data_types/slices.mdx | 105 +++++ .../docs/docs/noir/concepts/traits.md | 18 +- noir/noir-repo/docs/docs/tooling/testing.md | 21 +- noir/noir-repo/noir_stdlib/src/aes128.nr | 2 +- noir/noir-repo/noir_stdlib/src/array.nr | 4 +- noir/noir-repo/noir_stdlib/src/cmp.nr | 6 +- .../src/collections/bounded_vec.nr | 14 +- .../noir_stdlib/src/collections/map.nr | 8 +- noir/noir-repo/noir_stdlib/src/compat.nr | 15 +- noir/noir-repo/noir_stdlib/src/default.nr | 2 +- .../noir-repo/noir_stdlib/src/ec/montcurve.nr | 8 +- noir/noir-repo/noir_stdlib/src/ec/swcurve.nr | 8 +- noir/noir-repo/noir_stdlib/src/ec/tecurve.nr | 8 +- .../noir_stdlib/src/ecdsa_secp256k1.nr | 2 +- .../noir_stdlib/src/ecdsa_secp256r1.nr | 2 +- .../noir_stdlib/src/embedded_curve_ops.nr | 2 +- noir/noir-repo/noir_stdlib/src/hash.nr | 33 +- noir/noir-repo/noir_stdlib/src/hash/mimc.nr | 4 +- .../noir_stdlib/src/hash/poseidon.nr | 19 +- .../noir_stdlib/src/hash/poseidon/bn254.nr | 2 +- .../noir_stdlib/src/hash/poseidon2.nr | 4 +- noir/noir-repo/noir_stdlib/src/merkle.nr | 2 +- noir/noir-repo/noir_stdlib/src/option.nr | 2 +- noir/noir-repo/noir_stdlib/src/schnorr.nr | 2 +- noir/noir-repo/noir_stdlib/src/sha256.nr | 8 +- noir/noir-repo/noir_stdlib/src/sha512.nr | 2 +- noir/noir-repo/noir_stdlib/src/slice.nr | 51 +- noir/noir-repo/noir_stdlib/src/string.nr | 2 +- noir/noir-repo/noir_stdlib/src/test.nr | 4 +- noir/noir-repo/noir_stdlib/src/uint128.nr | 2 +- noir/noir-repo/scripts/install_bb.sh | 2 +- .../bench_eddsa_poseidon/src/main.nr | 4 +- .../bench_poseidon_hash_100/src/main.nr | 2 +- .../bench_poseidon_hash_30/src/main.nr | 2 +- .../benchmarks/bench_sha256_100/src/main.nr | 3 +- .../benchmarks/bench_sha256_30/src/main.nr | 3 +- .../negate_unsigned/src/main.nr | 1 - .../non_comptime_local_fn_call/Nargo.toml | 7 + .../non_comptime_local_fn_call/src/main.nr | 9 + .../orphaned_trait_impl/src/main.nr | 2 +- .../restricted_bit_sizes/src/main.nr | 2 + .../turbofish_generic_count/src/main.nr | 1 - .../abi_attribute/Nargo.toml | 6 + .../abi_attribute/src/main.nr | 9 + .../derive_impl/src/main.nr | 9 +- .../impl_where_clause/Nargo.toml | 7 + .../impl_where_clause/src/main.nr | 34 ++ .../intrinsic_die/src/main.nr | 1 - .../numeric_generics/src/main.nr | 1 - .../numeric_generics_explicit/Nargo.toml | 7 + .../numeric_generics_explicit/src/main.nr | 111 +++++ .../reexports/src/main.nr | 2 +- .../regression_4635/src/main.nr | 2 +- .../trait_generics/src/main.nr | 2 +- .../Nargo.toml | 2 +- .../src/main.nr | 0 .../workspace_reexport_bug/binary/src/main.nr | 2 +- .../workspace_reexport_bug/library/src/lib.nr | 2 +- .../div_by_zero_numerator_witness/src/main.nr | 1 - .../diamond_deps_0/src/main.nr | 6 +- .../execution_success/hashmap/src/main.nr | 1 + .../regression_4088/src/main.nr | 4 +- .../regression_4124/src/main.nr | 4 +- .../execution_success/slices/src/main.nr | 8 + .../traits_in_crates_1/crate1/src/lib.nr | 2 +- .../traits_in_crates_1/src/main.nr | 2 +- .../traits_in_crates_2/crate2/src/lib.nr | 2 +- .../traits_in_crates_2/src/main.nr | 2 +- .../verify_honk_proof/Nargo.toml | 3 +- .../should_fail_mismatch/src/main.nr | 15 +- .../comptime_globals/src/main.nr | 2 +- .../should_fail_with_matches/src/main.nr | 10 + .../test_libraries/diamond_deps_1/src/lib.nr | 2 +- .../test_libraries/reexporting_lib/src/lib.nr | 4 +- noir/noir-repo/tooling/acvm_cli/Cargo.toml | 2 +- noir/noir-repo/tooling/nargo/src/ops/test.rs | 2 +- noir/noir-repo/tooling/nargo_cli/Cargo.toml | 9 +- .../tooling/nargo_cli/benches/criterion.rs | 8 +- noir/noir-repo/tooling/nargo_cli/build.rs | 9 +- .../tooling/nargo_cli/tests/stdlib-tests.rs | 2 +- noir/noir-repo/tooling/nargo_fmt/src/items.rs | 3 +- .../tooling/nargo_fmt/src/rewrite/typ.rs | 4 + noir/noir-repo/tooling/nargo_fmt/src/utils.rs | 23 +- .../tooling/nargo_fmt/src/visitor/item.rs | 4 +- .../nargo_fmt/tests/expected/contract.nr | 4 +- .../tooling/nargo_fmt/tests/expected/fn.nr | 4 + .../tooling/nargo_fmt/tests/expected/impl.nr | 6 + .../tooling/nargo_fmt/tests/expected/let.nr | 8 +- .../tooling/nargo_fmt/tests/input/contract.nr | 4 +- .../tooling/nargo_fmt/tests/input/fn.nr | 5 + .../tooling/nargo_fmt/tests/input/impl.nr | 6 + .../tooling/nargo_fmt/tests/input/let.nr | 8 +- .../src/backend.ts | 3 +- 144 files changed, 2667 insertions(+), 760 deletions(-) create mode 100644 noir/noir-repo/test_programs/compile_failure/non_comptime_local_fn_call/Nargo.toml create mode 100644 noir/noir-repo/test_programs/compile_failure/non_comptime_local_fn_call/src/main.nr create mode 100644 noir/noir-repo/test_programs/compile_success_contract/abi_attribute/Nargo.toml create mode 100644 noir/noir-repo/test_programs/compile_success_contract/abi_attribute/src/main.nr create mode 100644 noir/noir-repo/test_programs/compile_success_empty/impl_where_clause/Nargo.toml create mode 100644 noir/noir-repo/test_programs/compile_success_empty/impl_where_clause/src/main.nr create mode 100644 noir/noir-repo/test_programs/compile_success_empty/numeric_generics_explicit/Nargo.toml create mode 100644 noir/noir-repo/test_programs/compile_success_empty/numeric_generics_explicit/src/main.nr rename noir/noir-repo/test_programs/compile_success_empty/{impl_with_where_clause => trait_impl_with_where_clause}/Nargo.toml (58%) rename noir/noir-repo/test_programs/compile_success_empty/{impl_with_where_clause => trait_impl_with_where_clause}/src/main.nr (100%) diff --git a/.noir-sync-commit b/.noir-sync-commit index 2df547e43b7..d185957d716 100644 --- a/.noir-sync-commit +++ b/.noir-sync-commit @@ -1 +1 @@ -f2f8ecc833d4725d0829f9c339389c90d1a4fbcd +d8b9870a991b724ec337b58380b50464ba274d8a diff --git a/noir/bb-version b/noir/bb-version index 72a8a6313bb..8298bb08b2d 100644 --- a/noir/bb-version +++ b/noir/bb-version @@ -1 +1 @@ -0.41.0 +0.43.0 diff --git a/noir/noir-repo/aztec_macros/src/transforms/compute_note_hash_and_optionally_a_nullifier.rs b/noir/noir-repo/aztec_macros/src/transforms/compute_note_hash_and_optionally_a_nullifier.rs index 30c0f63a2d4..40fde39a06f 100644 --- a/noir/noir-repo/aztec_macros/src/transforms/compute_note_hash_and_optionally_a_nullifier.rs +++ b/noir/noir-repo/aztec_macros/src/transforms/compute_note_hash_and_optionally_a_nullifier.rs @@ -176,7 +176,7 @@ fn generate_compute_note_hash_and_optionally_a_nullifier_source( format!( " unconstrained fn compute_note_hash_and_optionally_a_nullifier( - contract_address: dep::aztec::protocol_types::address::AztecAddress, + contract_address: aztec::protocol_types::address::AztecAddress, nonce: Field, storage_slot: Field, note_type_id: Field, @@ -194,7 +194,7 @@ fn generate_compute_note_hash_and_optionally_a_nullifier_source( let if_statements: Vec = note_types.iter().map(|note_type| format!( "if (note_type_id == {0}::get_note_type_id()) {{ - dep::aztec::note::utils::compute_note_hash_and_optionally_a_nullifier({0}::deserialize_content, note_header, compute_nullifier, serialized_note) + aztec::note::utils::compute_note_hash_and_optionally_a_nullifier({0}::deserialize_content, note_header, compute_nullifier, serialized_note) }}" , note_type)).collect(); @@ -208,14 +208,14 @@ fn generate_compute_note_hash_and_optionally_a_nullifier_source( format!( " unconstrained fn compute_note_hash_and_optionally_a_nullifier( - contract_address: dep::aztec::protocol_types::address::AztecAddress, + contract_address: aztec::protocol_types::address::AztecAddress, nonce: Field, storage_slot: Field, note_type_id: Field, compute_nullifier: bool, serialized_note: [Field; {}], ) -> pub [Field; 4] {{ - let note_header = dep::aztec::prelude::NoteHeader::new(contract_address, nonce, storage_slot); + let note_header = aztec::prelude::NoteHeader::new(contract_address, nonce, storage_slot); {} }}", diff --git a/noir/noir-repo/aztec_macros/src/transforms/contract_interface.rs b/noir/noir-repo/aztec_macros/src/transforms/contract_interface.rs index ed451fdd998..b9323644379 100644 --- a/noir/noir-repo/aztec_macros/src/transforms/contract_interface.rs +++ b/noir/noir-repo/aztec_macros/src/transforms/contract_interface.rs @@ -30,8 +30,8 @@ use crate::utils::{ // for i in 0..third_arg.len() { // args_acc = args_acc.append(third_arg[i].serialize().as_slice()); // } -// let args_hash = dep::aztec::hash::hash_args(args_acc); -// assert(args_hash == dep::aztec::oracle::arguments::pack_arguments(args_acc)); +// let args_hash = aztec::hash::hash_args(args_acc); +// assert(args_hash == aztec::oracle::arguments::pack_arguments(args_acc)); // PublicCallInterface { // target_contract: self.target_contract, // selector: FunctionSelector::from_signature("SELECTOR_PLACEHOLDER"), @@ -137,8 +137,8 @@ pub fn stub_function(aztec_visibility: &str, func: &NoirFunction, is_static_call format!( "let mut args_acc: [Field] = &[]; {} - let args_hash = dep::aztec::hash::hash_args(args_acc); - assert(args_hash == dep::aztec::oracle::arguments::pack_arguments(args_acc));", + let args_hash = aztec::hash::hash_args(args_acc); + assert(args_hash == aztec::oracle::arguments::pack_arguments(args_acc));", call_args ) } else { @@ -234,14 +234,14 @@ pub fn generate_contract_interface( let contract_interface = format!( " struct {0} {{ - target_contract: dep::aztec::protocol_types::address::AztecAddress + target_contract: aztec::protocol_types::address::AztecAddress }} impl {0} {{ {1} pub fn at( - target_contract: dep::aztec::protocol_types::address::AztecAddress + target_contract: aztec::protocol_types::address::AztecAddress ) -> Self {{ Self {{ target_contract }} }} @@ -255,7 +255,7 @@ pub fn generate_contract_interface( #[contract_library_method] pub fn at( - target_contract: dep::aztec::protocol_types::address::AztecAddress + target_contract: aztec::protocol_types::address::AztecAddress ) -> {0} {{ {0} {{ target_contract }} }} diff --git a/noir/noir-repo/aztec_macros/src/transforms/note_interface.rs b/noir/noir-repo/aztec_macros/src/transforms/note_interface.rs index 3ace22a89c3..b6d837d9384 100644 --- a/noir/noir-repo/aztec_macros/src/transforms/note_interface.rs +++ b/noir/noir-repo/aztec_macros/src/transforms/note_interface.rs @@ -72,6 +72,7 @@ pub fn generate_note_interface_impl(module: &mut SortedModule) -> Result<(), Azt type_span: note_struct.name.span(), generics: vec![], methods: vec![], + where_clause: vec![], }; module.impls.push(default_impl.clone()); module.impls.last_mut().unwrap() @@ -271,7 +272,7 @@ fn generate_note_get_header( ) -> Result { let function_source = format!( " - fn get_header(note: {}) -> dep::aztec::note::note_header::NoteHeader {{ + fn get_header(note: {}) -> aztec::note::note_header::NoteHeader {{ note.{} }} ", @@ -302,7 +303,7 @@ fn generate_note_set_header( ) -> Result { let function_source = format!( " - fn set_header(self: &mut {}, header: dep::aztec::note::note_header::NoteHeader) {{ + fn set_header(self: &mut {}, header: aztec::note::note_header::NoteHeader) {{ self.{} = header; }} ", @@ -492,7 +493,7 @@ fn generate_note_properties_fn( // Automatically generate the method to compute the note's content hash as: // fn compute_note_content_hash(self: NoteType) -> Field { -// dep::aztec::hash::pedersen_hash(self.serialize_content(), dep::aztec::protocol_types::constants::GENERATOR_INDEX__NOTE_CONTENT_HASH) +// aztec::hash::pedersen_hash(self.serialize_content(), aztec::protocol_types::constants::GENERATOR_INDEX__NOTE_CONTENT_HASH) // } // fn generate_compute_note_content_hash( @@ -502,7 +503,7 @@ fn generate_compute_note_content_hash( let function_source = format!( " fn compute_note_content_hash(self: {}) -> Field {{ - dep::aztec::hash::pedersen_hash(self.serialize_content(), dep::aztec::protocol_types::constants::GENERATOR_INDEX__NOTE_CONTENT_HASH) + aztec::hash::pedersen_hash(self.serialize_content(), aztec::protocol_types::constants::GENERATOR_INDEX__NOTE_CONTENT_HASH) }} ", note_type @@ -561,10 +562,7 @@ fn generate_note_properties_struct_source( .iter() .filter_map(|(field_name, _)| { if field_name != note_header_field_name { - Some(format!( - "{}: dep::aztec::note::note_getter_options::PropertySelector", - field_name - )) + Some(format!("{}: aztec::note::note_getter_options::PropertySelector", field_name)) } else { None } @@ -592,7 +590,7 @@ fn generate_note_properties_fn_source( .filter_map(|(index, (field_name, _))| { if field_name != note_header_field_name { Some(format!( - "{}: dep::aztec::note::note_getter_options::PropertySelector {{ index: {}, offset: 0, length: 32 }}", + "{}: aztec::note::note_getter_options::PropertySelector {{ index: {}, offset: 0, length: 32 }}", field_name, index )) @@ -669,10 +667,7 @@ fn generate_note_deserialize_content_source( ) } } else { - format!( - "{}: dep::aztec::note::note_header::NoteHeader::empty()", - note_header_field_name - ) + format!("{}: aztec::note::note_header::NoteHeader::empty()", note_header_field_name) } }) .collect::>() diff --git a/noir/noir-repo/aztec_macros/src/transforms/storage.rs b/noir/noir-repo/aztec_macros/src/transforms/storage.rs index bac87502c7d..c302dd87aa5 100644 --- a/noir/noir-repo/aztec_macros/src/transforms/storage.rs +++ b/noir/noir-repo/aztec_macros/src/transforms/storage.rs @@ -91,7 +91,7 @@ pub fn inject_context_in_storage(module: &mut SortedModule) -> Result<(), AztecM r#struct.attributes.iter().any(|attr| is_custom_attribute(attr, "aztec(storage)")) }) .unwrap(); - storage_struct.generics.push(ident("Context")); + storage_struct.generics.push(ident("Context").into()); storage_struct .fields .iter_mut() @@ -243,9 +243,11 @@ pub fn generate_storage_implementation( span: Some(Span::default()), }, type_span: Span::default(), - generics: vec![generic_context_ident], + generics: vec![generic_context_ident.into()], methods: vec![(init, Span::default())], + + where_clause: vec![], }; module.impls.push(storage_impl); diff --git a/noir/noir-repo/aztec_macros/src/utils/ast_utils.rs b/noir/noir-repo/aztec_macros/src/utils/ast_utils.rs index 48b3b25747b..4467c4bca4b 100644 --- a/noir/noir-repo/aztec_macros/src/utils/ast_utils.rs +++ b/noir/noir-repo/aztec_macros/src/utils/ast_utils.rs @@ -161,7 +161,7 @@ macro_rules! chained_dep { ( $base:expr $(, $tail:expr)* ) => { { let mut base_path = ident_path($base); - base_path.kind = PathKind::Dep; + base_path.kind = PathKind::Plain; $( base_path.segments.push(ident($tail)); )* diff --git a/noir/noir-repo/compiler/noirc_driver/tests/stdlib_warnings.rs b/noir/noir-repo/compiler/noirc_driver/tests/stdlib_warnings.rs index 327c8daad06..47ce893d202 100644 --- a/noir/noir-repo/compiler/noirc_driver/tests/stdlib_warnings.rs +++ b/noir/noir-repo/compiler/noirc_driver/tests/stdlib_warnings.rs @@ -27,7 +27,7 @@ fn stdlib_does_not_produce_constant_warnings() -> Result<(), ErrorsAndWarnings> let ((), warnings) = noirc_driver::check_crate(&mut context, root_crate_id, false, false, false)?; - assert_eq!(warnings, Vec::new(), "stdlib is producing warnings"); + assert_eq!(warnings, Vec::new(), "stdlib is producing {} warnings", warnings.len()); Ok(()) } diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs b/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs index b441e8be3eb..f10ff834f6c 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs @@ -22,6 +22,7 @@ use acvm::{acir::AcirField, FieldElement}; use fxhash::{FxHashMap as HashMap, FxHashSet as HashSet}; use iter_extended::vecmap; use num_bigint::BigUint; +use std::rc::Rc; use super::brillig_black_box::convert_black_box_call; use super::brillig_block_variables::BlockVariables; @@ -1629,7 +1630,7 @@ impl<'block> BrilligBlock<'block> { new_variable } } - Value::Array { array, .. } => { + Value::Array { array, typ } => { if let Some(variable) = self.variables.get_constant(value_id, dfg) { variable } else { @@ -1664,23 +1665,7 @@ impl<'block> BrilligBlock<'block> { // Write the items - // Allocate a register for the iterator - let iterator_register = - self.brillig_context.make_usize_constant_instruction(0_usize.into()); - - for element_id in array.iter() { - let element_variable = self.convert_ssa_value(*element_id, dfg); - // Store the item in memory - self.store_variable_in_array(pointer, iterator_register, element_variable); - // Increment the iterator - self.brillig_context.codegen_usize_op_in_place( - iterator_register.address, - BrilligBinaryOp::Add, - 1, - ); - } - - self.brillig_context.deallocate_single_addr(iterator_register); + self.initialize_constant_array(array, typ, dfg, pointer); new_variable } @@ -1705,6 +1690,125 @@ impl<'block> BrilligBlock<'block> { } } + fn initialize_constant_array( + &mut self, + data: &im::Vector, + typ: &Type, + dfg: &DataFlowGraph, + pointer: MemoryAddress, + ) { + if data.is_empty() { + return; + } + let item_types = typ.clone().element_types(); + + // Find out if we are repeating the same item over and over + let first_item = data.iter().take(item_types.len()).copied().collect(); + let mut is_repeating = true; + + for item_index in (item_types.len()..data.len()).step_by(item_types.len()) { + let item: Vec<_> = (0..item_types.len()).map(|i| data[item_index + i]).collect(); + if first_item != item { + is_repeating = false; + break; + } + } + + // If all the items are single address, and all have the same initial value, we can initialize the array in a runtime loop. + // Since the cost in instructions for a runtime loop is in the order of magnitude of 10, we only do this if the item_count is bigger than that. + let item_count = data.len() / item_types.len(); + + if item_count > 10 + && is_repeating + && item_types.iter().all(|typ| matches!(typ, Type::Numeric(_))) + { + self.initialize_constant_array_runtime( + item_types, first_item, item_count, pointer, dfg, + ); + } else { + self.initialize_constant_array_comptime(data, dfg, pointer); + } + } + + fn initialize_constant_array_runtime( + &mut self, + item_types: Rc>, + item_to_repeat: Vec, + item_count: usize, + pointer: MemoryAddress, + dfg: &DataFlowGraph, + ) { + let mut subitem_to_repeat_variables = Vec::with_capacity(item_types.len()); + for subitem_id in item_to_repeat.into_iter() { + subitem_to_repeat_variables.push(self.convert_ssa_value(subitem_id, dfg)); + } + + let data_length_variable = self + .brillig_context + .make_usize_constant_instruction((item_count * item_types.len()).into()); + + // If this is an array with complex subitems, we need a custom step in the loop to write all the subitems while iterating. + if item_types.len() > 1 { + let step_variable = + self.brillig_context.make_usize_constant_instruction(item_types.len().into()); + + let subitem_pointer = + SingleAddrVariable::new_usize(self.brillig_context.allocate_register()); + + let initializer_fn = |ctx: &mut BrilligContext<_>, iterator: SingleAddrVariable| { + ctx.mov_instruction(subitem_pointer.address, iterator.address); + for subitem in subitem_to_repeat_variables.into_iter() { + Self::store_variable_in_array_with_ctx(ctx, pointer, subitem_pointer, subitem); + ctx.codegen_usize_op_in_place(subitem_pointer.address, BrilligBinaryOp::Add, 1); + } + }; + + self.brillig_context.codegen_loop_with_bound_and_step( + data_length_variable.address, + step_variable.address, + initializer_fn, + ); + + self.brillig_context.deallocate_single_addr(step_variable); + self.brillig_context.deallocate_single_addr(subitem_pointer); + } else { + let subitem = subitem_to_repeat_variables.into_iter().next().unwrap(); + + let initializer_fn = |ctx: &mut _, iterator_register| { + Self::store_variable_in_array_with_ctx(ctx, pointer, iterator_register, subitem); + }; + + self.brillig_context.codegen_loop(data_length_variable.address, initializer_fn); + } + + self.brillig_context.deallocate_single_addr(data_length_variable); + } + + fn initialize_constant_array_comptime( + &mut self, + data: &im::Vector>, + dfg: &DataFlowGraph, + pointer: MemoryAddress, + ) { + // Allocate a register for the iterator + let iterator_register = + self.brillig_context.make_usize_constant_instruction(0_usize.into()); + + for element_id in data.iter() { + let element_variable = self.convert_ssa_value(*element_id, dfg); + // Store the item in memory + self.store_variable_in_array(pointer, iterator_register, element_variable); + // Increment the iterator + self.brillig_context.codegen_usize_op_in_place( + iterator_register.address, + BrilligBinaryOp::Add, + 1, + ); + } + + self.brillig_context.deallocate_single_addr(iterator_register); + } + /// Converts an SSA `ValueId` into a `MemoryAddress`. Initializes if necessary. fn convert_ssa_single_addr_value( &mut self, diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_ir/codegen_control_flow.rs b/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_ir/codegen_control_flow.rs index 10badcd7308..5741089a497 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_ir/codegen_control_flow.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_ir/codegen_control_flow.rs @@ -38,11 +38,12 @@ impl BrilligContext { self.stop_instruction(); } - /// This codegen will issue a loop that will iterate iteration_count times + /// This codegen will issue a loop do for (let iterator_register = 0; i < loop_bound; i += step) /// The body of the loop should be issued by the caller in the on_iteration closure. - pub(crate) fn codegen_loop( + pub(crate) fn codegen_loop_with_bound_and_step( &mut self, - iteration_count: MemoryAddress, + loop_bound: MemoryAddress, + step: MemoryAddress, on_iteration: impl FnOnce(&mut BrilligContext, SingleAddrVariable), ) { let iterator_register = self.make_usize_constant_instruction(0_u128.into()); @@ -52,13 +53,13 @@ impl BrilligContext { // Loop body - // Check if iterator < iteration_count + // Check if iterator < loop_bound let iterator_less_than_iterations = SingleAddrVariable { address: self.allocate_register(), bit_size: 1 }; self.memory_op_instruction( iterator_register.address, - iteration_count, + loop_bound, iterator_less_than_iterations.address, BrilligBinaryOp::LessThan, ); @@ -72,8 +73,13 @@ impl BrilligContext { // Call the on iteration function on_iteration(self, iterator_register); - // Increment the iterator register - self.codegen_usize_op_in_place(iterator_register.address, BrilligBinaryOp::Add, 1); + // Add step to the iterator register + self.memory_op_instruction( + iterator_register.address, + step, + iterator_register.address, + BrilligBinaryOp::Add, + ); self.jump_instruction(loop_label); @@ -85,6 +91,18 @@ impl BrilligContext { self.deallocate_single_addr(iterator_register); } + /// This codegen will issue a loop that will iterate iteration_count times + /// The body of the loop should be issued by the caller in the on_iteration closure. + pub(crate) fn codegen_loop( + &mut self, + iteration_count: MemoryAddress, + on_iteration: impl FnOnce(&mut BrilligContext, SingleAddrVariable), + ) { + let step = self.make_usize_constant_instruction(1_u128.into()); + self.codegen_loop_with_bound_and_step(iteration_count, step.address, on_iteration); + self.deallocate_single_addr(step); + } + /// This codegen will issue an if-then branch that will check if the condition is true /// and if so, perform the instructions given in `f(self, true)` and otherwise perform the /// instructions given in `f(self, false)`. A boolean is passed instead of two separate diff --git a/noir/noir-repo/compiler/noirc_frontend/src/ast/expression.rs b/noir/noir-repo/compiler/noirc_frontend/src/ast/expression.rs index 2657869a9d7..ae973385182 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/ast/expression.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/ast/expression.rs @@ -5,9 +5,11 @@ use crate::ast::{ Ident, ItemVisibility, Path, Pattern, Recoverable, Statement, StatementKind, UnresolvedTraitConstraint, UnresolvedType, UnresolvedTypeData, Visibility, }; +use crate::hir::def_collector::errors::DefCollectorErrorKind; use crate::macros_api::StructId; use crate::node_interner::ExprId; use crate::token::{Attributes, Token, Tokens}; +use crate::{Kind, Type}; use acvm::{acir::AcirField, FieldElement}; use iter_extended::vecmap; use noirc_errors::{Span, Spanned}; @@ -46,7 +48,72 @@ pub enum ExpressionKind { /// A Vec of unresolved names for type variables. /// For `fn foo(...)` this corresponds to vec!["A", "B"]. -pub type UnresolvedGenerics = Vec; +pub type UnresolvedGenerics = Vec; + +#[derive(Debug, PartialEq, Eq, Clone, Hash)] +pub enum UnresolvedGeneric { + Variable(Ident), + Numeric { ident: Ident, typ: UnresolvedType }, +} + +impl UnresolvedGeneric { + pub fn span(&self) -> Span { + match self { + UnresolvedGeneric::Variable(ident) => ident.0.span(), + UnresolvedGeneric::Numeric { ident, typ } => { + ident.0.span().merge(typ.span.unwrap_or_default()) + } + } + } + + pub fn kind(&self) -> Result { + match self { + UnresolvedGeneric::Variable(_) => Ok(Kind::Normal), + UnresolvedGeneric::Numeric { typ, .. } => { + let typ = self.resolve_numeric_kind_type(typ)?; + Ok(Kind::Numeric(Box::new(typ))) + } + } + } + + fn resolve_numeric_kind_type( + &self, + typ: &UnresolvedType, + ) -> Result { + use crate::ast::UnresolvedTypeData::{FieldElement, Integer}; + + match typ.typ { + FieldElement => Ok(Type::FieldElement), + Integer(sign, bits) => Ok(Type::Integer(sign, bits)), + // Only fields and integers are supported for numeric kinds + _ => Err(DefCollectorErrorKind::UnsupportedNumericGenericType { + ident: self.ident().clone(), + typ: typ.typ.clone(), + }), + } + } + + pub(crate) fn ident(&self) -> &Ident { + match self { + UnresolvedGeneric::Variable(ident) | UnresolvedGeneric::Numeric { ident, .. } => ident, + } + } +} + +impl Display for UnresolvedGeneric { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + UnresolvedGeneric::Variable(ident) => write!(f, "{ident}"), + UnresolvedGeneric::Numeric { ident, typ } => write!(f, "let {ident}: {typ}"), + } + } +} + +impl From for UnresolvedGeneric { + fn from(value: Ident) -> Self { + UnresolvedGeneric::Variable(value) + } +} impl ExpressionKind { pub fn into_path(self) -> Option { @@ -757,22 +824,32 @@ impl Display for FunctionDefinition { writeln!(f, "{:?}", self.attributes)?; let parameters = vecmap(&self.parameters, |Param { visibility, pattern, typ, span: _ }| { - format!("{pattern}: {visibility} {typ}") + if *visibility == Visibility::Public { + format!("{pattern}: {visibility} {typ}") + } else { + format!("{pattern}: {typ}") + } }); let where_clause = vecmap(&self.where_clause, ToString::to_string); let where_clause_str = if !where_clause.is_empty() { - format!("where {}", where_clause.join(", ")) + format!(" where {}", where_clause.join(", ")) } else { "".to_string() }; + let return_type = if matches!(&self.return_type, FunctionReturnType::Default(_)) { + String::new() + } else { + format!(" -> {}", self.return_type) + }; + write!( f, - "fn {}({}) -> {} {} {}", + "fn {}({}){}{} {}", self.name, parameters.join(", "), - self.return_type, + return_type, where_clause_str, self.body ) diff --git a/noir/noir-repo/compiler/noirc_frontend/src/ast/mod.rs b/noir/noir-repo/compiler/noirc_frontend/src/ast/mod.rs index a1ae349b537..dfe4258744a 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/ast/mod.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/ast/mod.rs @@ -22,6 +22,7 @@ pub use traits::*; pub use type_alias::*; use crate::{ + node_interner::QuotedTypeId, parser::{ParserError, ParserErrorReason}, token::IntType, BinaryTypeOperator, @@ -119,6 +120,10 @@ pub enum UnresolvedTypeData { // The type of quoted code for metaprogramming Quoted(crate::QuotedType), + /// An already resolved type. These can only be parsed if they were present in the token stream + /// as a result of being spliced into a macro's token stream input. + Resolved(QuotedTypeId), + Unspecified, // This is for when the user declares a variable without specifying it's type Error, } @@ -146,7 +151,7 @@ pub struct UnaryRhsMethodCall { } /// The precursor to TypeExpression, this is the type that the parser allows -/// to be used in the length position of an array type. Only constants, variables, +/// to be used in the length position of an array type. Only constant integers, variables, /// and numeric binary operators are allowed here. #[derive(Debug, PartialEq, Eq, Clone, Hash)] pub enum UnresolvedTypeExpression { @@ -221,6 +226,7 @@ impl std::fmt::Display for UnresolvedTypeData { Error => write!(f, "error"), Unspecified => write!(f, "unspecified"), Parenthesized(typ) => write!(f, "({typ})"), + Resolved(_) => write!(f, "(resolved type)"), } } } @@ -259,6 +265,10 @@ impl UnresolvedType { pub fn unspecified() -> UnresolvedType { UnresolvedType { typ: UnresolvedTypeData::Unspecified, span: None } } + + pub(crate) fn is_type_expression(&self) -> bool { + matches!(&self.typ, UnresolvedTypeData::Expression(_)) + } } impl UnresolvedTypeData { diff --git a/noir/noir-repo/compiler/noirc_frontend/src/ast/structure.rs b/noir/noir-repo/compiler/noirc_frontend/src/ast/structure.rs index bda6b8c0b11..bb2d89841b9 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/ast/structure.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/ast/structure.rs @@ -20,7 +20,7 @@ impl NoirStruct { pub fn new( name: Ident, attributes: Vec, - generics: Vec, + generics: UnresolvedGenerics, fields: Vec<(Ident, UnresolvedType)>, span: Span, ) -> NoirStruct { diff --git a/noir/noir-repo/compiler/noirc_frontend/src/ast/traits.rs b/noir/noir-repo/compiler/noirc_frontend/src/ast/traits.rs index 772675723b5..b1b14e3f657 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/ast/traits.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/ast/traits.rs @@ -14,7 +14,7 @@ use crate::node_interner::TraitId; #[derive(Clone, Debug)] pub struct NoirTrait { pub name: Ident, - pub generics: Vec, + pub generics: UnresolvedGenerics, pub where_clause: Vec, pub span: Span, pub items: Vec, @@ -26,7 +26,7 @@ pub struct NoirTrait { pub enum TraitItem { Function { name: Ident, - generics: Vec, + generics: UnresolvedGenerics, parameters: Vec<(Ident, UnresolvedType)>, return_type: FunctionReturnType, where_clause: Vec, @@ -49,6 +49,7 @@ pub struct TypeImpl { pub object_type: UnresolvedType, pub type_span: Span, pub generics: UnresolvedGenerics, + pub where_clause: Vec, pub methods: Vec<(NoirFunction, Span)>, } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/elaborator/expressions.rs b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/expressions.rs index 9d864a0de91..7d304990dd8 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/elaborator/expressions.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/expressions.rs @@ -29,7 +29,7 @@ use crate::{ }, node_interner::{DefinitionKind, ExprId, FuncId}, token::Tokens, - QuotedType, Shared, StructType, Type, + Kind, QuotedType, Shared, StructType, Type, }; use super::Elaborator; @@ -52,7 +52,20 @@ impl<'context> Elaborator<'context> { ExpressionKind::If(if_) => self.elaborate_if(*if_), ExpressionKind::Variable(variable, generics) => { let generics = generics.map(|option_inner| { - option_inner.into_iter().map(|generic| self.resolve_type(generic)).collect() + option_inner + .into_iter() + .map(|generic| { + // All type expressions should resolve to a `Type::Constant` + if generic.is_type_expression() { + self.resolve_type_inner( + generic, + &Kind::Numeric(Box::new(Type::default_int_type())), + ) + } else { + self.resolve_type(generic) + } + }) + .collect() }); return self.elaborate_variable(variable, generics); } @@ -651,7 +664,8 @@ impl<'context> Elaborator<'context> { fn elaborate_comptime_block(&mut self, block: BlockExpression, span: Span) -> (ExprId, Type) { let (block, _typ) = self.elaborate_block_expression(block); - let mut interpreter = Interpreter::new(self.interner, &mut self.comptime_scopes); + let mut interpreter = + Interpreter::new(self.interner, &mut self.comptime_scopes, self.crate_id); let value = interpreter.evaluate_block(block); self.inline_comptime_value(value, span) } @@ -724,7 +738,8 @@ impl<'context> Elaborator<'context> { } }; - let mut interpreter = Interpreter::new(self.interner, &mut self.comptime_scopes); + let mut interpreter = + Interpreter::new(self.interner, &mut self.comptime_scopes, self.crate_id); let mut comptime_args = Vec::new(); let mut errors = Vec::new(); @@ -744,7 +759,8 @@ impl<'context> Elaborator<'context> { return None; } - let result = interpreter.call_function(function, comptime_args, location); + let bindings = interpreter.interner.get_instantiation_bindings(func).clone(); + let result = interpreter.call_function(function, comptime_args, bindings, location); let (expr_id, typ) = self.inline_comptime_value(result, location.span); Some((self.interner.expression(&expr_id), typ)) } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/elaborator/mod.rs b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/mod.rs index dc99ceae2f0..ae8237706cc 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/elaborator/mod.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/mod.rs @@ -6,12 +6,13 @@ use std::{ use crate::{ ast::{FunctionKind, UnresolvedTraitConstraint}, hir::{ - comptime::{self, Interpreter, Value}, + comptime::{self, Interpreter, InterpreterError, Value}, def_collector::{ dc_crate::{ filter_literal_globals, CompilationError, ImplMap, UnresolvedGlobal, UnresolvedStruct, UnresolvedTypeAlias, }, + dc_mod, errors::DuplicateType, }, resolution::{errors::ResolverError, path_resolver::PathResolver, resolver::LambdaContext}, @@ -22,6 +23,7 @@ use crate::{ expr::HirIdent, function::{FunctionBody, Parameters}, traits::TraitConstraint, + types::{Generics, Kind, ResolvedGeneric}, }, macros_api::{ BlockExpression, Ident, NodeInterner, NoirFunction, NoirStruct, Pattern, @@ -30,10 +32,11 @@ use crate::{ node_interner::{ DefinitionId, DefinitionKind, DependencyId, ExprId, FuncId, GlobalId, TraitId, TypeAliasId, }, - Shared, Type, TypeVariable, + parser::TopLevelStatement, + Shared, Type, TypeBindings, TypeVariable, }; use crate::{ - ast::{TraitBound, UnresolvedGenerics}, + ast::{TraitBound, UnresolvedGeneric, UnresolvedGenerics}, graph::CrateId, hir::{ def_collector::{dc_crate::CollectedItems, errors::DefCollectorErrorKind}, @@ -44,7 +47,6 @@ use crate::{ hir_def::function::{FuncMeta, HirFunction}, macros_api::{Param, Path, UnresolvedType, UnresolvedTypeData}, node_interner::TraitImplId, - Generics, }; use crate::{ hir::{ @@ -90,22 +92,13 @@ pub struct Elaborator<'context> { file: FileId, - in_unconstrained_fn: bool, nested_loops: usize, - /// True if the current module is a contract. - /// This is usually determined by self.path_resolver.module_id(), but it can - /// be overridden for impls. Impls are an odd case since the methods within resolve - /// as if they're in the parent module, but should be placed in a child module. - /// Since they should be within a child module, in_contract is manually set to false - /// for these so we can still resolve them in the parent module without them being in a contract. - in_contract: bool, - /// Contains a mapping of the current struct or functions's generics to /// unique type variables if we're resolving a struct. Empty otherwise. /// This is a Vec rather than a map to preserve the order a functions generics /// were declared in. - generics: Vec<(Rc, TypeVariable, Span)>, + generics: Vec, /// When resolving lambda expressions, we need to keep track of the variables /// that are captured. We do this in order to create the hidden environment @@ -181,9 +174,7 @@ impl<'context> Elaborator<'context> { interner: &mut context.def_interner, def_maps: &mut context.def_maps, file: FileId::dummy(), - in_unconstrained_fn: false, nested_loops: 0, - in_contract: false, generics: Vec::new(), lambda_stack: Vec::new(), self_type: None, @@ -239,11 +230,12 @@ impl<'context> Elaborator<'context> { self.define_type_alias(alias_id, alias); } + // Must resolve structs before we resolve globals. + let generated_items = self.collect_struct_definitions(items.types); + self.define_function_metas(&mut items.functions, &mut items.impls, &mut items.trait_impls); - self.collect_traits(items.traits); - // Must resolve structs before we resolve globals. - self.collect_struct_definitions(items.types); + self.collect_traits(items.traits); // Before we resolve any function symbols we must go through our impls and // re-collect the methods within into their proper module. This cannot be @@ -265,6 +257,16 @@ impl<'context> Elaborator<'context> { self.elaborate_global(global); } + // After everything is collected, we can elaborate our generated items. + // It may be better to inline these within `items` entirely since elaborating them + // all here means any globals will not see these. Inlining them completely within `items` + // means we must be more careful about missing any additional items that need to be already + // elaborated. E.g. if a new struct is created, we've already passed the code path to + // elaborate them. + if !generated_items.is_empty() { + self.elaborate_items(generated_items); + } + for functions in items.functions { self.elaborate_functions(functions); } @@ -318,12 +320,6 @@ impl<'context> Elaborator<'context> { let old_function = std::mem::replace(&mut self.current_function, Some(id)); - // Without this, impl methods can accidentally be placed in contracts. See #3254 - let was_in_contract = self.in_contract; - if self.self_type.is_some() { - self.in_contract = false; - } - self.scopes.start_function(); let old_item = std::mem::replace(&mut self.current_item, Some(DependencyId::Function(id))); @@ -331,18 +327,26 @@ impl<'context> Elaborator<'context> { self.trait_bounds = func_meta.trait_constraints.clone(); - if self.interner.function_modifiers(&id).is_unconstrained { - self.in_unconstrained_fn = true; + // Introduce all numeric generics into scope + for generic in &func_meta.all_generics { + if let Kind::Numeric(typ) = &generic.kind { + let definition = DefinitionKind::GenericType(generic.type_var.clone()); + let ident = Ident::new(generic.name.to_string(), generic.span); + let hir_ident = + self.add_variable_decl_inner(ident, false, false, false, definition); + self.interner.push_definition_type(hir_ident.id, *typ.clone()); + } } // The DefinitionIds for each parameter were already created in define_function_meta // so we need to reintroduce the same IDs into scope here. for parameter in &func_meta.parameter_idents { let name = self.interner.definition_name(parameter.id).to_owned(); - self.add_existing_variable_to_scope(name, parameter.clone()); + self.add_existing_variable_to_scope(name, parameter.clone(), true); } self.generics = func_meta.all_generics.clone(); + self.declare_numeric_generics(&func_meta.parameters, func_meta.return_type()); self.add_trait_constraints_to_scope(&func_meta); @@ -414,9 +418,7 @@ impl<'context> Elaborator<'context> { self.trait_bounds.clear(); self.type_variables.clear(); - self.in_unconstrained_fn = false; self.interner.update_fn(id, hir_func); - self.in_contract = was_in_contract; self.current_function = old_function; self.current_item = old_item; } @@ -439,7 +441,7 @@ impl<'context> Elaborator<'context> { generics.push(new_generic.clone()); let name = format!("impl {trait_path}"); - let generic_type = Type::NamedGeneric(new_generic, Rc::new(name)); + let generic_type = Type::NamedGeneric(new_generic, Rc::new(name), Kind::Normal); let trait_bound = TraitBound { trait_path, trait_id: None, trait_generics }; if let Some(new_constraint) = self.resolve_trait_bound(&trait_bound, generic_type.clone()) { @@ -456,25 +458,56 @@ impl<'context> Elaborator<'context> { // Map the generic to a fresh type variable let id = self.interner.next_type_variable_id(); let typevar = TypeVariable::unbound(id); - let span = generic.0.span(); + let ident = generic.ident(); + let span = ident.0.span(); + + // Resolve the generic's kind + let kind = self.resolve_generic_kind(generic); // Check for name collisions of this generic - let name = Rc::new(generic.0.contents.clone()); + let name = Rc::new(ident.0.contents.clone()); + + let resolved_generic = + ResolvedGeneric { name: name.clone(), type_var: typevar.clone(), kind, span }; - if let Some((_, _, first_span)) = self.find_generic(&name) { + if let Some(generic) = self.find_generic(&name) { self.push_err(ResolverError::DuplicateDefinition { - name: generic.0.contents.clone(), - first_span: *first_span, + name: ident.0.contents.clone(), + first_span: generic.span, second_span: span, }); } else { - self.generics.push((name, typevar.clone(), span)); + self.generics.push(resolved_generic.clone()); } - typevar + resolved_generic }) } + /// Return the kind of an unresolved generic. + /// If a numeric generic has been specified, resolve the annotated type to make + /// sure only primitive numeric types are being used. + pub(super) fn resolve_generic_kind(&mut self, generic: &UnresolvedGeneric) -> Kind { + if let UnresolvedGeneric::Numeric { ident, typ } = generic { + let typ = typ.clone(); + let typ = if typ.is_type_expression() { + self.resolve_type_inner(typ, &Kind::Numeric(Box::new(Type::default_int_type()))) + } else { + self.resolve_type(typ.clone()) + }; + if !matches!(typ, Type::FieldElement | Type::Integer(_, _)) { + let unsupported_typ_err = ResolverError::UnsupportedNumericGenericType { + ident: ident.clone(), + typ: typ.clone(), + }; + self.push_err(unsupported_typ_err); + } + Kind::Numeric(Box::new(typ)) + } else { + Kind::Normal + } + } + fn push_err(&mut self, error: impl Into) { self.errors.push((error.into(), self.file)); } @@ -523,12 +556,20 @@ impl<'context> Elaborator<'context> { } fn resolve_trait_bound(&mut self, bound: &TraitBound, typ: Type) -> Option { - let trait_generics = vecmap(&bound.trait_generics, |typ| self.resolve_type(typ.clone())); + let the_trait = self.lookup_trait_or_error(bound.trait_path.clone())?; + + let resolved_generics = &the_trait.generics.clone(); + assert_eq!(resolved_generics.len(), bound.trait_generics.len()); + let generics_with_types = resolved_generics.iter().zip(&bound.trait_generics); + let trait_generics = vecmap(generics_with_types, |(generic, typ)| { + self.resolve_type_inner(typ.clone(), &generic.kind) + }); - let span = bound.trait_path.span(); let the_trait = self.lookup_trait_or_error(bound.trait_path.clone())?; let trait_id = the_trait.id; + let span = bound.trait_path.span(); + let expected_generics = the_trait.generics.len(); let actual_generics = trait_generics.len(); @@ -557,11 +598,13 @@ impl<'context> Elaborator<'context> { ) { self.current_function = Some(func_id); - // Without this, impl methods can accidentally be placed in contracts. See #3254 - let was_in_contract = self.in_contract; - if self.self_type.is_some() { - self.in_contract = false; - } + let in_contract = if self.self_type.is_some() { + // Without this, impl methods can accidentally be placed in contracts. + // See: https://github.com/noir-lang/noir/issues/3254 + false + } else { + self.in_contract() + }; self.scopes.start_function(); self.current_item = Some(DependencyId::Function(func_id)); @@ -570,12 +613,13 @@ impl<'context> Elaborator<'context> { let id = self.interner.function_definition_id(func_id); let name_ident = HirIdent::non_trait_method(id, location); - let is_entry_point = self.is_entry_point_function(func); + let is_entry_point = self.is_entry_point_function(func, in_contract); self.run_lint(|_| lints::inlining_attributes(func).map(Into::into)); self.run_lint(|_| lints::missing_pub(func, is_entry_point).map(Into::into)); self.run_lint(|elaborator| { - lints::unnecessary_pub_return(func, elaborator.pub_allowed(func)).map(Into::into) + lints::unnecessary_pub_return(func, elaborator.pub_allowed(func, in_contract)) + .map(Into::into) }); self.run_lint(|_| lints::oracle_not_marked_unconstrained(func).map(Into::into)); self.run_lint(|elaborator| { @@ -591,12 +635,12 @@ impl<'context> Elaborator<'context> { let has_no_predicates_attribute = func.attributes().is_no_predicates(); let should_fold = func.attributes().is_foldable(); let has_inline_attribute = has_no_predicates_attribute || should_fold; - let is_pub_allowed = self.pub_allowed(func); + let is_pub_allowed = self.pub_allowed(func, in_contract); self.add_generics(&func.def.generics); let mut trait_constraints = self.resolve_trait_constraints(&func.def.where_clause); - let mut generics = vecmap(&self.generics, |(_, typevar, _)| typevar.clone()); + let mut generics = vecmap(&self.generics, |generic| generic.type_var.clone()); let mut parameters = Vec::new(); let mut parameter_types = Vec::new(); let mut parameter_idents = Vec::new(); @@ -612,7 +656,7 @@ impl<'context> Elaborator<'context> { UnresolvedTypeData::TraitAsType(path, args) => { self.desugar_impl_trait_arg(path, args, &mut generics, &mut trait_constraints) } - _ => self.resolve_type_inner(typ), + _ => self.resolve_type_inner(typ, &Kind::Normal), }; self.check_if_type_is_valid_for_program_input( @@ -621,6 +665,7 @@ impl<'context> Elaborator<'context> { has_inline_attribute, type_span, ); + let pattern = self.elaborate_pattern_and_store_ids( pattern, typ.clone(), @@ -645,8 +690,8 @@ impl<'context> Elaborator<'context> { let direct_generics = func.def.generics.iter(); let direct_generics = direct_generics - .filter_map(|generic| self.find_generic(&generic.0.contents)) - .map(|(name, typevar, _span)| (name.clone(), typevar.clone())) + .filter_map(|generic| self.find_generic(&generic.ident().0.contents)) + .map(|ResolvedGeneric { name, type_var, .. }| (name.clone(), type_var.clone())) .collect(); let statements = std::mem::take(&mut func.def.body.statements); @@ -669,11 +714,11 @@ impl<'context> Elaborator<'context> { is_entry_point, is_trait_function, has_inline_attribute, + source_crate: self.crate_id, function_body: FunctionBody::Unresolved(func.kind, body, func.def.span), }; self.interner.push_fn_meta(meta, func_id); - self.in_contract = was_in_contract; self.current_function = None; self.scopes.end_function(); self.current_item = None; @@ -698,18 +743,30 @@ impl<'context> Elaborator<'context> { /// True if the `pub` keyword is allowed on parameters in this function /// `pub` on function parameters is only allowed for entry point functions - fn pub_allowed(&self, func: &NoirFunction) -> bool { - self.is_entry_point_function(func) || func.attributes().is_foldable() + fn pub_allowed(&self, func: &NoirFunction, in_contract: bool) -> bool { + self.is_entry_point_function(func, in_contract) || func.attributes().is_foldable() + } + + /// Returns `true` if the current module is a contract. + /// + /// This is usually determined by `self.module_id()`, but it can + /// be overridden for impls. Impls are an odd case since the methods within resolve + /// as if they're in the parent module, but should be placed in a child module. + /// Since they should be within a child module, they should be elaborated as if + /// `in_contract` is `false` so we can still resolve them in the parent module without them being in a contract. + fn in_contract(&self) -> bool { + self.module_id().module(self.def_maps).is_contract } - fn is_entry_point_function(&self, func: &NoirFunction) -> bool { - if self.in_contract { + fn is_entry_point_function(&self, func: &NoirFunction, in_contract: bool) -> bool { + if in_contract { func.attributes().is_contract_entry_point() } else { func.name() == MAIN_FUNCTION } } + // TODO(https://github.com/noir-lang/noir/issues/5156): Remove implicit numeric generics fn declare_numeric_generics(&mut self, params: &Parameters, return_type: &Type) { if self.generics.is_empty() { return; @@ -722,12 +779,27 @@ impl<'context> Elaborator<'context> { // We can fail to find the generic in self.generics if it is an implicit one created // by the compiler. This can happen when, e.g. eliding array lengths using the slice // syntax [T]. - if let Some((name, _, span)) = - self.generics.iter().find(|(name, _, _)| name.as_ref() == &name_to_find) + if let Some(ResolvedGeneric { name, span, kind, .. }) = + self.generics.iter_mut().find(|generic| generic.name.as_ref() == &name_to_find) { + let scope = self.scopes.get_mut_scope(); + let value = scope.find(&name_to_find); + if value.is_some() { + // With the addition of explicit numeric generics we do not want to introduce numeric generics in this manner + // However, this is going to be a big breaking change so for now we simply issue a warning while users have time + // to transition to the new syntax + // e.g. this code would break with a duplicate definition error: + // ``` + // fn foo(arr: [Field; N]) { } + // ``` + continue; + } + *kind = Kind::Numeric(Box::new(Type::default_int_type())); let ident = Ident::new(name.to_string(), *span); let definition = DefinitionKind::GenericType(type_variable); - self.add_variable_decl_inner(ident, false, false, false, definition); + self.add_variable_decl_inner(ident.clone(), false, false, false, definition); + + self.push_err(ResolverError::UseExplicitNumericGeneric { ident }); } } } @@ -753,7 +825,7 @@ impl<'context> Elaborator<'context> { } } - fn elaborate_impls(&mut self, impls: Vec<(Vec, Span, UnresolvedFunctions)>) { + fn elaborate_impls(&mut self, impls: Vec<(UnresolvedGenerics, Span, UnresolvedFunctions)>) { for (_, _, functions) in impls { self.file = functions.file_id; self.recover_generics(|this| this.elaborate_functions(functions)); @@ -783,7 +855,7 @@ impl<'context> Elaborator<'context> { fn collect_impls( &mut self, module: LocalModuleId, - impls: &mut [(Vec, Span, UnresolvedFunctions)], + impls: &mut [(UnresolvedGenerics, Span, UnresolvedFunctions)], ) { self.local_module = module; @@ -800,7 +872,6 @@ impl<'context> Elaborator<'context> { self.local_module = trait_impl.module_id; self.file = trait_impl.file_id; self.current_trait_impl = trait_impl.impl_id; - trait_impl.trait_id = self.resolve_trait_by_path(trait_impl.trait_path.clone()); let self_type = trait_impl.methods.self_type.clone(); let self_type = @@ -844,7 +915,7 @@ impl<'context> Elaborator<'context> { methods, }); - let generics = vecmap(&self.generics, |(_, type_variable, _)| type_variable.clone()); + let generics = vecmap(&self.generics, |generic| generic.type_var.clone()); if let Err((prev_span, prev_file)) = self.interner.add_trait_implementation( self_type.clone(), @@ -868,6 +939,7 @@ impl<'context> Elaborator<'context> { } self.generics.clear(); + self.current_trait_impl = None; self.self_type = None; } @@ -1087,11 +1159,18 @@ impl<'context> Elaborator<'context> { self.generics.clear(); } - fn collect_struct_definitions(&mut self, structs: BTreeMap) { + fn collect_struct_definitions( + &mut self, + structs: BTreeMap, + ) -> CollectedItems { // This is necessary to avoid cloning the entire struct map // when adding checks after each struct field is resolved. let struct_ids = structs.keys().copied().collect::>(); + // This will contain any additional top-level items that are generated at compile-time + // via macros. This often includes derived trait impls. + let mut generated_items = CollectedItems::default(); + // Resolve each field in each struct. // Each struct should already be present in the NodeInterner after def collection. for (type_id, mut typ) in structs { @@ -1100,14 +1179,35 @@ impl<'context> Elaborator<'context> { let attributes = std::mem::take(&mut typ.struct_def.attributes); let span = typ.struct_def.span; - let (generics, fields) = self.resolve_struct_fields(typ.struct_def, type_id); + let fields = self.resolve_struct_fields(typ.struct_def, type_id); self.interner.update_struct(type_id, |struct_def| { struct_def.set_fields(fields); - struct_def.generics = generics; + + // TODO(https://github.com/noir-lang/noir/issues/5156): Remove this with implicit numeric generics + // This is only necessary for resolving named types when implicit numeric generics are used. + let mut found_names = Vec::new(); + struct_def.find_numeric_generics_in_fields(&mut found_names); + for generic in struct_def.generics.iter_mut() { + for found_generic in found_names.iter() { + if found_generic == generic.name.as_str() { + if matches!(generic.kind, Kind::Normal) { + let ident = Ident::new(generic.name.to_string(), generic.span); + self.errors.push(( + CompilationError::ResolverError( + ResolverError::UseExplicitNumericGeneric { ident }, + ), + self.file, + )); + generic.kind = Kind::Numeric(Box::new(Type::default_int_type())); + } + break; + } + } + } }); - self.run_comptime_attributes_on_struct(attributes, type_id, span); + self.run_comptime_attributes_on_struct(attributes, type_id, span, &mut generated_items); } // Check whether the struct fields have nested slices @@ -1129,6 +1229,8 @@ impl<'context> Elaborator<'context> { } } } + + generated_items } fn run_comptime_attributes_on_struct( @@ -1136,48 +1238,73 @@ impl<'context> Elaborator<'context> { attributes: Vec, struct_id: StructId, span: Span, + generated_items: &mut CollectedItems, ) { for attribute in attributes { if let SecondaryAttribute::Custom(name) = attribute { - match self.lookup_global(Path::from_single(name, span)) { - Ok(id) => { - let definition = self.interner.definition(id); - if let DefinitionKind::Function(function) = &definition.kind { - let function = *function; - let mut interpreter = - Interpreter::new(self.interner, &mut self.comptime_scopes); - - let location = Location::new(span, self.file); - let arguments = vec![(Value::TypeDefinition(struct_id), location)]; - let result = interpreter.call_function(function, arguments, location); - if let Err(error) = result { - self.errors.push(error.into_compilation_error_pair()); - } - } else { - self.push_err(ResolverError::NonFunctionInAnnotation { span }); - } - } - Err(_) => self.push_err(ResolverError::UnknownAnnotation { span }), + if let Err(error) = + self.run_comptime_attribute_on_struct(name, struct_id, span, generated_items) + { + self.errors.push(error); } } } } + fn run_comptime_attribute_on_struct( + &mut self, + attribute: String, + struct_id: StructId, + span: Span, + generated_items: &mut CollectedItems, + ) -> Result<(), (CompilationError, FileId)> { + let id = self + .lookup_global(Path::from_single(attribute, span)) + .map_err(|_| (ResolverError::UnknownAnnotation { span }.into(), self.file))?; + + let definition = self.interner.definition(id); + let DefinitionKind::Function(function) = definition.kind else { + return Err((ResolverError::NonFunctionInAnnotation { span }.into(), self.file)); + }; + let mut interpreter = + Interpreter::new(self.interner, &mut self.comptime_scopes, self.crate_id); + + let location = Location::new(span, self.file); + let arguments = vec![(Value::TypeDefinition(struct_id), location)]; + + let value = interpreter + .call_function(function, arguments, TypeBindings::new(), location) + .map_err(|error| error.into_compilation_error_pair())?; + + if value != Value::Unit { + let item = value + .into_top_level_item(location) + .map_err(|error| error.into_compilation_error_pair())?; + + self.add_item(item, generated_items, location); + } + + Ok(()) + } + pub fn resolve_struct_fields( &mut self, unresolved: NoirStruct, struct_id: StructId, - ) -> (Generics, Vec<(Ident, Type)>) { + ) -> Vec<(Ident, Type)> { self.recover_generics(|this| { - let generics = this.add_generics(&unresolved.generics); - this.current_item = Some(DependencyId::Struct(struct_id)); this.resolving_ids.insert(struct_id); + + let struct_def = this.interner.get_struct(struct_id); + this.add_existing_generics(&unresolved.generics, &struct_def.borrow().generics); + let fields = vecmap(unresolved.fields, |(ident, typ)| (ident, this.resolve_type(typ))); + this.resolving_ids.remove(&struct_id); - (generics, fields) + fields }) } @@ -1190,7 +1317,7 @@ impl<'context> Elaborator<'context> { self.current_item = Some(DependencyId::Global(global_id)); let let_stmt = global.stmt_def; - if !self.in_contract + if !self.in_contract() && let_stmt.attributes.iter().any(|attr| matches!(attr, SecondaryAttribute::Abi(_))) { let span = let_stmt.pattern.span(); @@ -1231,7 +1358,8 @@ impl<'context> Elaborator<'context> { let definition_id = global.definition_id; let location = global.location; - let mut interpreter = Interpreter::new(self.interner, &mut self.comptime_scopes); + let mut interpreter = + Interpreter::new(self.interner, &mut self.comptime_scopes, self.crate_id); if let Err(error) = interpreter.evaluate_let(let_statement) { self.errors.push(error.into_compilation_error_pair()); @@ -1273,12 +1401,29 @@ impl<'context> Elaborator<'context> { self.file = trait_impl.file_id; self.local_module = trait_impl.module_id; + trait_impl.trait_id = self.resolve_trait_by_path(trait_impl.trait_path.clone()); let unresolved_type = &trait_impl.object_type; + self.add_generics(&trait_impl.generics); trait_impl.resolved_generics = self.generics.clone(); - let trait_generics = - vecmap(&trait_impl.trait_generics, |generic| self.resolve_type(generic.clone())); + // Fetch trait constraints here + let trait_generics = if let Some(trait_id) = trait_impl.trait_id { + let trait_def = self.interner.get_trait(trait_id); + let resolved_generics = trait_def.generics.clone(); + assert_eq!(resolved_generics.len(), trait_impl.trait_generics.len()); + trait_impl + .trait_generics + .iter() + .enumerate() + .map(|(i, generic)| { + self.resolve_type_inner(generic.clone(), &resolved_generics[i].kind) + }) + .collect() + } else { + // We still resolve as to continue type checking + vecmap(&trait_impl.trait_generics, |generic| self.resolve_type(generic.clone())) + }; trait_impl.resolved_trait_generics = trait_generics; @@ -1302,12 +1447,9 @@ impl<'context> Elaborator<'context> { for (local_module, id, func) in &mut function_set.functions { self.local_module = *local_module; - let was_in_contract = self.in_contract; - self.in_contract = self.module_id().module(self.def_maps).is_contract; self.recover_generics(|this| { this.define_function_meta(func, *id, false); }); - self.in_contract = was_in_contract; } } @@ -1358,4 +1500,81 @@ impl<'context> Elaborator<'context> { items.functions = function_sets; (comptime, items) } + + fn add_item( + &mut self, + item: TopLevelStatement, + generated_items: &mut CollectedItems, + location: Location, + ) { + match item { + TopLevelStatement::Function(function) => { + let id = self.interner.push_empty_fn(); + let module = self.module_id(); + self.interner.push_function(id, &function.def, module, location); + let functions = vec![(self.local_module, id, function)]; + generated_items.functions.push(UnresolvedFunctions { + file_id: self.file, + functions, + trait_id: None, + self_type: None, + }); + } + TopLevelStatement::TraitImpl(mut trait_impl) => { + let methods = dc_mod::collect_trait_impl_functions( + self.interner, + &mut trait_impl, + self.crate_id, + self.file, + self.local_module, + ); + + generated_items.trait_impls.push(UnresolvedTraitImpl { + file_id: self.file, + module_id: self.local_module, + trait_generics: trait_impl.trait_generics, + trait_path: trait_impl.trait_name, + object_type: trait_impl.object_type, + methods, + generics: trait_impl.impl_generics, + where_clause: trait_impl.where_clause, + + // These last fields are filled in later + trait_id: None, + impl_id: None, + resolved_object_type: None, + resolved_generics: Vec::new(), + resolved_trait_generics: Vec::new(), + }); + } + TopLevelStatement::Global(global) => { + let (global, error) = dc_mod::collect_global( + self.interner, + self.def_maps.get_mut(&self.crate_id).unwrap(), + global, + self.file, + self.local_module, + ); + + generated_items.globals.push(global); + if let Some(error) = error { + self.errors.push(error); + } + } + // Assume that an error has already been issued + TopLevelStatement::Error => (), + + TopLevelStatement::Module(_) + | TopLevelStatement::Import(_) + | TopLevelStatement::Struct(_) + | TopLevelStatement::Trait(_) + | TopLevelStatement::Impl(_) + | TopLevelStatement::TypeAlias(_) + | TopLevelStatement::SubModule(_) => { + let item = item.to_string(); + let error = InterpreterError::UnsupportedTopLevelItemUnquote { item, location }; + self.errors.push(error.into_compilation_error_pair()); + } + } + } } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/elaborator/patterns.rs b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/patterns.rs index 4d07009e064..4f04f5c523c 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/elaborator/patterns.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/patterns.rs @@ -301,9 +301,14 @@ impl<'context> Elaborator<'context> { ident } - pub fn add_existing_variable_to_scope(&mut self, name: String, ident: HirIdent) { + pub fn add_existing_variable_to_scope( + &mut self, + name: String, + ident: HirIdent, + warn_if_unused: bool, + ) { let second_span = ident.location.span; - let resolver_meta = ResolverMeta { num_times_used: 0, ident, warn_if_unused: true }; + let resolver_meta = ResolverMeta { num_times_used: 0, ident, warn_if_unused }; let old_value = self.scopes.get_mut_scope().add_key_value(name.clone(), resolver_meta); @@ -389,6 +394,7 @@ impl<'context> Elaborator<'context> { ) -> (ExprId, Type) { let span = variable.span; let expr = self.resolve_variable(variable); + let id = self.interner.push_expr(HirExpression::Ident(expr.clone(), generics.clone())); self.interner.push_expr_location(id, span, self.file); let typ = self.type_check_variable(expr, id, generics); @@ -464,8 +470,8 @@ impl<'context> Elaborator<'context> { for (param, arg) in the_trait.generics.iter().zip(&constraint.trait_generics) { // Avoid binding t = t - if !arg.occurs(param.id()) { - bindings.insert(param.id(), (param.clone(), arg.clone())); + if !arg.occurs(param.type_var.id()) { + bindings.insert(param.type_var.id(), (param.type_var.clone(), arg.clone())); } } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/elaborator/statements.rs b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/statements.rs index dd3e2778726..0d67c9ed3e3 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/elaborator/statements.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/statements.rs @@ -206,7 +206,10 @@ impl<'context> Elaborator<'context> { } fn elaborate_jump(&mut self, is_break: bool, span: noirc_errors::Span) -> (HirStatement, Type) { - if !self.in_unconstrained_fn { + let in_constrained_function = self + .current_function + .map_or(true, |func_id| !self.interner.function_modifiers(&func_id).is_unconstrained); + if in_constrained_function { self.push_err(ResolverError::JumpInConstrainedFn { is_break, span }); } if self.nested_loops == 0 { @@ -432,7 +435,8 @@ impl<'context> Elaborator<'context> { fn elaborate_comptime_statement(&mut self, statement: Statement) -> (HirStatement, Type) { let span = statement.span; let (hir_statement, _typ) = self.elaborate_statement(statement); - let mut interpreter = Interpreter::new(self.interner, &mut self.comptime_scopes); + let mut interpreter = + Interpreter::new(self.interner, &mut self.comptime_scopes, self.crate_id); let value = interpreter.evaluate_statement(hir_statement); let (expr, typ) = self.inline_comptime_value(value, span); (HirStatement::Expression(expr), typ) diff --git a/noir/noir-repo/compiler/noirc_frontend/src/elaborator/traits.rs b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/traits.rs index 3e04dbc784a..77ac8e476f8 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/elaborator/traits.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/traits.rs @@ -1,10 +1,12 @@ -use std::collections::BTreeMap; +use std::{collections::BTreeMap, rc::Rc}; use iter_extended::vecmap; use noirc_errors::Location; use crate::{ - ast::{FunctionKind, TraitItem, UnresolvedGenerics, UnresolvedTraitConstraint}, + ast::{ + FunctionKind, TraitItem, UnresolvedGeneric, UnresolvedGenerics, UnresolvedTraitConstraint, + }, hir::def_collector::dc_crate::UnresolvedTrait, hir_def::traits::{TraitConstant, TraitFunction, TraitType}, macros_api::{ @@ -13,7 +15,7 @@ use crate::{ }, node_interner::{FuncId, TraitId}, token::Attributes, - Type, TypeVariableKind, + Kind, ResolvedGeneric, Type, TypeVariableKind, }; use super::Elaborator; @@ -22,7 +24,11 @@ impl<'context> Elaborator<'context> { pub fn collect_traits(&mut self, traits: BTreeMap) { for (trait_id, unresolved_trait) in traits { self.recover_generics(|this| { - this.add_generics(&unresolved_trait.trait_def.generics); + let resolved_generics = this.interner.get_trait(trait_id).generics.clone(); + this.add_existing_generics( + &unresolved_trait.trait_def.generics, + &resolved_generics, + ); // Resolve order // 1. Trait Types ( Trait constants can have a trait type, therefore types before constants) @@ -34,7 +40,6 @@ impl<'context> Elaborator<'context> { this.interner.update_trait(trait_id, |trait_def| { trait_def.set_methods(methods); - trait_def.generics = vecmap(&this.generics, |(_, generic, _)| generic.clone()); }); }); @@ -87,10 +92,20 @@ impl<'context> Elaborator<'context> { Type::TypeVariable(self_typevar.clone(), TypeVariableKind::Normal); let name_span = the_trait.name.span(); - this.add_existing_generic("Self", name_span, self_typevar); + this.add_existing_generic( + &UnresolvedGeneric::Variable(Ident::from("Self")), + name_span, + &ResolvedGeneric { + name: Rc::new("Self".to_owned()), + type_var: self_typevar, + span: name_span, + kind: Kind::Normal, + }, + ); this.self_type = Some(self_type.clone()); let func_id = unresolved_trait.method_ids[&name.0.contents]; + this.resolve_trait_function( name, generics, @@ -105,7 +120,7 @@ impl<'context> Elaborator<'context> { let arguments = vecmap(&func_meta.parameters.0, |(_, typ, _)| typ.clone()); let return_type = func_meta.return_type().clone(); - let generics = vecmap(&this.generics, |(_, type_var, _)| type_var.clone()); + let generics = vecmap(&this.generics, |generic| generic.type_var.clone()); let default_impl_list: Vec<_> = unresolved_trait .fns_with_default_impl @@ -147,6 +162,7 @@ impl<'context> Elaborator<'context> { func_id: FuncId, ) { let old_generic_count = self.generics.len(); + self.scopes.start_function(); let kind = FunctionKind::Normal; diff --git a/noir/noir-repo/compiler/noirc_frontend/src/elaborator/types.rs b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/types.rs index fcb7ac94c26..63cab40f9d3 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/elaborator/types.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/types.rs @@ -5,7 +5,10 @@ use iter_extended::vecmap; use noirc_errors::{Location, Span}; use crate::{ - ast::{BinaryOpKind, IntegerBitSize, UnresolvedGenerics, UnresolvedTypeExpression}, + ast::{ + BinaryOpKind, IntegerBitSize, UnresolvedGeneric, UnresolvedGenerics, + UnresolvedTypeExpression, + }, hir::{ comptime::{Interpreter, Value}, def_map::ModuleDefId, @@ -28,48 +31,75 @@ use crate::{ UnaryOp, UnresolvedType, UnresolvedTypeData, }, node_interner::{DefinitionKind, ExprId, GlobalId, TraitId, TraitImplKind, TraitMethodId}, - Generics, Type, TypeBinding, TypeVariable, TypeVariableKind, + Generics, Kind, ResolvedGeneric, Type, TypeBinding, TypeVariable, TypeVariableKind, }; use super::{lints, Elaborator}; impl<'context> Elaborator<'context> { - /// Translates an UnresolvedType to a Type + /// Translates an UnresolvedType to a Type with a `TypeKind::Normal` pub(super) fn resolve_type(&mut self, typ: UnresolvedType) -> Type { let span = typ.span; - let resolved_type = self.resolve_type_inner(typ); + let resolved_type = self.resolve_type_inner(typ, &Kind::Normal); if resolved_type.is_nested_slice() { - self.push_err(ResolverError::NestedSlices { span: span.unwrap() }); + self.push_err(ResolverError::NestedSlices { + span: span.expect("Type should have span"), + }); } resolved_type } /// Translates an UnresolvedType into a Type and appends any /// freshly created TypeVariables created to new_variables. - pub fn resolve_type_inner(&mut self, typ: UnresolvedType) -> Type { + pub fn resolve_type_inner(&mut self, typ: UnresolvedType, kind: &Kind) -> Type { use crate::ast::UnresolvedTypeData::*; + let span = typ.span; + let resolved_type = match typ.typ { FieldElement => Type::FieldElement, Array(size, elem) => { - let elem = Box::new(self.resolve_type_inner(*elem)); - let size = self.convert_expression_type(size); + let elem = Box::new(self.resolve_type_inner(*elem, kind)); + let mut size = self.convert_expression_type(size); + // TODO(https://github.com/noir-lang/noir/issues/5156): Remove this once we only have explicit numeric generics + if let Type::NamedGeneric(type_var, name, _) = size { + size = Type::NamedGeneric( + type_var, + name, + Kind::Numeric(Box::new(Type::default_int_type())), + ); + } Type::Array(Box::new(size), elem) } Slice(elem) => { - let elem = Box::new(self.resolve_type_inner(*elem)); + let elem = Box::new(self.resolve_type_inner(*elem, kind)); Type::Slice(elem) } Expression(expr) => self.convert_expression_type(expr), Integer(sign, bits) => Type::Integer(sign, bits), Bool => Type::Bool, String(size) => { - let resolved_size = self.convert_expression_type(size); + let mut resolved_size = self.convert_expression_type(size); + // TODO(https://github.com/noir-lang/noir/issues/5156): Remove this once we only have explicit numeric generics + if let Type::NamedGeneric(type_var, name, _) = resolved_size { + resolved_size = Type::NamedGeneric( + type_var, + name, + Kind::Numeric(Box::new(Type::default_int_type())), + ); + } Type::String(Box::new(resolved_size)) } FormatString(size, fields) => { - let resolved_size = self.convert_expression_type(size); - let fields = self.resolve_type_inner(*fields); + let mut resolved_size = self.convert_expression_type(size); + if let Type::NamedGeneric(type_var, name, _) = resolved_size { + resolved_size = Type::NamedGeneric( + type_var, + name, + Kind::Numeric(Box::new(Type::default_int_type())), + ); + } + let fields = self.resolve_type_inner(*fields, kind); Type::FmtString(Box::new(resolved_size), Box::new(fields)) } Quoted(quoted) => Type::Quoted(quoted), @@ -79,10 +109,12 @@ impl<'context> Elaborator<'context> { Named(path, args, _) => self.resolve_named_type(path, args), TraitAsType(path, args) => self.resolve_trait_as_type(path, args), - Tuple(fields) => Type::Tuple(vecmap(fields, |field| self.resolve_type_inner(field))), + Tuple(fields) => { + Type::Tuple(vecmap(fields, |field| self.resolve_type_inner(field, kind))) + } Function(args, ret, env) => { - let args = vecmap(args, |arg| self.resolve_type_inner(arg)); - let ret = Box::new(self.resolve_type_inner(*ret)); + let args = vecmap(args, |arg| self.resolve_type_inner(arg, kind)); + let ret = Box::new(self.resolve_type_inner(*ret, kind)); // expect() here is valid, because the only places we don't have a span are omitted types // e.g. a function without return type implicitly has a spanless UnresolvedType::Unit return type @@ -90,10 +122,10 @@ impl<'context> Elaborator<'context> { let env_span = env.span.expect("Unexpected missing span for closure environment type"); - let env = Box::new(self.resolve_type_inner(*env)); + let env = Box::new(self.resolve_type_inner(*env, kind)); match *env { - Type::Unit | Type::Tuple(_) | Type::NamedGeneric(_, _) => { + Type::Unit | Type::Tuple(_) | Type::NamedGeneric(_, _, _) => { Type::Function(args, ret, env) } _ => { @@ -106,9 +138,10 @@ impl<'context> Elaborator<'context> { } } MutableReference(element) => { - Type::MutableReference(Box::new(self.resolve_type_inner(*element))) + Type::MutableReference(Box::new(self.resolve_type_inner(*element, kind))) } - Parenthesized(typ) => self.resolve_type_inner(*typ), + Parenthesized(typ) => self.resolve_type_inner(*typ, kind), + Resolved(id) => self.interner.get_quoted_type(id).clone(), }; if let Type::Struct(_, _) = resolved_type { @@ -120,11 +153,36 @@ impl<'context> Elaborator<'context> { ); } } + + // Check that any types with a type kind match the expected type kind supplied to this function + // TODO(https://github.com/noir-lang/noir/issues/5156): make this named generic check more general with `*resolved_kind != kind` + // as implicit numeric generics still existing makes this check more challenging to enforce + // An example of a more general check that we should switch to: + // if resolved_type.kind() != kind.clone() { + // let expected_typ_err = CompilationError::TypeError(TypeCheckError::TypeKindMismatch { + // expected_kind: kind.to_string(), + // expr_kind: resolved_type.kind().to_string(), + // expr_span: span.expect("Type should have span"), + // }); + // self.errors.push((expected_typ_err, self.file)); + // return Type::Error; + // } + if let Type::NamedGeneric(_, name, resolved_kind) = &resolved_type { + if matches!(resolved_kind, Kind::Numeric { .. }) && matches!(kind, Kind::Normal) { + let expected_typ_err = ResolverError::NumericGenericUsedForType { + name: name.to_string(), + span: span.expect("Type should have span"), + }; + self.push_err(expected_typ_err); + return Type::Error; + } + } + resolved_type } - pub fn find_generic(&self, target_name: &str) -> Option<&(Rc, TypeVariable, Span)> { - self.generics.iter().find(|(name, _, _)| name.as_ref() == target_name) + pub fn find_generic(&self, target_name: &str) -> Option<&ResolvedGeneric> { + self.generics.iter().find(|generic| generic.name.as_ref() == target_name) } fn resolve_named_type(&mut self, path: Path, args: Vec) -> Type { @@ -152,7 +210,6 @@ impl<'context> Elaborator<'context> { } let span = path.span(); - let mut args = vecmap(args, |arg| self.resolve_type_inner(arg)); if let Some(type_alias) = self.lookup_type_alias(path.clone()) { let type_alias = type_alias.borrow(); @@ -160,6 +217,10 @@ impl<'context> Elaborator<'context> { let type_alias_string = type_alias.to_string(); let id = type_alias.id; + let mut args = vecmap(type_alias.generics.iter().zip(args), |(generic, arg)| { + self.resolve_type_inner(arg, &generic.kind) + }); + self.verify_generics_count(expected_generic_count, &mut args, span, || { type_alias_string }); @@ -192,7 +253,7 @@ impl<'context> Elaborator<'context> { } let expected_generic_count = struct_type.borrow().generics.len(); - if !self.in_contract + if !self.in_contract() && self .interner .struct_attributes(&struct_type.borrow().id) @@ -203,6 +264,12 @@ impl<'context> Elaborator<'context> { span: struct_type.borrow().name.span(), }); } + + let mut args = + vecmap(struct_type.borrow().generics.iter().zip(args), |(generic, arg)| { + self.resolve_type_inner(arg, &generic.kind) + }); + self.verify_generics_count(expected_generic_count, &mut args, span, || { struct_type.borrow().to_string() }); @@ -219,10 +286,19 @@ impl<'context> Elaborator<'context> { } fn resolve_trait_as_type(&mut self, path: Path, args: Vec) -> Type { - let args = vecmap(args, |arg| self.resolve_type_inner(arg)); - - if let Some(t) = self.lookup_trait_or_error(path) { - Type::TraitAsType(t.id, Rc::new(t.name.to_string()), args) + // Fetch information needed from the trait as the closure for resolving all the `args` + // requires exclusive access to `self` + let trait_as_type_info = self + .lookup_trait_or_error(path) + .map(|t| (t.id, Rc::new(t.name.to_string()), t.generics.clone())); + + if let Some((id, name, resolved_generics)) = trait_as_type_info { + assert_eq!(resolved_generics.len(), args.len()); + let generics_with_types = resolved_generics.iter().zip(args); + let args = vecmap(generics_with_types, |(generic, typ)| { + self.resolve_type_inner(typ, &generic.kind) + }); + Type::TraitAsType(id, Rc::new(name.to_string()), args) } else { Type::Error } @@ -251,8 +327,9 @@ impl<'context> Elaborator<'context> { pub fn lookup_generic_or_global_type(&mut self, path: &Path) -> Option { if path.segments.len() == 1 { let name = &path.last_segment().0.contents; - if let Some((name, var, _)) = self.find_generic(name) { - return Some(Type::NamedGeneric(var.clone(), name.clone())); + if let Some(generic) = self.find_generic(name) { + let generic = generic.clone(); + return Some(Type::NamedGeneric(generic.type_var, generic.name, generic.kind)); } } @@ -318,9 +395,12 @@ impl<'context> Elaborator<'context> { let constraint = TraitConstraint { typ: self.self_type.clone()?, - trait_generics: Type::from_generics(&the_trait.generics), + trait_generics: Type::from_generics(&vecmap(&the_trait.generics, |generic| { + generic.type_var.clone() + })), trait_id, }; + return Some((method, constraint, false)); } } @@ -349,7 +429,9 @@ impl<'context> Elaborator<'context> { the_trait.self_type_typevar.clone(), TypeVariableKind::Normal, ), - trait_generics: Type::from_generics(&the_trait.generics), + trait_generics: Type::from_generics(&vecmap(&the_trait.generics, |generic| { + generic.type_var.clone() + })), trait_id, }; return Some((method, constraint, false)); @@ -371,7 +453,7 @@ impl<'context> Elaborator<'context> { } for constraint in self.trait_bounds.clone() { - if let Type::NamedGeneric(_, name) = &constraint.typ { + if let Type::NamedGeneric(_, name, _) = &constraint.typ { // if `path` is `T::method_name`, we're looking for constraint of the form `T: SomeTrait` if path.segments[0].0.contents != name.as_str() { continue; @@ -1077,7 +1159,7 @@ impl<'context> Elaborator<'context> { }); None } - Type::NamedGeneric(_, _) => { + Type::NamedGeneric(_, _, _) => { let func_meta = self.interner.function_meta( &self.current_function.expect("unexpected method outside a function"), ); @@ -1353,26 +1435,34 @@ impl<'context> Elaborator<'context> { } } - pub fn add_existing_generics(&mut self, names: &UnresolvedGenerics, generics: &Generics) { - assert_eq!(names.len(), generics.len()); + pub fn add_existing_generics( + &mut self, + unresolved_generics: &UnresolvedGenerics, + generics: &Generics, + ) { + assert_eq!(unresolved_generics.len(), generics.len()); - for (name, typevar) in names.iter().zip(generics) { - self.add_existing_generic(&name.0.contents, name.0.span(), typevar.clone()); + for (unresolved_generic, generic) in unresolved_generics.iter().zip(generics) { + self.add_existing_generic(unresolved_generic, unresolved_generic.span(), generic); } } - pub fn add_existing_generic(&mut self, name: &str, span: Span, typevar: TypeVariable) { - // Check for name collisions of this generic - let rc_name = Rc::new(name.to_owned()); + pub fn add_existing_generic( + &mut self, + unresolved_generic: &UnresolvedGeneric, + span: Span, + resolved_generic: &ResolvedGeneric, + ) { + let name = &unresolved_generic.ident().0.contents; - if let Some((_, _, first_span)) = self.find_generic(&rc_name) { + if let Some(generic) = self.find_generic(name) { self.push_err(ResolverError::DuplicateDefinition { - name: name.to_owned(), - first_span: *first_span, + name: name.clone(), + first_span: generic.span, second_span: span, }); } else { - self.generics.push((rc_name, typevar, span)); + self.generics.push(resolved_generic.clone()); } } @@ -1397,7 +1487,7 @@ impl<'context> Elaborator<'context> { | Type::Error | Type::TypeVariable(_, _) | Type::Constant(_) - | Type::NamedGeneric(_, _) + | Type::NamedGeneric(_, _, _) | Type::Quoted(_) | Type::Forall(_, _) => (), @@ -1408,7 +1498,7 @@ impl<'context> Elaborator<'context> { } Type::Array(length, element_type) => { - if let Type::NamedGeneric(type_variable, name) = length.as_ref() { + if let Type::NamedGeneric(type_variable, name, _) = length.as_ref() { found.insert(name.to_string(), type_variable.clone()); } Self::find_numeric_generics_in_type(element_type, found); @@ -1433,7 +1523,7 @@ impl<'context> Elaborator<'context> { Type::Struct(struct_type, generics) => { for (i, generic) in generics.iter().enumerate() { - if let Type::NamedGeneric(type_variable, name) = generic { + if let Type::NamedGeneric(type_variable, name, _) = generic { if struct_type.borrow().generic_is_numeric(i) { found.insert(name.to_string(), type_variable.clone()); } @@ -1444,7 +1534,7 @@ impl<'context> Elaborator<'context> { } Type::Alias(alias, generics) => { for (i, generic) in generics.iter().enumerate() { - if let Type::NamedGeneric(type_variable, name) = generic { + if let Type::NamedGeneric(type_variable, name, _) = generic { if alias.borrow().generic_is_numeric(i) { found.insert(name.to_string(), type_variable.clone()); } @@ -1455,12 +1545,12 @@ impl<'context> Elaborator<'context> { } Type::MutableReference(element) => Self::find_numeric_generics_in_type(element, found), Type::String(length) => { - if let Type::NamedGeneric(type_variable, name) = length.as_ref() { + if let Type::NamedGeneric(type_variable, name, _) = length.as_ref() { found.insert(name.to_string(), type_variable.clone()); } } Type::FmtString(length, fields) => { - if let Type::NamedGeneric(type_variable, name) = length.as_ref() { + if let Type::NamedGeneric(type_variable, name, _) = length.as_ref() { found.insert(name.to_string(), type_variable.clone()); } Self::find_numeric_generics_in_type(fields, found); diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/errors.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/errors.rs index 4eab12af308..d2c7acee2a3 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/errors.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/errors.rs @@ -42,6 +42,8 @@ pub enum InterpreterError { CannotInlineMacro { value: Value, location: Location }, UnquoteFoundDuringEvaluation { location: Location }, FailedToParseMacro { error: ParserError, tokens: Rc, rule: &'static str, file: FileId }, + UnsupportedTopLevelItemUnquote { item: String, location: Location }, + NonComptimeFnCallInSameCrate { function: String, location: Location }, Unimplemented { item: String, location: Location }, @@ -101,6 +103,8 @@ impl InterpreterError { | InterpreterError::NonStructInConstructor { location, .. } | InterpreterError::CannotInlineMacro { location, .. } | InterpreterError::UnquoteFoundDuringEvaluation { location, .. } + | InterpreterError::UnsupportedTopLevelItemUnquote { location, .. } + | InterpreterError::NonComptimeFnCallInSameCrate { location, .. } | InterpreterError::Unimplemented { location, .. } | InterpreterError::BreakNotInLoop { location, .. } | InterpreterError::ContinueNotInLoop { location, .. } => *location, @@ -259,7 +263,8 @@ impl<'a> From<&'a InterpreterError> for CustomDiagnostic { CustomDiagnostic::simple_error(msg, String::new(), location.span) } InterpreterError::CannotInlineMacro { value, location } => { - let msg = "Cannot inline value into runtime code if it contains references".into(); + let typ = value.get_type(); + let msg = format!("Cannot inline values of type `{typ}` into this position"); let secondary = format!("Cannot inline value {value:?}"); CustomDiagnostic::simple_error(msg, secondary, location.span) } @@ -293,6 +298,20 @@ impl<'a> From<&'a InterpreterError> for CustomDiagnostic { diagnostic.add_note(push_the_problem_on_the_library_author); diagnostic } + InterpreterError::UnsupportedTopLevelItemUnquote { item, location } => { + let msg = "Unsupported statement type to unquote".into(); + let secondary = + "Only functions, globals, and trait impls can be unquoted here".into(); + let mut error = CustomDiagnostic::simple_error(msg, secondary, location.span); + error.add_note(format!("Unquoted item was:\n{item}")); + error + } + InterpreterError::NonComptimeFnCallInSameCrate { function, location } => { + let msg = format!("`{function}` cannot be called in a `comptime` context here"); + let secondary = + "This function must be `comptime` or in a separate crate to be called".into(); + CustomDiagnostic::simple_error(msg, secondary, location.span) + } InterpreterError::Unimplemented { item, location } => { let msg = format!("{item} is currently unimplemented"); CustomDiagnostic::simple_error(msg, String::new(), location.span) diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/interpreter.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/interpreter.rs index 5e236a2b980..d2b98569bbb 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/interpreter.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/interpreter.rs @@ -7,6 +7,8 @@ use noirc_errors::Location; use rustc_hash::FxHashMap as HashMap; use crate::ast::{BinaryOpKind, FunctionKind, IntegerBitSize, Signedness}; +use crate::graph::CrateId; +use crate::monomorphization::{perform_instantiation_bindings, undo_instantiation_bindings}; use crate::token::Tokens; use crate::{ hir_def::{ @@ -27,7 +29,7 @@ use crate::{ }; use super::errors::{IResult, InterpreterError}; -use super::value::Value; +use super::value::{unwrap_rc, Value}; mod builtin; mod unquote; @@ -42,6 +44,8 @@ pub struct Interpreter<'interner> { /// up all currently visible definitions. scopes: &'interner mut Vec>, + crate_id: CrateId, + in_loop: bool, } @@ -50,11 +54,25 @@ impl<'a> Interpreter<'a> { pub(crate) fn new( interner: &'a mut NodeInterner, scopes: &'a mut Vec>, + crate_id: CrateId, ) -> Self { - Self { interner, scopes, in_loop: false } + Self { interner, scopes, crate_id, in_loop: false } } pub(crate) fn call_function( + &mut self, + function: FuncId, + arguments: Vec<(Value, Location)>, + instantiation_bindings: TypeBindings, + location: Location, + ) -> IResult { + perform_instantiation_bindings(&instantiation_bindings); + let result = self.call_function_inner(function, arguments, location); + undo_instantiation_bindings(instantiation_bindings); + result + } + + fn call_function_inner( &mut self, function: FuncId, arguments: Vec<(Value, Location)>, @@ -69,6 +87,14 @@ impl<'a> Interpreter<'a> { }); } + let is_comptime = self.interner.function_modifiers(&function).is_comptime; + if !is_comptime && meta.source_crate == self.crate_id { + // Calling non-comptime functions from within the current crate is restricted + // as non-comptime items will have not been elaborated yet. + let function = self.interner.function_name(&function).to_owned(); + return Err(InterpreterError::NonComptimeFnCallInSameCrate { function, location }); + } + if meta.kind != FunctionKind::Normal { return self.call_builtin(function, arguments, location); } @@ -98,7 +124,8 @@ impl<'a> Interpreter<'a> { .expect("all builtin functions must contain a function attribute which contains the opcode which it links to"); if let Some(builtin) = func_attrs.builtin() { - builtin::call_builtin(self.interner, builtin, arguments, location) + let builtin = builtin.clone(); + builtin::call_builtin(self.interner, &builtin, arguments, location) } else if let Some(foreign) = func_attrs.foreign() { let item = format!("Comptime evaluation for foreign functions like {foreign}"); Err(InterpreterError::Unimplemented { item, location }) @@ -187,7 +214,8 @@ impl<'a> Interpreter<'a> { ) -> IResult<()> { match pattern { HirPattern::Identifier(identifier) => { - self.define(identifier.id, typ, argument, location) + self.define(identifier.id, argument); + Ok(()) } HirPattern::Mutable(pattern, _) => { self.define_pattern(pattern, typ, argument, location) @@ -209,8 +237,6 @@ impl<'a> Interpreter<'a> { }, HirPattern::Struct(struct_type, pattern_fields, _) => { self.push_scope(); - self.type_check(typ, &argument, location)?; - self.type_check(struct_type, &argument, location)?; let res = match argument { Value::Struct(fields, struct_type) if fields.len() == pattern_fields.len() => { @@ -246,30 +272,8 @@ impl<'a> Interpreter<'a> { } /// Define a new variable in the current scope - fn define( - &mut self, - id: DefinitionId, - typ: &Type, - argument: Value, - location: Location, - ) -> IResult<()> { - // Temporarily disabled since this fails on generic types - // self.type_check(typ, &argument, location)?; + fn define(&mut self, id: DefinitionId, argument: Value) { self.current_scope_mut().insert(id, argument); - Ok(()) - } - - /// Mutate an existing variable, potentially from a prior scope. - /// Also type checks the value being assigned - fn checked_mutate( - &mut self, - id: DefinitionId, - typ: &Type, - argument: Value, - location: Location, - ) -> IResult<()> { - self.type_check(typ, &argument, location)?; - self.mutate(id, argument, location) } /// Mutate an existing variable, potentially from a prior scope @@ -308,15 +312,6 @@ impl<'a> Interpreter<'a> { Err(InterpreterError::NonComptimeVarReferenced { name, location }) } - fn type_check(&self, typ: &Type, value: &Value, location: Location) -> IResult<()> { - let typ = typ.follow_bindings(); - let value_type = value.get_type(); - - typ.try_unify(&value_type, &mut TypeBindings::new()).map_err(|_| { - InterpreterError::TypeMismatch { expected: typ, value: value.clone(), location } - }) - } - /// Evaluate an expression and return the result pub fn evaluate(&mut self, id: ExprId) -> IResult { match self.interner.expression(&id) { @@ -354,8 +349,9 @@ impl<'a> Interpreter<'a> { match &definition.kind { DefinitionKind::Function(function_id) => { - let typ = self.interner.id_type(id); - Ok(Value::Function(*function_id, typ)) + let typ = self.interner.id_type(id).follow_bindings(); + let bindings = Rc::new(self.interner.get_instantiation_bindings(id).clone()); + Ok(Value::Function(*function_id, typ, bindings)) } DefinitionKind::Local(_) => self.lookup(&ident), DefinitionKind::Global(global_id) => { @@ -526,7 +522,7 @@ impl<'a> Interpreter<'a> { } fn evaluate_array(&mut self, array: HirArrayLiteral, id: ExprId) -> IResult { - let typ = self.interner.id_type(id); + let typ = self.interner.id_type(id).follow_bindings(); match array { HirArrayLiteral::Standard(elements) => { @@ -608,10 +604,13 @@ impl<'a> Interpreter<'a> { let rhs = self.evaluate(infix.rhs)?; // TODO: Need to account for operator overloading - assert!( - self.interner.get_selected_impl_for_expression(id).is_none(), - "Operator overloading is unimplemented in the interpreter" - ); + // See https://github.com/noir-lang/noir/issues/4925 + if self.interner.get_selected_impl_for_expression(id).is_some() { + return Err(InterpreterError::Unimplemented { + item: "Operator overloading in the interpreter".to_string(), + location: infix.operator.location, + }); + } use InterpreterError::InvalidValuesForBinary; match infix.operator.kind { @@ -920,7 +919,7 @@ impl<'a> Interpreter<'a> { }) .collect::>()?; - let typ = self.interner.id_type(id); + let typ = self.interner.id_type(id).follow_bindings(); Ok(Value::Struct(fields, typ)) } @@ -961,7 +960,10 @@ impl<'a> Interpreter<'a> { let location = self.interner.expr_location(&id); match function { - Value::Function(function_id, _) => self.call_function(function_id, arguments, location), + Value::Function(function_id, _, bindings) => { + let bindings = unwrap_rc(bindings); + self.call_function(function_id, arguments, bindings, location) + } Value::Closure(closure, env, _) => self.call_closure(closure, env, arguments, location), value => Err(InterpreterError::NonFunctionCalled { value, location }), } @@ -990,7 +992,7 @@ impl<'a> Interpreter<'a> { }; if let Some(method) = method { - self.call_function(method, arguments, location) + self.call_function(method, arguments, TypeBindings::new(), location) } else { Err(InterpreterError::NoMethodFound { name: method_name.clone(), typ, location }) } @@ -1135,7 +1137,7 @@ impl<'a> Interpreter<'a> { let environment = try_vecmap(&lambda.captures, |capture| self.lookup_id(capture.ident.id, location))?; - let typ = self.interner.id_type(id); + let typ = self.interner.id_type(id).follow_bindings(); Ok(Value::Closure(lambda, environment, typ)) } @@ -1196,9 +1198,7 @@ impl<'a> Interpreter<'a> { fn store_lvalue(&mut self, lvalue: HirLValue, rhs: Value) -> IResult<()> { match lvalue { - HirLValue::Ident(ident, typ) => { - self.checked_mutate(ident.id, &typ, rhs, ident.location) - } + HirLValue::Ident(ident, typ) => self.mutate(ident.id, rhs, ident.location), HirLValue::Dereference { lvalue, element_type: _, location } => { match self.evaluate_lvalue(&lvalue)? { Value::Pointer(value) => { @@ -1217,7 +1217,7 @@ impl<'a> Interpreter<'a> { } Value::Struct(mut fields, typ) => { fields.insert(Rc::new(field_name.0.contents), rhs); - self.store_lvalue(*object, Value::Struct(fields, typ)) + self.store_lvalue(*object, Value::Struct(fields, typ.follow_bindings())) } value => { Err(InterpreterError::NonTupleOrStructInMemberAccess { value, location }) diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/interpreter/builtin.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/interpreter/builtin.rs index cccc9c6d545..1c0c4e6f274 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/interpreter/builtin.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/interpreter/builtin.rs @@ -4,14 +4,13 @@ use noirc_errors::Location; use crate::{ hir::comptime::{errors::IResult, InterpreterError, Value}, - lexer::Lexer, macros_api::NodeInterner, token::{SpannedToken, Token, Tokens}, QuotedType, Type, }; pub(super) fn call_builtin( - interner: &NodeInterner, + interner: &mut NodeInterner, name: &str, arguments: Vec<(Value, Location)>, location: Location, @@ -78,7 +77,7 @@ fn type_def_as_type( let struct_def = interner.get_struct(type_def); let struct_def = struct_def.borrow(); - let make_token = |name| SpannedToken::new(Token::Str(name), span); + let make_token = |name| SpannedToken::new(Token::Ident(name), span); let mut tokens = vec![make_token(struct_def.name.to_string())]; @@ -86,7 +85,7 @@ fn type_def_as_type( if i != 0 { tokens.push(SpannedToken::new(Token::Comma, span)); } - tokens.push(make_token(generic.borrow().to_string())); + tokens.push(make_token(generic.type_var.borrow().to_string())); } Ok(Value::Code(Rc::new(Tokens(tokens)))) @@ -112,7 +111,7 @@ fn type_def_generics( .generics .iter() .map(|generic| { - let name = SpannedToken::new(Token::Str(generic.borrow().to_string()), span); + let name = SpannedToken::new(Token::Ident(generic.type_var.borrow().to_string()), span); Value::Code(Rc::new(Tokens(vec![name]))) }) .collect(); @@ -124,7 +123,7 @@ fn type_def_generics( /// fn fields(self) -> [(Quoted, Quoted)] /// Returns (name, type) pairs of each field of this TypeDefinition fn type_def_fields( - interner: &NodeInterner, + interner: &mut NodeInterner, mut arguments: Vec<(Value, Location)>, ) -> IResult { assert_eq!(arguments.len(), 1, "ICE: `generics` should only receive a single argument"); @@ -138,14 +137,16 @@ fn type_def_fields( let struct_def = interner.get_struct(type_def); let struct_def = struct_def.borrow(); - let make_token = |name| SpannedToken::new(Token::Str(name), span); + let make_token = |name| SpannedToken::new(Token::Ident(name), span); let make_quoted = |tokens| Value::Code(Rc::new(Tokens(tokens))); let mut fields = im::Vector::new(); for (name, typ) in struct_def.get_fields_as_written() { let name = make_quoted(vec![make_token(name)]); - let typ = Value::Code(Rc::new(type_to_tokens(&typ)?)); + let id = interner.push_quoted_type(typ); + let typ = SpannedToken::new(Token::QuotedType(id), span); + let typ = Value::Code(Rc::new(Tokens(vec![typ]))); fields.push_back(Value::Tuple(vec![name, typ])); } @@ -155,22 +156,3 @@ fn type_def_fields( ]))); Ok(Value::Slice(fields, typ)) } - -/// FIXME(https://github.com/noir-lang/noir/issues/5309): This code is temporary. -/// It will produce poor results for type variables and will result in incorrect -/// spans on the returned tokens. -fn type_to_tokens(typ: &Type) -> IResult { - let (mut tokens, mut errors) = Lexer::lex(&typ.to_string()); - - if let Some(last) = tokens.0.last() { - if matches!(last.token(), Token::EOF) { - tokens.0.pop(); - } - } - - if !errors.is_empty() { - let error = errors.swap_remove(0); - todo!("Got lexer error: {error}") - } - Ok(tokens) -} diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/tests.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/tests.rs index 43f6e21905b..870f2bc458a 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/tests.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/tests.rs @@ -7,15 +7,16 @@ use noirc_errors::Location; use super::errors::InterpreterError; use super::interpreter::Interpreter; use super::value::Value; +use crate::graph::CrateId; use crate::hir::type_check::test::type_check_src_code; fn interpret_helper(src: &str, func_namespace: Vec) -> Result { let (mut interner, main_id) = type_check_src_code(src, func_namespace); let mut scopes = vec![HashMap::default()]; - let mut interpreter = Interpreter::new(&mut interner, &mut scopes); + let mut interpreter = Interpreter::new(&mut interner, &mut scopes, CrateId::Root(0)); let no_location = Location::dummy(); - interpreter.call_function(main_id, Vec::new(), no_location) + interpreter.call_function(main_id, Vec::new(), HashMap::new(), no_location) } fn interpret(src: &str, func_namespace: Vec) -> Value { @@ -30,14 +31,14 @@ fn interpret_expect_error(src: &str, func_namespace: Vec) -> Interpreter #[test] fn interpreter_works() { - let program = "fn main() -> pub Field { 3 }"; + let program = "comptime fn main() -> pub Field { 3 }"; let result = interpret(program, vec!["main".into()]); assert_eq!(result, Value::Field(3u128.into())); } #[test] fn mutation_works() { - let program = "fn main() -> pub i8 { + let program = "comptime fn main() -> pub i8 { let mut x = 3; x = 4; x @@ -48,7 +49,7 @@ fn mutation_works() { #[test] fn mutating_references() { - let program = "fn main() -> pub i32 { + let program = "comptime fn main() -> pub i32 { let x = &mut 3; *x = 4; *x @@ -59,7 +60,7 @@ fn mutating_references() { #[test] fn mutating_mutable_references() { - let program = "fn main() -> pub i64 { + let program = "comptime fn main() -> pub i64 { let mut x = &mut 3; *x = 4; *x @@ -70,7 +71,7 @@ fn mutating_mutable_references() { #[test] fn mutating_arrays() { - let program = "fn main() -> pub u8 { + let program = "comptime fn main() -> pub u8 { let mut a1 = [1, 2, 3, 4]; a1[1] = 22; a1[1] @@ -81,7 +82,7 @@ fn mutating_arrays() { #[test] fn mutate_in_new_scope() { - let program = "fn main() -> pub u8 { + let program = "comptime fn main() -> pub u8 { let mut x = 0; x += 1; { @@ -95,7 +96,7 @@ fn mutate_in_new_scope() { #[test] fn for_loop() { - let program = "fn main() -> pub u8 { + let program = "comptime fn main() -> pub u8 { let mut x = 0; for i in 0 .. 6 { x += i; @@ -108,7 +109,7 @@ fn for_loop() { #[test] fn for_loop_u16() { - let program = "fn main() -> pub u16 { + let program = "comptime fn main() -> pub u16 { let mut x = 0; for i in 0 .. 6 { x += i; @@ -121,7 +122,7 @@ fn for_loop_u16() { #[test] fn for_loop_with_break() { - let program = "unconstrained fn main() -> pub u32 { + let program = "unconstrained comptime fn main() -> pub u32 { let mut x = 0; for i in 0 .. 6 { if i == 4 { @@ -137,7 +138,7 @@ fn for_loop_with_break() { #[test] fn for_loop_with_continue() { - let program = "unconstrained fn main() -> pub u64 { + let program = "unconstrained comptime fn main() -> pub u64 { let mut x = 0; for i in 0 .. 6 { if i == 4 { @@ -153,7 +154,7 @@ fn for_loop_with_continue() { #[test] fn assert() { - let program = "fn main() { + let program = "comptime fn main() { assert(1 == 1); }"; let result = interpret(program, vec!["main".into()]); @@ -162,7 +163,7 @@ fn assert() { #[test] fn assert_fail() { - let program = "fn main() { + let program = "comptime fn main() { assert(1 == 2); }"; let result = interpret_expect_error(program, vec!["main".into()]); @@ -171,7 +172,7 @@ fn assert_fail() { #[test] fn lambda() { - let program = "fn main() -> pub u8 { + let program = "comptime fn main() -> pub u8 { let f = |x: u8| x + 1; f(1) }"; @@ -182,11 +183,11 @@ fn lambda() { #[test] fn non_deterministic_recursion() { let program = " - fn main() -> pub u64 { + comptime fn main() -> pub u64 { fib(10) } - fn fib(x: u64) -> u64 { + comptime fn fib(x: u64) -> u64 { if x <= 1 { x } else { @@ -196,3 +197,18 @@ fn non_deterministic_recursion() { let result = interpret(program, vec!["main".into(), "fib".into()]); assert_eq!(result, Value::U64(55)); } + +#[test] +fn generic_functions() { + let program = " + fn main() -> pub u8 { + apply(1, |x| x + 1) + } + + fn apply(x: T, f: fn[Env](T) -> U) -> U { + f(x) + } + "; + let result = interpret(program, vec!["main".into(), "apply".into()]); + assert!(matches!(result, Value::U8(2))); +} diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/value.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/value.rs index d51d69f9226..c956cdb5796 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/value.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/value.rs @@ -14,9 +14,9 @@ use crate::{ StructId, }, node_interner::{ExprId, FuncId}, - parser, + parser::{self, NoirParser, TopLevelStatement}, token::{SpannedToken, Token, Tokens}, - QuotedType, Shared, Type, + QuotedType, Shared, Type, TypeBindings, }; use rustc_hash::FxHashMap as HashMap; @@ -36,7 +36,7 @@ pub enum Value { U32(u32), U64(u64), String(Rc), - Function(FuncId, Type), + Function(FuncId, Type, Rc), Closure(HirLambda, Vec, Type), Tuple(Vec), Struct(HashMap, Value>, Type), @@ -65,7 +65,7 @@ impl Value { let length = Type::Constant(value.len() as u32); Type::String(Box::new(length)) } - Value::Function(_, typ) => return Cow::Borrowed(typ), + Value::Function(_, typ, _) => return Cow::Borrowed(typ), Value::Closure(_, _, typ) => return Cow::Borrowed(typ), Value::Tuple(fields) => { Type::Tuple(vecmap(fields, |field| field.get_type().into_owned())) @@ -128,13 +128,14 @@ impl Value { ExpressionKind::Literal(Literal::Integer((value as u128).into(), false)) } Value::String(value) => ExpressionKind::Literal(Literal::Str(unwrap_rc(value))), - Value::Function(id, typ) => { + Value::Function(id, typ, bindings) => { let id = interner.function_definition_id(id); let impl_kind = ImplKind::NotATraitMethod; let ident = HirIdent { location, id, impl_kind }; let expr_id = interner.push_expr(HirExpression::Ident(ident, None)); interner.push_expr_location(expr_id, location.span, location.file); interner.push_expr_type(expr_id, typ); + interner.store_instantiation_bindings(expr_id, unwrap_rc(bindings)); ExpressionKind::Resolved(expr_id) } Value::Closure(_lambda, _env, _typ) => { @@ -247,10 +248,15 @@ impl Value { HirExpression::Literal(HirLiteral::Integer((value as u128).into(), false)) } Value::String(value) => HirExpression::Literal(HirLiteral::Str(unwrap_rc(value))), - Value::Function(id, _typ) => { + Value::Function(id, typ, bindings) => { let id = interner.function_definition_id(id); let impl_kind = ImplKind::NotATraitMethod; - HirExpression::Ident(HirIdent { location, id, impl_kind }, None) + let ident = HirIdent { location, id, impl_kind }; + let expr_id = interner.push_expr(HirExpression::Ident(ident, None)); + interner.push_expr_location(expr_id, location.span, location.file); + interner.push_expr_type(expr_id, typ); + interner.store_instantiation_bindings(expr_id, unwrap_rc(bindings)); + return Ok(expr_id); } Value::Closure(_lambda, _env, _typ) => { // TODO: How should a closure's environment be inlined? @@ -319,6 +325,13 @@ impl Value { _ => None, } } + + pub(crate) fn into_top_level_item(self, location: Location) -> IResult { + match self { + Value::Code(tokens) => parse_tokens(tokens, parser::top_level_item(), location.file), + value => Err(InterpreterError::CannotInlineMacro { value, location }), + } + } } /// Unwraps an Rc value without cloning the inner value if the reference count is 1. Clones otherwise. @@ -326,6 +339,17 @@ pub(crate) fn unwrap_rc(rc: Rc) -> T { Rc::try_unwrap(rc).unwrap_or_else(|rc| (*rc).clone()) } +fn parse_tokens(tokens: Rc, parser: impl NoirParser, file: fm::FileId) -> IResult { + match parser.parse(tokens.as_ref().clone()) { + Ok(expr) => Ok(expr), + Err(mut errors) => { + let error = errors.swap_remove(0); + let rule = "an expression"; + Err(InterpreterError::FailedToParseMacro { error, file, tokens, rule }) + } + } +} + impl Display for Value { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { @@ -344,7 +368,7 @@ impl Display for Value { Value::U32(value) => write!(f, "{value}"), Value::U64(value) => write!(f, "{value}"), Value::String(value) => write!(f, "{value}"), - Value::Function(_, _) => write!(f, "(function)"), + Value::Function(..) => write!(f, "(function)"), Value::Closure(_, _, _) => write!(f, "(closure)"), Value::Tuple(fields) => { let fields = vecmap(fields, ToString::to_string); diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/def_collector/dc_crate.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/def_collector/dc_crate.rs index 216ed5fc545..37ece01c805 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/def_collector/dc_crate.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/def_collector/dc_crate.rs @@ -5,7 +5,7 @@ use crate::graph::CrateId; use crate::hir::comptime::{Interpreter, InterpreterError}; use crate::hir::def_map::{CrateDefMap, LocalModuleId, ModuleId}; use crate::hir::resolution::errors::ResolverError; -use crate::{Type, TypeVariable}; +use crate::{ResolvedGeneric, Type}; use crate::hir::resolution::import::{resolve_import, ImportDirective, PathResolution}; use crate::hir::resolution::{ @@ -33,7 +33,6 @@ use iter_extended::vecmap; use noirc_errors::{CustomDiagnostic, Span}; use std::collections::{BTreeMap, HashMap}; -use std::rc::Rc; use std::vec; #[derive(Default)] @@ -125,7 +124,7 @@ pub struct UnresolvedTraitImpl { pub trait_id: Option, pub impl_id: Option, pub resolved_object_type: Option, - pub resolved_generics: Vec<(Rc, TypeVariable, Span)>, + pub resolved_generics: Vec, // The resolved generic on the trait itself. E.g. it is the `` in // `impl Foo for Bar { ... }` @@ -154,6 +153,7 @@ pub struct DefCollector { pub(crate) items: CollectedItems, } +#[derive(Default)] pub struct CollectedItems { pub(crate) functions: Vec, pub(crate) types: BTreeMap, @@ -164,6 +164,18 @@ pub struct CollectedItems { pub(crate) trait_impls: Vec, } +impl CollectedItems { + pub fn is_empty(&self) -> bool { + self.functions.is_empty() + && self.types.is_empty() + && self.type_aliases.is_empty() + && self.traits.is_empty() + && self.globals.is_empty() + && self.impls.is_empty() + && self.trait_impls.is_empty() + } +} + /// Maps the type and the module id in which the impl is defined to the functions contained in that /// impl along with the generics declared on the impl itself. This also contains the Span /// of the object_type of the impl, used to issue an error if the object type fails to resolve. @@ -379,6 +391,7 @@ impl DefCollector { def_collector.items.traits, crate_id, )); + // Must resolve structs before we resolve globals. resolved_module.errors.extend(resolve_structs( context, @@ -447,7 +460,7 @@ impl DefCollector { resolved_module.type_check(context); if !cycles_present { - resolved_module.evaluate_comptime(&mut context.def_interner); + resolved_module.evaluate_comptime(&mut context.def_interner, crate_id); } resolved_module.errors @@ -546,10 +559,10 @@ impl ResolvedModule { } /// Evaluate all `comptime` expressions in this module - fn evaluate_comptime(&mut self, interner: &mut NodeInterner) { + fn evaluate_comptime(&mut self, interner: &mut NodeInterner, crate_id: CrateId) { if self.count_errors() == 0 { let mut scopes = vec![HashMap::default()]; - let mut interpreter = Interpreter::new(interner, &mut scopes); + let mut interpreter = Interpreter::new(interner, &mut scopes, crate_id); for (_file, global) in &self.globals { if let Err(error) = interpreter.scan_global(*global) { diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/def_collector/dc_mod.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/def_collector/dc_mod.rs index 5c196324b7d..ab9de6c25c4 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/def_collector/dc_mod.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/def_collector/dc_mod.rs @@ -11,6 +11,7 @@ use crate::ast::{ NoirStruct, NoirTrait, NoirTraitImpl, NoirTypeAlias, Pattern, TraitImplItem, TraitItem, TypeImpl, }; +use crate::macros_api::NodeInterner; use crate::{ graph::CrateId, hir::def_collector::dc_crate::{UnresolvedStruct, UnresolvedTrait}, @@ -26,7 +27,7 @@ use super::{ }, errors::{DefCollectorErrorKind, DuplicateType}, }; -use crate::hir::def_map::{LocalModuleId, ModuleData, ModuleId}; +use crate::hir::def_map::{CrateDefMap, LocalModuleId, ModuleData, ModuleId}; use crate::hir::resolution::import::ImportDirective; use crate::hir::Context; @@ -105,35 +106,19 @@ impl<'a> ModCollector<'a> { ) -> Vec<(CompilationError, fm::FileId)> { let mut errors = vec![]; for global in globals { - let name = global.pattern.name_ident().clone(); - - let global_id = context.def_interner.push_empty_global( - name.clone(), - self.module_id, + let (global, error) = collect_global( + &mut context.def_interner, + &mut self.def_collector.def_map, + global, self.file_id, - global.attributes.clone(), - matches!(global.pattern, Pattern::Mutable { .. }), + self.module_id, ); - // Add the statement to the scope so its path can be looked up later - let result = self.def_collector.def_map.modules[self.module_id.0] - .declare_global(name, global_id); - - if let Err((first_def, second_def)) = result { - let err = DefCollectorErrorKind::Duplicate { - typ: DuplicateType::Global, - first_def, - second_def, - }; - errors.push((err.into(), self.file_id)); + if let Some(error) = error { + errors.push(error); } - self.def_collector.items.globals.push(UnresolvedGlobal { - file_id: self.file_id, - module_id: self.module_id, - global_id, - stmt_def: global, - }); + self.def_collector.items.globals.push(global); } errors } @@ -149,8 +134,9 @@ impl<'a> ModCollector<'a> { self_type: None, }; - for (method, _) in r#impl.methods { + for (mut method, _) in r#impl.methods { let func_id = context.def_interner.push_empty_fn(); + method.def.where_clause.extend(r#impl.where_clause.clone()); let location = Location::new(method.span(), self.file_id); context.def_interner.push_function(func_id, &method.def, module_id, location); unresolved_functions.push_fn(self.module_id, func_id, method); @@ -168,11 +154,16 @@ impl<'a> ModCollector<'a> { impls: Vec, krate: CrateId, ) { - for trait_impl in impls { + for mut trait_impl in impls { let trait_name = trait_impl.trait_name.clone(); - let mut unresolved_functions = - self.collect_trait_impl_function_overrides(context, &trait_impl, krate); + let mut unresolved_functions = collect_trait_impl_functions( + &mut context.def_interner, + &mut trait_impl, + krate, + self.file_id, + self.module_id, + ); let module = ModuleId { krate, local_id: self.module_id }; @@ -204,33 +195,6 @@ impl<'a> ModCollector<'a> { } } - fn collect_trait_impl_function_overrides( - &mut self, - context: &mut Context, - trait_impl: &NoirTraitImpl, - krate: CrateId, - ) -> UnresolvedFunctions { - let mut unresolved_functions = UnresolvedFunctions { - file_id: self.file_id, - functions: Vec::new(), - trait_id: None, - self_type: None, - }; - - let module = ModuleId { krate, local_id: self.module_id }; - - for item in &trait_impl.items { - if let TraitImplItem::Function(impl_method) = item { - let func_id = context.def_interner.push_empty_fn(); - let location = Location::new(impl_method.span(), self.file_id); - context.def_interner.push_function(func_id, &impl_method.def, module, location); - unresolved_functions.push_fn(self.module_id, func_id, impl_method.clone()); - } - } - - unresolved_functions - } - fn collect_functions( &mut self, context: &mut Context, @@ -308,11 +272,21 @@ impl<'a> ModCollector<'a> { struct_def: struct_definition, }; + let resolved_generics = context.resolve_generics( + &unresolved.struct_def.generics, + &mut definition_errors, + self.file_id, + ); + // Create the corresponding module for the struct namespace let id = match self.push_child_module(&name, self.file_id, false, false) { - Ok(local_id) => { - context.def_interner.new_struct(&unresolved, krate, local_id, self.file_id) - } + Ok(local_id) => context.def_interner.new_struct( + &unresolved, + resolved_generics, + krate, + local_id, + self.file_id, + ), Err(error) => { definition_errors.push((error.into(), self.file_id)); continue; @@ -356,7 +330,14 @@ impl<'a> ModCollector<'a> { type_alias_def: type_alias, }; - let type_alias_id = context.def_interner.push_type_alias(&unresolved); + let resolved_generics = context.resolve_generics( + &unresolved.type_alias_def.generics, + &mut errors, + self.file_id, + ); + + let type_alias_id = + context.def_interner.push_type_alias(&unresolved, resolved_generics); // Add the type alias to scope so its path can be looked up later let result = self.def_collector.def_map.modules[self.module_id.0] @@ -516,6 +497,9 @@ impl<'a> ModCollector<'a> { } } + let resolved_generics = + context.resolve_generics(&trait_definition.generics, &mut errors, self.file_id); + // And store the TraitId -> TraitType mapping somewhere it is reachable let unresolved = UnresolvedTrait { file_id: self.file_id, @@ -525,7 +509,8 @@ impl<'a> ModCollector<'a> { method_ids, fns_with_default_impl: unresolved_functions, }; - context.def_interner.push_empty_trait(trait_id, &unresolved); + context.def_interner.push_empty_trait(trait_id, &unresolved, resolved_generics); + self.def_collector.items.traits.insert(trait_id, unresolved); } errors @@ -761,6 +746,60 @@ fn is_native_field(str: &str) -> bool { } } +pub(crate) fn collect_trait_impl_functions( + interner: &mut NodeInterner, + trait_impl: &mut NoirTraitImpl, + krate: CrateId, + file_id: FileId, + local_id: LocalModuleId, +) -> UnresolvedFunctions { + let mut unresolved_functions = + UnresolvedFunctions { file_id, functions: Vec::new(), trait_id: None, self_type: None }; + + let module = ModuleId { krate, local_id }; + + for item in std::mem::take(&mut trait_impl.items) { + if let TraitImplItem::Function(impl_method) = item { + let func_id = interner.push_empty_fn(); + let location = Location::new(impl_method.span(), file_id); + interner.push_function(func_id, &impl_method.def, module, location); + unresolved_functions.push_fn(local_id, func_id, impl_method); + } + } + + unresolved_functions +} + +pub(crate) fn collect_global( + interner: &mut NodeInterner, + def_map: &mut CrateDefMap, + global: LetStatement, + file_id: FileId, + module_id: LocalModuleId, +) -> (UnresolvedGlobal, Option<(CompilationError, FileId)>) { + let name = global.pattern.name_ident().clone(); + + let global_id = interner.push_empty_global( + name.clone(), + module_id, + file_id, + global.attributes.clone(), + matches!(global.pattern, Pattern::Mutable { .. }), + ); + + // Add the statement to the scope so its path can be looked up later + let result = def_map.modules[module_id.0].declare_global(name, global_id); + + let error = result.err().map(|(first_def, second_def)| { + let err = + DefCollectorErrorKind::Duplicate { typ: DuplicateType::Global, first_def, second_def }; + (err.into(), file_id) + }); + + let global = UnresolvedGlobal { file_id, module_id, global_id, stmt_def: global }; + (global, error) +} + #[cfg(test)] mod tests { use super::*; diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/def_collector/errors.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/def_collector/errors.rs index edeb463e10d..af2264c3843 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/def_collector/errors.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/def_collector/errors.rs @@ -1,4 +1,4 @@ -use crate::ast::{Ident, Path}; +use crate::ast::{Ident, Path, UnresolvedTypeData}; use crate::hir::resolution::import::PathResolutionError; use noirc_errors::CustomDiagnostic as Diagnostic; @@ -66,6 +66,8 @@ pub enum DefCollectorErrorKind { TraitImplOrphaned { span: Span }, #[error("macro error : {0:?}")] MacroError(MacroError), + #[error("The only supported types of numeric generics are integers, fields, and booleans")] + UnsupportedNumericGenericType { ident: Ident, typ: UnresolvedTypeData }, } /// An error struct that macro processors can return. @@ -228,6 +230,15 @@ impl<'a> From<&'a DefCollectorErrorKind> for Diagnostic { DefCollectorErrorKind::MacroError(macro_error) => { Diagnostic::simple_error(macro_error.primary_message.clone(), macro_error.secondary_message.clone().unwrap_or_default(), macro_error.span.unwrap_or_default()) }, + DefCollectorErrorKind::UnsupportedNumericGenericType { ident, typ } => { + let name = &ident.0.contents; + + Diagnostic::simple_error( + format!("{name} has a type of {typ}. The only supported types of numeric generics are integers and fields"), + "Unsupported numeric generic type".to_string(), + ident.0.span(), + ) + } } } } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/mod.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/mod.rs index 55dc22d6c5d..71fdc6b30d2 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/mod.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/mod.rs @@ -5,17 +5,21 @@ pub mod resolution; pub mod scope; pub mod type_check; +use crate::ast::UnresolvedGenerics; use crate::debug::DebugInstrumenter; use crate::graph::{CrateGraph, CrateId}; use crate::hir_def::function::FuncMeta; use crate::node_interner::{FuncId, NodeInterner, StructId}; use crate::parser::ParserError; -use crate::ParsedModule; +use crate::{Generics, Kind, ParsedModule, ResolvedGeneric, Type, TypeVariable}; +use def_collector::dc_crate::CompilationError; use def_map::{Contract, CrateDefMap}; -use fm::FileManager; +use fm::{FileId, FileManager}; +use iter_extended::vecmap; use noirc_errors::Location; use std::borrow::Cow; use std::collections::{BTreeMap, HashMap}; +use std::rc::Rc; use self::def_map::TestFunction; @@ -80,7 +84,7 @@ impl Context<'_, '_> { } } - pub fn parsed_file_results(&self, file_id: fm::FileId) -> (ParsedModule, Vec) { + pub fn parsed_file_results(&self, file_id: FileId) -> (ParsedModule, Vec) { self.parsed_files.get(&file_id).expect("noir file wasn't parsed").clone() } @@ -256,4 +260,34 @@ impl Context<'_, '_> { pub fn module(&self, module_id: def_map::ModuleId) -> &def_map::ModuleData { module_id.module(&self.def_maps) } + + /// Generics need to be resolved before elaboration to distinguish + /// between normal and numeric generics. + /// This method is expected to be used during definition collection. + /// Each result is returned in a list rather than returned as a single result as to allow + /// definition collection to provide an error for each ill-formed numeric generic. + pub(crate) fn resolve_generics( + &mut self, + generics: &UnresolvedGenerics, + errors: &mut Vec<(CompilationError, FileId)>, + file_id: FileId, + ) -> Generics { + vecmap(generics, |generic| { + // Map the generic to a fresh type variable + let id = self.def_interner.next_type_variable_id(); + let type_var = TypeVariable::unbound(id); + let ident = generic.ident(); + let span = ident.0.span(); + + // Check for name collisions of this generic + let name = Rc::new(ident.0.contents.clone()); + + let kind = generic.kind().unwrap_or_else(|err| { + errors.push((err.into(), file_id)); + Kind::Numeric(Box::new(Type::Error)) + }); + + ResolvedGeneric { name, type_var, kind, span } + }) + } } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/resolution/errors.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/resolution/errors.rs index 237c3313e16..b03c38b46cb 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/resolution/errors.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/resolution/errors.rs @@ -96,6 +96,12 @@ pub enum ResolverError { NoPredicatesAttributeOnUnconstrained { ident: Ident }, #[error("#[fold] attribute is only allowed on constrained functions")] FoldAttributeOnUnconstrained { ident: Ident }, + #[error("The only supported types of numeric generics are integers, fields, and booleans")] + UnsupportedNumericGenericType { ident: Ident, typ: Type }, + #[error("Numeric generics should be explicit")] + UseExplicitNumericGeneric { ident: Ident }, + #[error("expected type, found numeric generic parameter")] + NumericGenericUsedForType { name: String, span: Span }, #[error("Invalid array length construction")] ArrayLengthInterpreter { error: InterpreterError }, #[error("The unquote operator '$' can only be used within a quote expression")] @@ -393,6 +399,31 @@ impl<'a> From<&'a ResolverError> for Diagnostic { diag.add_note("The `#[fold]` attribute specifies whether a constrained function should be treated as a separate circuit rather than inlined into the program entry point".to_owned()); diag } + ResolverError::UnsupportedNumericGenericType { ident , typ } => { + let name = &ident.0.contents; + + Diagnostic::simple_error( + format!("{name} has a type of {typ}. The only supported types of numeric generics are integers, fields, and booleans."), + "Unsupported numeric generic type".to_string(), + ident.0.span(), + ) + } + ResolverError::UseExplicitNumericGeneric { ident } => { + let name = &ident.0.contents; + + Diagnostic::simple_warning( + String::from("Noir now supports explicit numeric generics. Support for implicit numeric generics will be removed in the following release."), + format!("Numeric generic `{name}` should now be specified with `let {name}: `"), + ident.0.span(), + ) + } + ResolverError::NumericGenericUsedForType { name, span } => { + Diagnostic::simple_error( + format!("expected type, found numeric generic parameter {name}"), + String::from("not a type"), + *span, + ) + } ResolverError::ArrayLengthInterpreter { error } => Diagnostic::from(error), ResolverError::UnquoteUsedOutsideQuote { span } => { Diagnostic::simple_error( diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/resolution/functions.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/resolution/functions.rs index e63de9b9173..fe46796ed24 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/resolution/functions.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/resolution/functions.rs @@ -1,8 +1,7 @@ -use std::{collections::BTreeMap, rc::Rc}; +use std::collections::BTreeMap; use fm::FileId; use iter_extended::vecmap; -use noirc_errors::Span; use crate::{ graph::CrateId, @@ -11,10 +10,10 @@ use crate::{ def_map::{CrateDefMap, ModuleId}, }, node_interner::{FuncId, NodeInterner, TraitImplId}, - Type, TypeVariable, + ResolvedGeneric, Type, }; -use super::{path_resolver::StandardPathResolver, resolver::Resolver}; +use super::{path_resolver::StandardPathResolver, Resolver}; #[allow(clippy::too_many_arguments)] pub(crate) fn resolve_function_set( @@ -24,7 +23,7 @@ pub(crate) fn resolve_function_set( mut unresolved_functions: UnresolvedFunctions, self_type: Option, trait_impl_id: Option, - impl_generics: Vec<(Rc, TypeVariable, Span)>, + impl_generics: Vec, errors: &mut Vec<(CompilationError, FileId)>, ) -> Vec<(FileId, FuncId)> { let file_id = unresolved_functions.file_id; diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/resolution/import.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/resolution/import.rs index 282ee8a23c2..d73130411e4 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/resolution/import.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/resolution/import.rs @@ -38,8 +38,6 @@ pub(crate) type PathResolutionResult = Result From<&'a PathResolutionError> for CustomDiagnostic { PathResolutionError::Unresolved(ident) => { CustomDiagnostic::simple_error(error.to_string(), String::new(), ident.span()) } - PathResolutionError::ExternalContractUsed(ident) => CustomDiagnostic::simple_error( - error.to_string(), - "Contracts may only be referenced from within a contract".to_string(), - ident.span(), - ), // This will be upgraded to an error in future versions PathResolutionError::Private(ident) => CustomDiagnostic::simple_warning( error.to_string(), diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/resolution/resolver.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/resolution/resolver.rs index 5706e62e193..6d547aaf0b7 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/resolution/resolver.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/resolution/resolver.rs @@ -32,8 +32,9 @@ use crate::ast::{ ArrayLiteral, BinaryOpKind, BlockExpression, Expression, ExpressionKind, ForRange, FunctionDefinition, FunctionKind, FunctionReturnType, Ident, ItemVisibility, LValue, LetStatement, Literal, NoirFunction, NoirStruct, NoirTypeAlias, Param, Path, PathKind, Pattern, - Statement, StatementKind, TraitBound, UnaryOp, UnresolvedGenerics, UnresolvedTraitConstraint, - UnresolvedType, UnresolvedTypeData, UnresolvedTypeExpression, Visibility, ERROR_IDENT, + Statement, StatementKind, TraitBound, UnaryOp, UnresolvedGeneric, UnresolvedGenerics, + UnresolvedTraitConstraint, UnresolvedType, UnresolvedTypeData, UnresolvedTypeExpression, + Visibility, ERROR_IDENT, }; use crate::graph::CrateId; use crate::hir::def_map::{ModuleDefId, TryFromModuleDefId, MAIN_FUNCTION}; @@ -47,7 +48,10 @@ use crate::node_interner::{ DefinitionId, DefinitionKind, DependencyId, ExprId, FuncId, GlobalId, NodeInterner, StmtId, StructId, TraitId, TraitImplId, TraitMethodId, TypeAliasId, }; -use crate::{Generics, Shared, StructType, Type, TypeAlias, TypeVariable, TypeVariableKind}; +use crate::{ + GenericTypeVars, Generics, Kind, ResolvedGeneric, Shared, StructType, Type, TypeAlias, + TypeVariable, TypeVariableKind, +}; use fm::FileId; use iter_extended::vecmap; use noirc_errors::{Location, Span, Spanned}; @@ -131,7 +135,7 @@ pub struct Resolver<'a> { /// unique type variables if we're resolving a struct. Empty otherwise. /// This is a Vec rather than a map to preserve the order a functions generics /// were declared in. - generics: Vec<(Rc, TypeVariable, Span)>, + generics: Vec, /// When resolving lambda expressions, we need to keep track of the variables /// that are captured. We do this in order to create the hidden environment @@ -223,7 +227,8 @@ impl<'a> Resolver<'a> { let mut new_generic_ident: Ident = format!("T{}_impl_{}", func_id, path.as_string()).into(); let mut new_generic_path = Path::from_ident(new_generic_ident.clone()); - while impl_trait_generics.contains(&new_generic_ident) + let new_generic = UnresolvedGeneric::from(new_generic_ident.clone()); + while impl_trait_generics.contains(&new_generic) || self.lookup_generic_or_global_type(&new_generic_path).is_some() { new_generic_ident = @@ -231,7 +236,7 @@ impl<'a> Resolver<'a> { new_generic_path = Path::from_ident(new_generic_ident.clone()); counter += 1; } - impl_trait_generics.insert(new_generic_ident.clone()); + impl_trait_generics.insert(UnresolvedGeneric::from(new_generic_ident.clone())); let is_synthesized = true; let new_generic_type_data = @@ -249,7 +254,7 @@ impl<'a> Resolver<'a> { }; parameter.typ.typ = new_generic_type_data; - func.def.generics.push(new_generic_ident); + func.def.generics.push(new_generic_ident.into()); func.def.where_clause.push(new_trait_constraint); } } @@ -591,7 +596,7 @@ impl<'a> Resolver<'a> { let env = Box::new(self.resolve_type_inner(*env)); match *env { - Type::Unit | Type::Tuple(_) | Type::NamedGeneric(_, _) => { + Type::Unit | Type::Tuple(_) | Type::NamedGeneric(_, _, _) => { Type::Function(args, ret, env) } _ => { @@ -607,6 +612,7 @@ impl<'a> Resolver<'a> { Type::MutableReference(Box::new(self.resolve_type_inner(*element))) } Parenthesized(typ) => self.resolve_type_inner(*typ), + Resolved(id) => self.interner.get_quoted_type(id).clone(), }; if let Type::Struct(_, _) = resolved_type { @@ -621,8 +627,8 @@ impl<'a> Resolver<'a> { resolved_type } - fn find_generic(&self, target_name: &str) -> Option<&(Rc, TypeVariable, Span)> { - self.generics.iter().find(|(name, _, _)| name.as_ref() == target_name) + fn find_generic(&self, target_name: &str) -> Option<&ResolvedGeneric> { + self.generics.iter().find(|generic| generic.name.as_ref() == target_name) } fn resolve_named_type(&mut self, path: Path, args: Vec) -> Type { @@ -747,9 +753,15 @@ impl<'a> Resolver<'a> { fn lookup_generic_or_global_type(&mut self, path: &Path) -> Option { if path.segments.len() == 1 { let name = &path.last_segment().0.contents; - if let Some((name, var, _)) = self.find_generic(name) { - return Some(Type::NamedGeneric(var.clone(), name.clone())); - } + if let Some(generic) = self.find_generic(name) { + // We always insert a `TypeKind::Normal` as we do not support explicit numeric generics + // in the resolver + return Some(Type::NamedGeneric( + generic.type_var.clone(), + generic.name.clone(), + Kind::Normal, + )); + }; } // If we cannot find a local generic of the same name, try to look up a global @@ -848,14 +860,14 @@ impl<'a> Resolver<'a> { /// Return the current generics. /// Needed to keep referring to the same type variables across many /// methods in a single impl. - pub fn get_generics(&self) -> &[(Rc, TypeVariable, Span)] { + pub fn get_generics(&self) -> &[ResolvedGeneric] { &self.generics } /// Set the current generics that are in scope. /// Unlike add_generics, this function will not create any new type variables, /// opting to reuse the existing ones it is directly given. - pub fn set_generics(&mut self, generics: Vec<(Rc, TypeVariable, Span)>) { + pub fn set_generics(&mut self, generics: Vec) { self.generics = generics; } @@ -875,48 +887,79 @@ impl<'a> Resolver<'a> { // Map the generic to a fresh type variable let id = self.interner.next_type_variable_id(); let typevar = TypeVariable::unbound(id); - let span = generic.0.span(); + let ident = generic.ident(); + let span = ident.0.span(); // Check for name collisions of this generic - let name = Rc::new(generic.0.contents.clone()); + let name = Rc::new(ident.0.contents.clone()); - if let Some((_, _, first_span)) = self.find_generic(&name) { + let resolved_generic = ResolvedGeneric { + name: name.clone(), + type_var: typevar, + // We only support numeric generics in the elaborator + kind: Kind::Normal, + span, + }; + if let Some(generic) = self.find_generic(&name) { self.errors.push(ResolverError::DuplicateDefinition { - name: generic.0.contents.clone(), - first_span: *first_span, + name: ident.0.contents.clone(), + first_span: generic.span, second_span: span, }); } else { - self.generics.push((name, typevar.clone(), span)); + self.generics.push(resolved_generic.clone()); } - typevar + resolved_generic }) } /// Add the given existing generics to scope. /// This is useful for adding the same generics to many items. E.g. apply impl generics /// to each function in the impl or trait generics to each item in the trait. - pub fn add_existing_generics(&mut self, names: &UnresolvedGenerics, generics: &Generics) { - assert_eq!(names.len(), generics.len()); + pub fn add_existing_generics( + &mut self, + unresolved_generics: &UnresolvedGenerics, + generics: &GenericTypeVars, + ) { + assert_eq!(unresolved_generics.len(), generics.len()); - for (name, typevar) in names.iter().zip(generics) { - self.add_existing_generic(&name.0.contents, name.0.span(), typevar.clone()); + for (unresolved_generic, typevar) in unresolved_generics.iter().zip(generics) { + self.add_existing_generic( + unresolved_generic, + unresolved_generic.span(), + typevar.clone(), + ); } } - pub fn add_existing_generic(&mut self, name: &str, span: Span, typevar: TypeVariable) { + pub fn add_existing_generic( + &mut self, + unresolved_generic: &UnresolvedGeneric, + span: Span, + typevar: TypeVariable, + ) { + let name = &unresolved_generic.ident().0.contents; + // Check for name collisions of this generic - let rc_name = Rc::new(name.to_owned()); + let rc_name = Rc::new(name.clone()); - if let Some((_, _, first_span)) = self.find_generic(&rc_name) { + if let Some(generic) = self.find_generic(&rc_name) { self.errors.push(ResolverError::DuplicateDefinition { - name: name.to_owned(), - first_span: *first_span, + name: name.clone(), + first_span: generic.span, second_span: span, }); } else { - self.generics.push((rc_name, typevar, span)); + let resolved_generic = ResolvedGeneric { + name: rc_name, + type_var: typevar.clone(), + kind: unresolved_generic + .kind() + .expect("ICE: Deprecated code should only support normal kinds"), + span, + }; + self.generics.push(resolved_generic); } } @@ -992,7 +1035,7 @@ impl<'a> Resolver<'a> { // indicate we should code generate in the same way. Thus, we unify the attributes into one flag here. let has_inline_attribute = has_no_predicates_attribute || should_fold; - let generics = vecmap(&self.generics, |(_, typevar, _)| typevar.clone()); + let generics = vecmap(&self.generics, |generic| generic.type_var.clone()); let mut parameters = vec![]; let mut parameter_types = vec![]; @@ -1053,8 +1096,8 @@ impl<'a> Resolver<'a> { let direct_generics = func.def.generics.iter(); let direct_generics = direct_generics - .filter_map(|generic| self.find_generic(&generic.0.contents)) - .map(|(name, typevar, _span)| (name.clone(), typevar.clone())) + .filter_map(|generic| self.find_generic(&generic.ident().0.contents)) + .map(|ResolvedGeneric { name, type_var, .. }| (name.clone(), type_var.clone())) .collect(); FuncMeta { @@ -1071,6 +1114,7 @@ impl<'a> Resolver<'a> { trait_constraints: self.resolve_trait_constraints(&func.def.where_clause), is_entry_point: self.is_entry_point_function(func), has_inline_attribute, + source_crate: self.path_resolver.module_id().krate, // These fields are only used by the elaborator all_generics: Vec::new(), @@ -1107,6 +1151,7 @@ impl<'a> Resolver<'a> { !func.def.is_unconstrained } + // TODO(https://github.com/noir-lang/noir/issues/5156): Remove this method in favor of explicit numeric generics fn declare_numeric_generics(&mut self, params: &[Type], return_type: &Type) { if self.generics.is_empty() { return; @@ -1119,12 +1164,12 @@ impl<'a> Resolver<'a> { // We can fail to find the generic in self.generics if it is an implicit one created // by the compiler. This can happen when, e.g. eliding array lengths using the slice // syntax [T]. - if let Some((name, _, span)) = - self.generics.iter().find(|(name, _, _)| name.as_ref() == &name_to_find) + if let Some(ResolvedGeneric { name, span, .. }) = + self.generics.iter().find(|generic| generic.name.as_ref() == &name_to_find) { let ident = Ident::new(name.to_string(), *span); let definition = DefinitionKind::GenericType(type_variable); - self.add_variable_decl_inner(ident, false, false, false, definition); + self.add_variable_decl_inner(ident.clone(), false, false, false, definition); } } } @@ -1150,7 +1195,7 @@ impl<'a> Resolver<'a> { | Type::Error | Type::TypeVariable(_, _) | Type::Constant(_) - | Type::NamedGeneric(_, _) + | Type::NamedGeneric(_, _, _) | Type::Quoted(_) | Type::Forall(_, _) => (), @@ -1161,7 +1206,7 @@ impl<'a> Resolver<'a> { } Type::Array(length, element_type) => { - if let Type::NamedGeneric(type_variable, name) = length.as_ref() { + if let Type::NamedGeneric(type_variable, name, _) = length.as_ref() { found.insert(name.to_string(), type_variable.clone()); } Self::find_numeric_generics_in_type(element_type, found); @@ -1186,7 +1231,7 @@ impl<'a> Resolver<'a> { Type::Struct(struct_type, generics) => { for (i, generic) in generics.iter().enumerate() { - if let Type::NamedGeneric(type_variable, name) = generic { + if let Type::NamedGeneric(type_variable, name, _) = generic { if struct_type.borrow().generic_is_numeric(i) { found.insert(name.to_string(), type_variable.clone()); } @@ -1197,7 +1242,7 @@ impl<'a> Resolver<'a> { } Type::Alias(alias, generics) => { for (i, generic) in generics.iter().enumerate() { - if let Type::NamedGeneric(type_variable, name) = generic { + if let Type::NamedGeneric(type_variable, name, _) = generic { if alias.borrow().generic_is_numeric(i) { found.insert(name.to_string(), type_variable.clone()); } @@ -1208,12 +1253,12 @@ impl<'a> Resolver<'a> { } Type::MutableReference(element) => Self::find_numeric_generics_in_type(element, found), Type::String(length) => { - if let Type::NamedGeneric(type_variable, name) = length.as_ref() { + if let Type::NamedGeneric(type_variable, name, _) = length.as_ref() { found.insert(name.to_string(), type_variable.clone()); } } Type::FmtString(length, fields) => { - if let Type::NamedGeneric(type_variable, name) = length.as_ref() { + if let Type::NamedGeneric(type_variable, name, _) = length.as_ref() { found.insert(name.to_string(), type_variable.clone()); } Self::find_numeric_generics_in_type(fields, found); @@ -1874,7 +1919,9 @@ impl<'a> Resolver<'a> { let constraint = TraitConstraint { typ: self.self_type.clone()?, - trait_generics: Type::from_generics(&the_trait.generics), + trait_generics: Type::from_generics(&vecmap(&the_trait.generics, |generic| { + generic.type_var.clone() + })), trait_id, }; return Some((method, constraint, false)); @@ -1902,7 +1949,9 @@ impl<'a> Resolver<'a> { the_trait.self_type_typevar.clone(), TypeVariableKind::Normal, ), - trait_generics: Type::from_generics(&the_trait.generics), + trait_generics: Type::from_generics(&vecmap(&the_trait.generics, |generic| { + generic.type_var.clone() + })), trait_id, }; return Some((method, constraint, false)); diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/resolution/traits.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/resolution/traits.rs index 4c360731711..e674a48e779 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/resolution/traits.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/resolution/traits.rs @@ -4,7 +4,7 @@ use fm::FileId; use iter_extended::vecmap; use noirc_errors::Location; -use crate::ast::{ItemVisibility, Path, TraitItem}; +use crate::ast::{Ident, ItemVisibility, Path, TraitItem, UnresolvedGeneric}; use crate::{ graph::CrateId, hir::{ @@ -17,7 +17,7 @@ use crate::{ }, hir_def::traits::{TraitConstant, TraitFunction, TraitImpl, TraitType}, node_interner::{FuncId, NodeInterner, TraitId}, - Generics, Shared, Type, TypeVariable, TypeVariableKind, + GenericTypeVars, Shared, Type, TypeVariableKind, }; use super::{ @@ -35,15 +35,18 @@ pub(crate) fn resolve_traits( traits: BTreeMap, crate_id: CrateId, ) -> Vec<(CompilationError, FileId)> { - for (trait_id, unresolved_trait) in &traits { - context.def_interner.push_empty_trait(*trait_id, unresolved_trait); - } let mut all_errors = Vec::new(); for (trait_id, unresolved_trait) in traits { - let generics = vecmap(&unresolved_trait.trait_def.generics, |_| { - TypeVariable::unbound(context.def_interner.next_type_variable_id()) - }); + let file_id = context.def_maps[&crate_id].file_id(unresolved_trait.module_id); + let generics = context.resolve_generics( + &unresolved_trait.trait_def.generics, + &mut all_errors, + file_id, + ); + let generic_type_vars = generics.iter().map(|generic| generic.type_var.clone()).collect(); + + context.def_interner.push_empty_trait(trait_id, &unresolved_trait, generics); // Resolve order // 1. Trait Types ( Trait constants can have a trait type, therefore types before constants) @@ -51,14 +54,18 @@ pub(crate) fn resolve_traits( // 2. Trait Constants ( Trait's methods can use trait types & constants, therefore they should be after) let _ = resolve_trait_constants(context, crate_id, &unresolved_trait); // 3. Trait Methods - let (methods, errors) = - resolve_trait_methods(context, trait_id, crate_id, &unresolved_trait, &generics); + let (methods, errors) = resolve_trait_methods( + context, + trait_id, + crate_id, + &unresolved_trait, + &generic_type_vars, + ); all_errors.extend(errors); context.def_interner.update_trait(trait_id, |trait_def| { trait_def.set_methods(methods); - trait_def.generics = generics; }); // This check needs to be after the trait's methods are set since @@ -93,7 +100,7 @@ fn resolve_trait_methods( trait_id: TraitId, crate_id: CrateId, unresolved_trait: &UnresolvedTrait, - trait_generics: &Generics, + trait_generics: &GenericTypeVars, ) -> (Vec, Vec<(CompilationError, FileId)>) { let interner = &mut context.def_interner; let def_maps = &mut context.def_maps; @@ -126,7 +133,11 @@ fn resolve_trait_methods( resolver.add_generics(generics); resolver.add_existing_generics(&unresolved_trait.trait_def.generics, trait_generics); - resolver.add_existing_generic("Self", name_span, self_typevar); + resolver.add_existing_generic( + &UnresolvedGeneric::Variable(Ident::from("Self")), + name_span, + self_typevar, + ); resolver.set_self_type(Some(self_type.clone())); let func_id = unresolved_trait.method_ids[&name.0.contents]; @@ -143,7 +154,7 @@ fn resolve_trait_methods( let arguments = vecmap(parameters, |param| resolver.resolve_type(param.1.clone())); let return_type = resolver.resolve_type(return_type.get_type().into_owned()); - let generics = vecmap(resolver.get_generics(), |(_, type_var, _)| type_var.clone()); + let generics = vecmap(resolver.get_generics(), |generic| generic.type_var.clone()); let default_impl_list: Vec<_> = unresolved_trait .fns_with_default_impl @@ -464,7 +475,7 @@ pub(crate) fn resolve_trait_impls( methods: vecmap(&impl_methods, |(_, func_id)| *func_id), }); - let impl_generics = vecmap(impl_generics, |(_, type_variable, _)| type_variable); + let impl_generics = vecmap(impl_generics, |generic| generic.type_var); if let Err((prev_span, prev_file)) = interner.add_trait_implementation( self_type.clone(), diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/type_check/errors.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/type_check/errors.rs index d9d021aee3f..f18e8a9e843 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/type_check/errors.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/type_check/errors.rs @@ -40,6 +40,8 @@ pub enum TypeCheckError { TypeMismatch { expected_typ: String, expr_typ: String, expr_span: Span }, #[error("Expected type {expected} is not the same as {actual}")] TypeMismatchWithSource { expected: Type, actual: Type, span: Span, source: Source }, + #[error("Expected type {expected_kind:?} is not the same as {expr_kind:?}")] + TypeKindMismatch { expected_kind: String, expr_kind: String, expr_span: Span }, #[error("Expected {expected:?} found {found:?}")] ArityMisMatch { expected: usize, found: usize, span: Span }, #[error("Return type in a function cannot be public")] @@ -178,6 +180,13 @@ impl<'a> From<&'a TypeCheckError> for Diagnostic { *expr_span, ) } + TypeCheckError::TypeKindMismatch { expected_kind, expr_kind, expr_span } => { + Diagnostic::simple_error( + format!("Expected kind {expected_kind}, found kind {expr_kind}"), + String::new(), + *expr_span, + ) + } TypeCheckError::TraitMethodParameterTypeMismatch { method_name, expected_typ, actual_typ, parameter_index, parameter_span } => { Diagnostic::simple_error( format!("Parameter #{parameter_index} of method `{method_name}` must be of type {expected_typ}, not {actual_typ}"), diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/type_check/expr.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/type_check/expr.rs index 4ded04ec2a4..77861a6d8f8 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/type_check/expr.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/type_check/expr.rs @@ -404,8 +404,8 @@ impl<'interner> TypeChecker<'interner> { for (param, arg) in the_trait.generics.iter().zip(&constraint.trait_generics) { // Avoid binding t = t - if !arg.occurs(param.id()) { - bindings.insert(param.id(), (param.clone(), arg.clone())); + if !arg.occurs(param.type_var.id()) { + bindings.insert(param.type_var.id(), (param.type_var.clone(), arg.clone())); } } @@ -1025,7 +1025,7 @@ impl<'interner> TypeChecker<'interner> { }); None } - Type::NamedGeneric(_, _) => { + Type::NamedGeneric(_, _, _) => { let func_meta = self.interner.function_meta( &self.current_function.expect("unexpected method outside a function"), ); diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/type_check/mod.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/type_check/mod.rs index 98e1cd9c72a..1d3c7fcda9b 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/type_check/mod.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/type_check/mod.rs @@ -22,7 +22,7 @@ use crate::{ traits::TraitConstraint, }, node_interner::{ExprId, FuncId, GlobalId, NodeInterner}, - Type, TypeBindings, + Kind, Type, TypeBindings, }; pub use self::errors::Source; @@ -263,7 +263,7 @@ pub(crate) fn check_trait_impl_method_matches_declaration( // Substitute each generic on the trait with the corresponding generic on the impl for (generic, arg) in trait_info.generics.iter().zip(&impl_.trait_generics) { - bindings.insert(generic.id(), (generic.clone(), arg.clone())); + bindings.insert(generic.type_var.id(), (generic.type_var.clone(), arg.clone())); } // If this is None, the trait does not have the corresponding function. @@ -284,7 +284,7 @@ pub(crate) fn check_trait_impl_method_matches_declaration( for ((_, trait_fn_generic), (name, impl_fn_generic)) in trait_fn_meta.direct_generics.iter().zip(&meta.direct_generics) { - let arg = Type::NamedGeneric(impl_fn_generic.clone(), name.clone()); + let arg = Type::NamedGeneric(impl_fn_generic.clone(), name.clone(), Kind::Normal); bindings.insert(trait_fn_generic.id(), (trait_fn_generic.clone(), arg)); } @@ -561,6 +561,7 @@ pub mod test { all_generics: Vec::new(), parameter_idents: Vec::new(), function_body: FunctionBody::Resolved, + source_crate: CrateId::dummy_id(), }; interner.push_fn_meta(func_meta, func_id); @@ -716,13 +717,15 @@ pub mod test { let mut interner = NodeInterner::default(); interner.populate_dummy_operator_traits(); - assert_eq!( - errors.len(), - 0, - "expected 0 parser errors, but got {}, errors: {:?}", - errors.len(), - errors - ); + if !errors.iter().all(|error| error.is_warning()) { + assert_eq!( + errors.len(), + 0, + "expected 0 parser errors, but got {}, errors: {:?}", + errors.len(), + errors + ); + } let func_ids = btree_map(&func_namespace, |name| { (name.to_string(), interner.push_test_function_definition(name.into())) diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir_def/function.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir_def/function.rs index 53eabe21081..a4a9f855c62 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir_def/function.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir_def/function.rs @@ -7,9 +7,10 @@ use super::expr::{HirBlockExpression, HirExpression, HirIdent}; use super::stmt::HirPattern; use super::traits::TraitConstraint; use crate::ast::{FunctionKind, FunctionReturnType, Visibility}; +use crate::graph::CrateId; use crate::macros_api::BlockExpression; use crate::node_interner::{ExprId, NodeInterner, TraitImplId}; -use crate::{Type, TypeVariable}; +use crate::{ResolvedGeneric, Type, TypeVariable}; /// A Hir function is a block expression /// with a list of statements @@ -118,7 +119,7 @@ pub struct FuncMeta { /// from outer scopes, such as those introduced by an impl. /// This is stored when the FuncMeta is first created to later be used to set the current /// generics when the function's body is later resolved. - pub all_generics: Vec<(Rc, TypeVariable, Span)>, + pub all_generics: Vec, pub location: Location, @@ -145,6 +146,9 @@ pub struct FuncMeta { pub has_inline_attribute: bool, pub function_body: FunctionBody, + + /// The crate this function was defined in + pub source_crate: CrateId, } #[derive(Debug, Clone)] diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir_def/types.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir_def/types.rs index 86d1fafd502..0a7797c2bfb 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir_def/types.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir_def/types.rs @@ -82,7 +82,7 @@ pub enum Type { /// NamedGenerics are the 'T' or 'U' in a user-defined generic function /// like `fn foo(...) {}`. Unlike TypeVariables, they cannot be bound over. - NamedGeneric(TypeVariable, Rc), + NamedGeneric(TypeVariable, Rc, Kind), /// A functions with arguments, a return type and environment. /// the environment should be `Unit` by default, @@ -98,7 +98,7 @@ pub enum Type { /// but it makes handling them both easier. The TypeVariableId should /// never be bound over during type checking, but during monomorphization it /// will be and thus needs the full TypeVariable link. - Forall(Generics, Box), + Forall(GenericTypeVars, Box), /// A type-level integer. Included to let an Array's size type variable /// bind to an integer without special checks to bind it to a non-type. @@ -142,7 +142,7 @@ impl Type { | Type::Unit | Type::TypeVariable(_, _) | Type::TraitAsType(..) - | Type::NamedGeneric(_, _) + | Type::NamedGeneric(_, _, _) | Type::Function(_, _, _) | Type::MutableReference(_) | Type::Forall(_, _) @@ -187,6 +187,27 @@ impl Type { } } +/// A Kind is the type of a Type. These are used since only certain kinds of types are allowed in +/// certain positions. +/// +/// For example, the type of a struct field or a function parameter is expected to be +/// a type of kind * (represented here as `Normal`). Types used in positions where a number +/// is expected (such as in an array length position) are expected to be of kind `Kind::Numeric`. +#[derive(PartialEq, Eq, Clone, Hash, Debug)] +pub enum Kind { + Normal, + Numeric(Box), +} + +impl std::fmt::Display for Kind { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Kind::Normal => write!(f, "normal"), + Kind::Numeric(typ) => write!(f, "numeric {}", typ), + } + } +} + #[derive(Debug, PartialEq, Eq, Copy, Clone, Hash)] pub enum QuotedType { Expr, @@ -222,7 +243,22 @@ pub struct StructType { } /// Corresponds to generic lists such as `` in the source program. -pub type Generics = Vec; +/// Used mainly for resolved types which no longer need information such +/// as names or kinds. +pub type GenericTypeVars = Vec; + +/// Corresponds to generic lists such as `` with additional +/// information gathered during name resolution that is necessary +/// correctly resolving types. +pub type Generics = Vec; + +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct ResolvedGeneric { + pub name: Rc, + pub type_var: TypeVariable, + pub kind: Kind, + pub span: Span, +} impl std::hash::Hash for StructType { fn hash(&self, state: &mut H) { @@ -271,7 +307,7 @@ impl StructType { .generics .iter() .zip(generic_args) - .map(|(old, new)| (old.id(), (old.clone(), new.clone()))) + .map(|(old, new)| (old.type_var.id(), (old.type_var.clone(), new.clone()))) .collect(); (typ.substitute(&substitutions), i) @@ -287,7 +323,7 @@ impl StructType { .generics .iter() .zip(generic_args) - .map(|(old, new)| (old.id(), (old.clone(), new.clone()))) + .map(|(old, new)| (old.type_var.id(), (old.type_var.clone(), new.clone()))) .collect(); vecmap(&self.fields, |(name, typ)| { @@ -310,11 +346,19 @@ impl StructType { self.fields.iter().map(|(name, _)| name.clone()).collect() } + /// Search the fields of a struct for any types with a `TypeKind::Numeric` + pub fn find_numeric_generics_in_fields(&self, found_names: &mut Vec) { + for (_, field) in self.fields.iter() { + field.find_numeric_type_vars(found_names); + } + } + /// True if the given index is the same index as a generic type of this struct /// which is expected to be a numeric generic. /// This is needed because we infer type kinds in Noir and don't have extensive kind checking. + /// TODO(https://github.com/noir-lang/noir/issues/5156): This is outdated and we should remove this implicit searching for numeric generics pub fn generic_is_numeric(&self, index_of_generic: usize) -> bool { - let target_id = self.generics[index_of_generic].0; + let target_id = self.generics[index_of_generic].type_var.id(); self.fields.iter().any(|(_, field)| field.contains_numeric_typevar(target_id)) } @@ -383,7 +427,7 @@ impl TypeAlias { .generics .iter() .zip(generic_args) - .map(|(old, new)| (old.id(), (old.clone(), new.clone()))) + .map(|(old, new)| (old.type_var.id(), (old.type_var.clone(), new.clone()))) .collect(); self.typ.substitute(&substitutions) @@ -393,7 +437,7 @@ impl TypeAlias { /// which is expected to be a numeric generic. /// This is needed because we infer type kinds in Noir and don't have extensive kind checking. pub fn generic_is_numeric(&self, index_of_generic: usize) -> bool { - let target_id = self.generics[index_of_generic].0; + let target_id = self.generics[index_of_generic].type_var.id(); self.typ.contains_numeric_typevar(target_id) } } @@ -503,7 +547,7 @@ impl TypeVariable { TypeBinding::Unbound(id) => *id, }; - assert!(!typ.occurs(id)); + assert!(!typ.occurs(id), "{self:?} occurs within {typ:?}"); *self.1.borrow_mut() = TypeBinding::Bound(typ); } @@ -641,7 +685,7 @@ impl Type { fn contains_numeric_typevar(&self, target_id: TypeVariableId) -> bool { // True if the given type is a NamedGeneric with the target_id let named_generic_id_matches_target = |typ: &Type| { - if let Type::NamedGeneric(type_variable, _) = typ { + if let Type::NamedGeneric(type_variable, _, _) = typ { match &*type_variable.borrow() { TypeBinding::Bound(_) => { unreachable!("Named generics should not be bound until monomorphization") @@ -661,7 +705,7 @@ impl Type { | Type::Error | Type::TypeVariable(_, _) | Type::Constant(_) - | Type::NamedGeneric(_, _) + | Type::NamedGeneric(_, _, _) | Type::Forall(_, _) | Type::Quoted(_) => false, @@ -705,6 +749,85 @@ impl Type { } } + /// TODO(https://github.com/noir-lang/noir/issues/5156): Remove with explicit numeric generics + pub fn find_numeric_type_vars(&self, found_names: &mut Vec) { + // Return whether the named generic has a TypeKind::Numeric and save its name + let named_generic_is_numeric = |typ: &Type, found_names: &mut Vec| { + if let Type::NamedGeneric(_, name, Kind::Numeric { .. }) = typ { + found_names.push(name.to_string()); + true + } else { + false + } + }; + + match self { + Type::FieldElement + | Type::Integer(_, _) + | Type::Bool + | Type::Unit + | Type::Error + | Type::Constant(_) + | Type::Forall(_, _) + | Type::Quoted(_) => {} + + Type::TypeVariable(type_var, _) => { + if let TypeBinding::Bound(typ) = &*type_var.borrow() { + named_generic_is_numeric(typ, found_names); + } + } + + Type::NamedGeneric(_, _, _) => { + named_generic_is_numeric(self, found_names); + } + + Type::TraitAsType(_, _, args) => { + for arg in args.iter() { + arg.find_numeric_type_vars(found_names); + } + } + Type::Array(length, elem) => { + elem.find_numeric_type_vars(found_names); + named_generic_is_numeric(length, found_names); + } + Type::Slice(elem) => elem.find_numeric_type_vars(found_names), + Type::Tuple(fields) => { + for field in fields.iter() { + field.find_numeric_type_vars(found_names); + } + } + Type::Function(parameters, return_type, env) => { + for parameter in parameters.iter() { + parameter.find_numeric_type_vars(found_names); + } + return_type.find_numeric_type_vars(found_names); + env.find_numeric_type_vars(found_names); + } + Type::Struct(_, generics) => { + for generic in generics.iter() { + if !named_generic_is_numeric(generic, found_names) { + generic.find_numeric_type_vars(found_names); + } + } + } + Type::Alias(_, generics) => { + for generic in generics.iter() { + if !named_generic_is_numeric(generic, found_names) { + generic.find_numeric_type_vars(found_names); + } + } + } + Type::MutableReference(element) => element.find_numeric_type_vars(found_names), + Type::String(length) => { + named_generic_is_numeric(length, found_names); + } + Type::FmtString(length, elements) => { + elements.find_numeric_type_vars(found_names); + named_generic_is_numeric(length, found_names); + } + } + } + /// True if this type can be used as a parameter to `main` or a contract function. /// This is only false for unsized types like slices or slices that do not make sense /// as a program input such as named generics or mutable references. @@ -725,7 +848,7 @@ impl Type { Type::FmtString(_, _) | Type::TypeVariable(_, _) - | Type::NamedGeneric(_, _) + | Type::NamedGeneric(_, _, _) | Type::Function(_, _, _) | Type::MutableReference(_) | Type::Forall(_, _) @@ -767,7 +890,7 @@ impl Type { | Type::Unit | Type::Constant(_) | Type::TypeVariable(_, _) - | Type::NamedGeneric(_, _) + | Type::NamedGeneric(_, _, _) | Type::Error => true, Type::FmtString(_, _) @@ -810,7 +933,7 @@ impl Type { | Type::Constant(_) | Type::Slice(_) | Type::TypeVariable(_, _) - | Type::NamedGeneric(_, _) + | Type::NamedGeneric(_, _, _) | Type::Function(_, _, _) | Type::FmtString(_, _) | Type::Error => true, @@ -847,7 +970,7 @@ impl Type { pub fn generic_count(&self) -> usize { match self { Type::Forall(generics, _) => generics.len(), - Type::TypeVariable(type_variable, _) | Type::NamedGeneric(type_variable, _) => { + Type::TypeVariable(type_variable, _) | Type::NamedGeneric(type_variable, _, _) => { match &*type_variable.borrow() { TypeBinding::Bound(binding) => binding.generic_count(), TypeBinding::Unbound(_) => 0, @@ -876,12 +999,42 @@ impl Type { /// Return the generics and type within this `Type::Forall`. /// Panics if `self` is not `Type::Forall` - pub fn unwrap_forall(&self) -> (Cow, &Type) { + pub fn unwrap_forall(&self) -> (Cow, &Type) { match self { Type::Forall(generics, typ) => (Cow::Borrowed(generics), typ.as_ref()), - other => (Cow::Owned(Generics::new()), other), + other => (Cow::Owned(GenericTypeVars::new()), other), } } + + // TODO(https://github.com/noir-lang/noir/issues/5156): Bring back this method when we remove implicit numeric generics + // It has been commented out as to not trigger clippy for an unused method + // pub(crate) fn kind(&self) -> Kind { + // match self { + // Type::NamedGeneric(_, _, kind) => kind.clone(), + // Type::Constant(_) => Kind::Numeric(Box::new(Type::Integer( + // Signedness::Unsigned, + // IntegerBitSize::ThirtyTwo, + // ))), + // Type::FieldElement + // | Type::Array(_, _) + // | Type::Slice(_) + // | Type::Integer(_, _) + // | Type::Bool + // | Type::String(_) + // | Type::FmtString(_, _) + // | Type::Unit + // | Type::Tuple(_) + // | Type::Struct(_, _) + // | Type::Alias(_, _) + // | Type::TypeVariable(_, _) + // | Type::TraitAsType(_, _, _) + // | Type::Function(_, _, _) + // | Type::MutableReference(_) + // | Type::Forall(_, _) + // | Type::Quoted(_) + // | Type::Error => Kind::Normal, + // } + // } } impl std::fmt::Display for Type { @@ -961,7 +1114,7 @@ impl std::fmt::Display for Type { } Type::Unit => write!(f, "()"), Type::Error => write!(f, "error"), - Type::NamedGeneric(binding, name) => match &*binding.borrow() { + Type::NamedGeneric(binding, name, _) => match &*binding.borrow() { TypeBinding::Bound(binding) => binding.fmt(f), TypeBinding::Unbound(_) if name.is_empty() => write!(f, "_"), TypeBinding::Unbound(_) => write!(f, "{name}"), @@ -1190,8 +1343,7 @@ impl Type { TypeBinding::Unbound(id) => *id, }; - let this = self.substitute(bindings); - + let this = self.substitute(bindings).follow_bindings(); if let Some(binding) = this.get_inner_type_variable() { match &*binding.borrow() { TypeBinding::Bound(typ) => return typ.try_bind_to(var, bindings), @@ -1213,7 +1365,7 @@ impl Type { fn get_inner_type_variable(&self) -> Option> { match self { - Type::TypeVariable(var, _) | Type::NamedGeneric(var, _) => Some(var.1.clone()), + Type::TypeVariable(var, _) | Type::NamedGeneric(var, _, _) => Some(var.1.clone()), _ => None, } } @@ -1324,7 +1476,7 @@ impl Type { } } - (NamedGeneric(binding, _), other) | (other, NamedGeneric(binding, _)) + (NamedGeneric(binding, _, _), other) | (other, NamedGeneric(binding, _, _)) if !binding.borrow().is_unbound() => { if let TypeBinding::Bound(link) = &*binding.borrow() { @@ -1334,7 +1486,7 @@ impl Type { } } - (NamedGeneric(binding_a, name_a), NamedGeneric(binding_b, name_b)) => { + (NamedGeneric(binding_a, name_a, _), NamedGeneric(binding_b, name_b, _)) => { // Bound NamedGenerics are caught by the check above assert!(binding_a.borrow().is_unbound()); assert!(binding_b.borrow().is_unbound()); @@ -1590,6 +1742,15 @@ impl Type { } } + fn type_variable_id(&self) -> Option { + match self { + Type::TypeVariable(variable, _) | Type::NamedGeneric(variable, _, _) => { + Some(variable.0) + } + _ => None, + } + } + /// Substitute any type variables found within this type with the /// given bindings if found. If a type variable is not found within /// the given TypeBindings, it is unchanged. @@ -1624,18 +1785,29 @@ impl Type { return self.clone(); } + let recur_on_binding = |id, replacement: &Type| { + // Prevent recuring forever if there's a `T := T` binding + if replacement.type_variable_id() == Some(id) { + replacement.clone() + } else { + replacement.substitute_helper(type_bindings, substitute_bound_typevars) + } + }; + let substitute_binding = |binding: &TypeVariable| { // Check the id first to allow substituting to // type variables that have already been bound over. // This is needed for monomorphizing trait impl methods. match type_bindings.get(&binding.0) { - Some((_, binding)) if substitute_bound_typevars => binding.clone(), + Some((_, replacement)) if substitute_bound_typevars => { + recur_on_binding(binding.0, replacement) + } _ => match &*binding.borrow() { TypeBinding::Bound(binding) => { binding.substitute_helper(type_bindings, substitute_bound_typevars) } TypeBinding::Unbound(id) => match type_bindings.get(id) { - Some((_, binding)) => binding.clone(), + Some((_, replacement)) => recur_on_binding(binding.0, replacement), None => self.clone(), }, }, @@ -1661,7 +1833,7 @@ impl Type { let fields = fields.substitute_helper(type_bindings, substitute_bound_typevars); Type::FmtString(Box::new(size), Box::new(fields)) } - Type::NamedGeneric(binding, _) | Type::TypeVariable(binding, _) => { + Type::NamedGeneric(binding, _, _) | Type::TypeVariable(binding, _) => { substitute_binding(binding) } // Do not substitute_helper fields, it can lead to infinite recursion @@ -1739,7 +1911,7 @@ impl Type { generic_args.iter().any(|arg| arg.occurs(target_id)) } Type::Tuple(fields) => fields.iter().any(|field| field.occurs(target_id)), - Type::NamedGeneric(binding, _) | Type::TypeVariable(binding, _) => { + Type::NamedGeneric(binding, _, _) | Type::TypeVariable(binding, _) => { match &*binding.borrow() { TypeBinding::Bound(binding) => binding.occurs(target_id), TypeBinding::Unbound(id) => *id == target_id, @@ -1794,7 +1966,7 @@ impl Type { def.borrow().get_type(args).follow_bindings() } Tuple(args) => Tuple(vecmap(args, |arg| arg.follow_bindings())), - TypeVariable(var, _) | NamedGeneric(var, _) => { + TypeVariable(var, _) | NamedGeneric(var, _, _) => { if let TypeBinding::Bound(typ) = &*var.borrow() { return typ.follow_bindings(); } @@ -1823,7 +1995,7 @@ impl Type { } } - pub fn from_generics(generics: &Generics) -> Vec { + pub fn from_generics(generics: &GenericTypeVars) -> Vec { vecmap(generics, |var| Type::TypeVariable(var.clone(), TypeVariableKind::Normal)) } } @@ -2020,7 +2192,14 @@ impl std::fmt::Debug for Type { } Type::Unit => write!(f, "()"), Type::Error => write!(f, "error"), - Type::NamedGeneric(binding, name) => write!(f, "{}{:?}", name, binding), + Type::NamedGeneric(binding, name, kind) => match kind { + Kind::Normal => { + write!(f, "{} -> {:?}", name, binding) + } + Kind::Numeric(typ) => { + write!(f, "({} : {}) -> {:?}", name, typ, binding) + } + }, Type::Constant(x) => x.fmt(f), Type::Forall(typevars, typ) => { let typevars = vecmap(typevars, |var| format!("{:?}", var)); diff --git a/noir/noir-repo/compiler/noirc_frontend/src/lexer/token.rs b/noir/noir-repo/compiler/noirc_frontend/src/lexer/token.rs index 6830ee528d6..b97677d9431 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/lexer/token.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/lexer/token.rs @@ -2,7 +2,10 @@ use acvm::{acir::AcirField, FieldElement}; use noirc_errors::{Position, Span, Spanned}; use std::{fmt, iter::Map, vec::IntoIter}; -use crate::{lexer::errors::LexerErrorKind, node_interner::ExprId}; +use crate::{ + lexer::errors::LexerErrorKind, + node_interner::{ExprId, QuotedTypeId}, +}; /// Represents a token in noir's grammar - a word, number, /// or symbol that can be used in noir's syntax. This is the @@ -24,9 +27,8 @@ pub enum BorrowedToken<'input> { LineComment(&'input str, Option), BlockComment(&'input str, Option), Quote(&'input Tokens), - /// < + QuotedType(QuotedTypeId), Less, - /// <= LessEqual, /// > Greater, @@ -125,6 +127,11 @@ pub enum Token { BlockComment(String, Option), // A `quote { ... }` along with the tokens in its token stream. Quote(Tokens), + /// A quoted type resulting from a `Type` object in noir code being + /// spliced into a macro's token stream. We preserve the original type + /// to avoid having to tokenize it, re-parse it, and re-resolve it which + /// may change the underlying type. + QuotedType(QuotedTypeId), /// < Less, /// <= @@ -223,6 +230,7 @@ pub fn token_to_borrowed_token(token: &Token) -> BorrowedToken<'_> { Token::LineComment(ref s, _style) => BorrowedToken::LineComment(s, *_style), Token::BlockComment(ref s, _style) => BorrowedToken::BlockComment(s, *_style), Token::Quote(stream) => BorrowedToken::Quote(stream), + Token::QuotedType(id) => BorrowedToken::QuotedType(*id), Token::IntType(ref i) => BorrowedToken::IntType(i.clone()), Token::Less => BorrowedToken::Less, Token::LessEqual => BorrowedToken::LessEqual, @@ -343,6 +351,8 @@ impl fmt::Display for Token { } write!(f, "}}") } + // Quoted types only have an ID so there is nothing to display + Token::QuotedType(_) => write!(f, "(type)"), Token::IntType(ref i) => write!(f, "{i}"), Token::Less => write!(f, "<"), Token::LessEqual => write!(f, "<="), @@ -394,6 +404,7 @@ pub enum TokenKind { Keyword, Attribute, Quote, + QuotedType, UnquoteMarker, } @@ -406,6 +417,7 @@ impl fmt::Display for TokenKind { TokenKind::Keyword => write!(f, "keyword"), TokenKind::Attribute => write!(f, "attribute"), TokenKind::Quote => write!(f, "quote"), + TokenKind::QuotedType => write!(f, "quoted type"), TokenKind::UnquoteMarker => write!(f, "macro result"), } } @@ -424,6 +436,7 @@ impl Token { Token::Attribute(_) => TokenKind::Attribute, Token::UnquoteMarker(_) => TokenKind::UnquoteMarker, Token::Quote(_) => TokenKind::Quote, + Token::QuotedType(_) => TokenKind::QuotedType, tok => TokenKind::Token(tok.clone()), } } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/monomorphization/mod.rs b/noir/noir-repo/compiler/noirc_frontend/src/monomorphization/mod.rs index 6b8981a4d8f..b64f0dbe286 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/monomorphization/mod.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/monomorphization/mod.rs @@ -947,7 +947,7 @@ impl<'interner> Monomorphizer<'interner> { HirType::TraitAsType(..) => { unreachable!("All TraitAsType should be replaced before calling convert_type"); } - HirType::NamedGeneric(binding, _) => { + HirType::NamedGeneric(binding, _, _) => { if let TypeBinding::Bound(binding) = &*binding.borrow() { return Self::convert_type(binding, location); } @@ -1818,13 +1818,13 @@ fn unwrap_struct_type(typ: &HirType) -> Vec<(String, HirType)> { } } -fn perform_instantiation_bindings(bindings: &TypeBindings) { +pub fn perform_instantiation_bindings(bindings: &TypeBindings) { for (var, binding) in bindings.values() { var.force_bind(binding.clone()); } } -fn undo_instantiation_bindings(bindings: TypeBindings) { +pub fn undo_instantiation_bindings(bindings: TypeBindings) { for (id, (var, _)) in bindings { var.unbind(id); } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/node_interner.rs b/noir/noir-repo/compiler/noirc_frontend/src/node_interner.rs index 13ccb9b6500..17531d09eac 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/node_interner.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/node_interner.rs @@ -32,9 +32,9 @@ use crate::hir_def::{ stmt::HirStatement, }; use crate::token::{Attributes, SecondaryAttribute}; -use crate::{ - Generics, Shared, TypeAlias, TypeBindings, TypeVariable, TypeVariableId, TypeVariableKind, -}; +use crate::GenericTypeVars; +use crate::Generics; +use crate::{Shared, TypeAlias, TypeBindings, TypeVariable, TypeVariableId, TypeVariableKind}; /// An arbitrary number to limit the recursion depth when searching for trait impls. /// This is needed to stop recursing for cases such as `impl Foo for T where T: Eq` @@ -176,6 +176,12 @@ pub struct NodeInterner { /// Stores the [Location] of a [Type] reference pub(crate) type_ref_locations: Vec<(Type, Location)>, + + /// In Noir's metaprogramming, a noir type has the type `Type`. When these are spliced + /// into `quoted` expressions, we preserve the original type by assigning it a unique id + /// and creating a `Token::QuotedType(id)` from this id. We cannot create a token holding + /// the actual type since types do not implement Send or Sync. + quoted_types: noirc_arena::Arena, } /// A dependency in the dependency graph may be a type or a definition. @@ -472,6 +478,9 @@ pub struct GlobalInfo { pub value: Option, } +#[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub struct QuotedTypeId(noirc_arena::Index); + impl Default for NodeInterner { fn default() -> Self { let mut interner = NodeInterner { @@ -506,6 +515,7 @@ impl Default for NodeInterner { primitive_methods: HashMap::new(), type_alias_ref: Vec::new(), type_ref_locations: Vec::new(), + quoted_types: Default::default(), }; // An empty block expression is used often, we add this into the `node` on startup @@ -547,7 +557,12 @@ impl NodeInterner { self.definition_to_type.insert(definition_id, typ); } - pub fn push_empty_trait(&mut self, type_id: TraitId, unresolved_trait: &UnresolvedTrait) { + pub fn push_empty_trait( + &mut self, + type_id: TraitId, + unresolved_trait: &UnresolvedTrait, + generics: Generics, + ) { let self_type_typevar_id = self.next_type_variable_id(); let new_trait = Trait { @@ -555,13 +570,7 @@ impl NodeInterner { name: unresolved_trait.trait_def.name.clone(), crate_id: unresolved_trait.crate_id, location: Location::new(unresolved_trait.trait_def.span, unresolved_trait.file_id), - generics: vecmap(&unresolved_trait.trait_def.generics, |_| { - // Temporary type variable ids before the trait is resolved to its actual ids. - // This lets us record how many arguments the type expects so that other types - // can refer to it with generic arguments before the generic parameters themselves - // are resolved. - TypeVariable::unbound(TypeVariableId(0)) - }), + generics, self_type_typevar_id, self_type_typevar: TypeVariable::unbound(self_type_typevar_id), methods: Vec::new(), @@ -576,6 +585,7 @@ impl NodeInterner { pub fn new_struct( &mut self, typ: &UnresolvedStruct, + generics: Generics, krate: CrateId, local_id: LocalModuleId, file_id: FileId, @@ -585,13 +595,6 @@ impl NodeInterner { // Fields will be filled in later let no_fields = Vec::new(); - let generics = vecmap(&typ.struct_def.generics, |_| { - // Temporary type variable ids before the struct is resolved to its actual ids. - // This lets us record how many arguments the type expects so that other types - // can refer to it with generic arguments before the generic parameters themselves - // are resolved. - TypeVariable::unbound(TypeVariableId(0)) - }); let location = Location::new(typ.struct_def.span, file_id); let new_struct = StructType::new(struct_id, name, location, no_fields, generics); @@ -600,7 +603,11 @@ impl NodeInterner { struct_id } - pub fn push_type_alias(&mut self, typ: &UnresolvedTypeAlias) -> TypeAliasId { + pub fn push_type_alias( + &mut self, + typ: &UnresolvedTypeAlias, + generics: Generics, + ) -> TypeAliasId { let type_id = TypeAliasId(self.type_aliases.len()); self.type_aliases.push(Shared::new(TypeAlias::new( @@ -608,7 +615,7 @@ impl NodeInterner { typ.type_alias_def.name.clone(), Location::new(typ.type_alias_def.span, typ.file_id), Type::Error, - vecmap(&typ.type_alias_def.generics, |_| TypeVariable::unbound(TypeVariableId(0))), + generics, ))); type_id @@ -624,6 +631,11 @@ impl NodeInterner { f(&mut value); } + pub fn update_trait(&mut self, trait_id: TraitId, f: impl FnOnce(&mut Trait)) { + let value = self.traits.get_mut(&trait_id).unwrap(); + f(value); + } + pub fn update_struct_attributes( &mut self, type_id: StructId, @@ -633,11 +645,6 @@ impl NodeInterner { f(value); } - pub fn update_trait(&mut self, trait_id: TraitId, f: impl FnOnce(&mut Trait)) { - let value = self.traits.get_mut(&trait_id).unwrap(); - f(value); - } - pub fn set_type_alias(&mut self, type_id: TypeAliasId, typ: Type, generics: Generics) { let type_alias_type = &mut self.type_aliases[type_id.0]; type_alias_type.borrow_mut().set_type_and_generics(typ, generics); @@ -1416,7 +1423,7 @@ impl NodeInterner { trait_id: TraitId, trait_generics: Vec, impl_id: TraitImplId, - impl_generics: Generics, + impl_generics: GenericTypeVars, trait_impl: Shared, ) -> Result<(), (Span, FileId)> { self.trait_implementations.insert(impl_id, trait_impl.clone()); @@ -1747,6 +1754,14 @@ impl NodeInterner { cycle } + + pub fn push_quoted_type(&mut self, typ: Type) -> QuotedTypeId { + QuotedTypeId(self.quoted_types.insert(typ)) + } + + pub fn get_quoted_type(&self, id: QuotedTypeId) -> &Type { + &self.quoted_types[id.0] + } } impl Methods { @@ -1834,7 +1849,7 @@ fn get_type_method_key(typ: &Type) -> Option { Type::Unit => Some(Unit), Type::Tuple(_) => Some(Tuple), Type::Function(_, _, _) => Some(Function), - Type::NamedGeneric(_, _) => Some(Generic), + Type::NamedGeneric(_, _, _) => Some(Generic), Type::Quoted(quoted) => Some(Quoted(*quoted)), Type::MutableReference(element) => get_type_method_key(element), Type::Alias(alias, _) => get_type_method_key(&alias.borrow().typ), diff --git a/noir/noir-repo/compiler/noirc_frontend/src/parser/errors.rs b/noir/noir-repo/compiler/noirc_frontend/src/parser/errors.rs index af3d4caa145..41ea9f88c19 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/parser/errors.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/parser/errors.rs @@ -133,7 +133,7 @@ impl std::fmt::Display for ParserError { } else { let expected = expected.iter().map(ToString::to_string).collect::>().join(", "); - write!(f, "Unexpected {}, expected one of {}{}", self.found, expected, reason_str) + write!(f, "Unexpected {:?}, expected one of {}{}", self.found, expected, reason_str) } } } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/parser/mod.rs b/noir/noir-repo/compiler/noirc_frontend/src/parser/mod.rs index 72b1ea05ec2..d7a282dbfc7 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/parser/mod.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/parser/mod.rs @@ -22,10 +22,10 @@ use chumsky::primitive::Container; pub use errors::ParserError; pub use errors::ParserErrorReason; use noirc_errors::Span; -pub use parser::{expression, parse_program}; +pub use parser::{expression, parse_program, top_level_item}; #[derive(Debug, Clone)] -pub(crate) enum TopLevelStatement { +pub enum TopLevelStatement { Function(NoirFunction), Module(ModuleDeclaration), Import(UseTree), @@ -197,7 +197,7 @@ fn parameter_name_recovery() -> impl NoirParser { } fn top_level_statement_recovery() -> impl NoirParser { - none_of([Token::Semicolon, Token::RightBrace, Token::EOF]) + none_of([Token::RightBrace, Token::EOF]) .repeated() .ignore_then(one_of([Token::Semicolon])) .map(|_| TopLevelStatement::Error) diff --git a/noir/noir-repo/compiler/noirc_frontend/src/parser/parser.rs b/noir/noir-repo/compiler/noirc_frontend/src/parser/parser.rs index 0ae810fe4d9..afeee889ede 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/parser/parser.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/parser/parser.rs @@ -191,6 +191,11 @@ fn module() -> impl NoirParser { }) } +/// This parser is used for parsing top level statements in macros +pub fn top_level_item() -> impl NoirParser { + top_level_statement(module()) +} + /// top_level_statement: function_definition /// | struct_definition /// | trait_definition @@ -225,11 +230,20 @@ fn implementation() -> impl NoirParser { keyword(Keyword::Impl) .ignore_then(function::generics()) .then(parse_type().map_with_span(|typ, span| (typ, span))) + .then(where_clause()) .then_ignore(just(Token::LeftBrace)) .then(spanned(function::function_definition(true)).repeated()) .then_ignore(just(Token::RightBrace)) - .map(|((generics, (object_type, type_span)), methods)| { - TopLevelStatement::Impl(TypeImpl { generics, object_type, type_span, methods }) + .map(|args| { + let ((other_args, where_clause), methods) = args; + let (generics, (object_type, type_span)) = other_args; + TopLevelStatement::Impl(TypeImpl { + generics, + object_type, + type_span, + where_clause, + methods, + }) }) } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/parser/parser/function.rs b/noir/noir-repo/compiler/noirc_frontend/src/parser/parser/function.rs index 4db5637f6a7..3e686ee4c85 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/parser/parser/function.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/parser/parser/function.rs @@ -5,11 +5,14 @@ use super::{ self_parameter, where_clause, NoirParser, }; use crate::ast::{ - FunctionDefinition, FunctionReturnType, Ident, ItemVisibility, NoirFunction, Param, Visibility, + FunctionDefinition, FunctionReturnType, ItemVisibility, NoirFunction, Param, Visibility, }; -use crate::parser::labels::ParsingRuleLabel; use crate::parser::spanned; use crate::token::{Keyword, Token}; +use crate::{ + ast::{UnresolvedGeneric, UnresolvedGenerics}, + parser::labels::ParsingRuleLabel, +}; use chumsky::prelude::*; @@ -76,16 +79,31 @@ fn function_modifiers() -> impl NoirParser<(bool, ItemVisibility, bool)> { }) } +pub(super) fn numeric_generic() -> impl NoirParser { + keyword(Keyword::Let) + .ignore_then(ident()) + .then_ignore(just(Token::Colon)) + .then(parse_type()) + .map(|(ident, typ)| UnresolvedGeneric::Numeric { ident, typ }) +} + +pub(super) fn generic_type() -> impl NoirParser { + ident().map(UnresolvedGeneric::Variable) +} + +pub(super) fn generic() -> impl NoirParser { + generic_type().or(numeric_generic()) +} + /// non_empty_ident_list: ident ',' non_empty_ident_list /// | ident /// /// generics: '<' non_empty_ident_list '>' /// | %empty -pub(super) fn generics() -> impl NoirParser> { - ident() +pub(super) fn generics() -> impl NoirParser { + generic() .separated_by(just(Token::Comma)) .allow_trailing() - .at_least(1) .delimited_by(just(Token::Less), just(Token::Greater)) .or_not() .map(|opt| opt.unwrap_or_default()) @@ -193,6 +211,7 @@ mod test { // fn func_name(x: impl Eq) {} with error Expected an end of input but found end of input // "fn func_name(x: impl Eq) {}", "fn func_name(x: impl Eq, y : T) where T: SomeTrait + Eq {}", + "fn func_name(x: [Field; N]) {}", ], ); @@ -209,6 +228,11 @@ mod test { // A leading plus is not allowed. "fn func_name(f: Field, y : T) where T: + SomeTrait {}", "fn func_name(f: Field, y : T) where T: TraitX + {}", + // Test ill-formed numeric generics + "fn func_name(y: T) {}", + "fn func_name(y: T) {}", + "fn func_name(y: T) {}", + "fn func_name(y: T) {}", ], ); } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/parser/parser/types.rs b/noir/noir-repo/compiler/noirc_frontend/src/parser/parser/types.rs index 14840bafa04..493ebd1fb2f 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/parser/parser/types.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/parser/parser/types.rs @@ -1,3 +1,4 @@ +use super::primitives::token_kind; use super::{ expression_with_precedence, keyword, nothing, parenthesized, path, NoirParser, ParserError, ParserErrorReason, Precedence, @@ -6,7 +7,7 @@ use crate::ast::{Recoverable, UnresolvedType, UnresolvedTypeData, UnresolvedType use crate::QuotedType; use crate::parser::labels::ParsingRuleLabel; -use crate::token::{Keyword, Token}; +use crate::token::{Keyword, Token, TokenKind}; use chumsky::prelude::*; use noirc_errors::Span; @@ -28,6 +29,7 @@ pub(super) fn parse_type_inner<'a>( top_level_item_type(), type_of_quoted_types(), quoted_type(), + resolved_type(), format_string_type(recursive_type_parser.clone()), named_type(recursive_type_parser.clone()), named_trait(recursive_type_parser.clone()), @@ -105,6 +107,16 @@ fn quoted_type() -> impl NoirParser { .map_with_span(|_, span| UnresolvedTypeData::Quoted(QuotedType::Quoted).with_span(span)) } +/// This is the type of an already resolved type. +/// The only way this can appear in the token input is if an already resolved `Type` object +/// was spliced into a macro's token stream via the `$` operator. +fn resolved_type() -> impl NoirParser { + token_kind(TokenKind::QuotedType).map_with_span(|token, span| match token { + Token::QuotedType(id) => UnresolvedTypeData::Resolved(id).with_span(span), + _ => unreachable!("token_kind(QuotedType) guarantees we parse a quoted type"), + }) +} + pub(super) fn string_type() -> impl NoirParser { keyword(Keyword::String) .ignore_then(type_expression().delimited_by(just(Token::Less), just(Token::Greater))) diff --git a/noir/noir-repo/compiler/noirc_frontend/src/tests.rs b/noir/noir-repo/compiler/noirc_frontend/src/tests.rs index f4845625b87..9251eb3db6b 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/tests.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/tests.rs @@ -50,7 +50,10 @@ pub(crate) fn remove_experimental_warnings(errors: &mut Vec<(CompilationError, F }); } -pub(crate) fn get_program(src: &str) -> (ParsedModule, Context, Vec<(CompilationError, FileId)>) { +pub(crate) fn get_program( + src: &str, + use_legacy: bool, +) -> (ParsedModule, Context, Vec<(CompilationError, FileId)>) { let root = std::path::Path::new("/"); let fm = FileManager::new(root); @@ -82,7 +85,7 @@ pub(crate) fn get_program(src: &str) -> (ParsedModule, Context, Vec<(Compilation &mut context, program.clone().into_sorted(), root_file_id, - false, + use_legacy, &[], // No macro processors )); } @@ -90,7 +93,7 @@ pub(crate) fn get_program(src: &str) -> (ParsedModule, Context, Vec<(Compilation } pub(crate) fn get_program_errors(src: &str) -> Vec<(CompilationError, FileId)> { - get_program(src).2 + get_program(src, false).2 } #[test] @@ -833,7 +836,7 @@ fn check_trait_as_type_as_two_fn_parameters() { } fn get_program_captures(src: &str) -> Vec> { - let (program, context, _errors) = get_program(src); + let (program, context, _errors) = get_program(src, false); let interner = context.def_interner; let mut all_captures: Vec> = Vec::new(); for func in program.into_sorted().functions { @@ -1195,7 +1198,7 @@ fn resolve_fmt_strings() { } fn check_rewrite(src: &str, expected: &str) { - let (_program, mut context, _errors) = get_program(src); + let (_program, mut context, _errors) = get_program(src, false); let main_func_id = context.def_interner.find_function("main").unwrap(); let program = monomorphize(main_func_id, &mut context.def_interner).unwrap(); assert!(format!("{}", program) == expected); @@ -1326,14 +1329,20 @@ fn for_loop_over_array() { hello(array); } "#; - assert_eq!(get_program_errors(src).len(), 0); + let errors = get_program_errors(src); + assert_eq!(get_program_errors(src).len(), 1); + + assert!(matches!( + errors[0].0, + CompilationError::ResolverError(ResolverError::UseExplicitNumericGeneric { .. }) + )); } // Regression for #4545 #[test] fn type_aliases_in_main() { let src = r#" - type Outer = [u8; N]; + type Outer = [u8; N]; fn main(_arg: Outer<1>) {} "#; assert_eq!(get_program_errors(src).len(), 0); @@ -1446,6 +1455,425 @@ fn specify_method_types_with_turbofish() { assert_eq!(errors.len(), 0); } +#[test] +fn struct_numeric_generic_in_function() { + let src = r#" + struct Foo { + inner: u64 + } + + fn bar() { } + "#; + let errors = get_program_errors(src); + assert_eq!(errors.len(), 1); + assert!(matches!( + errors[0].0, + CompilationError::ResolverError(ResolverError::UnsupportedNumericGenericType { .. }), + )); +} + +#[test] +fn struct_numeric_generic_in_struct() { + let src = r#" + struct Foo { + inner: u64 + } + + struct Bar { } + "#; + let errors = get_program_errors(src); + assert_eq!(errors.len(), 1); + assert!(matches!( + errors[0].0, + CompilationError::DefinitionError( + DefCollectorErrorKind::UnsupportedNumericGenericType { .. } + ), + )); +} + +#[test] +fn bool_numeric_generic() { + let src = r#" + fn read() -> Field { + if N { + 0 + } else { + 1 + } + } + "#; + let errors = get_program_errors(src); + assert_eq!(errors.len(), 1); + assert!(matches!( + errors[0].0, + CompilationError::ResolverError(ResolverError::UnsupportedNumericGenericType { .. }), + )); +} + +#[test] +fn numeric_generic_binary_operation_type_mismatch() { + let src = r#" + fn foo() -> bool { + let mut check: bool = true; + check = N; + check + } + "#; + let errors = get_program_errors(src); + assert_eq!(errors.len(), 1); + assert!(matches!( + errors[0].0, + CompilationError::TypeError(TypeCheckError::TypeMismatchWithSource { .. }), + )); +} + +#[test] +fn bool_generic_as_loop_bound() { + let src = r#" + fn read() { + let mut fields = [0; N]; + for i in 0..N { + fields[i] = i + 1; + } + assert(fields[0] == 1); + } + "#; + let errors = get_program_errors(src); + assert_eq!(errors.len(), 2); + + assert!(matches!( + errors[0].0, + CompilationError::ResolverError(ResolverError::UnsupportedNumericGenericType { .. }), + )); + + let CompilationError::TypeError(TypeCheckError::TypeMismatch { + expected_typ, expr_typ, .. + }) = &errors[1].0 + else { + panic!("Got an error other than a type mismatch"); + }; + + assert_eq!(expected_typ, "Field"); + assert_eq!(expr_typ, "bool"); +} + +#[test] +fn numeric_generic_in_function_signature() { + let src = r#" + fn foo(arr: [Field; N]) -> [Field; N] { arr } + "#; + let errors = get_program_errors(src); + assert!(errors.is_empty()); +} + +#[test] +fn numeric_generic_as_struct_field_type() { + let src = r#" + struct Foo { + a: Field, + b: N, + } + "#; + let errors = get_program_errors(src); + assert_eq!(errors.len(), 1); + assert!(matches!( + errors[0].0, + CompilationError::ResolverError(ResolverError::NumericGenericUsedForType { .. }), + )); +} + +#[test] +fn normal_generic_as_array_length() { + let src = r#" + struct Foo { + a: Field, + b: [Field; N], + } + "#; + let errors = get_program_errors(src); + assert_eq!(errors.len(), 1); + // TODO(https://github.com/noir-lang/noir/issues/5156): This should be switched to a hard type error rather than + // the `UseExplicitNumericGeneric` once implicit numeric generics are removed. + assert!(matches!( + errors[0].0, + CompilationError::ResolverError(ResolverError::UseExplicitNumericGeneric { .. }), + )); +} + +#[test] +fn numeric_generic_as_param_type() { + let src = r#" + fn foo(x: I) -> I { + let _q: I = 5; + x + } + "#; + let errors = get_program_errors(src); + assert_eq!(errors.len(), 3); + // Error from the parameter type + assert!(matches!( + errors[0].0, + CompilationError::ResolverError(ResolverError::NumericGenericUsedForType { .. }), + )); + // Error from the let statement annotated type + assert!(matches!( + errors[1].0, + CompilationError::ResolverError(ResolverError::NumericGenericUsedForType { .. }), + )); + // Error from the return type + assert!(matches!( + errors[2].0, + CompilationError::ResolverError(ResolverError::NumericGenericUsedForType { .. }), + )); +} + +#[test] +fn numeric_generic_used_in_nested_type_fail() { + let src = r#" + struct Foo { + a: Field, + b: Bar, + } + struct Bar { + inner: N + } + "#; + let errors = get_program_errors(src); + assert_eq!(errors.len(), 1); + assert!(matches!( + errors[0].0, + CompilationError::ResolverError(ResolverError::NumericGenericUsedForType { .. }), + )); +} + +#[test] +fn normal_generic_used_in_nested_array_length_fail() { + let src = r#" + struct Foo { + a: Field, + b: Bar, + } + struct Bar { + inner: [Field; N] + } + "#; + let errors = get_program_errors(src); + // TODO(https://github.com/noir-lang/noir/issues/5156): This should be switched to a hard type error once implicit numeric generics are removed. + assert_eq!(errors.len(), 0); +} + +#[test] +fn numeric_generic_used_in_nested_type_pass() { + // The order of these structs should not be changed to make sure + // that we are accurately resolving all struct generics before struct fields + let src = r#" + struct NestedNumeric { + a: Field, + b: InnerNumeric + } + struct InnerNumeric { + inner: [u64; N], + } + "#; + let errors = get_program_errors(src); + assert!(errors.is_empty()); +} + +#[test] +fn numeric_generic_used_in_trait() { + let src = r#" + struct MyType { + a: Field, + b: Field, + c: Field, + d: T, + } + + impl Deserialize for MyType { + fn deserialize(fields: [Field; N], other: T) -> Self { + MyType { a: fields[0], b: fields[1], c: fields[2], d: other } + } + } + + trait Deserialize { + fn deserialize(fields: [Field; N], other: T) -> Self; + } + "#; + let errors = get_program_errors(src); + // We want to make sure that `N` in `impl Deserialize` does + // not trigger `expected type, found numeric generic parameter N` as the trait + // does in fact expect a numeric generic. + assert!(errors.is_empty()); +} + +#[test] +fn numeric_generic_in_trait_impl_with_extra_impl_generics() { + let src = r#" + trait Default { + fn default() -> Self; + } + + struct MyType { + a: Field, + b: Field, + c: Field, + d: T, + } + + // Make sure that `T` is placed before `N` as we want to test that the order of the generics is correctly maintained. + // `N` is used first in the trait impl generics (`Deserialize for MyType`). + // We want to make sure that the compiler correctly accounts for that `N` has a numeric kind + // while `T` has a normal kind. + impl Deserialize for MyType where T: Default { + fn deserialize(fields: [Field; N]) -> Self { + MyType { a: fields[0], b: fields[1], c: fields[2], d: T::default() } + } + } + + trait Deserialize { + fn deserialize(fields: [Field; N]) -> Self; + } + "#; + let errors = get_program_errors(src); + assert!(errors.is_empty()); +} + +#[test] +fn numeric_generic_used_in_where_clause() { + let src = r#" + trait Deserialize { + fn deserialize(fields: [Field; N]) -> Self; + } + + fn read() -> T where T: Deserialize { + let mut fields: [Field; N] = [0; N]; + for i in 0..N { + fields[i] = i as Field + 1; + } + T::deserialize(fields) + } + "#; + let errors = get_program_errors(src); + assert!(errors.is_empty()); +} + +#[test] +fn numeric_generic_used_in_turbofish() { + let src = r#" + fn double() -> u32 { + // Used as an expression + N * 2 + } + + fn double_numeric_generics_test() { + // Example usage of a numeric generic arguments. + assert(double::<9>() == 18); + assert(double::<7 + 8>() == 30); + } + "#; + let errors = get_program_errors(src); + assert!(errors.is_empty()); +} + +#[test] +fn constant_used_with_numeric_generic() { + let src = r#" + struct ValueNote { + value: Field, + } + + trait Serialize { + fn serialize(self) -> [Field; N]; + } + + impl Serialize<1> for ValueNote { + fn serialize(self) -> [Field; 1] { + [self.value] + } + } + "#; + let errors = get_program_errors(src); + assert!(errors.is_empty()); +} + +#[test] +fn normal_generic_used_when_numeric_expected_in_where_clause() { + let src = r#" + trait Deserialize { + fn deserialize(fields: [Field; N]) -> Self; + } + + fn read() -> T where T: Deserialize { + T::deserialize([0, 1]) + } + "#; + let errors = get_program_errors(src); + assert_eq!(errors.len(), 1); + assert!(matches!( + errors[0].0, + CompilationError::TypeError(TypeCheckError::TypeMismatch { .. }), + )); + + let src = r#" + trait Deserialize { + fn deserialize(fields: [Field; N]) -> Self; + } + + fn read() -> T where T: Deserialize { + let mut fields: [Field; N] = [0; N]; + for i in 0..N { + fields[i] = i as Field + 1; + } + T::deserialize(fields) + } + "#; + let errors = get_program_errors(src); + assert_eq!(errors.len(), 1); + assert!(matches!( + errors[0].0, + CompilationError::ResolverError(ResolverError::VariableNotDeclared { .. }), + )); +} + +// TODO(https://github.com/noir-lang/noir/issues/5156): Remove this test once we ban implicit numeric generics +#[test] +fn implicit_numeric_generics_elaborator() { + let src = r#" + struct BoundedVec { + storage: [T; MaxLen], + len: u64, + } + + impl BoundedVec { + + // Test that we have an implicit numeric generic for "Len" as well as "MaxLen" + pub fn extend_from_bounded_vec(&mut self, _vec: BoundedVec) { + // We do this to avoid an unused variable warning on `self` + let _ = self.len; + for _ in 0..Len { } + } + + pub fn push(&mut self, elem: T) { + assert(self.len < MaxLen, "push out of bounds"); + self.storage[self.len] = elem; + self.len += 1; + } + } + "#; + let errors = get_program_errors(src); + assert_eq!(errors.len(), 4); + + for error in errors.iter() { + if let CompilationError::ResolverError(ResolverError::UseExplicitNumericGeneric { ident }) = + &errors[0].0 + { + assert!(matches!(ident.0.contents.as_str(), "MaxLen" | "Len")); + } else { + panic!("Expected ResolverError::UseExplicitNumericGeneric but got {:?}", error); + } + } +} + #[test] fn quote_code_fragments() { // This test ensures we can quote (and unquote/splice) code fragments diff --git a/noir/noir-repo/deny.toml b/noir/noir-repo/deny.toml index db7e53cad24..2d6d3e658b5 100644 --- a/noir/noir-repo/deny.toml +++ b/noir/noir-repo/deny.toml @@ -73,7 +73,7 @@ exceptions = [ { allow = ["CC0-1.0"], name = "tiny-keccak" }, { allow = ["MPL-2.0"], name = "sized-chunks" }, { allow = ["MPL-2.0"], name = "webpki-roots" }, - + { allow = ["CDDL-1.0"], name = "inferno" }, ] [[licenses.clarify]] diff --git a/noir/noir-repo/docs/docs/how_to/how-to-oracles.md b/noir/noir-repo/docs/docs/how_to/how-to-oracles.md index d6834c09c84..2d2ed5c94b9 100644 --- a/noir/noir-repo/docs/docs/how_to/how-to-oracles.md +++ b/noir/noir-repo/docs/docs/how_to/how-to-oracles.md @@ -141,10 +141,10 @@ server.addMethod("resolve_function_call", async (params) => { if params.function !== "getSqrt" { throw Error("Unexpected foreign call") }; - const values = params.inputs[0].map((field) => { + const values = params.inputs[0].Array.map((field) => { return `${Math.sqrt(parseInt(field, 16))}`; }); - return { values }; + return { values: [{ Array: values }] }; }); ``` @@ -236,9 +236,9 @@ const foreignCallHandler = async (name, input) => { // notice that the "inputs" parameter contains *all* the inputs // in this case we to make the RPC request with the first parameter "numbers", which would be input[0] const oracleReturn = await client.request(name, [ - input[0].map((i) => i.toString("hex")), + { Array: input[0].map((i) => i.toString("hex")) }, ]); - return { values: oracleReturn }; + return [oracleReturn.values[0].Array]; }; // the rest of your NoirJS code diff --git a/noir/noir-repo/docs/docs/noir/concepts/data_types/arrays.md b/noir/noir-repo/docs/docs/noir/concepts/data_types/arrays.md index 95d749053e2..9a4ab5d3c1f 100644 --- a/noir/noir-repo/docs/docs/noir/concepts/data_types/arrays.md +++ b/noir/noir-repo/docs/docs/noir/concepts/data_types/arrays.md @@ -199,7 +199,7 @@ fn main() { ### reduce -Same as fold, but uses the first element as starting element. +Same as fold, but uses the first element as the starting element. ```rust fn reduce(self, f: fn(T, T) -> T) -> T diff --git a/noir/noir-repo/docs/docs/noir/concepts/data_types/slices.mdx b/noir/noir-repo/docs/docs/noir/concepts/data_types/slices.mdx index dff08d63ffb..d619117b799 100644 --- a/noir/noir-repo/docs/docs/noir/concepts/data_types/slices.mdx +++ b/noir/noir-repo/docs/docs/noir/concepts/data_types/slices.mdx @@ -191,3 +191,108 @@ fn main() { assert(array[1] == slice[1]); } ``` + +### map + +Applies a function to each element of the slice, returning a new slice containing the mapped elements. + +```rust +fn map(self, f: fn(T) -> U) -> [U] +``` + +example + +```rust +let a = &[1, 2, 3]; +let b = a.map(|a| a * 2); // b is now &[2, 4, 6] +``` + +### fold + +Applies a function to each element of the slice, returning the final accumulated value. The first +parameter is the initial value. + +```rust +fn fold(self, mut accumulator: U, f: fn(U, T) -> U) -> U +``` + +This is a left fold, so the given function will be applied to the accumulator and first element of +the slice, then the second, and so on. For a given call the expected result would be equivalent to: + +```rust +let a1 = &[1]; +let a2 = &[1, 2]; +let a3 = &[1, 2, 3]; + +let f = |a, b| a - b; +a1.fold(10, f) //=> f(10, 1) +a2.fold(10, f) //=> f(f(10, 1), 2) +a3.fold(10, f) //=> f(f(f(10, 1), 2), 3) +``` + +example: + +```rust + +fn main() { + let slice = &[2, 2, 2, 2, 2]; + let folded = slice.fold(0, |a, b| a + b); + assert(folded == 10); +} + +``` + +### reduce + +Same as fold, but uses the first element as the starting element. + +```rust +fn reduce(self, f: fn(T, T) -> T) -> T +``` + +example: + +```rust +fn main() { + let slice = &[2, 2, 2, 2, 2]; + let reduced = slice.reduce(|a, b| a + b); + assert(reduced == 10); +} +``` + +### all + +Returns true if all the elements satisfy the given predicate + +```rust +fn all(self, predicate: fn(T) -> bool) -> bool +``` + +example: + +```rust +fn main() { + let slice = &[2, 2, 2, 2, 2]; + let all = slice.all(|a| a == 2); + assert(all); +} +``` + +### any + +Returns true if any of the elements satisfy the given predicate + +```rust +fn any(self, predicate: fn(T) -> bool) -> bool +``` + +example: + +```rust +fn main() { + let slice = &[2, 2, 2, 2, 5]; + let any = slice.any(|a| a == 5); + assert(any); +} + +``` diff --git a/noir/noir-repo/docs/docs/noir/concepts/traits.md b/noir/noir-repo/docs/docs/noir/concepts/traits.md index df7cb9ebda0..51305b38c16 100644 --- a/noir/noir-repo/docs/docs/noir/concepts/traits.md +++ b/noir/noir-repo/docs/docs/noir/concepts/traits.md @@ -147,7 +147,7 @@ fn main() { ### Generic Trait Implementations With Where Clauses -Where clauses can also be placed on trait implementations themselves to restrict generics in a similar way. +Where clauses can be placed on trait implementations themselves to restrict generics in a similar way. For example, while `impl Foo for T` implements the trait `Foo` for every type, `impl Foo for T where T: Bar` will implement `Foo` only for types that also implement `Bar`. This is often used for implementing generic types. For example, here is the implementation for array equality: @@ -169,6 +169,22 @@ impl Eq for [T; N] where T: Eq { } ``` +Where clauses can also be placed on struct implementations. +For example, here is a method utilizing a generic type that implements the equality trait. + +```rust +struct Foo { + a: u32, + b: T, +} + +impl Foo where T: Eq { + fn eq(self, other: Self) -> bool { + (self.a == other.a) & self.b.eq(other.b) + } +} +``` + ## Generic Traits Traits themselves can also be generic by placing the generic arguments after the trait name. These generics are in diff --git a/noir/noir-repo/docs/docs/tooling/testing.md b/noir/noir-repo/docs/docs/tooling/testing.md index d3e0c522473..866677da567 100644 --- a/noir/noir-repo/docs/docs/tooling/testing.md +++ b/noir/noir-repo/docs/docs/tooling/testing.md @@ -42,7 +42,7 @@ fn test_add() { } ``` -You can be more specific and make it fail with a specific reason by using `should_fail_with = "`: +You can be more specific and make it fail with a specific reason by using `should_fail_with = ""`: ```rust fn main(african_swallow_avg_speed : Field) { @@ -58,5 +58,22 @@ fn test_king_arthur() { fn test_bridgekeeper() { main(32); } - ``` + +The string given to `should_fail_with` doesn't need to exactly match the failure reason, it just needs to be a substring of it: + +```rust +fn main(african_swallow_avg_speed : Field) { + assert(african_swallow_avg_speed == 65, "What is the airspeed velocity of an unladen swallow"); +} + +#[test] +fn test_king_arthur() { + main(65); +} + +#[test(should_fail_with = "airspeed velocity")] +fn test_bridgekeeper() { + main(32); +} +``` \ No newline at end of file diff --git a/noir/noir-repo/noir_stdlib/src/aes128.nr b/noir/noir-repo/noir_stdlib/src/aes128.nr index e6e2a5e4997..7b0876b86f3 100644 --- a/noir/noir-repo/noir_stdlib/src/aes128.nr +++ b/noir/noir-repo/noir_stdlib/src/aes128.nr @@ -1,4 +1,4 @@ #[foreign(aes128_encrypt)] // docs:start:aes128 -pub fn aes128_encrypt(input: [u8; N], iv: [u8; 16], key: [u8; 16]) -> [u8] {} +pub fn aes128_encrypt(input: [u8; N], iv: [u8; 16], key: [u8; 16]) -> [u8] {} // docs:end:aes128 diff --git a/noir/noir-repo/noir_stdlib/src/array.nr b/noir/noir-repo/noir_stdlib/src/array.nr index 6fba197dd05..ad9c7093d07 100644 --- a/noir/noir-repo/noir_stdlib/src/array.nr +++ b/noir/noir-repo/noir_stdlib/src/array.nr @@ -2,7 +2,7 @@ use crate::cmp::Ord; // TODO: Once we fully move to the new SSA pass this module can be removed and replaced // by the methods in the `slice` module -impl [T; N] { +impl [T; N] { #[builtin(array_len)] pub fn len(self) -> u32 {} @@ -110,7 +110,7 @@ impl [T; N] { // helper function used to look up the position of a value in an array of Field // Note that function returns 0 if the value is not found -unconstrained fn find_index(a: [u32; N], find: u32) -> u32 { +unconstrained fn find_index(a: [u32; N], find: u32) -> u32 { let mut result = 0; for i in 0..a.len() { if a[i] == find { diff --git a/noir/noir-repo/noir_stdlib/src/cmp.nr b/noir/noir-repo/noir_stdlib/src/cmp.nr index 457b2cfa167..bdd5e2bc5ec 100644 --- a/noir/noir-repo/noir_stdlib/src/cmp.nr +++ b/noir/noir-repo/noir_stdlib/src/cmp.nr @@ -18,7 +18,7 @@ impl Eq for i64 { fn eq(self, other: i64) -> bool { self == other } } impl Eq for () { fn eq(_self: Self, _other: ()) -> bool { true } } impl Eq for bool { fn eq(self, other: bool) -> bool { self == other } } -impl Eq for [T; N] where T: Eq { +impl Eq for [T; N] where T: Eq { fn eq(self, other: [T; N]) -> bool { let mut result = true; for i in 0 .. self.len() { @@ -38,7 +38,7 @@ impl Eq for [T] where T: Eq { } } -impl Eq for str { +impl Eq for str { fn eq(self, other: str) -> bool { let self_bytes = self.as_bytes(); let other_bytes = other.as_bytes(); @@ -203,7 +203,7 @@ impl Ord for bool { } } -impl Ord for [T; N] where T: Ord { +impl Ord for [T; N] where T: Ord { // The first non-equal element of both arrays determines // the ordering for the whole array. fn cmp(self, other: [T; N]) -> Ordering { diff --git a/noir/noir-repo/noir_stdlib/src/collections/bounded_vec.nr b/noir/noir-repo/noir_stdlib/src/collections/bounded_vec.nr index 6fde9e70f4d..c218ecd2348 100644 --- a/noir/noir-repo/noir_stdlib/src/collections/bounded_vec.nr +++ b/noir/noir-repo/noir_stdlib/src/collections/bounded_vec.nr @@ -1,11 +1,11 @@ use crate::{cmp::Eq, convert::From}; -struct BoundedVec { +struct BoundedVec { storage: [T; MaxLen], len: u32, } -impl BoundedVec { +impl BoundedVec { pub fn new() -> Self { let zeroed = crate::unsafe::zeroed(); BoundedVec { storage: [zeroed; MaxLen], len: 0 } @@ -61,7 +61,7 @@ impl BoundedVec { self.storage } - pub fn extend_from_array(&mut self, array: [T; Len]) { + pub fn extend_from_array(&mut self, array: [T; Len]) { let new_len = self.len + array.len(); assert(new_len <= MaxLen, "extend_from_array out of bounds"); for i in 0..array.len() { @@ -79,7 +79,7 @@ impl BoundedVec { self.len = new_len; } - pub fn extend_from_bounded_vec(&mut self, vec: BoundedVec) { + pub fn extend_from_bounded_vec(&mut self, vec: BoundedVec) { let append_len = vec.len(); let new_len = self.len + append_len; assert(new_len <= MaxLen, "extend_from_bounded_vec out of bounds"); @@ -94,7 +94,7 @@ impl BoundedVec { self.len = new_len; } - pub fn from_array(array: [T; Len]) -> Self { + pub fn from_array(array: [T; Len]) -> Self { assert(Len <= MaxLen, "from array out of bounds"); let mut vec: BoundedVec = BoundedVec::new(); vec.extend_from_array(array); @@ -134,7 +134,7 @@ impl BoundedVec { } } -impl Eq for BoundedVec where T: Eq { +impl Eq for BoundedVec where T: Eq { fn eq(self, other: BoundedVec) -> bool { // TODO: https://github.com/noir-lang/noir/issues/4837 // @@ -145,7 +145,7 @@ impl Eq for BoundedVec where T: Eq { } } -impl From<[T; Len]> for BoundedVec { +impl From<[T; Len]> for BoundedVec { fn from(array: [T; Len]) -> BoundedVec { BoundedVec::from_array(array) } diff --git a/noir/noir-repo/noir_stdlib/src/collections/map.nr b/noir/noir-repo/noir_stdlib/src/collections/map.nr index 84e94166869..8324583632f 100644 --- a/noir/noir-repo/noir_stdlib/src/collections/map.nr +++ b/noir/noir-repo/noir_stdlib/src/collections/map.nr @@ -15,7 +15,7 @@ global MAX_LOAD_FACTOR_DEN0MINATOR = 4; // Size of the underlying table must be known at compile time. // It is advised to select capacity N as a power of two, or a prime number // because utilized probing scheme is best tailored for it. -struct HashMap { +struct HashMap { _table: [Slot; N], // Amount of valid elements in the map. @@ -77,7 +77,7 @@ impl Slot { // While conducting lookup, we iterate attempt from 0 to N - 1 due to heuristic, // that if we have went that far without finding desired, // it is very unlikely to be after - performance will be heavily degraded. -impl HashMap { +impl HashMap { // Creates a new instance of HashMap with specified BuildHasher. // docs:start:with_hasher pub fn with_hasher(_build_hasher: B) -> Self @@ -424,7 +424,7 @@ impl HashMap { // equal sets of key-value entries, // thus one is a subset of the other and vice versa. // docs:start:eq -impl Eq for HashMap +impl Eq for HashMap where K: Eq + Hash, V: Eq, @@ -460,7 +460,7 @@ where } // docs:start:default -impl Default for HashMap +impl Default for HashMap where B: BuildHasher + Default, H: Hasher + Default diff --git a/noir/noir-repo/noir_stdlib/src/compat.nr b/noir/noir-repo/noir_stdlib/src/compat.nr index 30b7f73f130..06da8150767 100644 --- a/noir/noir-repo/noir_stdlib/src/compat.nr +++ b/noir/noir-repo/noir_stdlib/src/compat.nr @@ -1,18 +1,7 @@ -global BN254_MODULUS_BE_BYTES: [u8; 32] = [ +global BN254_MODULUS_BE_BYTES: [u8] = &[ 48, 100, 78, 114, 225, 49, 160, 41, 184, 80, 69, 182, 129, 129, 88, 93, 40, 51, 232, 72, 121, 185, 112, 145, 67, 225, 245, 147, 240, 0, 0, 1 ]; pub fn is_bn254() -> bool { - // TODO: refactor this once https://github.com/noir-lang/noir/issues/5245 is resolved. - let modulus_bytes = crate::field::modulus_be_bytes(); - if modulus_bytes.len() == 32 { - let mut modulus_bytes_array: [u8; 32] = [0; 32]; - for i in 0..32 { - modulus_bytes_array[i] = modulus_bytes[i]; - } - - modulus_bytes_array == BN254_MODULUS_BE_BYTES - } else { - false - } + crate::field::modulus_be_bytes() == BN254_MODULUS_BE_BYTES } diff --git a/noir/noir-repo/noir_stdlib/src/default.nr b/noir/noir-repo/noir_stdlib/src/default.nr index bd2f1ce0cd2..0acb3966034 100644 --- a/noir/noir-repo/noir_stdlib/src/default.nr +++ b/noir/noir-repo/noir_stdlib/src/default.nr @@ -17,7 +17,7 @@ impl Default for i64 { fn default() -> i64 { 0 } } impl Default for () { fn default() -> () { () } } impl Default for bool { fn default() -> bool { false } } -impl Default for [T; N] where T: Default { +impl Default for [T; N] where T: Default { fn default() -> [T; N] { [T::default(); N] } diff --git a/noir/noir-repo/noir_stdlib/src/ec/montcurve.nr b/noir/noir-repo/noir_stdlib/src/ec/montcurve.nr index 7dc756781c0..12b48d66b9d 100644 --- a/noir/noir-repo/noir_stdlib/src/ec/montcurve.nr +++ b/noir/noir-repo/noir_stdlib/src/ec/montcurve.nr @@ -114,7 +114,7 @@ mod affine { // Scalar multiplication with scalar represented by a bit array (little-endian convention). // If k is the natural number represented by `bits`, then this computes p + ... + p k times. - fn bit_mul(self, bits: [u1; N], p: Point) -> Point { + fn bit_mul(self, bits: [u1; N], p: Point) -> Point { self.into_tecurve().bit_mul(bits, p.into_tecurve()).into_montcurve() } @@ -124,7 +124,7 @@ mod affine { } // Multi-scalar multiplication (n[0]*p[0] + ... + n[N]*p[N], where * denotes scalar multiplication) - fn msm(self, n: [Field; N], p: [Point; N]) -> Point { + fn msm(self, n: [Field; N], p: [Point; N]) -> Point { let mut out = Point::zero(); for i in 0..N { @@ -315,7 +315,7 @@ mod curvegroup { // Scalar multiplication with scalar represented by a bit array (little-endian convention). // If k is the natural number represented by `bits`, then this computes p + ... + p k times. - fn bit_mul(self, bits: [u1; N], p: Point) -> Point { + fn bit_mul(self, bits: [u1; N], p: Point) -> Point { self.into_tecurve().bit_mul(bits, p.into_tecurve()).into_montcurve() } @@ -325,7 +325,7 @@ mod curvegroup { } // Multi-scalar multiplication (n[0]*p[0] + ... + n[N]*p[N], where * denotes scalar multiplication) - fn msm(self, n: [Field; N], p: [Point; N]) -> Point { + fn msm(self, n: [Field; N], p: [Point; N]) -> Point { let mut out = Point::zero(); for i in 0..N { diff --git a/noir/noir-repo/noir_stdlib/src/ec/swcurve.nr b/noir/noir-repo/noir_stdlib/src/ec/swcurve.nr index 9dd324f3085..3ad3af41cff 100644 --- a/noir/noir-repo/noir_stdlib/src/ec/swcurve.nr +++ b/noir/noir-repo/noir_stdlib/src/ec/swcurve.nr @@ -134,7 +134,7 @@ mod affine { // Scalar multiplication with scalar represented by a bit array (little-endian convention). // If k is the natural number represented by `bits`, then this computes p + ... + p k times. - fn bit_mul(self, bits: [u1; N], p: Point) -> Point { + fn bit_mul(self, bits: [u1; N], p: Point) -> Point { self.into_group().bit_mul(bits, p.into_group()).into_affine() } @@ -144,7 +144,7 @@ mod affine { } // Multi-scalar multiplication (n[0]*p[0] + ... + n[N]*p[N], where * denotes scalar multiplication) - pub fn msm(self, n: [Field; N], p: [Point; N]) -> Point { + pub fn msm(self, n: [Field; N], p: [Point; N]) -> Point { let mut out = Point::zero(); for i in 0..N { @@ -336,7 +336,7 @@ mod curvegroup { // Scalar multiplication with scalar represented by a bit array (little-endian convention). // If k is the natural number represented by `bits`, then this computes p + ... + p k times. - fn bit_mul(self, bits: [u1; N], p: Point) -> Point { + fn bit_mul(self, bits: [u1; N], p: Point) -> Point { let mut out = Point::zero(); for i in 0..N { @@ -363,7 +363,7 @@ mod curvegroup { } // Multi-scalar multiplication (n[0]*p[0] + ... + n[N]*p[N], where * denotes scalar multiplication) - fn msm(self, n: [Field; N], p: [Point; N]) -> Point { + fn msm(self, n: [Field; N], p: [Point; N]) -> Point { let mut out = Point::zero(); for i in 0..N { diff --git a/noir/noir-repo/noir_stdlib/src/ec/tecurve.nr b/noir/noir-repo/noir_stdlib/src/ec/tecurve.nr index 506fe89313a..aaf66f903cc 100644 --- a/noir/noir-repo/noir_stdlib/src/ec/tecurve.nr +++ b/noir/noir-repo/noir_stdlib/src/ec/tecurve.nr @@ -132,7 +132,7 @@ mod affine { // Scalar multiplication with scalar represented by a bit array (little-endian convention). // If k is the natural number represented by `bits`, then this computes p + ... + p k times. - fn bit_mul(self, bits: [u1; N], p: Point) -> Point { + fn bit_mul(self, bits: [u1; N], p: Point) -> Point { self.into_group().bit_mul(bits, p.into_group()).into_affine() } @@ -142,7 +142,7 @@ mod affine { } // Multi-scalar multiplication (n[0]*p[0] + ... + n[N]*p[N], where * denotes scalar multiplication) - fn msm(self, n: [Field; N], p: [Point; N]) -> Point { + fn msm(self, n: [Field; N], p: [Point; N]) -> Point { let mut out = Point::zero(); for i in 0..N { @@ -340,7 +340,7 @@ mod curvegroup { // Scalar multiplication with scalar represented by a bit array (little-endian convention). // If k is the natural number represented by `bits`, then this computes p + ... + p k times. - fn bit_mul(self, bits: [u1; N], p: Point) -> Point { + fn bit_mul(self, bits: [u1; N], p: Point) -> Point { let mut out = Point::zero(); for i in 0..N { @@ -367,7 +367,7 @@ mod curvegroup { } // Multi-scalar multiplication (n[0]*p[0] + ... + n[N]*p[N], where * denotes scalar multiplication) - fn msm(self, n: [Field; N], p: [Point; N]) -> Point { + fn msm(self, n: [Field; N], p: [Point; N]) -> Point { let mut out = Point::zero(); for i in 0..N { diff --git a/noir/noir-repo/noir_stdlib/src/ecdsa_secp256k1.nr b/noir/noir-repo/noir_stdlib/src/ecdsa_secp256k1.nr index f84e2221f57..8a70184dca8 100644 --- a/noir/noir-repo/noir_stdlib/src/ecdsa_secp256k1.nr +++ b/noir/noir-repo/noir_stdlib/src/ecdsa_secp256k1.nr @@ -1,6 +1,6 @@ #[foreign(ecdsa_secp256k1)] // docs:start:ecdsa_secp256k1 -pub fn verify_signature( +pub fn verify_signature( public_key_x: [u8; 32], public_key_y: [u8; 32], signature: [u8; 64], diff --git a/noir/noir-repo/noir_stdlib/src/ecdsa_secp256r1.nr b/noir/noir-repo/noir_stdlib/src/ecdsa_secp256r1.nr index 76e68aeeafa..8772fa7c2ca 100644 --- a/noir/noir-repo/noir_stdlib/src/ecdsa_secp256r1.nr +++ b/noir/noir-repo/noir_stdlib/src/ecdsa_secp256r1.nr @@ -1,6 +1,6 @@ #[foreign(ecdsa_secp256r1)] // docs:start:ecdsa_secp256r1 -pub fn verify_signature( +pub fn verify_signature( public_key_x: [u8; 32], public_key_y: [u8; 32], signature: [u8; 64], diff --git a/noir/noir-repo/noir_stdlib/src/embedded_curve_ops.nr b/noir/noir-repo/noir_stdlib/src/embedded_curve_ops.nr index 8e768b97479..c5617094c0a 100644 --- a/noir/noir-repo/noir_stdlib/src/embedded_curve_ops.nr +++ b/noir/noir-repo/noir_stdlib/src/embedded_curve_ops.nr @@ -68,7 +68,7 @@ impl EmbeddedCurveScalar { // underlying proof system. #[foreign(multi_scalar_mul)] // docs:start:multi_scalar_mul -pub fn multi_scalar_mul( +pub fn multi_scalar_mul( points: [EmbeddedCurvePoint; N], scalars: [EmbeddedCurveScalar; N] ) -> [Field; 3] diff --git a/noir/noir-repo/noir_stdlib/src/hash.nr b/noir/noir-repo/noir_stdlib/src/hash.nr index 62b47b67241..493430c99a4 100644 --- a/noir/noir-repo/noir_stdlib/src/hash.nr +++ b/noir/noir-repo/noir_stdlib/src/hash.nr @@ -9,24 +9,24 @@ use crate::embedded_curve_ops::{EmbeddedCurvePoint, EmbeddedCurveScalar, multi_s #[foreign(sha256)] // docs:start:sha256 -pub fn sha256(input: [u8; N]) -> [u8; 32] +pub fn sha256(input: [u8; N]) -> [u8; 32] // docs:end:sha256 {} #[foreign(blake2s)] // docs:start:blake2s -pub fn blake2s(input: [u8; N]) -> [u8; 32] +pub fn blake2s(input: [u8; N]) -> [u8; 32] // docs:end:blake2s {} #[foreign(blake3)] // docs:start:blake3 -pub fn blake3(input: [u8; N]) -> [u8; 32] +pub fn blake3(input: [u8; N]) -> [u8; 32] // docs:end:blake3 {} // docs:start:pedersen_commitment -pub fn pedersen_commitment(input: [Field; N]) -> EmbeddedCurvePoint { +pub fn pedersen_commitment(input: [Field; N]) -> EmbeddedCurvePoint { // docs:end:pedersen_commitment let value = pedersen_commitment_with_separator(input, 0); if (value.x == 0) & (value.y == 0) { @@ -36,7 +36,7 @@ pub fn pedersen_commitment(input: [Field; N]) -> EmbeddedCurvePoint { } } -fn pedersen_commitment_with_separator_noir(input: [Field; N], separator: u32) -> EmbeddedCurvePoint { +fn pedersen_commitment_with_separator_noir(input: [Field; N], separator: u32) -> EmbeddedCurvePoint { let mut points = [EmbeddedCurveScalar { lo: 0, hi: 0 }; N]; for i in 0..N { points[i] = EmbeddedCurveScalar::from_field(input[i]); @@ -46,20 +46,23 @@ fn pedersen_commitment_with_separator_noir(input: [Field; N], separator: u32) EmbeddedCurvePoint { x: values[0], y: values[1], is_infinite: values[2] as bool } } -pub fn pedersen_commitment_with_separator(input: [Field; N], separator: u32) -> EmbeddedCurvePoint { +pub fn pedersen_commitment_with_separator(input: [Field; N], separator: u32) -> EmbeddedCurvePoint { let values = __pedersen_commitment_with_separator(input, separator); EmbeddedCurvePoint { x: values[0], y: values[1], is_infinite: false } } // docs:start:pedersen_hash -pub fn pedersen_hash(input: [Field; N]) -> Field +pub fn pedersen_hash(input: [Field; N]) -> Field // docs:end:pedersen_hash { pedersen_hash_with_separator(input, 0) } #[field(bn254)] -fn derive_generators(domain_separator_bytes: [u8; M], starting_index: u32) -> [EmbeddedCurvePoint; N] { +fn derive_generators( + domain_separator_bytes: [u8; M], + starting_index: u32 +) -> [EmbeddedCurvePoint; N] { crate::assert_constant(domain_separator_bytes); crate::assert_constant(starting_index); __derive_generators(domain_separator_bytes, starting_index) @@ -67,9 +70,9 @@ fn derive_generators(domain_separator_bytes: [u8; M], starting_index: u32) #[builtin(derive_pedersen_generators)] #[field(bn254)] -fn __derive_generators(domain_separator_bytes: [u8; M], starting_index: u32) -> [EmbeddedCurvePoint; N] {} +fn __derive_generators(domain_separator_bytes: [u8; M], starting_index: u32) -> [EmbeddedCurvePoint; N] {} -fn pedersen_hash_with_separator_noir(input: [Field; N], separator: u32) -> Field { +fn pedersen_hash_with_separator_noir(input: [Field; N], separator: u32) -> Field { let v1 = pedersen_commitment_with_separator(input, separator); let length_generator : [EmbeddedCurvePoint; 1] = derive_generators("pedersen_hash_length".as_bytes(), 0); multi_scalar_mul( @@ -79,10 +82,10 @@ fn pedersen_hash_with_separator_noir(input: [Field; N], separator: u32) -> Fi } #[foreign(pedersen_hash)] -pub fn pedersen_hash_with_separator(input: [Field; N], separator: u32) -> Field {} +pub fn pedersen_hash_with_separator(input: [Field; N], separator: u32) -> Field {} #[foreign(pedersen_commitment)] -fn __pedersen_commitment_with_separator(input: [Field; N], separator: u32) -> [Field; 2] {} +fn __pedersen_commitment_with_separator(input: [Field; N], separator: u32) -> [Field; 2] {} pub fn hash_to_field(inputs: [Field]) -> Field { let mut sum = 0; @@ -97,12 +100,12 @@ pub fn hash_to_field(inputs: [Field]) -> Field { #[foreign(keccak256)] // docs:start:keccak256 -pub fn keccak256(input: [u8; N], message_size: u32) -> [u8; 32] +pub fn keccak256(input: [u8; N], message_size: u32) -> [u8; 32] // docs:end:keccak256 {} #[foreign(poseidon2_permutation)] -pub fn poseidon2_permutation(_input: [Field; N], _state_length: u32) -> [Field; N] {} +pub fn poseidon2_permutation(_input: [Field; N], _state_length: u32) -> [Field; N] {} #[foreign(sha256_compression)] pub fn sha256_compression(_input: [u32; 16], _state: [u32; 8]) -> [u32; 8] {} @@ -207,7 +210,7 @@ impl Hash for U128 { } } -impl Hash for [T; N] where T: Hash { +impl Hash for [T; N] where T: Hash { fn hash(self, state: &mut H) where H: Hasher{ for elem in self { elem.hash(state); diff --git a/noir/noir-repo/noir_stdlib/src/hash/mimc.nr b/noir/noir-repo/noir_stdlib/src/hash/mimc.nr index 6c5502c2fbf..a16a73c5bc5 100644 --- a/noir/noir-repo/noir_stdlib/src/hash/mimc.nr +++ b/noir/noir-repo/noir_stdlib/src/hash/mimc.nr @@ -6,7 +6,7 @@ use crate::default::Default; // You must use constants generated for the native field // Rounds number should be ~ log(p)/log(exp) // For 254 bit primes, exponent 7 and 91 rounds seems to be recommended -fn mimc(x: Field, k: Field, constants: [Field; N], exp: Field) -> Field { +fn mimc(x: Field, k: Field, constants: [Field; N], exp: Field) -> Field { //round 0 let mut t = x + k; let mut h = t.pow_32(exp); @@ -116,7 +116,7 @@ global MIMC_BN254_CONSTANTS: [Field; MIMC_BN254_ROUNDS] = [ //mimc implementation with hardcoded parameters for BN254 curve. #[field(bn254)] -pub fn mimc_bn254(array: [Field; N]) -> Field { +pub fn mimc_bn254(array: [Field; N]) -> Field { let exponent = 7; let mut r = 0; for elem in array { diff --git a/noir/noir-repo/noir_stdlib/src/hash/poseidon.nr b/noir/noir-repo/noir_stdlib/src/hash/poseidon.nr index c4b5f0fcb6f..963808f6053 100644 --- a/noir/noir-repo/noir_stdlib/src/hash/poseidon.nr +++ b/noir/noir-repo/noir_stdlib/src/hash/poseidon.nr @@ -6,7 +6,7 @@ use crate::default::Default; // A config struct defining the parameters of the Poseidon instance to use. // // A thorough writeup of this method (along with an unoptimized method) can be found at: https://spec.filecoin.io/algorithms/crypto/poseidon/ -struct PoseidonConfig { +struct PoseidonConfig { // State width, should be equal to `T` t: Field, // Number of full rounds. should be even @@ -28,7 +28,7 @@ struct PoseidonConfig { sparse_mds: [Field; X], } -pub fn config( +pub fn config( t: Field, rf: u8, rp: u8, @@ -40,14 +40,17 @@ pub fn config( ) -> PoseidonConfig { // Input checks assert_eq(rf & 1, 0); - assert_eq((t as u8) * rf + rp, N); - assert_eq(t, T); + assert_eq((t as u8) * rf + rp, N as u8); + assert_eq(t, T as Field); assert(alpha != 0); PoseidonConfig { t, rf, rp, alpha, round_constants, mds, presparse_mds, sparse_mds } } -pub fn permute(pos_conf: PoseidonConfig, mut state: [Field; T]) -> [Field; T] { +pub fn permute( + pos_conf: PoseidonConfig, + mut state: [Field; T] +) -> [Field; T] { let PoseidonConfig {t, rf, rp, alpha, round_constants, mds, presparse_mds, sparse_mds } = pos_conf; for i in 0..state.len() { @@ -109,7 +112,7 @@ pub fn permute(pos_conf: PoseidonConfig, mut state: [Field; T] } // Performs matrix multiplication on a vector -fn apply_matrix(matrix: [[Field; N]; N], vec: [Field; N]) -> [Field; N] { +fn apply_matrix(matrix: [[Field; N]; N], vec: [Field; N]) -> [Field; N] { let mut out = [0; N]; for i in 0..N { @@ -122,7 +125,7 @@ fn apply_matrix(matrix: [[Field; N]; N], vec: [Field; N]) -> [Field; N] { } // Corresponding absorption. -fn absorb( +fn absorb( pos_conf: PoseidonConfig, // Initial state; usually [0; O] mut state: [Field; T], @@ -152,7 +155,7 @@ fn absorb( state } -fn sigma(x: [Field; O]) -> [Field; O] { +fn sigma(x: [Field; O]) -> [Field; O] { let mut y = x; for i in 0..O { let t = y[i]; diff --git a/noir/noir-repo/noir_stdlib/src/hash/poseidon/bn254.nr b/noir/noir-repo/noir_stdlib/src/hash/poseidon/bn254.nr index 54f22884e29..0e47ca11e20 100644 --- a/noir/noir-repo/noir_stdlib/src/hash/poseidon/bn254.nr +++ b/noir/noir-repo/noir_stdlib/src/hash/poseidon/bn254.nr @@ -6,7 +6,7 @@ use crate::hash::poseidon::{PoseidonConfig, absorb}; // Variable-length Poseidon-128 sponge as suggested in second bullet point of §3 of https://eprint.iacr.org/2019/458.pdf #[field(bn254)] -pub fn sponge(msg: [Field; N]) -> Field { +pub fn sponge(msg: [Field; N]) -> Field { absorb(consts::x5_5_config(), [0; 5], 4, 1, msg)[1] } diff --git a/noir/noir-repo/noir_stdlib/src/hash/poseidon2.nr b/noir/noir-repo/noir_stdlib/src/hash/poseidon2.nr index 313544f8884..e34992364ab 100644 --- a/noir/noir-repo/noir_stdlib/src/hash/poseidon2.nr +++ b/noir/noir-repo/noir_stdlib/src/hash/poseidon2.nr @@ -11,7 +11,7 @@ struct Poseidon2 { } impl Poseidon2 { - pub fn hash(input: [Field; N], message_size: u32) -> Field { + pub fn hash(input: [Field; N], message_size: u32) -> Field { if message_size == N { Poseidon2::hash_internal(input, N, false) } else { @@ -94,7 +94,7 @@ impl Poseidon2 { result } - fn hash_internal(input: [Field; N], in_len: u32, is_variable_length: bool) -> Field { + fn hash_internal(input: [Field; N], in_len: u32, is_variable_length: bool) -> Field { let two_pow_64 = 18446744073709551616; let iv : Field = (in_len as Field) * two_pow_64; let mut sponge = Poseidon2::new(iv); diff --git a/noir/noir-repo/noir_stdlib/src/merkle.nr b/noir/noir-repo/noir_stdlib/src/merkle.nr index 9b15fe7313d..17e539ab9b7 100644 --- a/noir/noir-repo/noir_stdlib/src/merkle.nr +++ b/noir/noir-repo/noir_stdlib/src/merkle.nr @@ -2,7 +2,7 @@ // Currently we assume that it is a binary tree, so depth k implies a width of 2^k // XXX: In the future we can add an arity parameter // Returns the merkle root of the tree from the provided leaf, its hashpath, using a pedersen hash function. -pub fn compute_merkle_root(leaf: Field, index: Field, hash_path: [Field; N]) -> Field { +pub fn compute_merkle_root(leaf: Field, index: Field, hash_path: [Field; N]) -> Field { let n = hash_path.len(); let index_bits = index.to_le_bits(n as u32); let mut current = leaf; diff --git a/noir/noir-repo/noir_stdlib/src/option.nr b/noir/noir-repo/noir_stdlib/src/option.nr index c94a1cf836e..df020e75615 100644 --- a/noir/noir-repo/noir_stdlib/src/option.nr +++ b/noir/noir-repo/noir_stdlib/src/option.nr @@ -57,7 +57,7 @@ impl Option { } /// Asserts `self.is_some()` with a provided custom message and returns the contained `Some` value - fn expect(self, message: fmtstr) -> T { + fn expect(self, message: fmtstr) -> T { assert(self.is_some(), message); self._value } diff --git a/noir/noir-repo/noir_stdlib/src/schnorr.nr b/noir/noir-repo/noir_stdlib/src/schnorr.nr index c63915061cb..24ca514025c 100644 --- a/noir/noir-repo/noir_stdlib/src/schnorr.nr +++ b/noir/noir-repo/noir_stdlib/src/schnorr.nr @@ -1,6 +1,6 @@ #[foreign(schnorr_verify)] // docs:start:schnorr_verify -pub fn verify_signature( +pub fn verify_signature( public_key_x: Field, public_key_y: Field, signature: [u8; 64], diff --git a/noir/noir-repo/noir_stdlib/src/sha256.nr b/noir/noir-repo/noir_stdlib/src/sha256.nr index d856043fcfa..0161756c1d0 100644 --- a/noir/noir-repo/noir_stdlib/src/sha256.nr +++ b/noir/noir-repo/noir_stdlib/src/sha256.nr @@ -16,8 +16,8 @@ fn msg_u8_to_u32(msg: [u8; 64]) -> [u32; 16] { msg32 } // SHA-256 hash function -pub fn digest(msg: [u8; N]) -> [u8; 32] { - sha256_var(msg, N) +pub fn digest(msg: [u8; N]) -> [u8; 32] { + sha256_var(msg, N as u64) } fn hash_final_block(msg_block: [u8; 64], mut state: [u32; 8]) -> [u8; 32] { @@ -38,12 +38,12 @@ fn hash_final_block(msg_block: [u8; 64], mut state: [u32; 8]) -> [u8; 32] { } // Variable size SHA-256 hash -pub fn sha256_var(msg: [u8; N], message_size: u64) -> [u8; 32] { +pub fn sha256_var(msg: [u8; N], message_size: u64) -> [u8; 32] { let mut msg_block: [u8; 64] = [0; 64]; let mut h: [u32; 8] = [1779033703, 3144134277, 1013904242, 2773480762, 1359893119, 2600822924, 528734635, 1541459225]; // Intermediate hash, starting with the canonical initial value let mut i: u64 = 0; // Message byte pointer for k in 0..N { - if k < message_size { + if k as u64 < message_size { // Populate msg_block msg_block[i] = msg[k]; i = i + 1; diff --git a/noir/noir-repo/noir_stdlib/src/sha512.nr b/noir/noir-repo/noir_stdlib/src/sha512.nr index 0f8ffcfcb1c..aed6c2878b3 100644 --- a/noir/noir-repo/noir_stdlib/src/sha512.nr +++ b/noir/noir-repo/noir_stdlib/src/sha512.nr @@ -87,7 +87,7 @@ fn msg_u8_to_u64(msg: [u8; 128]) -> [u64; 16] { msg64 } // SHA-512 hash function -pub fn digest(msg: [u8; N]) -> [u8; 64] { +pub fn digest(msg: [u8; N]) -> [u8; 64] { let mut msg_block: [u8; 128] = [0; 128]; // noir-fmt:ignore let mut h: [u64; 8] = [7640891576956012808, 13503953896175478587, 4354685564936845355, 11912009170470909681, 5840696475078001361, 11170449401992604703, 2270897969802886507, 6620516959819538809]; // Intermediate hash, starting with the canonical initial value diff --git a/noir/noir-repo/noir_stdlib/src/slice.nr b/noir/noir-repo/noir_stdlib/src/slice.nr index bf05ae0cf64..1a40abcf704 100644 --- a/noir/noir-repo/noir_stdlib/src/slice.nr +++ b/noir/noir-repo/noir_stdlib/src/slice.nr @@ -44,7 +44,7 @@ impl [T] { self } - pub fn as_array(self) -> [T; N] { + pub fn as_array(self) -> [T; N] { assert(self.len() == N); let mut array = [crate::unsafe::zeroed(); N]; @@ -53,4 +53,53 @@ impl [T] { } array } + + // Apply a function to each element of the slice, returning a new slice + // containing the mapped elements. + pub fn map(self, f: fn[Env](T) -> U) -> [U] { + let mut ret = &[]; + for elem in self { + ret = ret.push_back(f(elem)); + } + ret + } + + // Apply a function to each element of the slice and an accumulator value, + // returning the final accumulated value. This function is also sometimes + // called `foldl`, `fold_left`, `reduce`, or `inject`. + pub fn fold(self, mut accumulator: U, f: fn[Env](U, T) -> U) -> U { + for elem in self { + accumulator = f(accumulator, elem); + } + accumulator + } + + // Apply a function to each element of the slice and an accumulator value, + // returning the final accumulated value. Unlike fold, reduce uses the first + // element of the given slice as its starting accumulator value. + pub fn reduce(self, f: fn[Env](T, T) -> T) -> T { + let mut accumulator = self[0]; + for i in 1..self.len() { + accumulator = f(accumulator, self[i]); + } + accumulator + } + + // Returns true if all elements in the slice satisfy the predicate + pub fn all(self, predicate: fn[Env](T) -> bool) -> bool { + let mut ret = true; + for elem in self { + ret &= predicate(elem); + } + ret + } + + // Returns true if any element in the slice satisfies the predicate + pub fn any(self, predicate: fn[Env](T) -> bool) -> bool { + let mut ret = false; + for elem in self { + ret |= predicate(elem); + } + ret + } } diff --git a/noir/noir-repo/noir_stdlib/src/string.nr b/noir/noir-repo/noir_stdlib/src/string.nr index 12b5a1e75ec..5f8f3de775d 100644 --- a/noir/noir-repo/noir_stdlib/src/string.nr +++ b/noir/noir-repo/noir_stdlib/src/string.nr @@ -1,5 +1,5 @@ use crate::collections::vec::Vec; -impl str { +impl str { /// Converts the given string into a byte array #[builtin(str_as_bytes)] pub fn as_bytes(self) -> [u8; N] {} diff --git a/noir/noir-repo/noir_stdlib/src/test.nr b/noir/noir-repo/noir_stdlib/src/test.nr index e6a7e03fefc..f8db6079193 100644 --- a/noir/noir-repo/noir_stdlib/src/test.nr +++ b/noir/noir-repo/noir_stdlib/src/test.nr @@ -1,5 +1,5 @@ #[oracle(create_mock)] -unconstrained fn create_mock_oracle(name: str) -> Field {} +unconstrained fn create_mock_oracle(name: str) -> Field {} #[oracle(set_mock_params)] unconstrained fn set_mock_params_oracle

(id: Field, params: P) {} @@ -21,7 +21,7 @@ struct OracleMock { } impl OracleMock { - unconstrained pub fn mock(name: str) -> Self { + unconstrained pub fn mock(name: str) -> Self { Self { id: create_mock_oracle(name) } } diff --git a/noir/noir-repo/noir_stdlib/src/uint128.nr b/noir/noir-repo/noir_stdlib/src/uint128.nr index 829ab09ee1e..e99818bafa0 100644 --- a/noir/noir-repo/noir_stdlib/src/uint128.nr +++ b/noir/noir-repo/noir_stdlib/src/uint128.nr @@ -66,7 +66,7 @@ impl U128 { bytes } - pub fn from_hex(hex: str) -> U128 { + pub fn from_hex(hex: str) -> U128 { let N = N as u32; let bytes = hex.as_bytes(); // string must starts with "0x" diff --git a/noir/noir-repo/scripts/install_bb.sh b/noir/noir-repo/scripts/install_bb.sh index c3ed476200a..b0d55b6ff1d 100755 --- a/noir/noir-repo/scripts/install_bb.sh +++ b/noir/noir-repo/scripts/install_bb.sh @@ -1,6 +1,6 @@ #!/bin/bash -VERSION="0.41.0" +VERSION="0.43.0" BBUP_PATH=~/.bb/bbup diff --git a/noir/noir-repo/test_programs/benchmarks/bench_eddsa_poseidon/src/main.nr b/noir/noir-repo/test_programs/benchmarks/bench_eddsa_poseidon/src/main.nr index 2e42c483360..cb853e48c30 100644 --- a/noir/noir-repo/test_programs/benchmarks/bench_eddsa_poseidon/src/main.nr +++ b/noir/noir-repo/test_programs/benchmarks/bench_eddsa_poseidon/src/main.nr @@ -1,4 +1,4 @@ -use std::eddsa::{eddsa_poseidon_verify}; +use std::eddsa::eddsa_poseidon_verify; fn main( msg: pub Field, @@ -9,4 +9,4 @@ fn main( s: Field ) -> pub bool { eddsa_poseidon_verify(pub_key_x, pub_key_y, s, r8_x, r8_y, msg) -} \ No newline at end of file +} diff --git a/noir/noir-repo/test_programs/benchmarks/bench_poseidon_hash_100/src/main.nr b/noir/noir-repo/test_programs/benchmarks/bench_poseidon_hash_100/src/main.nr index 5fc9e313179..1c9bbfe61bf 100644 --- a/noir/noir-repo/test_programs/benchmarks/bench_poseidon_hash_100/src/main.nr +++ b/noir/noir-repo/test_programs/benchmarks/bench_poseidon_hash_100/src/main.nr @@ -9,4 +9,4 @@ fn main(input: [[Field; 2]; SIZE]) -> pub [Field; SIZE] { } results -} \ No newline at end of file +} diff --git a/noir/noir-repo/test_programs/benchmarks/bench_poseidon_hash_30/src/main.nr b/noir/noir-repo/test_programs/benchmarks/bench_poseidon_hash_30/src/main.nr index 3e319d2b025..3edb47e9f72 100644 --- a/noir/noir-repo/test_programs/benchmarks/bench_poseidon_hash_30/src/main.nr +++ b/noir/noir-repo/test_programs/benchmarks/bench_poseidon_hash_30/src/main.nr @@ -9,4 +9,4 @@ fn main(input: [[Field; 2]; SIZE]) -> pub [Field; SIZE] { } results -} \ No newline at end of file +} diff --git a/noir/noir-repo/test_programs/benchmarks/bench_sha256_100/src/main.nr b/noir/noir-repo/test_programs/benchmarks/bench_sha256_100/src/main.nr index 48b7fbac93c..6df856a83fc 100644 --- a/noir/noir-repo/test_programs/benchmarks/bench_sha256_100/src/main.nr +++ b/noir/noir-repo/test_programs/benchmarks/bench_sha256_100/src/main.nr @@ -1,4 +1,3 @@ - global SIZE = 100; fn main(input: [[u8; 2]; SIZE]) -> pub [[u8; 32]; SIZE] { @@ -8,4 +7,4 @@ fn main(input: [[u8; 2]; SIZE]) -> pub [[u8; 32]; SIZE] { } results -} \ No newline at end of file +} diff --git a/noir/noir-repo/test_programs/benchmarks/bench_sha256_30/src/main.nr b/noir/noir-repo/test_programs/benchmarks/bench_sha256_30/src/main.nr index 37c742f9667..220c1cfbbed 100644 --- a/noir/noir-repo/test_programs/benchmarks/bench_sha256_30/src/main.nr +++ b/noir/noir-repo/test_programs/benchmarks/bench_sha256_30/src/main.nr @@ -1,4 +1,3 @@ - global SIZE = 30; fn main(input: [[u8; 2]; SIZE]) -> pub [[u8; 32]; SIZE] { @@ -8,4 +7,4 @@ fn main(input: [[u8; 2]; SIZE]) -> pub [[u8; 32]; SIZE] { } results -} \ No newline at end of file +} diff --git a/noir/noir-repo/test_programs/compile_failure/negate_unsigned/src/main.nr b/noir/noir-repo/test_programs/compile_failure/negate_unsigned/src/main.nr index af4802a4ce6..4d3c5abe5a4 100644 --- a/noir/noir-repo/test_programs/compile_failure/negate_unsigned/src/main.nr +++ b/noir/noir-repo/test_programs/compile_failure/negate_unsigned/src/main.nr @@ -1,4 +1,3 @@ - fn main() { let var = -1 as u8; std::println(var); diff --git a/noir/noir-repo/test_programs/compile_failure/non_comptime_local_fn_call/Nargo.toml b/noir/noir-repo/test_programs/compile_failure/non_comptime_local_fn_call/Nargo.toml new file mode 100644 index 00000000000..2597f0c4653 --- /dev/null +++ b/noir/noir-repo/test_programs/compile_failure/non_comptime_local_fn_call/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "non_comptime_local_fn_call" +type = "bin" +authors = [""] +compiler_version = ">=0.31.0" + +[dependencies] diff --git a/noir/noir-repo/test_programs/compile_failure/non_comptime_local_fn_call/src/main.nr b/noir/noir-repo/test_programs/compile_failure/non_comptime_local_fn_call/src/main.nr new file mode 100644 index 00000000000..d75bb1a922a --- /dev/null +++ b/noir/noir-repo/test_programs/compile_failure/non_comptime_local_fn_call/src/main.nr @@ -0,0 +1,9 @@ +fn main() { + comptime { + let _a = id(3); + } +} + +fn id(x: Field) -> Field { + x +} diff --git a/noir/noir-repo/test_programs/compile_failure/orphaned_trait_impl/src/main.nr b/noir/noir-repo/test_programs/compile_failure/orphaned_trait_impl/src/main.nr index dfd88d8f074..dd04aa454b2 100644 --- a/noir/noir-repo/test_programs/compile_failure/orphaned_trait_impl/src/main.nr +++ b/noir/noir-repo/test_programs/compile_failure/orphaned_trait_impl/src/main.nr @@ -1,4 +1,4 @@ -impl dep::crate1::MyTrait for dep::crate2::MyStruct { +impl crate1::MyTrait for crate2::MyStruct { } fn main(x: Field, y: pub Field) { diff --git a/noir/noir-repo/test_programs/compile_failure/restricted_bit_sizes/src/main.nr b/noir/noir-repo/test_programs/compile_failure/restricted_bit_sizes/src/main.nr index 4298c2052a7..a3fea13cc3a 100644 --- a/noir/noir-repo/test_programs/compile_failure/restricted_bit_sizes/src/main.nr +++ b/noir/noir-repo/test_programs/compile_failure/restricted_bit_sizes/src/main.nr @@ -1,3 +1,5 @@ +use std::assert_constant; + fn main() -> pub u63 { 5 } diff --git a/noir/noir-repo/test_programs/compile_failure/turbofish_generic_count/src/main.nr b/noir/noir-repo/test_programs/compile_failure/turbofish_generic_count/src/main.nr index a5f46adb6a5..4091b2f0581 100644 --- a/noir/noir-repo/test_programs/compile_failure/turbofish_generic_count/src/main.nr +++ b/noir/noir-repo/test_programs/compile_failure/turbofish_generic_count/src/main.nr @@ -1,4 +1,3 @@ - struct Bar { one: Field, two: Field, diff --git a/noir/noir-repo/test_programs/compile_success_contract/abi_attribute/Nargo.toml b/noir/noir-repo/test_programs/compile_success_contract/abi_attribute/Nargo.toml new file mode 100644 index 00000000000..56fa88ccb68 --- /dev/null +++ b/noir/noir-repo/test_programs/compile_success_contract/abi_attribute/Nargo.toml @@ -0,0 +1,6 @@ +[package] +name = "abi_attribute" +type = "contract" +authors = [""] + +[dependencies] diff --git a/noir/noir-repo/test_programs/compile_success_contract/abi_attribute/src/main.nr b/noir/noir-repo/test_programs/compile_success_contract/abi_attribute/src/main.nr new file mode 100644 index 00000000000..d658823d519 --- /dev/null +++ b/noir/noir-repo/test_programs/compile_success_contract/abi_attribute/src/main.nr @@ -0,0 +1,9 @@ +contract Foo { + #[abi(foo)] + global foo: Field = 42; + + #[abi(bar)] + struct Bar { + inner: Field + } +} diff --git a/noir/noir-repo/test_programs/compile_success_empty/derive_impl/src/main.nr b/noir/noir-repo/test_programs/compile_success_empty/derive_impl/src/main.nr index abad6d4f8e1..9636e4c7383 100644 --- a/noir/noir-repo/test_programs/compile_success_empty/derive_impl/src/main.nr +++ b/noir/noir-repo/test_programs/compile_success_empty/derive_impl/src/main.nr @@ -21,9 +21,12 @@ comptime fn derive_default(typ: TypeDefinition) -> Quoted { #[derive_default] struct Foo { x: Field, - y: u32, + y: Bar, } +#[derive_default] +struct Bar {} + comptime fn make_field_exprs(fields: [(Quoted, Quoted)]) -> [Quoted] { let mut result = &[]; for my_field in fields { @@ -41,4 +44,6 @@ comptime fn join(slice: [Quoted]) -> Quoted { result } -fn main() {} +fn main() { + let _foo: Foo = Default::default(); +} diff --git a/noir/noir-repo/test_programs/compile_success_empty/impl_where_clause/Nargo.toml b/noir/noir-repo/test_programs/compile_success_empty/impl_where_clause/Nargo.toml new file mode 100644 index 00000000000..7d0d5f3513e --- /dev/null +++ b/noir/noir-repo/test_programs/compile_success_empty/impl_where_clause/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "impl_where_clause" +type = "bin" +authors = [""] +compiler_version = ">=0.31.0" + +[dependencies] \ No newline at end of file diff --git a/noir/noir-repo/test_programs/compile_success_empty/impl_where_clause/src/main.nr b/noir/noir-repo/test_programs/compile_success_empty/impl_where_clause/src/main.nr new file mode 100644 index 00000000000..2f3223efaae --- /dev/null +++ b/noir/noir-repo/test_programs/compile_success_empty/impl_where_clause/src/main.nr @@ -0,0 +1,34 @@ +struct MyStruct { + a: u32, + b: T, +} + +struct InnerStruct { + a: Field, + b: Field, +} + +trait MyEq { + fn my_eq(self, other: Self) -> bool; +} + +impl MyEq for InnerStruct { + fn my_eq(self, other: InnerStruct) -> bool { + (self.a == other.a) & (self.b == other.b) + } +} + +impl MyStruct where T: MyEq { + fn my_eq(self, other: Self) -> bool { + (self.a == other.a) & self.b.my_eq(other.b) + } +} + +fn main() { + let inner = InnerStruct { a: 1, b: 2 }; + let my_struct = MyStruct { a: 5, b: inner }; + assert(my_struct.my_eq(my_struct)); + + let mut my_struct_new = MyStruct { a: 5, b: InnerStruct { a: 10, b: 15 } }; + assert(my_struct_new.my_eq(my_struct_new)); +} diff --git a/noir/noir-repo/test_programs/compile_success_empty/intrinsic_die/src/main.nr b/noir/noir-repo/test_programs/compile_success_empty/intrinsic_die/src/main.nr index c6e269c155d..17aaf02c283 100644 --- a/noir/noir-repo/test_programs/compile_success_empty/intrinsic_die/src/main.nr +++ b/noir/noir-repo/test_programs/compile_success_empty/intrinsic_die/src/main.nr @@ -1,6 +1,5 @@ // This test checks that we perform dead-instruction-elimination on intrinsic functions. fn main(x: Field) { - let hash = std::hash::pedersen_commitment([x]); let g1_x = 0x0000000000000000000000000000000000000000000000000000000000000001; let g1_y = 0x0000000000000002cf135e7506a45d632d270d45f1181294833fc48d823f272c; let g1 = std::embedded_curve_ops::EmbeddedCurvePoint { x: g1_x, y: g1_y, is_infinite: false }; diff --git a/noir/noir-repo/test_programs/compile_success_empty/numeric_generics/src/main.nr b/noir/noir-repo/test_programs/compile_success_empty/numeric_generics/src/main.nr index 1e03a382fed..340c18c2a1d 100644 --- a/noir/noir-repo/test_programs/compile_success_empty/numeric_generics/src/main.nr +++ b/noir/noir-repo/test_programs/compile_success_empty/numeric_generics/src/main.nr @@ -36,4 +36,3 @@ fn foo(mut s: MyStruct<2+1>) -> MyStruct<10/2-2> { s.data[0] = s.data[0] + 1; s } - diff --git a/noir/noir-repo/test_programs/compile_success_empty/numeric_generics_explicit/Nargo.toml b/noir/noir-repo/test_programs/compile_success_empty/numeric_generics_explicit/Nargo.toml new file mode 100644 index 00000000000..bc3d43498db --- /dev/null +++ b/noir/noir-repo/test_programs/compile_success_empty/numeric_generics_explicit/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "numeric_generics_explicit" +type = "bin" +authors = [""] +compiler_version = ">=0.30.0" + +[dependencies] \ No newline at end of file diff --git a/noir/noir-repo/test_programs/compile_success_empty/numeric_generics_explicit/src/main.nr b/noir/noir-repo/test_programs/compile_success_empty/numeric_generics_explicit/src/main.nr new file mode 100644 index 00000000000..7c4f7761ff6 --- /dev/null +++ b/noir/noir-repo/test_programs/compile_success_empty/numeric_generics_explicit/src/main.nr @@ -0,0 +1,111 @@ +// Regression that a global of the same name does not trigger a duplicate definition error +global N = 1000; + +fn main() { + let a = id([1, 2]); + let b = id([1, 2, 3]); + + let itWorks1 = MyStruct { data: a }; + assert(itWorks1.data[1] == 2); + let itWorks2 = MyStruct { data: b }; + assert(itWorks2.data[1] == 2); + + let c = [1, 2]; + let itAlsoWorks = MyStruct { data: c }; + assert(itAlsoWorks.data[1] == 2); + + assert(foo(itWorks2).data[0] == itWorks2.data[0] + 1); + + double_numeric_generics_test(); + + let my_type = PublicStorage::read::(); + assert(my_type.a == 1); + assert(my_type.b == 2); + assert(my_type.c == 3); + + let foo = baz::<10>(); + assert(foo.data == [1; 10]); +} + +// Used in the signature of a function +fn id(x: [Field; I]) -> [Field; I] { + x +} + +// Used as a field of a struct +struct MyStruct { + data: [Field; S], +} + +// Used in an impl +impl MyStruct { + fn insert(mut self: Self, index: Field, elem: Field) -> Self { + // Regression test for numeric generics on impls + assert(index as u32 < S); + + self.data[index] = elem; + self + } +} + +fn foo(mut s: MyStruct<2+1>) -> MyStruct<10/2-2> { + s.data[0] = s.data[0] + 1; + s +} + +fn baz() -> MyStruct { + MyStruct { data: [1; N] } +} + +fn double() -> u32 { + // Used as an expression + N * 2 +} + +fn double_numeric_generics_test() { + // Example usage of a numeric generic arguments. + assert(double::<9>() == 18); + assert(double::<123>() == 246); + assert(double::<7 + 8>() == 30); +} + +struct MyType { + a: Field, + b: Field, + c: Field, +} + +impl Deserialize for MyType { + fn deserialize(fields: [Field; N]) -> Self { + MyType { a: fields[0], b: fields[1], c: fields[2] } + } +} + +trait Deserialize { + fn deserialize(fields: [Field; N]) -> Self; +} + +struct PublicStorage {} + +impl PublicStorage { + fn read() -> T where T: Deserialize { + // Used as a type within a function body + let mut fields: [Field; N] = [0; N]; + // Used a loop bound + for i in 0..N { + fields[i] = i as Field + 1; + } + T::deserialize(fields) + } +} + +// Check that we can thread numeric generics into nested structs +// and also that we can handle nested structs with numeric generics +// which are declared after the parent struct +struct NestedNumeric { + a: Field, + b: InnerNumeric +} +struct InnerNumeric { + inner: [u32; N], +} diff --git a/noir/noir-repo/test_programs/compile_success_empty/reexports/src/main.nr b/noir/noir-repo/test_programs/compile_success_empty/reexports/src/main.nr index ed469ff77d0..0fd65a33564 100644 --- a/noir/noir-repo/test_programs/compile_success_empty/reexports/src/main.nr +++ b/noir/noir-repo/test_programs/compile_success_empty/reexports/src/main.nr @@ -1,4 +1,4 @@ -use dep::reexporting_lib::{FooStruct, MyStruct, lib}; +use reexporting_lib::{FooStruct, MyStruct, lib}; fn main() { let x: FooStruct = MyStruct { inner: 0 }; diff --git a/noir/noir-repo/test_programs/compile_success_empty/regression_4635/src/main.nr b/noir/noir-repo/test_programs/compile_success_empty/regression_4635/src/main.nr index 23918e30785..350b60ba3f7 100644 --- a/noir/noir-repo/test_programs/compile_success_empty/regression_4635/src/main.nr +++ b/noir/noir-repo/test_programs/compile_success_empty/regression_4635/src/main.nr @@ -8,7 +8,7 @@ impl FromField for Field { } } -trait Deserialize { +trait Deserialize { fn deserialize(fields: [Field; N]) -> Self; } diff --git a/noir/noir-repo/test_programs/compile_success_empty/trait_generics/src/main.nr b/noir/noir-repo/test_programs/compile_success_empty/trait_generics/src/main.nr index 30b2e79d579..56ce7e8970c 100644 --- a/noir/noir-repo/test_programs/compile_success_empty/trait_generics/src/main.nr +++ b/noir/noir-repo/test_programs/compile_success_empty/trait_generics/src/main.nr @@ -29,7 +29,7 @@ impl MyInto for Field { /// Serialize example -trait Serializable { +trait Serializable { fn serialize(self) -> [Field; N]; } diff --git a/noir/noir-repo/test_programs/compile_success_empty/impl_with_where_clause/Nargo.toml b/noir/noir-repo/test_programs/compile_success_empty/trait_impl_with_where_clause/Nargo.toml similarity index 58% rename from noir/noir-repo/test_programs/compile_success_empty/impl_with_where_clause/Nargo.toml rename to noir/noir-repo/test_programs/compile_success_empty/trait_impl_with_where_clause/Nargo.toml index ef9bdce2640..672569634ea 100644 --- a/noir/noir-repo/test_programs/compile_success_empty/impl_with_where_clause/Nargo.toml +++ b/noir/noir-repo/test_programs/compile_success_empty/trait_impl_with_where_clause/Nargo.toml @@ -1,5 +1,5 @@ [package] -name = "impl_with_where_clause" +name = "trait_impl_with_where_clause" type = "bin" authors = [""] diff --git a/noir/noir-repo/test_programs/compile_success_empty/impl_with_where_clause/src/main.nr b/noir/noir-repo/test_programs/compile_success_empty/trait_impl_with_where_clause/src/main.nr similarity index 100% rename from noir/noir-repo/test_programs/compile_success_empty/impl_with_where_clause/src/main.nr rename to noir/noir-repo/test_programs/compile_success_empty/trait_impl_with_where_clause/src/main.nr diff --git a/noir/noir-repo/test_programs/compile_success_empty/workspace_reexport_bug/binary/src/main.nr b/noir/noir-repo/test_programs/compile_success_empty/workspace_reexport_bug/binary/src/main.nr index ab0ae9a48b8..a4207794a8a 100644 --- a/noir/noir-repo/test_programs/compile_success_empty/workspace_reexport_bug/binary/src/main.nr +++ b/noir/noir-repo/test_programs/compile_success_empty/workspace_reexport_bug/binary/src/main.nr @@ -1,2 +1,2 @@ -use dep::library::ReExportMeFromAnotherLib; +use library::ReExportMeFromAnotherLib; fn main(_x: ReExportMeFromAnotherLib) {} diff --git a/noir/noir-repo/test_programs/compile_success_empty/workspace_reexport_bug/library/src/lib.nr b/noir/noir-repo/test_programs/compile_success_empty/workspace_reexport_bug/library/src/lib.nr index 8e84662ed03..e3a1539ea65 100644 --- a/noir/noir-repo/test_programs/compile_success_empty/workspace_reexport_bug/library/src/lib.nr +++ b/noir/noir-repo/test_programs/compile_success_empty/workspace_reexport_bug/library/src/lib.nr @@ -1,2 +1,2 @@ // Re-export -use dep::library2::ReExportMeFromAnotherLib; +use library2::ReExportMeFromAnotherLib; diff --git a/noir/noir-repo/test_programs/execution_failure/div_by_zero_numerator_witness/src/main.nr b/noir/noir-repo/test_programs/execution_failure/div_by_zero_numerator_witness/src/main.nr index 7c6cae4932e..012e823b297 100644 --- a/noir/noir-repo/test_programs/execution_failure/div_by_zero_numerator_witness/src/main.nr +++ b/noir/noir-repo/test_programs/execution_failure/div_by_zero_numerator_witness/src/main.nr @@ -1,4 +1,3 @@ - fn main(x: Field) { let a: Field = x / 0; std::println(a); diff --git a/noir/noir-repo/test_programs/execution_success/diamond_deps_0/src/main.nr b/noir/noir-repo/test_programs/execution_success/diamond_deps_0/src/main.nr index ca95c6e0aa8..690d6fc9fc8 100644 --- a/noir/noir-repo/test_programs/execution_success/diamond_deps_0/src/main.nr +++ b/noir/noir-repo/test_programs/execution_success/diamond_deps_0/src/main.nr @@ -1,6 +1,6 @@ -use dep::dep1::call_dep1_then_dep2; -use dep::dep2::call_dep2; -use dep::dep2::RESOLVE_THIS; +use dep1::call_dep1_then_dep2; +use dep2::call_dep2; +use dep2::RESOLVE_THIS; fn main(x: Field, y: pub Field) -> pub Field { call_dep1_then_dep2(x, y) + call_dep2(x, y) + RESOLVE_THIS diff --git a/noir/noir-repo/test_programs/execution_success/hashmap/src/main.nr b/noir/noir-repo/test_programs/execution_success/hashmap/src/main.nr index 56b13d6779b..8cf70cc5970 100644 --- a/noir/noir-repo/test_programs/execution_success/hashmap/src/main.nr +++ b/noir/noir-repo/test_programs/execution_success/hashmap/src/main.nr @@ -3,6 +3,7 @@ mod utils; use std::collections::map::HashMap; use std::hash::BuildHasherDefault; use std::hash::poseidon2::Poseidon2Hasher; +use std::cmp::Eq; use utils::cut; diff --git a/noir/noir-repo/test_programs/execution_success/regression_4088/src/main.nr b/noir/noir-repo/test_programs/execution_success/regression_4088/src/main.nr index 9e4d7892fc3..12a7afca68c 100644 --- a/noir/noir-repo/test_programs/execution_success/regression_4088/src/main.nr +++ b/noir/noir-repo/test_programs/execution_success/regression_4088/src/main.nr @@ -1,4 +1,4 @@ -trait Serialize { +trait Serialize { fn serialize(self) -> [Field; N]; } @@ -12,7 +12,7 @@ impl Serialize<1> for ValueNote { } } -fn check(serialized_note: [Field; N]) { +fn check(serialized_note: [Field; N]) { assert(serialized_note[0] == 0); } diff --git a/noir/noir-repo/test_programs/execution_success/regression_4124/src/main.nr b/noir/noir-repo/test_programs/execution_success/regression_4124/src/main.nr index 2b0e65a0b6c..6caea017798 100644 --- a/noir/noir-repo/test_programs/execution_success/regression_4124/src/main.nr +++ b/noir/noir-repo/test_programs/execution_success/regression_4124/src/main.nr @@ -1,6 +1,6 @@ use std::option::Option; -trait MyDeserialize { +trait MyDeserialize { fn deserialize(fields: [Field; N]) -> Self; } @@ -10,7 +10,7 @@ impl MyDeserialize<1> for Field { } } -pub fn storage_read() -> [Field; N] { +pub fn storage_read() -> [Field; N] { std::unsafe::zeroed() } diff --git a/noir/noir-repo/test_programs/execution_success/slices/src/main.nr b/noir/noir-repo/test_programs/execution_success/slices/src/main.nr index 8be79cdc3c4..2bd4dbd97b0 100644 --- a/noir/noir-repo/test_programs/execution_success/slices/src/main.nr +++ b/noir/noir-repo/test_programs/execution_success/slices/src/main.nr @@ -45,6 +45,14 @@ fn main(x: Field, y: pub Field) { assert(append[0] == 1); assert(append[4] == 5); + let mapped = &[1, 2].map(|x| x + 1); + assert_eq(mapped, &[2, 3]); + + assert_eq(&[1, 2, 3].fold(0, |acc, x| acc + x), 6); + assert_eq(&[1, 2, 3].reduce(|acc, x| acc + x), 6); + assert(&[2, 4, 6].all(|x| x > 0)); + assert(&[2, 4, 6].any(|x| x > 5)); + regression_2083(); // The parameters to this function must come from witness values (inputs to main) regression_merge_slices(x, y); diff --git a/noir/noir-repo/test_programs/execution_success/traits_in_crates_1/crate1/src/lib.nr b/noir/noir-repo/test_programs/execution_success/traits_in_crates_1/crate1/src/lib.nr index 62dd5a2c111..e36a263093a 100644 --- a/noir/noir-repo/test_programs/execution_success/traits_in_crates_1/crate1/src/lib.nr +++ b/noir/noir-repo/test_programs/execution_success/traits_in_crates_1/crate1/src/lib.nr @@ -2,7 +2,7 @@ trait MyTrait { fn Add10(&mut self); } -impl MyTrait for dep::crate2::MyStruct { +impl MyTrait for crate2::MyStruct { fn Add10(&mut self) { self.Q += 10; } diff --git a/noir/noir-repo/test_programs/execution_success/traits_in_crates_1/src/main.nr b/noir/noir-repo/test_programs/execution_success/traits_in_crates_1/src/main.nr index 7ba2f63c5c0..2afec29ee1f 100644 --- a/noir/noir-repo/test_programs/execution_success/traits_in_crates_1/src/main.nr +++ b/noir/noir-repo/test_programs/execution_success/traits_in_crates_1/src/main.nr @@ -1,5 +1,5 @@ fn main(x: Field, y: pub Field) { - let mut V = dep::crate2::MyStruct { Q: x }; + let mut V = crate2::MyStruct { Q: x }; V.Add10(); assert(V.Q == y); } diff --git a/noir/noir-repo/test_programs/execution_success/traits_in_crates_2/crate2/src/lib.nr b/noir/noir-repo/test_programs/execution_success/traits_in_crates_2/crate2/src/lib.nr index 38870489131..fe6a94a4a95 100644 --- a/noir/noir-repo/test_programs/execution_success/traits_in_crates_2/crate2/src/lib.nr +++ b/noir/noir-repo/test_programs/execution_success/traits_in_crates_2/crate2/src/lib.nr @@ -2,7 +2,7 @@ struct MyStruct { Q: Field, } -impl dep::crate1::MyTrait for MyStruct { +impl crate1::MyTrait for MyStruct { fn Add10(&mut self) { self.Q += 10; } diff --git a/noir/noir-repo/test_programs/execution_success/traits_in_crates_2/src/main.nr b/noir/noir-repo/test_programs/execution_success/traits_in_crates_2/src/main.nr index 7ba2f63c5c0..2afec29ee1f 100644 --- a/noir/noir-repo/test_programs/execution_success/traits_in_crates_2/src/main.nr +++ b/noir/noir-repo/test_programs/execution_success/traits_in_crates_2/src/main.nr @@ -1,5 +1,5 @@ fn main(x: Field, y: pub Field) { - let mut V = dep::crate2::MyStruct { Q: x }; + let mut V = crate2::MyStruct { Q: x }; V.Add10(); assert(V.Q == y); } diff --git a/noir/noir-repo/test_programs/execution_success/verify_honk_proof/Nargo.toml b/noir/noir-repo/test_programs/execution_success/verify_honk_proof/Nargo.toml index d298dabb560..8fce1bf44b6 100644 --- a/noir/noir-repo/test_programs/execution_success/verify_honk_proof/Nargo.toml +++ b/noir/noir-repo/test_programs/execution_success/verify_honk_proof/Nargo.toml @@ -2,6 +2,5 @@ name = "verify_honk_proof" type = "bin" authors = [""] -compiler_version = ">=0.30.0" -[dependencies] \ No newline at end of file +[dependencies] diff --git a/noir/noir-repo/test_programs/noir_test_failure/should_fail_mismatch/src/main.nr b/noir/noir-repo/test_programs/noir_test_failure/should_fail_mismatch/src/main.nr index 08a9234a752..59b99c85c0b 100644 --- a/noir/noir-repo/test_programs/noir_test_failure/should_fail_mismatch/src/main.nr +++ b/noir/noir-repo/test_programs/noir_test_failure/should_fail_mismatch/src/main.nr @@ -2,14 +2,9 @@ fn test_different_string() { assert_eq(0, 1, "Different string"); } -// The assert message has a space -#[test(should_fail_with = "Not equal")] -fn test_with_extra_space() { - assert_eq(0, 1, "Not equal "); -} -// The assert message has a space -#[test(should_fail_with = "Not equal")] -fn test_runtime_mismatch() { - // We use a pedersen commitment here so that the assertion failure is only known at runtime. - assert_eq(std::hash::pedersen_commitment([27]).x, 0, "Not equal "); + +// The failure reason is a substring of the expected message, but it should be the other way around +#[test(should_fail_with = "Definitely Not equal!")] +fn test_wrong_expectation() { + assert_eq(0, 1, "Not equal"); } diff --git a/noir/noir-repo/test_programs/noir_test_success/comptime_globals/src/main.nr b/noir/noir-repo/test_programs/noir_test_success/comptime_globals/src/main.nr index efe9f0742b9..95c54b96609 100644 --- a/noir/noir-repo/test_programs/noir_test_success/comptime_globals/src/main.nr +++ b/noir/noir-repo/test_programs/noir_test_success/comptime_globals/src/main.nr @@ -5,7 +5,7 @@ comptime global FOO: Field = foo(); // Due to this function's mutability and branching, SSA currently fails // to fold this function into a constant before the assert_constant check // is evaluated before loop unrolling. -fn foo() -> Field { +comptime fn foo() -> Field { let mut three = 3; if three == 3 { 5 } else { 6 } } diff --git a/noir/noir-repo/test_programs/noir_test_success/should_fail_with_matches/src/main.nr b/noir/noir-repo/test_programs/noir_test_success/should_fail_with_matches/src/main.nr index b713976643b..42696762ffe 100644 --- a/noir/noir-repo/test_programs/noir_test_success/should_fail_with_matches/src/main.nr +++ b/noir/noir-repo/test_programs/noir_test_success/should_fail_with_matches/src/main.nr @@ -3,6 +3,11 @@ fn test_should_fail_with_match() { assert_eq(0, 1, "Not equal"); } +#[test(should_fail_with = "Not equal")] +fn test_should_fail_with_match_partial_match() { + assert_eq(0, 1, "Definitely Not equal!"); +} + #[test(should_fail)] fn test_should_fail_without_match() { assert_eq(0, 1); @@ -48,6 +53,11 @@ unconstrained fn unconstrained_test_should_fail_with_match() { assert_eq(0, 1, "Not equal"); } +#[test(should_fail_with = "Not equal")] +unconstrained fn unconstrained_test_should_fail_with_match_partial_match() { + assert_eq(0, 1, "Definitely Not equal!"); +} + #[test(should_fail)] unconstrained fn unconstrained_test_should_fail_without_match() { assert_eq(0, 1); diff --git a/noir/noir-repo/test_programs/test_libraries/diamond_deps_1/src/lib.nr b/noir/noir-repo/test_programs/test_libraries/diamond_deps_1/src/lib.nr index 60c001ec64e..d76ce5a05e9 100644 --- a/noir/noir-repo/test_programs/test_libraries/diamond_deps_1/src/lib.nr +++ b/noir/noir-repo/test_programs/test_libraries/diamond_deps_1/src/lib.nr @@ -1,4 +1,4 @@ -use dep::dep2::call_dep2; +use dep2::call_dep2; pub fn call_dep1_then_dep2(x: Field, y: Field) -> Field { call_dep2(x, y) diff --git a/noir/noir-repo/test_programs/test_libraries/reexporting_lib/src/lib.nr b/noir/noir-repo/test_programs/test_libraries/reexporting_lib/src/lib.nr index f12dfe01ecd..1bced548304 100644 --- a/noir/noir-repo/test_programs/test_libraries/reexporting_lib/src/lib.nr +++ b/noir/noir-repo/test_programs/test_libraries/reexporting_lib/src/lib.nr @@ -1,3 +1,3 @@ -use dep::exporting_lib::{MyStruct, FooStruct}; +use exporting_lib::{MyStruct, FooStruct}; -use dep::exporting_lib as lib; +use exporting_lib as lib; diff --git a/noir/noir-repo/tooling/acvm_cli/Cargo.toml b/noir/noir-repo/tooling/acvm_cli/Cargo.toml index a592f2d65f3..1cfd1f3b270 100644 --- a/noir/noir-repo/tooling/acvm_cli/Cargo.toml +++ b/noir/noir-repo/tooling/acvm_cli/Cargo.toml @@ -20,7 +20,7 @@ path = "src/main.rs" [dependencies] thiserror.workspace = true toml.workspace = true -color-eyre.workspace = true +color-eyre = "0.6.2" clap.workspace = true acvm.workspace = true nargo.workspace = true diff --git a/noir/noir-repo/tooling/nargo/src/ops/test.rs b/noir/noir-repo/tooling/nargo/src/ops/test.rs index ace2e9f0d0c..18c6f2530b9 100644 --- a/noir/noir-repo/tooling/nargo/src/ops/test.rs +++ b/noir/noir-repo/tooling/nargo/src/ops/test.rs @@ -128,7 +128,7 @@ fn check_expected_failure_message( }; let expected_failure_message_matches = - matches!(&failed_assertion, Some(message) if message == expected_failure_message); + matches!(&failed_assertion, Some(message) if message.contains(expected_failure_message)); if expected_failure_message_matches { return TestStatus::Pass; } diff --git a/noir/noir-repo/tooling/nargo_cli/Cargo.toml b/noir/noir-repo/tooling/nargo_cli/Cargo.toml index e0e54449a6f..b9d7d7e3e48 100644 --- a/noir/noir-repo/tooling/nargo_cli/Cargo.toml +++ b/noir/noir-repo/tooling/nargo_cli/Cargo.toml @@ -43,16 +43,11 @@ prettytable-rs = "0.10" rayon = "1.8.0" thiserror.workspace = true tower.workspace = true -async-lsp = { workspace = true, features = [ - "client-monitor", - "stdio", - "tracing", - "tokio", -] } +async-lsp = { workspace = true, features = ["client-monitor", "stdio", "tracing", "tokio"] } const_format.workspace = true similar-asserts.workspace = true termcolor = "1.1.2" -color-eyre.workspace = true +color-eyre = "0.6.2" tokio = { version = "1.0", features = ["io-std", "rt"] } dap.workspace = true clap-markdown = { git = "https://github.com/noir-lang/clap-markdown", rev = "450d759532c88f0dba70891ceecdbc9ff8f25d2b", optional = true } diff --git a/noir/noir-repo/tooling/nargo_cli/benches/criterion.rs b/noir/noir-repo/tooling/nargo_cli/benches/criterion.rs index 9f67bcffd6e..effab7d7c27 100644 --- a/noir/noir-repo/tooling/nargo_cli/benches/criterion.rs +++ b/noir/noir-repo/tooling/nargo_cli/benches/criterion.rs @@ -28,16 +28,10 @@ macro_rules! criterion_command { }; } criterion_command!(execution, "execute"); -criterion_command!(prove, "prove"); criterion_group! { name = execution_benches; config = Criterion::default().sample_size(20).measurement_time(Duration::from_secs(20)).with_profiler(PProfProfiler::new(100, Output::Flamegraph(None))); targets = criterion_selected_tests_execution } -criterion_group! { - name = prove_benches; - config = Criterion::default().sample_size(10).measurement_time(Duration::from_secs(20)).with_profiler(PProfProfiler::new(100, Output::Flamegraph(None))); - targets = criterion_selected_tests_prove -} -criterion_main!(execution_benches, prove_benches); +criterion_main!(execution_benches); diff --git a/noir/noir-repo/tooling/nargo_cli/build.rs b/noir/noir-repo/tooling/nargo_cli/build.rs index f2da161267d..a6873910524 100644 --- a/noir/noir-repo/tooling/nargo_cli/build.rs +++ b/noir/noir-repo/tooling/nargo_cli/build.rs @@ -61,8 +61,13 @@ const IGNORED_BRILLIG_TESTS: [&str; 11] = [ /// Certain features are only available in the elaborator. /// We skip these tests for non-elaborator code since they are not /// expected to work there. This can be removed once the old code is removed. -const IGNORED_NEW_FEATURE_TESTS: [&str; 3] = - ["macros", "wildcard_type", "type_definition_annotation"]; +const IGNORED_NEW_FEATURE_TESTS: [&str; 5] = [ + "macros", + "wildcard_type", + "type_definition_annotation", + "numeric_generics_explicit", + "derive_impl", +]; fn read_test_cases( test_data_dir: &Path, diff --git a/noir/noir-repo/tooling/nargo_cli/tests/stdlib-tests.rs b/noir/noir-repo/tooling/nargo_cli/tests/stdlib-tests.rs index 7fd396d6961..bf6614860e2 100644 --- a/noir/noir-repo/tooling/nargo_cli/tests/stdlib-tests.rs +++ b/noir/noir-repo/tooling/nargo_cli/tests/stdlib-tests.rs @@ -32,7 +32,7 @@ fn run_stdlib_tests() { let (mut context, dummy_crate_id) = prepare_package(&file_manager, &parsed_files, &dummy_package); - let result = check_crate(&mut context, dummy_crate_id, true, false, false); + let result = check_crate(&mut context, dummy_crate_id, false, false, false); report_errors(result, &context.file_manager, true, false) .expect("Error encountered while compiling standard library"); diff --git a/noir/noir-repo/tooling/nargo_fmt/src/items.rs b/noir/noir-repo/tooling/nargo_fmt/src/items.rs index 7f998f45b59..80b641fd830 100644 --- a/noir/noir-repo/tooling/nargo_fmt/src/items.rs +++ b/noir/noir-repo/tooling/nargo_fmt/src/items.rs @@ -74,7 +74,8 @@ impl<'me, T> Items<'me, T> { let mut different_line = false; let leading = self.visitor.slice(start..end); - let leading_trimmed = leading.trim(); + // Trim any possible whitespace before and after a comma separator + let leading_trimmed = leading.trim().trim_start_matches(',').trim(); let starts_with_block_comment = leading_trimmed.starts_with("/*"); let ends_with_block_comment = leading_trimmed.ends_with("*/"); diff --git a/noir/noir-repo/tooling/nargo_fmt/src/rewrite/typ.rs b/noir/noir-repo/tooling/nargo_fmt/src/rewrite/typ.rs index 3eb398346c3..3298ed8ae73 100644 --- a/noir/noir-repo/tooling/nargo_fmt/src/rewrite/typ.rs +++ b/noir/noir-repo/tooling/nargo_fmt/src/rewrite/typ.rs @@ -55,6 +55,10 @@ pub(crate) fn rewrite(visitor: &FmtVisitor, _shape: Shape, typ: UnresolvedType) format!("fn{env}({args}) -> {return_type}") } + UnresolvedTypeData::Resolved(_) => { + unreachable!("Unexpected macro expansion of a type in nargo fmt input") + } + UnresolvedTypeData::Unspecified => todo!(), UnresolvedTypeData::FieldElement | UnresolvedTypeData::Integer(_, _) diff --git a/noir/noir-repo/tooling/nargo_fmt/src/utils.rs b/noir/noir-repo/tooling/nargo_fmt/src/utils.rs index 2c5c3085e66..020f411ae2f 100644 --- a/noir/noir-repo/tooling/nargo_fmt/src/utils.rs +++ b/noir/noir-repo/tooling/nargo_fmt/src/utils.rs @@ -3,7 +3,7 @@ use std::borrow::Cow; use crate::items::HasItem; use crate::rewrite; use crate::visitor::{FmtVisitor, Shape}; -use noirc_frontend::ast::{Expression, Ident, Param, Visibility}; +use noirc_frontend::ast::{Expression, Ident, Param, UnresolvedGeneric, Visibility}; use noirc_frontend::hir::resolution::errors::Span; use noirc_frontend::lexer::Lexer; use noirc_frontend::token::Token; @@ -80,6 +80,7 @@ pub(crate) fn find_comment_end(slice: &str, is_last: bool) -> usize { std::cmp::max(find_comment_end(slice) + block, separator_index + 1) } (_, Some(newline)) if newline > separator_index => newline + 1, + (None, None) => 0, _ => slice.len(), } } else if let Some(newline_index) = newline_index { @@ -170,6 +171,26 @@ impl HasItem for Ident { } } +impl HasItem for UnresolvedGeneric { + fn span(&self) -> Span { + self.span() + } + + fn format(self, visitor: &FmtVisitor, _shape: Shape) -> String { + match self { + UnresolvedGeneric::Variable(_) => visitor.slice(self.span()).into(), + UnresolvedGeneric::Numeric { ident, typ } => { + let mut result = "".to_owned(); + result.push_str(&ident.0.contents); + result.push_str(": "); + let typ = rewrite::typ(visitor, _shape, typ); + result.push_str(&typ); + result + } + } + } +} + pub(crate) fn first_line_width(exprs: &str) -> usize { exprs.lines().next().map_or(0, |line: &str| line.chars().count()) } diff --git a/noir/noir-repo/tooling/nargo_fmt/src/visitor/item.rs b/noir/noir-repo/tooling/nargo_fmt/src/visitor/item.rs index 3cfee4f46ad..5aaaf20ff47 100644 --- a/noir/noir-repo/tooling/nargo_fmt/src/visitor/item.rs +++ b/noir/noir-repo/tooling/nargo_fmt/src/visitor/item.rs @@ -188,8 +188,8 @@ impl super::FmtVisitor<'_> { continue; } - let slice = - self.slice(self.last_position..impl_.object_type.span.unwrap().end()); + let before_brace = self.span_before(span, Token::LeftBrace).start(); + let slice = self.slice(self.last_position..before_brace).trim(); let after_brace = self.span_after(span, Token::LeftBrace).start(); self.last_position = after_brace; diff --git a/noir/noir-repo/tooling/nargo_fmt/tests/expected/contract.nr b/noir/noir-repo/tooling/nargo_fmt/tests/expected/contract.nr index 14b1af4a848..e3a5877725a 100644 --- a/noir/noir-repo/tooling/nargo_fmt/tests/expected/contract.nr +++ b/noir/noir-repo/tooling/nargo_fmt/tests/expected/contract.nr @@ -3,11 +3,11 @@ // Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. // Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum. contract Benchmarking { - use dep::aztec::protocol_types::abis::function_selector::FunctionSelector; + use aztec::protocol_types::abis::function_selector::FunctionSelector; use value_note::{utils::{increment, decrement}, value_note::{VALUE_NOTE_LEN, ValueNote, ValueNoteMethods}}; - use dep::aztec::{ + use aztec::{ context::Context, note::{note_getter_options::NoteGetterOptions, note_header::NoteHeader}, log::emit_unencrypted_log, state_vars::{Map, PublicMutable, PrivateSet}, types::type_serialization::field_serialization::{FieldSerializationMethods, FIELD_SERIALIZED_LEN}, diff --git a/noir/noir-repo/tooling/nargo_fmt/tests/expected/fn.nr b/noir/noir-repo/tooling/nargo_fmt/tests/expected/fn.nr index 961e67faf1c..4dde9a1b3ec 100644 --- a/noir/noir-repo/tooling/nargo_fmt/tests/expected/fn.nr +++ b/noir/noir-repo/tooling/nargo_fmt/tests/expected/fn.nr @@ -62,6 +62,10 @@ fn main( pub fn from_baz(x: [Field; crate::foo::MAGIC_NUMBER]) {} +fn id(x: [Field; I]) -> [Field; I] {} + +fn id_two(x: [Field; I]) -> [Field; I] {} + fn whitespace_before_generics(foo: T) {} fn more_whitespace_before_generics(foo: T) {} diff --git a/noir/noir-repo/tooling/nargo_fmt/tests/expected/impl.nr b/noir/noir-repo/tooling/nargo_fmt/tests/expected/impl.nr index ec734b57970..3c2fa42837a 100644 --- a/noir/noir-repo/tooling/nargo_fmt/tests/expected/impl.nr +++ b/noir/noir-repo/tooling/nargo_fmt/tests/expected/impl.nr @@ -19,3 +19,9 @@ impl MyType { impl MyType { fn method(self) {} } + +impl MyStruct where T: MyEq { + fn my_eq(self, other: Self) -> bool { + (self.a == other.a) & self.b.my_eq(other.b) + } +} diff --git a/noir/noir-repo/tooling/nargo_fmt/tests/expected/let.nr b/noir/noir-repo/tooling/nargo_fmt/tests/expected/let.nr index 7ff69e74306..0edc0eaf922 100644 --- a/noir/noir-repo/tooling/nargo_fmt/tests/expected/let.nr +++ b/noir/noir-repo/tooling/nargo_fmt/tests/expected/let.nr @@ -51,10 +51,10 @@ fn let_() { let expr = MyExpr { /*A boolean literal (true, false).*/ kind: ExprKind::Bool(true) }; - let mut V = dep::crate2::MyStruct { Q: x }; - let mut V = dep::crate2::MyStruct {}; - let mut V = dep::crate2::MyStruct {/*test*/}; - let mut V = dep::crate2::MyStruct { + let mut V = crate2::MyStruct { Q: x }; + let mut V = crate2::MyStruct {}; + let mut V = crate2::MyStruct {/*test*/}; + let mut V = crate2::MyStruct { // sad }; } diff --git a/noir/noir-repo/tooling/nargo_fmt/tests/input/contract.nr b/noir/noir-repo/tooling/nargo_fmt/tests/input/contract.nr index 14b1af4a848..e3a5877725a 100644 --- a/noir/noir-repo/tooling/nargo_fmt/tests/input/contract.nr +++ b/noir/noir-repo/tooling/nargo_fmt/tests/input/contract.nr @@ -3,11 +3,11 @@ // Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. // Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum. contract Benchmarking { - use dep::aztec::protocol_types::abis::function_selector::FunctionSelector; + use aztec::protocol_types::abis::function_selector::FunctionSelector; use value_note::{utils::{increment, decrement}, value_note::{VALUE_NOTE_LEN, ValueNote, ValueNoteMethods}}; - use dep::aztec::{ + use aztec::{ context::Context, note::{note_getter_options::NoteGetterOptions, note_header::NoteHeader}, log::emit_unencrypted_log, state_vars::{Map, PublicMutable, PrivateSet}, types::type_serialization::field_serialization::{FieldSerializationMethods, FIELD_SERIALIZED_LEN}, diff --git a/noir/noir-repo/tooling/nargo_fmt/tests/input/fn.nr b/noir/noir-repo/tooling/nargo_fmt/tests/input/fn.nr index 03806b0fef9..16ed95a540d 100644 --- a/noir/noir-repo/tooling/nargo_fmt/tests/input/fn.nr +++ b/noir/noir-repo/tooling/nargo_fmt/tests/input/fn.nr @@ -45,6 +45,11 @@ fn main( pub fn from_baz(x: [Field; crate::foo::MAGIC_NUMBER]) {} +fn id< T , let I : Field > ( x : [ Field ; I ] ) -> [Field; I ] { } + +fn id_two(x: [Field ; I]) -> [ Field; I] {} + fn whitespace_before_generics < T > (foo: T) {} fn more_whitespace_before_generics < diff --git a/noir/noir-repo/tooling/nargo_fmt/tests/input/impl.nr b/noir/noir-repo/tooling/nargo_fmt/tests/input/impl.nr index ea909dfad44..21ce6a2e175 100644 --- a/noir/noir-repo/tooling/nargo_fmt/tests/input/impl.nr +++ b/noir/noir-repo/tooling/nargo_fmt/tests/input/impl.nr @@ -19,3 +19,9 @@ fn method(self) {} impl MyType { fn method(self) {} } + +impl MyStruct where T: MyEq { + fn my_eq(self, other: Self) -> bool { + (self.a == other.a) & self.b.my_eq(other.b) + } +} diff --git a/noir/noir-repo/tooling/nargo_fmt/tests/input/let.nr b/noir/noir-repo/tooling/nargo_fmt/tests/input/let.nr index 37cdc6655c7..16ce0a9d7f1 100644 --- a/noir/noir-repo/tooling/nargo_fmt/tests/input/let.nr +++ b/noir/noir-repo/tooling/nargo_fmt/tests/input/let.nr @@ -26,10 +26,10 @@ kind: ExprKind::Bool(true), let expr = MyExpr {/*A boolean literal (true, false).*/kind: ExprKind::Bool(true),}; - let mut V = dep::crate2::MyStruct { Q: x }; - let mut V = dep::crate2::MyStruct {}; - let mut V = dep::crate2::MyStruct {/*test*/}; - let mut V = dep::crate2::MyStruct { + let mut V = crate2::MyStruct { Q: x }; + let mut V = crate2::MyStruct {}; + let mut V = crate2::MyStruct {/*test*/}; + let mut V = crate2::MyStruct { // sad }; } diff --git a/noir/noir-repo/tooling/noir_js_backend_barretenberg/src/backend.ts b/noir/noir-repo/tooling/noir_js_backend_barretenberg/src/backend.ts index ce2c2712491..96c4d13aa61 100644 --- a/noir/noir-repo/tooling/noir_js_backend_barretenberg/src/backend.ts +++ b/noir/noir-repo/tooling/noir_js_backend_barretenberg/src/backend.ts @@ -45,9 +45,10 @@ export class BarretenbergVerifierBackend implements VerifierBackend { const { Barretenberg, RawBuffer, Crs } = await import('@aztec/bb.js'); const api = await Barretenberg.new(this.options); + const honkRecursion = false; const [_exact, _total, subgroupSize] = await api.acirGetCircuitSizes( this.acirUncompressedBytecode, - /*honkRecursion=*/ false, // TODO(https://github.com/AztecProtocol/barretenberg/issues/1013): Remove this flag + honkRecursion, ); const crs = await Crs.new(subgroupSize + 1); await api.commonInitSlabAllocator(subgroupSize); From 77c304ee70de3cf47f68b45c35c776a31d61af46 Mon Sep 17 00:00:00 2001 From: esau <152162806+sklppy88@users.noreply.github.com> Date: Fri, 28 Jun 2024 22:36:15 +0200 Subject: [PATCH 16/29] feat: add outgoing keys support to getEvents (#7239) Some considerations is the potential inefficiency with the current impl, but I think the UX is better passing in an array of heterogeneous keys. --- .../contracts/test_log_contract/src/main.nr | 24 +++++- .../aztec.js/src/wallet/base_wallet.ts | 7 +- .../circuit-types/src/interfaces/pxe.ts | 4 +- .../end-to-end/src/e2e_event_logs.test.ts | 78 ++++++++++++++++--- .../pxe/src/pxe_service/pxe_service.ts | 36 ++++++--- 5 files changed, 120 insertions(+), 29 deletions(-) diff --git a/noir-projects/noir-contracts/contracts/test_log_contract/src/main.nr b/noir-projects/noir-contracts/contracts/test_log_contract/src/main.nr index ef63b330603..41f9772ec52 100644 --- a/noir-projects/noir-contracts/contracts/test_log_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/test_log_contract/src/main.nr @@ -1,6 +1,9 @@ contract TestLog { use dep::aztec::prelude::PrivateSet; - use dep::aztec::protocol_types::{traits::Serialize, grumpkin_point::GrumpkinPoint, grumpkin_private_key::GrumpkinPrivateKey}; + use dep::aztec::protocol_types::{ + traits::Serialize, grumpkin_point::GrumpkinPoint, grumpkin_private_key::GrumpkinPrivateKey, + address::AztecAddress + }; use dep::value_note::value_note::ValueNote; use dep::aztec::encrypted_logs::incoming_body::EncryptedLogIncomingBody; use dep::aztec::event::event_interface::EventInterface; @@ -42,25 +45,38 @@ contract TestLog { } #[aztec(private)] - fn emit_encrypted_events(randomness: [Field; 2], preimages: [Field; 4]) { + fn emit_encrypted_events(other: AztecAddress, randomness: [Field; 2], preimages: [Field; 4]) { let event0 = ExampleEvent0 { value0: preimages[0], value1: preimages[1] }; event0.emit( encode_and_encrypt_event( &mut context, randomness[0], - context.msg_sender(), + // outgoing is set to other, incoming is set to msg sender + other, context.msg_sender() ) ); + // We duplicate the emission, but specifying different incoming and outgoing parties + event0.emit( + encode_and_encrypt_event( + &mut context, + randomness[0], + // outgoing is set to msg sender, incoming is set to other + context.msg_sender(), + other + ) + ); + let event1 = ExampleEvent1 { value2: preimages[2], value3: preimages[3] }; event1.emit( encode_and_encrypt_event( &mut context, randomness[1], - context.msg_sender(), + // outgoing is set to other, incoming is set to msg sender + other, context.msg_sender() ) ); diff --git a/yarn-project/aztec.js/src/wallet/base_wallet.ts b/yarn-project/aztec.js/src/wallet/base_wallet.ts index 737247c2270..43b6753efce 100644 --- a/yarn-project/aztec.js/src/wallet/base_wallet.ts +++ b/yarn-project/aztec.js/src/wallet/base_wallet.ts @@ -187,8 +187,11 @@ export abstract class BaseWallet implements Wallet { eventMetadata: EventMetadata, from: number, limit: number, - ivpk: Point = this.getCompleteAddress().publicKeys.masterIncomingViewingPublicKey, + vpks: Point[] = [ + this.getCompleteAddress().publicKeys.masterIncomingViewingPublicKey, + this.getCompleteAddress().publicKeys.masterOutgoingViewingPublicKey, + ], ) { - return this.pxe.getEvents(type, eventMetadata, from, limit, ivpk); + return this.pxe.getEvents(type, eventMetadata, from, limit, vpks); } } diff --git a/yarn-project/circuit-types/src/interfaces/pxe.ts b/yarn-project/circuit-types/src/interfaces/pxe.ts index 1cdc9d9f6fe..b4e6e6eda2e 100644 --- a/yarn-project/circuit-types/src/interfaces/pxe.ts +++ b/yarn-project/circuit-types/src/interfaces/pxe.ts @@ -386,7 +386,7 @@ export interface PXE { * @param eventMetadata - Identifier of the event. This should be the class generated from the contract. e.g. Contract.events.Event * @param from - The block number to search from. * @param limit - The amount of blocks to search. - * @param ivpk - (Used for encrypted logs only) The incoming viewing public key that corresponds to the incoming viewing secret key that can decrypt the log. + * @param vpks - (Used for encrypted logs only) The viewing (incoming and outgoing) public keys that correspond to the viewing secret keys that can decrypt the log. * @returns - The deserialized events. */ getEvents( @@ -394,7 +394,7 @@ export interface PXE { eventMetadata: EventMetadata, from: number, limit: number, - ivpk: Point, + vpks: Point[], ): Promise; } // docs:end:pxe-interface diff --git a/yarn-project/end-to-end/src/e2e_event_logs.test.ts b/yarn-project/end-to-end/src/e2e_event_logs.test.ts index 8421e87b8dc..d49bef63dd1 100644 --- a/yarn-project/end-to-end/src/e2e_event_logs.test.ts +++ b/yarn-project/end-to-end/src/e2e_event_logs.test.ts @@ -4,6 +4,7 @@ import { EventType, Fr, L1EventPayload, + type PXE, TaggedLog, } from '@aztec/aztec.js'; import { deriveMasterIncomingViewingSecretKey } from '@aztec/circuits.js'; @@ -24,30 +25,36 @@ describe('Logs', () => { let wallets: AccountWalletWithSecretKey[]; let node: AztecNode; + let pxe: PXE; let teardown: () => Promise; beforeAll(async () => { - ({ teardown, wallets, aztecNode: node } = await setup(2)); + ({ teardown, wallets, aztecNode: node, pxe } = await setup(2)); await publicDeployAccounts(wallets[0], wallets.slice(0, 2)); testLogContract = await TestLogContract.deploy(wallets[0]).send().deployed(); + + await pxe.registerRecipient(wallets[1].getCompleteAddress()); }); afterAll(() => teardown()); describe('functionality around emitting an encrypted log', () => { - it('emits multiple events as encrypted logs and decodes a single one manually', async () => { + it('emits multiple events as encrypted logs and decodes them one manually', async () => { const randomness = makeTuple(2, Fr.random); const preimage = makeTuple(4, Fr.random); - const tx = await testLogContract.methods.emit_encrypted_events(randomness, preimage).send().wait(); + const tx = await testLogContract.methods + .emit_encrypted_events(wallets[1].getAddress(), randomness, preimage) + .send() + .wait(); const txEffect = await node.getTxEffect(tx.txHash); const encryptedLogs = txEffect!.encryptedLogs.unrollLogs(); - expect(encryptedLogs.length).toBe(2); + expect(encryptedLogs.length).toBe(3); const decryptedLog0 = TaggedLog.decryptAsIncoming( encryptedLogs[0], @@ -73,7 +80,8 @@ describe('Logs', () => { expect(badEvent0).toBe(undefined); const decryptedLog1 = TaggedLog.decryptAsIncoming( - encryptedLogs[1], + // We want to skip the second emitted log as it is irrelevant in this test. + encryptedLogs[2], deriveMasterIncomingViewingSecretKey(wallets[0].getSecretKey()), L1EventPayload, ); @@ -101,14 +109,24 @@ describe('Logs', () => { const preimage = makeTuple(5, makeTuple.bind(undefined, 4, Fr.random)) as Tuple, 5>; let i = 0; - const firstTx = await testLogContract.methods.emit_encrypted_events(randomness[i], preimage[i]).send().wait(); + const firstTx = await testLogContract.methods + .emit_encrypted_events(wallets[1].getAddress(), randomness[i], preimage[i]) + .send() + .wait(); await Promise.all( [...new Array(3)].map(() => - testLogContract.methods.emit_encrypted_events(randomness[++i], preimage[i]).send().wait(), + testLogContract.methods + .emit_encrypted_events(wallets[1].getAddress(), randomness[++i], preimage[i]) + .send() + .wait(), ), ); - const lastTx = await testLogContract.methods.emit_encrypted_events(randomness[++i], preimage[i]).send().wait(); + const lastTx = await testLogContract.methods + .emit_encrypted_events(wallets[1].getAddress(), randomness[++i], preimage[i]) + .send() + .wait(); + // We get all the events we can decrypt with either our incoming or outgoing viewing keys const collectedEvent0s = await wallets[0].getEvents( EventType.Encrypted, TestLogContract.events.ExampleEvent0, @@ -116,23 +134,59 @@ describe('Logs', () => { lastTx.blockNumber! - firstTx.blockNumber! + 1, ); + const collectedEvent0sWithIncoming = await wallets[0].getEvents( + EventType.Encrypted, + TestLogContract.events.ExampleEvent0, + firstTx.blockNumber!, + lastTx.blockNumber! - firstTx.blockNumber! + 1, + // This function can be called specifying the viewing public keys associated with the encrypted event. + [wallets[0].getCompleteAddress().publicKeys.masterIncomingViewingPublicKey], + ); + + const collectedEvent0sWithOutgoing = await wallets[0].getEvents( + EventType.Encrypted, + TestLogContract.events.ExampleEvent0, + firstTx.blockNumber!, + lastTx.blockNumber! - firstTx.blockNumber! + 1, + [wallets[0].getCompleteAddress().publicKeys.masterOutgoingViewingPublicKey], + ); + const collectedEvent1s = await wallets[0].getEvents( EventType.Encrypted, TestLogContract.events.ExampleEvent1, firstTx.blockNumber!, lastTx.blockNumber! - firstTx.blockNumber! + 1, - // This function can also be called specifying the incoming viewing public key associated with the encrypted event. - wallets[0].getCompleteAddress().publicKeys.masterIncomingViewingPublicKey, + [wallets[0].getCompleteAddress().publicKeys.masterIncomingViewingPublicKey], ); - expect(collectedEvent0s.length).toBe(5); + expect(collectedEvent0sWithIncoming.length).toBe(5); + expect(collectedEvent0sWithOutgoing.length).toBe(5); + expect(collectedEvent0s.length).toBe(10); expect(collectedEvent1s.length).toBe(5); + const emptyEvent1s = await wallets[0].getEvents( + EventType.Encrypted, + TestLogContract.events.ExampleEvent1, + firstTx.blockNumber!, + lastTx.blockNumber! - firstTx.blockNumber! + 1, + [wallets[0].getCompleteAddress().publicKeys.masterOutgoingViewingPublicKey], + ); + + expect(emptyEvent1s.length).toBe(0); + const exampleEvent0Sort = (a: ExampleEvent0, b: ExampleEvent0) => (a.value0 > b.value0 ? 1 : -1); - expect(collectedEvent0s.sort(exampleEvent0Sort)).toStrictEqual( + expect(collectedEvent0sWithIncoming.sort(exampleEvent0Sort)).toStrictEqual( + preimage.map(preimage => ({ value0: preimage[0], value1: preimage[1] })).sort(exampleEvent0Sort), + ); + + expect(collectedEvent0sWithOutgoing.sort(exampleEvent0Sort)).toStrictEqual( preimage.map(preimage => ({ value0: preimage[0], value1: preimage[1] })).sort(exampleEvent0Sort), ); + expect([...collectedEvent0sWithIncoming, ...collectedEvent0sWithOutgoing].sort(exampleEvent0Sort)).toStrictEqual( + collectedEvent0s.sort(exampleEvent0Sort), + ); + const exampleEvent1Sort = (a: ExampleEvent1, b: ExampleEvent1) => (a.value2 > b.value2 ? 1 : -1); expect(collectedEvent1s.sort(exampleEvent1Sort)).toStrictEqual( preimage.map(preimage => ({ value2: preimage[2], value3: preimage[3] })).sort(exampleEvent1Sort), diff --git a/yarn-project/pxe/src/pxe_service/pxe_service.ts b/yarn-project/pxe/src/pxe_service/pxe_service.ts index e9b76b6d07c..87c74997393 100644 --- a/yarn-project/pxe/src/pxe_service/pxe_service.ts +++ b/yarn-project/pxe/src/pxe_service/pxe_service.ts @@ -43,7 +43,7 @@ import { FunctionSelector, encodeArguments, } from '@aztec/foundation/abi'; -import { type Fq, Fr, Point } from '@aztec/foundation/fields'; +import { type Fq, Fr, type Point } from '@aztec/foundation/fields'; import { SerialQueue } from '@aztec/foundation/fifo'; import { type DebugLogger, createDebugLogger } from '@aztec/foundation/log'; import { type KeyStore } from '@aztec/key-store'; @@ -846,7 +846,7 @@ export class PXEService implements PXE { eventMetadata: EventMetadata, from: number, limit: number, - ivpk: Point, + vpks: Point[], ): Promise; public getEvents( type: EventType.Unencrypted, @@ -859,16 +859,25 @@ export class PXEService implements PXE { eventMetadata: EventMetadata, from: number, limit: number, - ivpk: Point = Point.ZERO, + vpks: Point[] = [], ): Promise { if (type.includes(EventType.Encrypted)) { - return this.getEncryptedEvents(from, limit, eventMetadata, ivpk); + return this.getEncryptedEvents(from, limit, eventMetadata, vpks); } return this.getUnencryptedEvents(from, limit, eventMetadata); } - async getEncryptedEvents(from: number, limit: number, eventMetadata: EventMetadata, ivpk: Point): Promise { + async getEncryptedEvents( + from: number, + limit: number, + eventMetadata: EventMetadata, + vpks: Point[], + ): Promise { + if (vpks.length === 0) { + throw new Error('Tried to get encrypted events without supplying any viewing public keys'); + } + const blocks = await this.node.getBlocks(from, limit); const txEffects = blocks.flatMap(block => block.body.txEffects); @@ -876,11 +885,20 @@ export class PXEService implements PXE { const encryptedLogs = encryptedTxLogs.flatMap(encryptedTxLog => encryptedTxLog.unrollLogs()); - const ivsk = await this.keyStore.getMasterSecretKey(ivpk); + const vsks = await Promise.all(vpks.map(vpk => this.keyStore.getMasterSecretKey(vpk))); - const visibleEvents = encryptedLogs - .map(encryptedLog => TaggedLog.decryptAsIncoming(encryptedLog, ivsk, L1EventPayload)) - .filter(item => item !== undefined) as TaggedLog[]; + const visibleEvents = encryptedLogs.flatMap(encryptedLog => { + for (const sk of vsks) { + const decryptedLog = + TaggedLog.decryptAsIncoming(encryptedLog, sk, L1EventPayload) ?? + TaggedLog.decryptAsOutgoing(encryptedLog, sk, L1EventPayload); + if (decryptedLog !== undefined) { + return [decryptedLog]; + } + } + + return []; + }); const decodedEvents = visibleEvents .map(visibleEvent => { From 17c8d3a00f3a2e500d5caa1fb438504bcd357e8a Mon Sep 17 00:00:00 2001 From: Cody Gunton Date: Fri, 28 Jun 2024 17:48:27 -0400 Subject: [PATCH 17/29] feat: Constant Honk proof sizes (#6954) Introduces logic in the Honk provers/verifiers to 1) produce a proof of fixed size regardless of the size of the circuit, and 2) produce a recursive verifier circuit that is independent of the size of the circuit whose proof it is verifying. This was done largely for convenience so that no additional logic is needed downstream of bberg to handle the fact that Honk proofs and recursive verifier circuits are in general dependent on the size of the inner circuit (unlike for Plonk). --------- Co-authored-by: lucasxia01 Co-authored-by: ledwards2225 --- barretenberg/Earthfile | 2 +- .../flows/prove_then_verify_ultra_honk.sh | 2 +- barretenberg/cpp/src/CMakeLists.txt | 1 + barretenberg/cpp/src/barretenberg/bb/main.cpp | 68 ++++--- .../benchmark/goblin_bench/goblin.bench.cpp | 144 -------------- .../commitment_schemes/verification_key.hpp | 4 +- .../zeromorph/zeromorph.hpp | 177 +++++++++++++----- .../zeromorph/zeromorph.test.cpp | 28 +-- .../CMakeLists.txt | 1 + .../zeromorph.test.cpp | 140 ++++++++++++++ .../cpp/src/barretenberg/constants.hpp | 7 + .../dsl/acir_format/acir_format.cpp | 2 +- .../acir_format/honk_recursion_constraint.cpp | 88 ++++++++- .../src/barretenberg/eccvm/eccvm_flavor.hpp | 10 +- .../src/barretenberg/eccvm/eccvm_prover.cpp | 3 +- .../eccvm/eccvm_transcript.test.cpp | 4 +- .../src/barretenberg/eccvm/eccvm_verifier.cpp | 3 +- .../eccvm_recursive_verifier.cpp | 9 +- .../verifier_commitment_key.hpp | 5 +- .../cpp/src/barretenberg/flavor/flavor.hpp | 45 +++++ .../src/barretenberg/goblin/mock_circuits.hpp | 35 ---- .../goblin/mock_circuits_pinning.test.cpp | 27 --- .../library/grand_product_delta.hpp | 22 --- .../cpp/src/barretenberg/polynomials/pow.hpp | 32 ++++ .../protogalaxy/decider_verifier.cpp | 4 +- .../honk_recursion/transcript/transcript.hpp | 1 + .../verifier/decider_recursive_verifier.cpp | 3 +- .../protogalaxy_recursive_verifier.cpp | 5 +- .../verifier/ultra_recursive_verifier.cpp | 13 +- .../honk_recursion/verifier/verifier.test.cpp | 97 ++++++---- .../stdlib/primitives/biggroup/biggroup.hpp | 4 + .../primitives/biggroup/biggroup.test.cpp | 2 + .../stdlib/primitives/curves/bn254.hpp | 1 + .../stdlib/primitives/curves/grumpkin.hpp | 2 +- .../stdlib/primitives/field/field.cpp | 13 ++ .../stdlib/primitives/field/field.hpp | 3 + .../stdlib_circuit_builders/mega_flavor.hpp | 38 +--- .../stdlib_circuit_builders/ultra_flavor.hpp | 39 +--- .../src/barretenberg/sumcheck/sumcheck.hpp | 40 +++- .../barretenberg/sumcheck/sumcheck_round.hpp | 58 +++++- .../translator_vm/translator_prover.cpp | 3 +- .../translator_vm/translator_verifier.cpp | 3 +- .../translator_recursive_verifier.cpp | 5 +- .../ultra_honk/decider_prover.cpp | 3 +- .../ultra_honk/mega_transcript.test.cpp | 4 +- .../barretenberg/ultra_honk/oink_verifier.cpp | 3 - .../ultra_honk/ultra_transcript.test.cpp | 4 +- .../ultra_honk/ultra_verifier.cpp | 5 +- .../barretenberg/vm/generated/avm_prover.cpp | 3 +- .../vm/generated/avm_verifier.cpp | 13 +- .../bb-pil-backend/src/prover_builder.rs | 3 +- .../bb-pil-backend/src/verifier_builder.rs | 4 +- .../benchmarks/bench_sha256/Prover.toml | 1 - .../verify_honk_proof/Prover.toml | 2 +- .../verify_honk_proof/src/main.nr | 3 +- 55 files changed, 738 insertions(+), 503 deletions(-) delete mode 100644 barretenberg/cpp/src/barretenberg/benchmark/goblin_bench/goblin.bench.cpp create mode 100644 barretenberg/cpp/src/barretenberg/commitment_schemes_recursion/CMakeLists.txt create mode 100644 barretenberg/cpp/src/barretenberg/commitment_schemes_recursion/zeromorph.test.cpp create mode 100644 barretenberg/cpp/src/barretenberg/constants.hpp delete mode 100644 noir/noir-repo/test_programs/benchmarks/bench_sha256/Prover.toml diff --git a/barretenberg/Earthfile b/barretenberg/Earthfile index 73dc1443992..1eb57ff2507 100644 --- a/barretenberg/Earthfile +++ b/barretenberg/Earthfile @@ -42,7 +42,7 @@ barretenberg-acir-tests-bb: RUN FLOW=prove_and_verify_mega_honk_program ./run_acir_tests.sh # Fold and verify an ACIR program stack using ClientIvc RUN FLOW=fold_and_verify_program ./run_acir_tests.sh fold_basic - # Fold and verify an ACIR program stack using ClientIvc, recursively verify as part of the Tube circuit and produce and verify a Honk proof + # Fold and verify an ACIR program stack using ClientIvc, recursively verify as part of the Tube circuit and produce and verify a Honk proof RUN FLOW=prove_then_verify_tube ./run_acir_tests.sh fold_basic # Construct and separately verify a UltraHonk proof for a single program that recursively verifies a Honk proof RUN FLOW=prove_then_verify_ultra_honk ./run_acir_tests.sh verify_honk_proof diff --git a/barretenberg/acir_tests/flows/prove_then_verify_ultra_honk.sh b/barretenberg/acir_tests/flows/prove_then_verify_ultra_honk.sh index fd559e256c6..ac3bb9bc962 100755 --- a/barretenberg/acir_tests/flows/prove_then_verify_ultra_honk.sh +++ b/barretenberg/acir_tests/flows/prove_then_verify_ultra_honk.sh @@ -1,5 +1,5 @@ #!/bin/sh -set -eu +set -eux VFLAG=${VERBOSE:+-v} BFLAG="-b ./target/program.json" diff --git a/barretenberg/cpp/src/CMakeLists.txt b/barretenberg/cpp/src/CMakeLists.txt index be6d254b723..077d20a4c3b 100644 --- a/barretenberg/cpp/src/CMakeLists.txt +++ b/barretenberg/cpp/src/CMakeLists.txt @@ -56,6 +56,7 @@ add_subdirectory(barretenberg/bb) add_subdirectory(barretenberg/circuit_checker) add_subdirectory(barretenberg/client_ivc) add_subdirectory(barretenberg/commitment_schemes) +add_subdirectory(barretenberg/commitment_schemes_recursion) add_subdirectory(barretenberg/common) add_subdirectory(barretenberg/crypto) add_subdirectory(barretenberg/dsl) diff --git a/barretenberg/cpp/src/barretenberg/bb/main.cpp b/barretenberg/cpp/src/barretenberg/bb/main.cpp index 114e2f0caf2..1045d543d21 100644 --- a/barretenberg/cpp/src/barretenberg/bb/main.cpp +++ b/barretenberg/cpp/src/barretenberg/bb/main.cpp @@ -741,18 +741,15 @@ bool avm_verify(const std::filesystem::path& proof_path, const std::filesystem:: #endif /** - * @brief Creates a proof for an ACIR circuit - * - * Communication: - * - stdout: The proof is written to stdout as a byte array - * - Filesystem: The proof is written to the path specified by outputPath + * @brief Create a Honk a prover from program bytecode and an optional witness * - * @param bytecodePath Path to the file containing the serialized circuit - * @param witnessPath Path to the file containing the serialized witness - * @param outputPath Path to write the proof to + * @tparam Flavor + * @param bytecodePath + * @param witnessPath + * @return UltraProver_ */ -template -void prove_honk(const std::string& bytecodePath, const std::string& witnessPath, const std::string& outputPath) +template +UltraProver_ compute_valid_prover(const std::string& bytecodePath, const std::string& witnessPath) { using Builder = Flavor::CircuitBuilder; using Prover = UltraProver_; @@ -762,7 +759,10 @@ void prove_honk(const std::string& bytecodePath, const std::string& witnessPath, honk_recursion = true; } auto constraint_system = get_constraint_system(bytecodePath, honk_recursion); - auto witness = get_witness(witnessPath); + acir_format::WitnessVector witness = {}; + if (!witnessPath.empty()) { + witness = get_witness(witnessPath); + } auto builder = acir_format::create_circuit(constraint_system, 0, witness, honk_recursion); @@ -770,8 +770,29 @@ void prove_honk(const std::string& bytecodePath, const std::string& witnessPath, size_t srs_size = builder.get_circuit_subgroup_size(builder.get_total_circuit_size() + num_extra_gates); init_bn254_crs(srs_size); - // Construct Honk proof Prover prover{ builder }; + return prover; +} + +/** + * @brief Creates a proof for an ACIR circuit + * + * Communication: + * - stdout: The proof is written to stdout as a byte array + * - Filesystem: The proof is written to the path specified by outputPath + * + * @param bytecodePath Path to the file containing the serialized circuit + * @param witnessPath Path to the file containing the serialized witness + * @param outputPath Path to write the proof to + */ +template +void prove_honk(const std::string& bytecodePath, const std::string& witnessPath, const std::string& outputPath) +{ + // using Builder = Flavor::CircuitBuilder; + using Prover = UltraProver_; + + // Construct Honk proof + Prover prover = compute_valid_prover(bytecodePath, witnessPath); auto proof = prover.construct_proof(); if (outputPath == "-") { @@ -807,10 +828,9 @@ template bool verify_honk(const std::string& proof_path, auto g2_data = get_bn254_g2_data(CRS_PATH); srs::init_crs_factory({}, g2_data); auto proof = from_buffer>(read_file(proof_path)); - auto verification_key = std::make_shared(from_buffer(read_file(vk_path))); - verification_key->pcs_verification_key = std::make_shared(); - - Verifier verifier{ verification_key }; + auto vk = std::make_shared(from_buffer(read_file(vk_path))); + vk->pcs_verification_key = std::make_shared(); + Verifier verifier{ vk }; bool verified = verifier.verify_proof(proof); @@ -830,22 +850,12 @@ template bool verify_honk(const std::string& proof_path, */ template void write_vk_honk(const std::string& bytecodePath, const std::string& outputPath) { - using Builder = Flavor::CircuitBuilder; + using Prover = UltraProver_; using ProverInstance = ProverInstance_; using VerificationKey = Flavor::VerificationKey; - bool honk_recursion = false; - if constexpr (IsAnyOf) { - honk_recursion = true; - } - auto constraint_system = get_constraint_system(bytecodePath, honk_recursion); - auto builder = acir_format::create_circuit(constraint_system, 0, {}, honk_recursion); - - auto num_extra_gates = builder.get_num_gates_added_to_ensure_nonzero_polynomials(); - size_t srs_size = builder.get_circuit_subgroup_size(builder.get_total_circuit_size() + num_extra_gates); - init_bn254_crs(srs_size); - - ProverInstance prover_inst(builder); + Prover prover = compute_valid_prover(bytecodePath, ""); + ProverInstance& prover_inst = *prover.instance; VerificationKey vk( prover_inst.proving_key); // uses a partial form of the proving key which only has precomputed entities diff --git a/barretenberg/cpp/src/barretenberg/benchmark/goblin_bench/goblin.bench.cpp b/barretenberg/cpp/src/barretenberg/benchmark/goblin_bench/goblin.bench.cpp deleted file mode 100644 index 5ad7944451a..00000000000 --- a/barretenberg/cpp/src/barretenberg/benchmark/goblin_bench/goblin.bench.cpp +++ /dev/null @@ -1,144 +0,0 @@ - -#include - -#include "barretenberg/common/op_count_google_bench.hpp" -#include "barretenberg/goblin/goblin.hpp" -#include "barretenberg/goblin/mock_circuits.hpp" -#include "barretenberg/stdlib_circuit_builders/ultra_circuit_builder.hpp" - -using namespace benchmark; -using namespace bb; - -namespace { - -class GoblinBench : public benchmark::Fixture { - public: - GoblinAccumulationOutput kernel_accum; - - // Number of function circuits to accumulate(based on Zacs target numbers) - static constexpr size_t NUM_ITERATIONS_MEDIUM_COMPLEXITY = 6; - - void SetUp([[maybe_unused]] const ::benchmark::State& state) override - { - bb::srs::init_crs_factory("../srs_db/ignition"); - bb::srs::init_grumpkin_crs_factory("../srs_db/grumpkin"); - } - - /** - * @brief Perform a specified number of function circuit accumulation rounds - * @details Each round "accumulates" a mock function circuit and a mock kernel circuit. Each round thus consists of - * the generation of two circuits, two MegaHonk proofs and two Merge proofs. To match the sizes called out in the - * spec - * (https://github.com/AztecProtocol/aztec-packages/blob/master/yellow-paper/docs/cryptography/performance-targets.md) - * we set the size of the function circuit to be 2^17 except for the first one which is 2^19. - * - * @param state - */ - void perform_goblin_accumulation_rounds(State& state, GoblinProver& goblin) - { - auto NUM_CIRCUITS = static_cast(state.range(0)); - for (size_t circuit_idx = 0; circuit_idx < NUM_CIRCUITS; ++circuit_idx) { - - // Construct and accumulate a mock function circuit - MegaCircuitBuilder function_circuit{ goblin.op_queue }; - // On the first iteration construct a "large" function circuit (2^19), otherwise medium (2^17) - GoblinMockCircuits::construct_mock_function_circuit(function_circuit, /*large=*/circuit_idx == 0); - auto function_accum = goblin.accumulate(function_circuit); - - // Construct and accumulate the mock kernel circuit - // Note: in first round, kernel_accum is empty since there is no previous kernel to recursively verify - MegaCircuitBuilder circuit_builder{ goblin.op_queue }; - GoblinMockCircuits::construct_mock_recursion_kernel_circuit( - circuit_builder, - { function_accum.proof, function_accum.verification_key }, - { kernel_accum.proof, kernel_accum.verification_key }); - kernel_accum = goblin.accumulate(circuit_builder); - } - } -}; - -/** - * @brief Benchmark the full Goblin IVC protocol - * - */ -BENCHMARK_DEFINE_F(GoblinBench, GoblinFull)(benchmark::State& state) -{ - GoblinProver goblin; - - for (auto _ : state) { - BB_REPORT_OP_COUNT_IN_BENCH(state); - // Perform a specified number of iterations of function/kernel accumulation - perform_goblin_accumulation_rounds(state, goblin); - - // Construct proofs for ECCVM and Translator - goblin.prove(); - } -} - -/** - * @brief Benchmark only the accumulation rounds - * - */ -BENCHMARK_DEFINE_F(GoblinBench, GoblinAccumulate)(benchmark::State& state) -{ - GoblinProver goblin; - - // Perform a specified number of iterations of function/kernel accumulation - for (auto _ : state) { - perform_goblin_accumulation_rounds(state, goblin); - } -} - -/** - * @brief Benchmark only the ECCVM component - * - */ -BENCHMARK_DEFINE_F(GoblinBench, GoblinECCVMProve)(benchmark::State& state) -{ - GoblinProver goblin; - - // Perform a specified number of iterations of function/kernel accumulation - perform_goblin_accumulation_rounds(state, goblin); - - // Prove ECCVM only - for (auto _ : state) { - goblin.prove_eccvm(); - } -} - -/** - * @brief Benchmark only the Translator component - * - */ -BENCHMARK_DEFINE_F(GoblinBench, TranslatorProve)(benchmark::State& state) -{ - GoblinProver goblin; - - // Perform a specified number of iterations of function/kernel accumulation - perform_goblin_accumulation_rounds(state, goblin); - - // Prove ECCVM (unmeasured) and Translator (measured) - goblin.prove_eccvm(); - for (auto _ : state) { - goblin.prove_translator(); - } -} - -#define ARGS \ - Arg(GoblinBench::NUM_ITERATIONS_MEDIUM_COMPLEXITY) \ - ->Arg(1 << 0) \ - ->Arg(1 << 1) \ - ->Arg(1 << 2) \ - ->Arg(1 << 3) \ - ->Arg(1 << 4) \ - ->Arg(1 << 5) \ - ->Arg(1 << 6) - -BENCHMARK_REGISTER_F(GoblinBench, GoblinFull)->Unit(benchmark::kMillisecond)->ARGS; -BENCHMARK_REGISTER_F(GoblinBench, GoblinAccumulate)->Unit(benchmark::kMillisecond)->ARGS; -BENCHMARK_REGISTER_F(GoblinBench, GoblinECCVMProve)->Unit(benchmark::kMillisecond)->ARGS; -BENCHMARK_REGISTER_F(GoblinBench, TranslatorProve)->Unit(benchmark::kMillisecond)->ARGS; - -} // namespace - -BENCHMARK_MAIN(); diff --git a/barretenberg/cpp/src/barretenberg/commitment_schemes/verification_key.hpp b/barretenberg/cpp/src/barretenberg/commitment_schemes/verification_key.hpp index 23fb76a9502..909a40cd439 100644 --- a/barretenberg/cpp/src/barretenberg/commitment_schemes/verification_key.hpp +++ b/barretenberg/cpp/src/barretenberg/commitment_schemes/verification_key.hpp @@ -29,11 +29,11 @@ template class VerifierCommitmentKey; * @tparam curve::BN254 */ template <> class VerifierCommitmentKey { + public: using Curve = curve::BN254; using GroupElement = typename Curve::Element; using Commitment = typename Curve::AffineElement; - public: VerifierCommitmentKey() { srs::init_crs_factory("../srs_db/ignition"); @@ -69,11 +69,11 @@ template <> class VerifierCommitmentKey { * @tparam curve::Grumpkin */ template <> class VerifierCommitmentKey { + public: using Curve = curve::Grumpkin; using GroupElement = typename Curve::Element; using Commitment = typename Curve::AffineElement; - public: /** * @brief Construct a new IPA Verification Key object from existing SRS * diff --git a/barretenberg/cpp/src/barretenberg/commitment_schemes/zeromorph/zeromorph.hpp b/barretenberg/cpp/src/barretenberg/commitment_schemes/zeromorph/zeromorph.hpp index f6a77ba302c..2b507523865 100644 --- a/barretenberg/cpp/src/barretenberg/commitment_schemes/zeromorph/zeromorph.hpp +++ b/barretenberg/cpp/src/barretenberg/commitment_schemes/zeromorph/zeromorph.hpp @@ -6,6 +6,8 @@ #include "barretenberg/common/ref_vector.hpp" #include "barretenberg/common/zip_view.hpp" #include "barretenberg/polynomials/polynomial.hpp" +#include "barretenberg/stdlib/primitives/biggroup/biggroup.hpp" +#include "barretenberg/stdlib/primitives/witness/witness.hpp" #include "barretenberg/transcript/transcript.hpp" namespace bb { @@ -69,9 +71,6 @@ template class ZeroMorphProver_ { std::span u_challenge) { size_t log_N = numeric::get_msb(polynomial.size()); - // The size of the multilinear challenge must equal the log of the polynomial size - ASSERT(log_N == u_challenge.size()); - // Define the vector of quotients q_k, k = 0, ..., log_N-1 std::vector quotients; for (size_t k = 0; k < log_N; ++k) { @@ -323,7 +322,8 @@ template class ZeroMorphProver_ { * * @todo https://github.com/AztecProtocol/barretenberg/issues/1030: document concatenation trick */ - static OpeningClaim prove(RefSpan f_polynomials, + static OpeningClaim prove(FF circuit_size, + RefSpan f_polynomials, RefSpan g_polynomials, RefSpan f_evaluations, RefSpan g_shift_evaluations, @@ -339,7 +339,7 @@ template class ZeroMorphProver_ { // Extract multilinear challenge u and claimed multilinear evaluations from Sumcheck output std::span u_challenge = multilinear_challenge; - size_t log_N = u_challenge.size(); + size_t log_N = numeric::get_msb(static_cast(circuit_size)); size_t N = 1 << log_N; // Compute batching of unshifted polynomials f_i and to-be-shifted polynomials g_i: @@ -392,15 +392,18 @@ template class ZeroMorphProver_ { f_polynomial += concatenated_batched; // Compute the multilinear quotients q_k = q_k(X_0, ..., X_{k-1}) - auto quotients = compute_multilinear_quotients(f_polynomial, u_challenge); - + std::vector quotients = compute_multilinear_quotients(f_polynomial, u_challenge); // Compute and send commitments C_{q_k} = [q_k], k = 0,...,d-1 - std::vector q_k_commitments; - q_k_commitments.reserve(log_N); for (size_t idx = 0; idx < log_N; ++idx) { - q_k_commitments[idx] = commitment_key->commit(quotients[idx]); + Commitment q_k_commitment = commitment_key->commit(quotients[idx]); std::string label = "ZM:C_q_" + std::to_string(idx); - transcript->send_to_verifier(label, q_k_commitments[idx]); + transcript->send_to_verifier(label, q_k_commitment); + } + // Add buffer elements to remove log_N dependence in proof + for (size_t idx = log_N; idx < CONST_PROOF_SIZE_LOG_N; ++idx) { + auto buffer_element = Commitment::one(); + std::string label = "ZM:C_q_" + std::to_string(idx); + transcript->send_to_verifier(label, buffer_element); } // Get challenge y @@ -462,10 +465,19 @@ template class ZeroMorphVerifier_ { static Commitment compute_C_zeta_x(const Commitment& C_q, std::vector& C_q_k, FF y_challenge, - FF x_challenge) + FF x_challenge, + const FF log_circuit_size, + const FF circuit_size) { - size_t log_N = C_q_k.size(); - size_t N = 1 << log_N; + size_t N{ 0 }; + size_t log_N{ 0 }; + if constexpr (Curve::is_stdlib_type) { + N = static_cast(circuit_size.get_value()); + log_N = static_cast(log_circuit_size.get_value()); + } else { + N = static_cast(circuit_size); + log_N = static_cast(log_circuit_size); + } // Instantiate containers for input to batch mul std::vector scalars; @@ -480,21 +492,40 @@ template class ZeroMorphVerifier_ { } commitments.emplace_back(C_q); - // Contribution from C_q_k, k = 0,...,log_N - for (size_t k = 0; k < log_N; ++k) { + // Contribution from C_q_k, k = 0,...,log_N-1 + for (size_t k = 0; k < CONST_PROOF_SIZE_LOG_N; ++k) { + // Utilize dummy rounds in order to make verifier circuit independent of proof size + bool is_dummy_round = k >= log_N; auto deg_k = static_cast((1 << k) - 1); // Compute scalar y^k * x^{N - deg_k - 1} - auto scalar = y_challenge.pow(k); - scalar *= x_challenge.pow(N - deg_k - 1); + // TODO(https://github.com/AztecProtocol/barretenberg/issues/1039): pow may not add proper constraints + FF scalar = y_challenge.pow(k); + size_t x_exponent = is_dummy_round ? 0 : N - deg_k - 1; + scalar *= x_challenge.pow(x_exponent); scalar *= FF(-1); - + if constexpr (Curve::is_stdlib_type) { + auto builder = x_challenge.get_context(); + FF zero = FF::from_witness(builder, 0); + stdlib::bool_t dummy_round = stdlib::witness_t(builder, is_dummy_round); + // TODO(https://github.com/AztecProtocol/barretenberg/issues/1039): is it kosher to reassign like this? + scalar = FF::conditional_assign(dummy_round, zero, scalar); + } else { + if (is_dummy_round) { + scalar = 0; + } + } scalars.emplace_back(scalar); commitments.emplace_back(C_q_k[k]); } // Compute batch mul to get the result if constexpr (Curve::is_stdlib_type) { - return Commitment::batch_mul(commitments, scalars); + // If Ultra and using biggroup, handle edge cases in batch_mul + if constexpr (IsUltraBuilder && stdlib::IsBigGroup) { + return Commitment::batch_mul(commitments, scalars, /*max_num_bits=*/0, /*with_edgecases=*/true); + } else { + return Commitment::batch_mul(commitments, scalars); + } } else { return batch_mul_native(commitments, scalars); } @@ -533,15 +564,25 @@ template class ZeroMorphVerifier_ { FF batched_evaluation, FF x_challenge, std::span u_challenge, + const FF log_circuit_size, + const FF circuit_size, const std::vector>& concatenation_groups_commitments = {}) { - size_t log_N = C_q_k.size(); - size_t N = 1 << log_N; + size_t N{ 0 }; + size_t log_N{ 0 }; + if constexpr (Curve::is_stdlib_type) { + N = static_cast(circuit_size.get_value()); + log_N = static_cast(log_circuit_size.get_value()); + } else { + N = static_cast(circuit_size); + log_N = static_cast(log_circuit_size); + } std::vector scalars; std::vector commitments; // Phi_n(x) = (x^N - 1) / (x - 1) + // TODO(https://github.com/AztecProtocol/barretenberg/issues/1039): pow may not add proper constraints auto phi_numerator = x_challenge.pow(N) - 1; // x^N - 1 auto phi_n_x = phi_numerator / (x_challenge - 1); @@ -590,26 +631,57 @@ template class ZeroMorphVerifier_ { // scalar = -x * (x^{2^k} * \Phi_{n-k-1}(x^{2^{k+1}}) - u_k * \Phi_{n-k}(x^{2^k})) auto x_pow_2k = x_challenge; // x^{2^k} auto x_pow_2kp1 = x_challenge * x_challenge; // x^{2^{k + 1}} - for (size_t k = 0; k < log_N; ++k) { - - auto phi_term_1 = phi_numerator / (x_pow_2kp1 - 1); // \Phi_{n-k-1}(x^{2^{k + 1}}) - auto phi_term_2 = phi_numerator / (x_pow_2k - 1); // \Phi_{n-k}(x^{2^k}) - - auto scalar = x_pow_2k * phi_term_1; - scalar -= u_challenge[k] * phi_term_2; - scalar *= x_challenge; - scalar *= FF(-1); - - scalars.emplace_back(scalar); - commitments.emplace_back(C_q_k[k]); - - // Update powers of challenge x - x_pow_2k = x_pow_2kp1; - x_pow_2kp1 *= x_pow_2kp1; + for (size_t k = 0; k < CONST_PROOF_SIZE_LOG_N; ++k) { + // Utilize dummy rounds in order to make verifier circuit independent of proof size + bool is_dummy_round = k >= log_N; + if constexpr (Curve::is_stdlib_type) { + auto builder = x_challenge.get_context(); + stdlib::bool_t dummy_scalar = stdlib::witness_t(builder, is_dummy_round); + auto phi_term_1 = phi_numerator / (x_pow_2kp1 - 1); // \Phi_{n-k-1}(x^{2^{k + 1}}) + auto phi_term_2 = phi_numerator / (x_pow_2k - 1); // \Phi_{n-k}(x^{2^k}) + + auto scalar = x_pow_2k * phi_term_1; + scalar -= u_challenge[k] * phi_term_2; + scalar *= x_challenge; + scalar *= -FF(1); + + FF zero = FF::from_witness(builder, 0); + scalar = FF::conditional_assign(dummy_scalar, zero, scalar); + scalars.emplace_back(scalar); + commitments.emplace_back(C_q_k[k]); + + x_pow_2k = FF::conditional_assign(dummy_scalar, x_pow_2k, x_pow_2kp1); + x_pow_2kp1 = FF::conditional_assign(dummy_scalar, x_pow_2kp1, x_pow_2kp1 * x_pow_2kp1); + } else { + if (is_dummy_round) { + scalars.emplace_back(0); + commitments.emplace_back(C_q_k[k]); + } else { + auto phi_term_1 = phi_numerator / (x_pow_2kp1 - 1); // \Phi_{n-k-1}(x^{2^{k + 1}}) + auto phi_term_2 = phi_numerator / (x_pow_2k - 1); // \Phi_{n-k}(x^{2^k}) + + auto scalar = x_pow_2k * phi_term_1; + scalar -= u_challenge[k] * phi_term_2; + scalar *= x_challenge; + scalar *= FF(-1); + + scalars.emplace_back(scalar); + commitments.emplace_back(C_q_k[k]); + + // Update powers of challenge x + x_pow_2k = x_pow_2kp1; + x_pow_2kp1 *= x_pow_2kp1; + } + } } if constexpr (Curve::is_stdlib_type) { - return Commitment::batch_mul(commitments, scalars); + // If Ultra and using biggroup, handle edge cases in batch_mul + if constexpr (IsUltraBuilder && stdlib::IsBigGroup) { + return Commitment::batch_mul(commitments, scalars, /*max_num_bits=*/0, /*with_edgecases=*/true); + } else { + return Commitment::batch_mul(commitments, scalars); + } } else { return batch_mul_native(commitments, scalars); } @@ -638,7 +710,8 @@ template class ZeroMorphVerifier_ { * @param transcript * @return VerifierAccumulator Inputs to the final PCS verification check that will be accumulated */ - static OpeningClaim verify(RefSpan unshifted_commitments, + static OpeningClaim verify(FF circuit_size, + RefSpan unshifted_commitments, RefSpan to_be_shifted_commitments, RefSpan unshifted_evaluations, RefSpan shifted_evaluations, @@ -648,7 +721,13 @@ template class ZeroMorphVerifier_ { const std::vector>& concatenation_group_commitments = {}, RefSpan concatenated_evaluations = {}) { - size_t log_N = multivariate_challenge.size(); + FF log_N; + // TODO(https://github.com/AztecProtocol/barretenberg/issues/1039): Connect witness log_N to circuit size + if constexpr (Curve::is_stdlib_type) { + log_N = FF(static_cast(numeric::get_msb(static_cast(circuit_size.get_value())))); + } else { + log_N = numeric::get_msb(static_cast(circuit_size)); + } FF rho = transcript->template get_challenge("rho"); // Construct batched evaluation v = sum_{i=0}^{m-1}\rho^i*f_i(u) + sum_{i=0}^{l-1}\rho^{m+i}*h_i(u) @@ -669,8 +748,8 @@ template class ZeroMorphVerifier_ { // Receive commitments [q_k] std::vector C_q_k; - C_q_k.reserve(log_N); - for (size_t i = 0; i < log_N; ++i) { + C_q_k.reserve(CONST_PROOF_SIZE_LOG_N); + for (size_t i = 0; i < CONST_PROOF_SIZE_LOG_N; ++i) { C_q_k.emplace_back(transcript->template receive_from_prover("ZM:C_q_" + std::to_string(i))); } @@ -684,7 +763,7 @@ template class ZeroMorphVerifier_ { auto [x_challenge, z_challenge] = transcript->template get_challenges("ZM:x", "ZM:z"); // Compute commitment C_{\zeta_x} - auto C_zeta_x = compute_C_zeta_x(C_q, C_q_k, y_challenge, x_challenge); + auto C_zeta_x = compute_C_zeta_x(C_q, C_q_k, y_challenge, x_challenge, log_N, circuit_size); // Compute commitment C_{Z_x} Commitment C_Z_x = compute_C_Z_x(g1_identity, @@ -695,17 +774,23 @@ template class ZeroMorphVerifier_ { batched_evaluation, x_challenge, multivariate_challenge, + log_N, + circuit_size, concatenation_group_commitments); // Compute commitment C_{\zeta,Z} Commitment C_zeta_Z; if constexpr (Curve::is_stdlib_type) { - // Express operation as a batch_mul in order to use Goblinization if available auto builder = z_challenge.get_context(); std::vector scalars = { FF(builder, 1), z_challenge }; std::vector points = { C_zeta_x, C_Z_x }; - C_zeta_Z = Commitment::batch_mul(points, scalars); + // If Ultra and using biggroup, handle edge cases in batch_mul + if constexpr (IsUltraBuilder && stdlib::IsBigGroup) { + C_zeta_Z = Commitment::batch_mul(points, scalars, /*max_num_bits=*/0, /*with_edgecases=*/true); + } else { + C_zeta_Z = Commitment::batch_mul(points, scalars); + } } else { C_zeta_Z = C_zeta_x + C_Z_x * z_challenge; } diff --git a/barretenberg/cpp/src/barretenberg/commitment_schemes/zeromorph/zeromorph.test.cpp b/barretenberg/cpp/src/barretenberg/commitment_schemes/zeromorph/zeromorph.test.cpp index 122fcb1187f..146ce53b461 100644 --- a/barretenberg/cpp/src/barretenberg/commitment_schemes/zeromorph/zeromorph.test.cpp +++ b/barretenberg/cpp/src/barretenberg/commitment_schemes/zeromorph/zeromorph.test.cpp @@ -1,9 +1,7 @@ #include "zeromorph.hpp" -#include "../commitment_key.test.hpp" +#include "barretenberg/commitment_schemes/commitment_key.test.hpp" #include "barretenberg/commitment_schemes/ipa/ipa.hpp" #include "barretenberg/commitment_schemes/kzg/kzg.hpp" -#include "barretenberg/transcript/transcript.hpp" - #include namespace bb { @@ -89,10 +87,10 @@ template class ZeroMorphTest : public CommitmentTest class ZeroMorphTest : public CommitmentTest u_challenge) { auto prover_transcript = NativeTranscript::prover_init_empty(); // Execute Prover protocol - auto prover_opening_claim = ZeroMorphProver::prove(RefVector(unshifted.polynomials), // unshifted + auto prover_opening_claim = ZeroMorphProver::prove(N, + RefVector(unshifted.polynomials), // unshifted RefVector(shifted.polynomials), // to-be shifted RefVector(unshifted.evaluations), // unshifted RefVector(shifted.evaluations), // shifted @@ -224,7 +224,8 @@ template class ZeroMorphTest : public CommitmentTest class ZeroMorphTest : public CommitmentTest u_challenge, size_t NUM_CONCATENATED) @@ -261,7 +263,8 @@ template class ZeroMorphTest : public CommitmentTest class ZeroMorphTest : public CommitmentTestexecute_zeromorph_protocol(num_unshifted, num_shifted, num_concatenated); EXPECT_TRUE(verified); } -} // namespace bb +} // namespace bb \ No newline at end of file diff --git a/barretenberg/cpp/src/barretenberg/commitment_schemes_recursion/CMakeLists.txt b/barretenberg/cpp/src/barretenberg/commitment_schemes_recursion/CMakeLists.txt new file mode 100644 index 00000000000..71ce791bd34 --- /dev/null +++ b/barretenberg/cpp/src/barretenberg/commitment_schemes_recursion/CMakeLists.txt @@ -0,0 +1 @@ +barretenberg_module(commitment_schemes_recursion commitment_schemes stdlib_primitives) \ No newline at end of file diff --git a/barretenberg/cpp/src/barretenberg/commitment_schemes_recursion/zeromorph.test.cpp b/barretenberg/cpp/src/barretenberg/commitment_schemes_recursion/zeromorph.test.cpp new file mode 100644 index 00000000000..93cfbc82d89 --- /dev/null +++ b/barretenberg/cpp/src/barretenberg/commitment_schemes_recursion/zeromorph.test.cpp @@ -0,0 +1,140 @@ +#include "barretenberg/commitment_schemes/zeromorph/zeromorph.hpp" +#include "barretenberg/circuit_checker/circuit_checker.hpp" +#include "barretenberg/commitment_schemes/commitment_key.test.hpp" +#include "barretenberg/commitment_schemes/ipa/ipa.hpp" +#include "barretenberg/commitment_schemes/kzg/kzg.hpp" +#include "barretenberg/srs/global_crs.hpp" +#include "barretenberg/stdlib/honk_recursion/transcript/transcript.hpp" +#include "barretenberg/stdlib/primitives/curves/bn254.hpp" +#include "barretenberg/stdlib/primitives/curves/grumpkin.hpp" +#include "barretenberg/stdlib_circuit_builders/ultra_circuit_builder.hpp" +#include + +#include + +using namespace bb; + +template class ZeroMorphRecursionTest : public CommitmentTest {}; + +numeric::RNG& engine = numeric::get_debug_randomness(); + +/** + * @brief Test full Prover/Verifier protocol for proving single multilinear evaluation + * + */ +TEST(ZeroMorphRecursionTest, ProveAndVerifySingle) +{ + // Define some useful type aliases + using Builder = UltraCircuitBuilder; + using Curve = typename stdlib::bn254; + using NativeCurve = typename Curve::NativeCurve; + using Commitment = typename Curve::AffineElement; + using NativeCommitment = typename Curve::AffineElementNative; + using NativeCurve = typename Curve::NativeCurve; + using NativePCS = std::conditional_t, KZG, IPA>; + using CommitmentKey = typename NativePCS::CK; + using ZeroMorphProver = ZeroMorphProver_; + using Fr = typename Curve::ScalarField; + using NativeFr = typename Curve::NativeCurve::ScalarField; + using Polynomial = bb::Polynomial; + using ZeroMorphVerifier = ZeroMorphVerifier_; + using Transcript = bb::BaseTranscript>; + + constexpr size_t N = 2; + constexpr size_t NUM_UNSHIFTED = 1; + constexpr size_t NUM_SHIFTED = 0; + + srs::init_crs_factory("../srs_db/ignition"); + + std::vector u_challenge = { NativeFr::random_element(&engine) }; + + // Construct some random multilinear polynomials f_i and their evaluations v_i = f_i(u) + std::vector f_polynomials; // unshifted polynomials + std::vector v_evaluations; + for (size_t i = 0; i < NUM_UNSHIFTED; ++i) { + f_polynomials.emplace_back(Polynomial::random(N)); + f_polynomials[i][0] = NativeFr(0); // ensure f is "shiftable" + v_evaluations.emplace_back(f_polynomials[i].evaluate_mle(u_challenge)); + } + + // Construct some "shifted" multilinear polynomials h_i as the left-shift-by-1 of f_i + std::vector g_polynomials; // to-be-shifted polynomials + std::vector h_polynomials; // shifts of the to-be-shifted polynomials + std::vector w_evaluations; + for (size_t i = 0; i < NUM_SHIFTED; ++i) { + g_polynomials.emplace_back(f_polynomials[i]); + h_polynomials.emplace_back(g_polynomials[i].shifted()); + w_evaluations.emplace_back(h_polynomials[i].evaluate_mle(u_challenge)); + } + + // Compute commitments [f_i] + std::vector f_commitments; + auto commitment_key = std::make_shared(1024); + for (size_t i = 0; i < NUM_UNSHIFTED; ++i) { + f_commitments.emplace_back(commitment_key->commit(f_polynomials[i])); + } + + // Construct container of commitments of the "to-be-shifted" polynomials [g_i] (= [f_i]) + std::vector g_commitments; + for (size_t i = 0; i < NUM_SHIFTED; ++i) { + g_commitments.emplace_back(f_commitments[i]); + } + + // Initialize an empty NativeTranscript + auto prover_transcript = NativeTranscript::prover_init_empty(); + + // Execute Prover protocol + ZeroMorphProver::prove(N, + RefVector(f_polynomials), + RefVector(g_polynomials), + RefVector(v_evaluations), + RefVector(w_evaluations), + u_challenge, + commitment_key, + prover_transcript); + + Builder builder; + StdlibProof stdlib_proof = bb::convert_proof_to_witness(&builder, prover_transcript->proof_data); + auto stdlib_verifier_transcript = std::make_shared(stdlib_proof); + [[maybe_unused]] auto _ = stdlib_verifier_transcript->template receive_from_prover("Init"); + + // Execute Verifier protocol without the need for vk prior the final check + const auto commitments_to_witnesses = [&builder](const auto& commitments) { + std::vector commitments_in_biggroup(commitments.size()); + std::transform(commitments.begin(), + commitments.end(), + commitments_in_biggroup.begin(), + [&builder](const auto& native_commitment) { + return Commitment::from_witness(&builder, native_commitment); + }); + return commitments_in_biggroup; + }; + const auto elements_to_witness = [&](const auto& elements) { + std::vector elements_in_circuit(elements.size()); + std::transform(elements.begin(), + elements.end(), + elements_in_circuit.begin(), + [&builder](const auto& native_element) { return Fr::from_witness(&builder, native_element); }); + return elements_in_circuit; + }; + auto stdlib_f_commitments = commitments_to_witnesses(f_commitments); + auto stdlib_g_commitments = commitments_to_witnesses(g_commitments); + auto stdlib_v_evaluations = elements_to_witness(v_evaluations); + auto stdlib_w_evaluations = elements_to_witness(w_evaluations); + + std::vector u_challenge_in_circuit(CONST_PROOF_SIZE_LOG_N); + std::fill_n(u_challenge_in_circuit.begin(), CONST_PROOF_SIZE_LOG_N, Fr::from_witness(&builder, 0)); + u_challenge_in_circuit[0] = Fr::from_witness(&builder, u_challenge[0]); + + [[maybe_unused]] auto opening_claim = ZeroMorphVerifier::verify(Fr::from_witness(&builder, N), + RefVector(stdlib_f_commitments), // unshifted + RefVector(stdlib_g_commitments), // to-be-shifted + RefVector(stdlib_v_evaluations), // unshifted + RefVector(stdlib_w_evaluations), // shifted + u_challenge_in_circuit, + Commitment::one(&builder), + stdlib_verifier_transcript, + {}, + {}); + EXPECT_TRUE(CircuitChecker::check(builder)); +} diff --git a/barretenberg/cpp/src/barretenberg/constants.hpp b/barretenberg/cpp/src/barretenberg/constants.hpp new file mode 100644 index 00000000000..4adf2c4a6b1 --- /dev/null +++ b/barretenberg/cpp/src/barretenberg/constants.hpp @@ -0,0 +1,7 @@ +#pragma once +#include + +namespace bb { +// The log of the max circuit size assumed in order to achieve constant sized proofs +static constexpr uint32_t CONST_PROOF_SIZE_LOG_N = 28; +} // namespace bb \ No newline at end of file diff --git a/barretenberg/cpp/src/barretenberg/dsl/acir_format/acir_format.cpp b/barretenberg/cpp/src/barretenberg/dsl/acir_format/acir_format.cpp index bcfcce6776f..528fd947ed3 100644 --- a/barretenberg/cpp/src/barretenberg/dsl/acir_format/acir_format.cpp +++ b/barretenberg/cpp/src/barretenberg/dsl/acir_format/acir_format.cpp @@ -341,7 +341,7 @@ void build_constraints(Builder& builder, // Add recursion constraints for (size_t i = 0; i < constraint_system.honk_recursion_constraints.size(); ++i) { - auto constraint = constraint_system.honk_recursion_constraints.at(i); + auto& constraint = constraint_system.honk_recursion_constraints.at(i); // A proof passed into the constraint should be stripped of its inner public inputs, but not the // nested aggregation object itself. The verifier circuit requires that the indices to a nested // proof aggregation state are a circuit constant. The user tells us they how they want these diff --git a/barretenberg/cpp/src/barretenberg/dsl/acir_format/honk_recursion_constraint.cpp b/barretenberg/cpp/src/barretenberg/dsl/acir_format/honk_recursion_constraint.cpp index 4b32ec0a14f..fb778929845 100644 --- a/barretenberg/cpp/src/barretenberg/dsl/acir_format/honk_recursion_constraint.cpp +++ b/barretenberg/cpp/src/barretenberg/dsl/acir_format/honk_recursion_constraint.cpp @@ -117,8 +117,6 @@ std::array create_ho } } - // Recursively verify the proof - auto vkey = std::make_shared(builder, key_fields); if (!has_valid_witness_assignments) { // Set vkey->circuit_size correctly based on the proof size size_t num_frs_comm = bb::field_conversion::calc_num_bn254_frs(); @@ -128,14 +126,86 @@ std::array create_ho 2 * num_frs_comm) % (num_frs_comm + num_frs_fr * UltraFlavor::BATCHED_RELATION_PARTIAL_LENGTH) == 0); - vkey->log_circuit_size = (input.proof.size() - HonkRecursionConstraint::inner_public_input_offset - - UltraFlavor::NUM_WITNESS_ENTITIES * num_frs_comm - - UltraFlavor::NUM_ALL_ENTITIES * num_frs_fr - 2 * num_frs_comm) / - (num_frs_comm + num_frs_fr * UltraFlavor::BATCHED_RELATION_PARTIAL_LENGTH); - vkey->circuit_size = (1 << vkey->log_circuit_size); - vkey->num_public_inputs = input.public_inputs.size(); - vkey->pub_inputs_offset = UltraFlavor::has_zero_row ? 1 : 0; + // Note: this computation should always result in log_circuit_size = CONST_PROOF_SIZE_LOG_N + auto log_circuit_size = (input.proof.size() - HonkRecursionConstraint::inner_public_input_offset - + UltraFlavor::NUM_WITNESS_ENTITIES * num_frs_comm - + UltraFlavor::NUM_ALL_ENTITIES * num_frs_fr - 2 * num_frs_comm) / + (num_frs_comm + num_frs_fr * UltraFlavor::BATCHED_RELATION_PARTIAL_LENGTH); + builder.assert_equal(builder.add_variable(1 << log_circuit_size), key_fields[0].witness_index); + builder.assert_equal(builder.add_variable(input.public_inputs.size()), key_fields[1].witness_index); + builder.assert_equal(builder.add_variable(UltraFlavor::has_zero_row ? 1 : 0), key_fields[2].witness_index); + uint32_t offset = 3; + + for (size_t i = 0; i < Flavor::NUM_PRECOMPUTED_ENTITIES; ++i) { + auto comm = curve::BN254::AffineElement::one() * fr::random_element(); + auto frs = field_conversion::convert_to_bn254_frs(comm); + builder.assert_equal(builder.add_variable(frs[0]), key_fields[offset].witness_index); + builder.assert_equal(builder.add_variable(frs[1]), key_fields[offset + 1].witness_index); + builder.assert_equal(builder.add_variable(frs[2]), key_fields[offset + 2].witness_index); + builder.assert_equal(builder.add_variable(frs[3]), key_fields[offset + 3].witness_index); + offset += 4; + } + + offset = HonkRecursionConstraint::inner_public_input_offset; + // first 3 things + builder.assert_equal(builder.add_variable(1 << log_circuit_size), proof_fields[0].witness_index); + builder.assert_equal(builder.add_variable(input.public_inputs.size()), proof_fields[1].witness_index); + builder.assert_equal(builder.add_variable(UltraFlavor::has_zero_row ? 1 : 0), proof_fields[2].witness_index); + + // the public inputs + for (size_t i = 0; i < input.public_inputs.size(); i++) { + builder.assert_equal(builder.add_variable(fr::random_element()), proof_fields[offset].witness_index); + offset++; + } + + // first 7 commitments + for (size_t i = 0; i < Flavor::NUM_WITNESS_ENTITIES; i++) { + auto comm = curve::BN254::AffineElement::one() * fr::random_element(); + auto frs = field_conversion::convert_to_bn254_frs(comm); + builder.assert_equal(builder.add_variable(frs[0]), proof_fields[offset].witness_index); + builder.assert_equal(builder.add_variable(frs[1]), proof_fields[offset + 1].witness_index); + builder.assert_equal(builder.add_variable(frs[2]), proof_fields[offset + 2].witness_index); + builder.assert_equal(builder.add_variable(frs[3]), proof_fields[offset + 3].witness_index); + offset += 4; + } + + // now the univariates, which can just be 0s (7*CONST_PROOF_SIZE_LOG_N Frs) + for (size_t i = 0; i < CONST_PROOF_SIZE_LOG_N * Flavor::BATCHED_RELATION_PARTIAL_LENGTH; i++) { + builder.assert_equal(builder.add_variable(fr::random_element()), proof_fields[offset].witness_index); + offset++; + } + + // now the sumcheck evalutions, which is just 43 0s + for (size_t i = 0; i < Flavor::NUM_ALL_ENTITIES; i++) { + builder.assert_equal(builder.add_variable(fr::random_element()), proof_fields[offset].witness_index); + offset++; + } + + // now the zeromorph commitments, which are CONST_PROOF_SIZE_LOG_N comms + for (size_t i = 0; i < CONST_PROOF_SIZE_LOG_N; i++) { + auto comm = curve::BN254::AffineElement::one() * fr::random_element(); + auto frs = field_conversion::convert_to_bn254_frs(comm); + builder.assert_equal(builder.add_variable(frs[0]), proof_fields[offset].witness_index); + builder.assert_equal(builder.add_variable(frs[1]), proof_fields[offset + 1].witness_index); + builder.assert_equal(builder.add_variable(frs[2]), proof_fields[offset + 2].witness_index); + builder.assert_equal(builder.add_variable(frs[3]), proof_fields[offset + 3].witness_index); + offset += 4; + } + + // lastly the 2 commitments + for (size_t i = 0; i < 2; i++) { + auto comm = curve::BN254::AffineElement::one() * fr::random_element(); + auto frs = field_conversion::convert_to_bn254_frs(comm); + builder.assert_equal(builder.add_variable(frs[0]), proof_fields[offset].witness_index); + builder.assert_equal(builder.add_variable(frs[1]), proof_fields[offset + 1].witness_index); + builder.assert_equal(builder.add_variable(frs[2]), proof_fields[offset + 2].witness_index); + builder.assert_equal(builder.add_variable(frs[3]), proof_fields[offset + 3].witness_index); + offset += 4; + } + ASSERT(offset == input.proof.size() + input.public_inputs.size()); } + // Recursively verify the proof + auto vkey = std::make_shared(builder, key_fields); RecursiveVerifier verifier(&builder, vkey); std::array pairing_points = verifier.verify_proof(proof_fields); diff --git a/barretenberg/cpp/src/barretenberg/eccvm/eccvm_flavor.hpp b/barretenberg/cpp/src/barretenberg/eccvm/eccvm_flavor.hpp index 4a87e300d09..bdb969415cb 100644 --- a/barretenberg/cpp/src/barretenberg/eccvm/eccvm_flavor.hpp +++ b/barretenberg/cpp/src/barretenberg/eccvm/eccvm_flavor.hpp @@ -936,7 +936,6 @@ class ECCVMFlavor { size_t num_frs_read = 0; circuit_size = NativeTranscript::template deserialize_from_buffer(NativeTranscript::proof_data, num_frs_read); - size_t log_n = numeric::get_msb(circuit_size); transcript_add_comm = NativeTranscript::template deserialize_from_buffer( NativeTranscript::proof_data, num_frs_read); transcript_mul_comm = NativeTranscript::template deserialize_from_buffer( @@ -1113,14 +1112,14 @@ class ECCVMFlavor { NativeTranscript::proof_data, num_frs_read); z_perm_comm = NativeTranscript::template deserialize_from_buffer(NativeTranscript::proof_data, num_frs_read); - for (size_t i = 0; i < log_n; ++i) { + for (size_t i = 0; i < CONST_PROOF_SIZE_LOG_N; ++i) { sumcheck_univariates.emplace_back(NativeTranscript::template deserialize_from_buffer< bb::Univariate>( NativeTranscript::proof_data, num_frs_read)); } sumcheck_evaluations = NativeTranscript::template deserialize_from_buffer>( NativeTranscript::proof_data, num_frs_read); - for (size_t i = 0; i < log_n; ++i) { + for (size_t i = 0; i < CONST_PROOF_SIZE_LOG_N; ++i) { zm_cq_comms.push_back( NativeTranscript::template deserialize_from_buffer(proof_data, num_frs_read)); } @@ -1164,7 +1163,6 @@ class ECCVMFlavor { NativeTranscript::proof_data.clear(); NativeTranscript::template serialize_to_buffer(circuit_size, NativeTranscript::proof_data); - size_t log_n = numeric::get_msb(circuit_size); NativeTranscript::template serialize_to_buffer(transcript_add_comm, NativeTranscript::proof_data); NativeTranscript::template serialize_to_buffer(transcript_mul_comm, NativeTranscript::proof_data); @@ -1264,11 +1262,11 @@ class ECCVMFlavor { NativeTranscript::proof_data); NativeTranscript::template serialize_to_buffer(lookup_inverses_comm, NativeTranscript::proof_data); NativeTranscript::template serialize_to_buffer(z_perm_comm, NativeTranscript::proof_data); - for (size_t i = 0; i < log_n; ++i) { + for (size_t i = 0; i < CONST_PROOF_SIZE_LOG_N; ++i) { NativeTranscript::template serialize_to_buffer(sumcheck_univariates[i], NativeTranscript::proof_data); } NativeTranscript::template serialize_to_buffer(sumcheck_evaluations, NativeTranscript::proof_data); - for (size_t i = 0; i < log_n; ++i) { + for (size_t i = 0; i < CONST_PROOF_SIZE_LOG_N; ++i) { NativeTranscript::template serialize_to_buffer(zm_cq_comms[i], NativeTranscript::proof_data); } NativeTranscript::template serialize_to_buffer(zm_cq_comm, NativeTranscript::proof_data); diff --git a/barretenberg/cpp/src/barretenberg/eccvm/eccvm_prover.cpp b/barretenberg/cpp/src/barretenberg/eccvm/eccvm_prover.cpp index 7d049b16970..830eb760683 100644 --- a/barretenberg/cpp/src/barretenberg/eccvm/eccvm_prover.cpp +++ b/barretenberg/cpp/src/barretenberg/eccvm/eccvm_prover.cpp @@ -121,7 +121,8 @@ void ECCVMProver::execute_pcs_rounds() // Execute the ZeroMorph protocol to produce a univariate opening claim for the multilinear evaluations produced by // Sumcheck auto multivariate_to_univariate_opening_claim = - ZeroMorph::prove(key->polynomials.get_unshifted(), + ZeroMorph::prove(key->circuit_size, + key->polynomials.get_unshifted(), key->polynomials.get_to_be_shifted(), sumcheck_output.claimed_evaluations.get_unshifted(), sumcheck_output.claimed_evaluations.get_shifted(), diff --git a/barretenberg/cpp/src/barretenberg/eccvm/eccvm_transcript.test.cpp b/barretenberg/cpp/src/barretenberg/eccvm/eccvm_transcript.test.cpp index 9eaedc9df93..2f8999ada5d 100644 --- a/barretenberg/cpp/src/barretenberg/eccvm/eccvm_transcript.test.cpp +++ b/barretenberg/cpp/src/barretenberg/eccvm/eccvm_transcript.test.cpp @@ -140,7 +140,7 @@ class ECCVMTranscriptTests : public ::testing::Test { manifest_expected.add_challenge(round, label); } - for (size_t i = 0; i < log_n; ++i) { + for (size_t i = 0; i < CONST_PROOF_SIZE_LOG_N; ++i) { round++; std::string idx = std::to_string(i); manifest_expected.add_entry(round, "Sumcheck:univariate_" + idx, frs_per_uni); @@ -153,7 +153,7 @@ class ECCVMTranscriptTests : public ::testing::Test { manifest_expected.add_challenge(round, "rho"); round++; - for (size_t i = 0; i < log_n; ++i) { + for (size_t i = 0; i < CONST_PROOF_SIZE_LOG_N; ++i) { std::string idx = std::to_string(i); manifest_expected.add_entry(round, "ZM:C_q_" + idx, frs_per_G); } diff --git a/barretenberg/cpp/src/barretenberg/eccvm/eccvm_verifier.cpp b/barretenberg/cpp/src/barretenberg/eccvm/eccvm_verifier.cpp index 2c1e3d6dc57..26bd5ac6ce6 100644 --- a/barretenberg/cpp/src/barretenberg/eccvm/eccvm_verifier.cpp +++ b/barretenberg/cpp/src/barretenberg/eccvm/eccvm_verifier.cpp @@ -61,7 +61,8 @@ bool ECCVMVerifier::verify_proof(const HonkProof& proof) } // Reduce the multivariate evaluation claims produced by sumcheck to a single univariate opening claim - auto multivariate_to_univariate_opening_claim = ZeroMorph::verify(commitments.get_unshifted(), + auto multivariate_to_univariate_opening_claim = ZeroMorph::verify(circuit_size, + commitments.get_unshifted(), commitments.get_to_be_shifted(), claimed_evaluations.get_unshifted(), claimed_evaluations.get_shifted(), diff --git a/barretenberg/cpp/src/barretenberg/eccvm_recursion/eccvm_recursive_verifier.cpp b/barretenberg/cpp/src/barretenberg/eccvm_recursion/eccvm_recursive_verifier.cpp index 4ceb6478179..e2821b7789b 100644 --- a/barretenberg/cpp/src/barretenberg/eccvm_recursion/eccvm_recursive_verifier.cpp +++ b/barretenberg/cpp/src/barretenberg/eccvm_recursion/eccvm_recursive_verifier.cpp @@ -31,7 +31,11 @@ template void ECCVMRecursiveVerifier_::verify_proof(co VerifierCommitments commitments{ key }; CommitmentLabels commitment_labels; - const auto circuit_size = transcript->template receive_from_prover("circuit_size"); + // TODO(https://github.com/AztecProtocol/barretenberg/issues/1040): Extract circuit size as BF (field_t) then + // convert to FF (bigfield fq) since this is what's expected by ZM. See issue for more details. + const BF circuit_size_bf = transcript->template receive_from_prover("circuit_size"); + const FF circuit_size{ static_cast(static_cast(circuit_size_bf.get_value())) }; + for (auto [comm, label] : zip_view(commitments.get_wires(), commitment_labels.get_wires())) { comm = transcript->template receive_from_prover(label); // TODO(https://github.com/AztecProtocol/barretenberg/issues/1017): This is a hack to ensure zero commitments @@ -75,7 +79,8 @@ template void ECCVMRecursiveVerifier_::verify_proof(co auto [multivariate_challenge, claimed_evaluations, sumcheck_verified] = sumcheck.verify(relation_parameters, alpha, gate_challenges); - auto multivariate_to_univariate_opening_claim = ZeroMorph::verify(commitments.get_unshifted(), + auto multivariate_to_univariate_opening_claim = ZeroMorph::verify(circuit_size, + commitments.get_unshifted(), commitments.get_to_be_shifted(), claimed_evaluations.get_unshifted(), claimed_evaluations.get_shifted(), diff --git a/barretenberg/cpp/src/barretenberg/eccvm_recursion/verifier_commitment_key.hpp b/barretenberg/cpp/src/barretenberg/eccvm_recursion/verifier_commitment_key.hpp index 5dcb13ffacb..b25e5369adb 100644 --- a/barretenberg/cpp/src/barretenberg/eccvm_recursion/verifier_commitment_key.hpp +++ b/barretenberg/cpp/src/barretenberg/eccvm_recursion/verifier_commitment_key.hpp @@ -7,12 +7,13 @@ namespace bb { * * @tparam Builder */ -template class VerifierCommitmentKey { +template class VerifierCommitmentKey { + public: + using Curve = Curve_; using Builder = Curve::Builder; using Commitment = Curve::AffineElement; using NativeEmbeddedCurve = typename Builder::EmbeddedCurve; - public: /** * @brief Construct a new Verifier Commitment Key object from its native counterpart. instantiated on Grumpkin. * This will be part of the ECCVMRecursiveFlavor once implemented. The Grumpkin SRS points are represented after diff --git a/barretenberg/cpp/src/barretenberg/flavor/flavor.hpp b/barretenberg/cpp/src/barretenberg/flavor/flavor.hpp index 301597a6299..0059b7e166f 100644 --- a/barretenberg/cpp/src/barretenberg/flavor/flavor.hpp +++ b/barretenberg/cpp/src/barretenberg/flavor/flavor.hpp @@ -68,6 +68,9 @@ #include "barretenberg/common/std_array.hpp" #include "barretenberg/common/std_vector.hpp" #include "barretenberg/common/zip_view.hpp" +#include "barretenberg/constants.hpp" +#include "barretenberg/crypto/sha256/sha256.hpp" +#include "barretenberg/ecc/fields/field_conversion.hpp" #include "barretenberg/plonk_honk_shared/types/circuit_type.hpp" #include "barretenberg/polynomials/barycentric.hpp" #include "barretenberg/polynomials/evaluation_domain.hpp" @@ -181,6 +184,8 @@ class ProvingKeyAvm_ : public PrecomputedPolynomials, public WitnessPolynomials template class VerificationKey_ : public PrecomputedCommitments { public: + using FF = typename VerifierCommitmentKey::Curve::ScalarField; + using Commitment = typename VerifierCommitmentKey::Commitment; std::shared_ptr pcs_verification_key; uint64_t pub_inputs_offset = 0; @@ -191,6 +196,46 @@ class VerificationKey_ : public PrecomputedCommitments { this->log_circuit_size = numeric::get_msb(circuit_size); this->num_public_inputs = num_public_inputs; }; + + /** + * @brief Serialize verification key to field elements + * + * @return std::vector + */ + std::vector to_field_elements() + { + std::vector elements; + std::vector circuit_size_elements = bb::field_conversion::convert_to_bn254_frs(this->circuit_size); + elements.insert(elements.end(), circuit_size_elements.begin(), circuit_size_elements.end()); + // do the same for the rest of the fields + std::vector num_public_inputs_elements = + bb::field_conversion::convert_to_bn254_frs(this->num_public_inputs); + elements.insert(elements.end(), num_public_inputs_elements.begin(), num_public_inputs_elements.end()); + std::vector pub_inputs_offset_elements = + bb::field_conversion::convert_to_bn254_frs(this->pub_inputs_offset); + elements.insert(elements.end(), pub_inputs_offset_elements.begin(), pub_inputs_offset_elements.end()); + + for (Commitment& comm : this->get_all()) { + std::vector comm_elements = bb::field_conversion::convert_to_bn254_frs(comm); + elements.insert(elements.end(), comm_elements.begin(), comm_elements.end()); + } + return elements; + } + + uint256_t hash() + { + std::vector field_elements = to_field_elements(); + std::vector to_hash(field_elements.size() * sizeof(FF)); + + const auto convert_and_insert = [&to_hash](auto& vector) { + std::vector buffer = to_buffer(vector); + to_hash.insert(to_hash.end(), buffer.begin(), buffer.end()); + }; + + convert_and_insert(field_elements); + + return from_buffer(crypto::sha256(to_hash)); + } }; // Because of how Gemini is written, is importat to put the polynomials out in this order. diff --git a/barretenberg/cpp/src/barretenberg/goblin/mock_circuits.hpp b/barretenberg/cpp/src/barretenberg/goblin/mock_circuits.hpp index 0fbdef6204f..9e696ae3580 100644 --- a/barretenberg/cpp/src/barretenberg/goblin/mock_circuits.hpp +++ b/barretenberg/cpp/src/barretenberg/goblin/mock_circuits.hpp @@ -136,41 +136,6 @@ class GoblinMockCircuits { MockCircuits::construct_arithmetic_circuit(builder); } - /** - * @brief Construct a size 2^17 mock kernel circuit based on vanilla recursion for benchmarking - * @details This circuit contains (1) some arbitrary operations representing general kernel logic, (2) recursive - * verification of a function circuit proof, and optionally (3) recursive verification of a previous kernel circuit - * proof. The arbitrary kernel logic is structured to bring the final dyadic circuit size of the kernel to 2^17. - * - * TODO(https://github.com/AztecProtocol/barretenberg/issues/801): Pairing point aggregation not implemented - * @param builder - * @param function_accum {proof, vkey} for function circuit to be recursively verified - * @param prev_kernel_accum {proof, vkey} for previous kernel circuit to be recursively verified - */ - static void construct_mock_recursion_kernel_circuit(MegaBuilder& builder, - const KernelInput& function_accum, - const KernelInput& prev_kernel_accum) - { - // Add operations representing general kernel logic e.g. state updates. Note: these are structured to make the - // kernel "full" within the dyadic size 2^17 (130914 gates) - const size_t NUM_MERKLE_CHECKS = 40; - const size_t NUM_ECDSA_VERIFICATIONS = 1; - const size_t NUM_SHA_HASHES = 1; - stdlib::generate_merkle_membership_test_circuit(builder, NUM_MERKLE_CHECKS); - stdlib::generate_ecdsa_verification_test_circuit(builder, NUM_ECDSA_VERIFICATIONS); - stdlib::generate_sha256_test_circuit(builder, NUM_SHA_HASHES); - - // Execute recursive aggregation of function proof - RecursiveVerifier verifier1{ &builder, function_accum.verification_key }; - verifier1.verify_proof(function_accum.proof); - - // Execute recursive aggregation of previous kernel proof if one exists - if (!prev_kernel_accum.proof.empty()) { - RecursiveVerifier verifier2{ &builder, prev_kernel_accum.verification_key }; - verifier2.verify_proof(prev_kernel_accum.proof); - } - } - /** * @brief Construct a mock kernel circuit * @details Construct an arbitrary circuit meant to represent the aztec private function execution kernel. Recursive diff --git a/barretenberg/cpp/src/barretenberg/goblin/mock_circuits_pinning.test.cpp b/barretenberg/cpp/src/barretenberg/goblin/mock_circuits_pinning.test.cpp index a31ee0cc09b..b37cdd16206 100644 --- a/barretenberg/cpp/src/barretenberg/goblin/mock_circuits_pinning.test.cpp +++ b/barretenberg/cpp/src/barretenberg/goblin/mock_circuits_pinning.test.cpp @@ -32,31 +32,4 @@ TEST_F(MegaMockCircuitsPinning, FunctionSizes) }; run_test(true); run_test(false); -} - -TEST_F(MegaMockCircuitsPinning, RecursionKernelSizes) -{ - const auto run_test = [](bool large) { - { - GoblinProver goblin; - GoblinAccumulationOutput kernel_accum; - MegaCircuitBuilder app_circuit{ goblin.op_queue }; - GoblinMockCircuits::construct_mock_function_circuit(app_circuit, large); - auto function_accum = goblin.accumulate(app_circuit); - MegaCircuitBuilder kernel_circuit{ goblin.op_queue }; - GoblinMockCircuits::construct_mock_recursion_kernel_circuit( - kernel_circuit, - { function_accum.proof, function_accum.verification_key }, - { kernel_accum.proof, kernel_accum.verification_key }); - - auto instance = std::make_shared(kernel_circuit); - if (large) { - EXPECT_EQ(instance->proving_key.log_circuit_size, 17); - } else { - EXPECT_EQ(instance->proving_key.log_circuit_size, 17); - }; - } - }; - run_test(true); - run_test(false); } \ No newline at end of file diff --git a/barretenberg/cpp/src/barretenberg/plonk_honk_shared/library/grand_product_delta.hpp b/barretenberg/cpp/src/barretenberg/plonk_honk_shared/library/grand_product_delta.hpp index a68b398b118..b967dc0e93a 100644 --- a/barretenberg/cpp/src/barretenberg/plonk_honk_shared/library/grand_product_delta.hpp +++ b/barretenberg/cpp/src/barretenberg/plonk_honk_shared/library/grand_product_delta.hpp @@ -61,26 +61,4 @@ typename Flavor::FF compute_public_input_delta(std::span -Field compute_lookup_grand_product_delta(const Field& beta, const Field& gamma, const auto domain_size) -{ - Field gamma_by_one_plus_beta = gamma * (Field(1) + beta); // γ(1 + β) - return gamma_by_one_plus_beta.pow(domain_size); // (γ(1 + β))^n -} - } // namespace bb \ No newline at end of file diff --git a/barretenberg/cpp/src/barretenberg/polynomials/pow.hpp b/barretenberg/cpp/src/barretenberg/polynomials/pow.hpp index 583a9d3ddf1..7dda9aaa485 100644 --- a/barretenberg/cpp/src/barretenberg/polynomials/pow.hpp +++ b/barretenberg/cpp/src/barretenberg/polynomials/pow.hpp @@ -2,6 +2,7 @@ #include "barretenberg/common/compiler_hints.hpp" #include "barretenberg/common/op_count.hpp" #include "barretenberg/common/thread.hpp" +#include "barretenberg/stdlib/primitives/bool/bool.hpp" #include #include @@ -63,6 +64,21 @@ template struct PowPolynomial { */ FF univariate_eval(FF challenge) const { return (FF(1) + (challenge * (betas[current_element_idx] - FF(1)))); }; + /** + * @brief Evaluate \f$ ((1−X_{i}) + X_{i}\cdot \beta_{i})\f$ at the challenge point \f$ X_{i}=u_{i} \f$. + */ + template FF univariate_eval(const FF& challenge, const Bool& dummy_round) const + { + FF beta_or_dummy; + if (!dummy_round.get_value()) { + beta_or_dummy = betas[current_element_idx]; + } else { + beta_or_dummy = FF::from_witness(challenge.get_context(), 1); + } + FF beta_val = FF::conditional_assign(dummy_round, FF::from_witness(challenge.get_context(), 1), beta_or_dummy); + return (FF(1) + (challenge * (beta_val - FF(1)))); + } + /** * @brief Partially evaluate the \f$pow_{\beta} \f$-polynomial at the new challenge and update \f$ c_i \f$ * @details Update the constant \f$c_{i} \to c_{i+1} \f$ multiplying it by \f$pow_{\beta}\f$'s factor \f$\left( @@ -77,6 +93,22 @@ template struct PowPolynomial { periodicity *= 2; } + /** + * @brief Partially evaluate the \f$pow_{\beta} \f$-polynomial at the new challenge and update \f$ c_i \f$ + * @details Update the constant \f$c_{i} \to c_{i+1} \f$ multiplying it by \f$pow_{\beta}\f$'s factor \f$\left( + * (1-X_i) + X_i\cdot \beta_i\right)\vert_{X_i = u_i}\f$ computed by \ref univariate_eval. + * @param challenge \f$ i \f$-th verifier challenge \f$ u_{i}\f$ + */ + template void partially_evaluate(const FF& challenge, const stdlib::bool_t& dummy) + { + FF current_univariate_eval = univariate_eval(challenge, dummy); + // If dummy round, make no update to the partial_evaluation_result + partial_evaluation_result = FF::conditional_assign( + dummy, partial_evaluation_result, partial_evaluation_result * current_univariate_eval); + current_element_idx++; + periodicity *= 2; + } + /** * @brief Given \f$ \vec\beta = (\beta_0,...,\beta_{d-1})\f$ compute \f$ pow_{\ell}(\vec \beta) = pow_{\beta}(\vec * \ell)\f$ for \f$ \ell =0,\ldots,2^{d}-1\f$. diff --git a/barretenberg/cpp/src/barretenberg/protogalaxy/decider_verifier.cpp b/barretenberg/cpp/src/barretenberg/protogalaxy/decider_verifier.cpp index 5ae609a0e9a..080a4f322f0 100644 --- a/barretenberg/cpp/src/barretenberg/protogalaxy/decider_verifier.cpp +++ b/barretenberg/cpp/src/barretenberg/protogalaxy/decider_verifier.cpp @@ -44,12 +44,14 @@ template bool DeciderVerifier_::verify_proof(const Hon // If Sumcheck did not verify, return false if (sumcheck_verified.has_value() && !sumcheck_verified.value()) { + info("Sumcheck verification failed."); return false; } // Execute ZeroMorph rounds. See https://hackmd.io/dlf9xEwhTQyE3hiGbq4FsA?view for a complete description of the // unrolled protocol. - auto opening_claim = ZeroMorph::verify(commitments.get_unshifted(), + auto opening_claim = ZeroMorph::verify(accumulator->verification_key->circuit_size, + commitments.get_unshifted(), commitments.get_to_be_shifted(), claimed_evaluations.get_unshifted(), claimed_evaluations.get_shifted(), diff --git a/barretenberg/cpp/src/barretenberg/stdlib/honk_recursion/transcript/transcript.hpp b/barretenberg/cpp/src/barretenberg/stdlib/honk_recursion/transcript/transcript.hpp index 6b09876cd15..5aeeaf8a6c6 100644 --- a/barretenberg/cpp/src/barretenberg/stdlib/honk_recursion/transcript/transcript.hpp +++ b/barretenberg/cpp/src/barretenberg/stdlib/honk_recursion/transcript/transcript.hpp @@ -17,6 +17,7 @@ template struct StdlibTranscriptParams { Builder* builder = data[0].get_context(); return stdlib::poseidon2::hash(*builder, data); } else { + // TODO(https://github.com/AztecProtocol/barretenberg/issues/1035): Add constraints for hashing in Ultra using NativeFr = bb::fr; ASSERT(!data.empty() && data[0].get_context() != nullptr); Builder* builder = data[0].get_context(); diff --git a/barretenberg/cpp/src/barretenberg/stdlib/honk_recursion/verifier/decider_recursive_verifier.cpp b/barretenberg/cpp/src/barretenberg/stdlib/honk_recursion/verifier/decider_recursive_verifier.cpp index 44c083a544c..e3d29e360a5 100644 --- a/barretenberg/cpp/src/barretenberg/stdlib/honk_recursion/verifier/decider_recursive_verifier.cpp +++ b/barretenberg/cpp/src/barretenberg/stdlib/honk_recursion/verifier/decider_recursive_verifier.cpp @@ -33,7 +33,8 @@ std::array DeciderRecursiveVerifier_:: // Execute ZeroMorph rounds. See https://hackmd.io/dlf9xEwhTQyE3hiGbq4FsA?view for a complete description of the // unrolled protocol. - auto opening_claim = ZeroMorph::verify(commitments.get_unshifted(), + auto opening_claim = ZeroMorph::verify(accumulator->verification_key->circuit_size, + commitments.get_unshifted(), commitments.get_to_be_shifted(), claimed_evaluations.get_unshifted(), claimed_evaluations.get_shifted(), diff --git a/barretenberg/cpp/src/barretenberg/stdlib/honk_recursion/verifier/protogalaxy_recursive_verifier.cpp b/barretenberg/cpp/src/barretenberg/stdlib/honk_recursion/verifier/protogalaxy_recursive_verifier.cpp index 0a1b48068bd..bac5ef6dd7b 100644 --- a/barretenberg/cpp/src/barretenberg/stdlib/honk_recursion/verifier/protogalaxy_recursive_verifier.cpp +++ b/barretenberg/cpp/src/barretenberg/stdlib/honk_recursion/verifier/protogalaxy_recursive_verifier.cpp @@ -91,10 +91,7 @@ void ProtoGalaxyRecursiveVerifier_::receive_and_finalise_inst gamma, inst->verification_key->circuit_size, static_cast(inst->verification_key->pub_inputs_offset)); - const FF lookup_grand_product_delta = - compute_lookup_grand_product_delta(beta, gamma, inst->verification_key->circuit_size); - inst->relation_parameters = - RelationParameters{ eta, eta_two, eta_three, beta, gamma, public_input_delta, lookup_grand_product_delta }; + inst->relation_parameters = RelationParameters{ eta, eta_two, eta_three, beta, gamma, public_input_delta }; // Get the relation separation challenges for (size_t idx = 0; idx < NUM_SUBRELATIONS - 1; idx++) { diff --git a/barretenberg/cpp/src/barretenberg/stdlib/honk_recursion/verifier/ultra_recursive_verifier.cpp b/barretenberg/cpp/src/barretenberg/stdlib/honk_recursion/verifier/ultra_recursive_verifier.cpp index f609464efec..38100654bd9 100644 --- a/barretenberg/cpp/src/barretenberg/stdlib/honk_recursion/verifier/ultra_recursive_verifier.cpp +++ b/barretenberg/cpp/src/barretenberg/stdlib/honk_recursion/verifier/ultra_recursive_verifier.cpp @@ -53,7 +53,7 @@ std::array UltraRecursiveVerifier_::ve VerifierCommitments commitments{ key }; CommitmentLabels commitment_labels; - transcript->template receive_from_prover("circuit_size"); + FF circuit_size = transcript->template receive_from_prover("circuit_size"); transcript->template receive_from_prover("public_input_size"); transcript->template receive_from_prover("pub_inputs_offset"); @@ -117,15 +117,12 @@ std::array UltraRecursiveVerifier_::ve commitments.return_data_inverses = transcript->template receive_from_prover(commitment_labels.return_data_inverses); } - const FF public_input_delta = compute_public_input_delta( - public_inputs, beta, gamma, key->circuit_size, static_cast(key->pub_inputs_offset)); - const FF lookup_grand_product_delta = compute_lookup_grand_product_delta(beta, gamma, key->circuit_size); + public_inputs, beta, gamma, circuit_size, static_cast(key->pub_inputs_offset)); relation_parameters.beta = beta; relation_parameters.gamma = gamma; relation_parameters.public_input_delta = public_input_delta; - relation_parameters.lookup_grand_product_delta = lookup_grand_product_delta; // Get commitment to permutation and lookup grand products commitments.z_perm = transcript->template receive_from_prover(commitment_labels.z_perm); @@ -139,6 +136,9 @@ std::array UltraRecursiveVerifier_::ve alpha[idx] = transcript->template get_challenge("alpha_" + std::to_string(idx)); } + // TODO(https://github.com/AztecProtocol/barretenberg/issues/1041): Once hashing produces constraints for Ultra in + // the transcript, a fixed number of gate_challenges must be generated by the prover/verifier in order to achieve a + // verification circuit that is independent of proof size. auto gate_challenges = std::vector(log_circuit_size); for (size_t idx = 0; idx < log_circuit_size; idx++) { gate_challenges[idx] = transcript->template get_challenge("Sumcheck:gate_challenge_" + std::to_string(idx)); @@ -147,7 +147,8 @@ std::array UltraRecursiveVerifier_::ve sumcheck.verify(relation_parameters, alpha, gate_challenges); // Execute ZeroMorph to produce an opening claim subsequently verified by a univariate PCS - auto opening_claim = ZeroMorph::verify(commitments.get_unshifted(), + auto opening_claim = ZeroMorph::verify(circuit_size, + commitments.get_unshifted(), commitments.get_to_be_shifted(), claimed_evaluations.get_unshifted(), claimed_evaluations.get_shifted(), diff --git a/barretenberg/cpp/src/barretenberg/stdlib/honk_recursion/verifier/verifier.test.cpp b/barretenberg/cpp/src/barretenberg/stdlib/honk_recursion/verifier/verifier.test.cpp index 605b44b702f..c1d2acecf79 100644 --- a/barretenberg/cpp/src/barretenberg/stdlib/honk_recursion/verifier/verifier.test.cpp +++ b/barretenberg/cpp/src/barretenberg/stdlib/honk_recursion/verifier/verifier.test.cpp @@ -50,19 +50,12 @@ template class RecursiveVerifierTest : public testing */ static InnerBuilder create_inner_circuit(size_t log_num_gates = 10) { - using fr_ct = InnerCurve::ScalarField; - using fq_ct = InnerCurve::BaseField; - using point_ct = InnerCurve::AffineElement; - using public_witness_ct = InnerCurve::public_witness_ct; - using witness_ct = InnerCurve::witness_ct; - using byte_array_ct = InnerCurve::byte_array_ct; using fr = typename InnerCurve::ScalarFieldNative; - using point = typename InnerCurve::GroupNative::affine_element; InnerBuilder builder; // Create 2^log_n many add gates based on input log num gates - const size_t num_gates = 1 << log_num_gates; + const size_t num_gates = (1 << log_num_gates); for (size_t i = 0; i < num_gates; ++i) { fr a = fr::random_element(); uint32_t a_idx = builder.add_variable(a); @@ -77,39 +70,6 @@ template class RecursiveVerifierTest : public testing builder.create_big_add_gate({ a_idx, b_idx, c_idx, d_idx, fr(1), fr(1), fr(1), fr(-1), fr(0) }); } - // Perform a batch mul which will add some arbitrary goblin-style ECC op gates if the circuit arithmetic is - // goblinisied otherwise it will add the conventional nonnative gates - size_t num_points = 5; - std::vector circuit_points; - std::vector circuit_scalars; - for (size_t i = 0; i < num_points; ++i) { - circuit_points.push_back(point_ct::from_witness(&builder, point::random_element())); - circuit_scalars.push_back(fr_ct::from_witness(&builder, fr::random_element())); - } - point_ct::batch_mul(circuit_points, circuit_scalars); - - // Define some additional arbitrary convetional circuit logic - fr_ct a(public_witness_ct(&builder, fr::random_element())); - fr_ct b(public_witness_ct(&builder, fr::random_element())); - fr_ct c(public_witness_ct(&builder, fr::random_element())); - - for (size_t i = 0; i < 32; ++i) { - a = (a * b) + b + a; - a = a.madd(b, c); - } - pedersen_hash::hash({ a, b }); - byte_array_ct to_hash(&builder, "nonsense test data"); - blake3s(to_hash); - - fr bigfield_data = fr::random_element(); - fr bigfield_data_a{ bigfield_data.data[0], bigfield_data.data[1], 0, 0 }; - fr bigfield_data_b{ bigfield_data.data[2], bigfield_data.data[3], 0, 0 }; - - fq_ct big_a(fr_ct(witness_ct(&builder, bigfield_data_a.to_montgomery_form())), fr_ct(witness_ct(&builder, 0))); - fq_ct big_b(fr_ct(witness_ct(&builder, bigfield_data_b.to_montgomery_form())), fr_ct(witness_ct(&builder, 0))); - - big_a* big_b; - return builder; }; @@ -156,6 +116,52 @@ template class RecursiveVerifierTest : public testing } } + static void test_independent_vk_hash() + { + auto get_blocks = [](size_t inner_size) { // Create an arbitrary inner circuit + auto inner_circuit = create_inner_circuit(inner_size); + + // Generate a proof over the inner circuit + auto instance = std::make_shared(inner_circuit); + InnerProver inner_prover(instance); + info("test circuit size: ", instance->proving_key.circuit_size); + auto verification_key = std::make_shared(instance->proving_key); + auto inner_proof = inner_prover.construct_proof(); + + // Create a recursive verification circuit for the proof of the inner circuit + OuterBuilder outer_circuit; + RecursiveVerifier verifier{ &outer_circuit, verification_key }; + [[maybe_unused]] auto pairing_points = verifier.verify_proof(inner_proof); + return outer_circuit.blocks; + }; + + bool broke(false); + auto check_eq = [&broke](auto& p1, auto& p2) { + for (size_t idx = 0; idx < p1.size(); idx++) { + if (p1[idx] != p2[idx]) { + broke = true; + info("discrepancy at value index: ", idx); + break; + } + } + }; + + auto blocks_10 = get_blocks(10); + auto blocks_11 = get_blocks(11); + size_t block_idx = 0; + for (auto [b_10, b_11] : zip_view(blocks_10.get(), blocks_11.get())) { + info("block index: ", block_idx); + size_t sel_idx = 0; + for (auto [p_10, p_11] : zip_view(b_10.selectors, b_11.selectors)) { + info("sel index: ", sel_idx); + check_eq(p_10, p_11); + sel_idx++; + } + block_idx++; + } + EXPECT_FALSE(broke); + } + /** * @brief Construct a recursive verification circuit for the proof of an inner circuit then call check_circuit on * it. @@ -269,6 +275,15 @@ HEAVY_TYPED_TEST(RecursiveVerifierTest, SingleRecursiveVerification) TestFixture::test_recursive_verification(); }; +HEAVY_TYPED_TEST(RecursiveVerifierTest, IndependentVKHash) +{ + if constexpr (std::same_as>) { + TestFixture::test_independent_vk_hash(); + } else { + GTEST_SKIP() << "Not built for this parameter"; + } +}; + HEAVY_TYPED_TEST(RecursiveVerifierTest, SingleRecursiveVerificationFailure) { TestFixture::test_recursive_verification_fails(); diff --git a/barretenberg/cpp/src/barretenberg/stdlib/primitives/biggroup/biggroup.hpp b/barretenberg/cpp/src/barretenberg/stdlib/primitives/biggroup/biggroup.hpp index f73d389cab0..83ce2a6c7de 100644 --- a/barretenberg/cpp/src/barretenberg/stdlib/primitives/biggroup/biggroup.hpp +++ b/barretenberg/cpp/src/barretenberg/stdlib/primitives/biggroup/biggroup.hpp @@ -22,6 +22,7 @@ namespace bb::stdlib { template class element { public: using bool_ct = stdlib::bool_t; + using biggroup_tag = element; // Facilitates a constexpr check IsBigGroup struct secp256k1_wnaf { std::vector> wnaf; @@ -937,6 +938,9 @@ template class element { typename std::conditional, batch_lookup_table_plookup<>, batch_lookup_table_base>::type; }; +template +concept IsBigGroup = std::is_same_v; + template inline std::ostream& operator<<(std::ostream& os, element const& v) { diff --git a/barretenberg/cpp/src/barretenberg/stdlib/primitives/biggroup/biggroup.test.cpp b/barretenberg/cpp/src/barretenberg/stdlib/primitives/biggroup/biggroup.test.cpp index 49c8a471ca5..84359b97214 100644 --- a/barretenberg/cpp/src/barretenberg/stdlib/primitives/biggroup/biggroup.test.cpp +++ b/barretenberg/cpp/src/barretenberg/stdlib/primitives/biggroup/biggroup.test.cpp @@ -443,6 +443,8 @@ template class stdlib_biggroup : public testing::Test { static void test_batch_mul() { + // TODO(https://github.com/AztecProtocol/barretenberg/issues/1043): this test will fail with num_points is 1 + // (and this case gets hit sometimes when handling points at infinity). const size_t num_points = 5; Builder builder; std::vector points; diff --git a/barretenberg/cpp/src/barretenberg/stdlib/primitives/curves/bn254.hpp b/barretenberg/cpp/src/barretenberg/stdlib/primitives/curves/bn254.hpp index d437cee5044..83b8f3b70b1 100644 --- a/barretenberg/cpp/src/barretenberg/stdlib/primitives/curves/bn254.hpp +++ b/barretenberg/cpp/src/barretenberg/stdlib/primitives/curves/bn254.hpp @@ -12,6 +12,7 @@ template struct bn254 { // classes are instantiated with "native" curve types. Eventually, the verifier classes will be instantiated only // with stdlib types, and "native" verification will be acheived via a simulated builder. static constexpr bool is_stdlib_type = true; + using NativeCurve = curve::BN254; // Corresponding native types (used exclusively for testing) using ScalarFieldNative = curve::BN254::ScalarField; diff --git a/barretenberg/cpp/src/barretenberg/stdlib/primitives/curves/grumpkin.hpp b/barretenberg/cpp/src/barretenberg/stdlib/primitives/curves/grumpkin.hpp index 66c704e9d9b..8f8555886e6 100644 --- a/barretenberg/cpp/src/barretenberg/stdlib/primitives/curves/grumpkin.hpp +++ b/barretenberg/cpp/src/barretenberg/stdlib/primitives/curves/grumpkin.hpp @@ -14,8 +14,8 @@ namespace bb::stdlib { */ template struct grumpkin { static constexpr bool is_stdlib_type = true; - using Builder = CircuitBuilder; + using NativeCurve = curve::Grumpkin; // Stdlib types corresponding to those defined in the native description of the curve. // Note: its useful to have these type names match the native analog exactly so that components that digest a diff --git a/barretenberg/cpp/src/barretenberg/stdlib/primitives/field/field.cpp b/barretenberg/cpp/src/barretenberg/stdlib/primitives/field/field.cpp index 44da799f729..5d84e42450f 100644 --- a/barretenberg/cpp/src/barretenberg/stdlib/primitives/field/field.cpp +++ b/barretenberg/cpp/src/barretenberg/stdlib/primitives/field/field.cpp @@ -388,6 +388,19 @@ template field_t field_t::pow(const field_t return accumulator; } +/** + * @brief raise a field_t to a power of an exponent (field_t). Note that the exponent must not exceed 32 bits and is + * implicitly range constrained. + * + * @returns this ** (exponent) + */ +template field_t field_t::pow(const size_t exponent) const +{ + auto* ctx = get_context(); + auto exponent_field_elt = field_t::from_witness(ctx, exponent); + return pow(exponent_field_elt); +} + /** * @returns `this * to_mul + to_add` */ diff --git a/barretenberg/cpp/src/barretenberg/stdlib/primitives/field/field.hpp b/barretenberg/cpp/src/barretenberg/stdlib/primitives/field/field.hpp index c3c4a19c140..2bbfe93989b 100644 --- a/barretenberg/cpp/src/barretenberg/stdlib/primitives/field/field.hpp +++ b/barretenberg/cpp/src/barretenberg/stdlib/primitives/field/field.hpp @@ -129,6 +129,9 @@ template class field_t { // N.B. we implicitly range-constrain 'exponent' to be a 32-bit integer! field_t pow(const field_t& exponent) const; + // TODO(https://github.com/AztecProtocol/barretenberg/issues/1039): Use of this function in ZM verifier is insecure. + field_t pow(size_t exponent) const; + field_t operator+=(const field_t& other) { *this = *this + other; diff --git a/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/mega_flavor.hpp b/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/mega_flavor.hpp index 45467f78325..37680a5c543 100644 --- a/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/mega_flavor.hpp +++ b/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/mega_flavor.hpp @@ -416,9 +416,6 @@ class MegaFlavor { this->circuit_size, this->pub_inputs_offset); relation_parameters.public_input_delta = public_input_delta; - auto lookup_grand_product_delta = compute_lookup_grand_product_delta( - relation_parameters.beta, relation_parameters.gamma, this->circuit_size); - relation_parameters.lookup_grand_product_delta = lookup_grand_product_delta; // Compute permutation and lookup grand product polynomials compute_grand_products(this->polynomials, relation_parameters); @@ -560,31 +557,6 @@ class MegaFlavor { lagrange_last, lagrange_ecc_op, databus_id); - - /** - * @brief Serialize verification key to field elements - * - * @return std::vector - */ - std::vector to_field_elements() - { - std::vector elements; - std::vector circuit_size_elements = bb::field_conversion::convert_to_bn254_frs(this->circuit_size); - elements.insert(elements.end(), circuit_size_elements.begin(), circuit_size_elements.end()); - // do the same for the rest of the fields - std::vector num_public_inputs_elements = - bb::field_conversion::convert_to_bn254_frs(this->num_public_inputs); - elements.insert(elements.end(), num_public_inputs_elements.begin(), num_public_inputs_elements.end()); - std::vector pub_inputs_offset_elements = - bb::field_conversion::convert_to_bn254_frs(this->pub_inputs_offset); - elements.insert(elements.end(), pub_inputs_offset_elements.begin(), pub_inputs_offset_elements.end()); - - for (Commitment& comm : this->get_all()) { - std::vector comm_elements = bb::field_conversion::convert_to_bn254_frs(comm); - elements.insert(elements.end(), comm_elements.begin(), comm_elements.end()); - } - return elements; - } }; /** * @brief A container for storing the partially evaluated multivariates produced by sumcheck. @@ -814,7 +786,6 @@ class MegaFlavor { // take current proof and put them into the struct size_t num_frs_read = 0; circuit_size = deserialize_from_buffer(proof_data, num_frs_read); - size_t log_n = numeric::get_msb(circuit_size); public_input_size = deserialize_from_buffer(proof_data, num_frs_read); pub_inputs_offset = deserialize_from_buffer(proof_data, num_frs_read); @@ -839,13 +810,13 @@ class MegaFlavor { w_4_comm = deserialize_from_buffer(proof_data, num_frs_read); lookup_inverses_comm = deserialize_from_buffer(proof_data, num_frs_read); z_perm_comm = deserialize_from_buffer(proof_data, num_frs_read); - for (size_t i = 0; i < log_n; ++i) { + for (size_t i = 0; i < CONST_PROOF_SIZE_LOG_N; ++i) { sumcheck_univariates.push_back( deserialize_from_buffer>(proof_data, num_frs_read)); } sumcheck_evaluations = deserialize_from_buffer>(proof_data, num_frs_read); - for (size_t i = 0; i < log_n; ++i) { + for (size_t i = 0; i < CONST_PROOF_SIZE_LOG_N; ++i) { zm_cq_comms.push_back(deserialize_from_buffer(proof_data, num_frs_read)); } zm_cq_comm = deserialize_from_buffer(proof_data, num_frs_read); @@ -856,7 +827,6 @@ class MegaFlavor { { size_t old_proof_length = proof_data.size(); proof_data.clear(); - size_t log_n = numeric::get_msb(circuit_size); serialize_to_buffer(circuit_size, proof_data); serialize_to_buffer(public_input_size, proof_data); serialize_to_buffer(pub_inputs_offset, proof_data); @@ -881,11 +851,11 @@ class MegaFlavor { serialize_to_buffer(w_4_comm, proof_data); serialize_to_buffer(lookup_inverses_comm, proof_data); serialize_to_buffer(z_perm_comm, proof_data); - for (size_t i = 0; i < log_n; ++i) { + for (size_t i = 0; i < CONST_PROOF_SIZE_LOG_N; ++i) { serialize_to_buffer(sumcheck_univariates[i], proof_data); } serialize_to_buffer(sumcheck_evaluations, proof_data); - for (size_t i = 0; i < log_n; ++i) { + for (size_t i = 0; i < CONST_PROOF_SIZE_LOG_N; ++i) { serialize_to_buffer(zm_cq_comms[i], proof_data); } serialize_to_buffer(zm_cq_comm, proof_data); diff --git a/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/ultra_flavor.hpp b/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/ultra_flavor.hpp index 6ca68d7037e..d9b010c320d 100644 --- a/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/ultra_flavor.hpp +++ b/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/ultra_flavor.hpp @@ -344,9 +344,6 @@ class UltraFlavor { this->circuit_size, this->pub_inputs_offset); relation_parameters.public_input_delta = public_input_delta; - auto lookup_grand_product_delta = compute_lookup_grand_product_delta( - relation_parameters.beta, relation_parameters.gamma, this->circuit_size); - relation_parameters.lookup_grand_product_delta = lookup_grand_product_delta; // Compute permutation and lookup grand product polynomials compute_grand_products(this->polynomials, relation_parameters); @@ -361,7 +358,6 @@ class UltraFlavor { * that, and split out separate PrecomputedPolynomials/Commitments data for clarity but also for portability of our * circuits. */ - // using VerificationKey = VerificationKey_, VerifierCommitmentKey>; class VerificationKey : public VerificationKey_, VerifierCommitmentKey> { public: VerificationKey() = default; @@ -472,31 +468,6 @@ class UltraFlavor { table_4, lagrange_first, lagrange_last); - - /** - * @brief Serialize verification key to field elements - * - * @return std::vector - */ - std::vector to_field_elements() - { - std::vector elements; - std::vector circuit_size_elements = bb::field_conversion::convert_to_bn254_frs(this->circuit_size); - elements.insert(elements.end(), circuit_size_elements.begin(), circuit_size_elements.end()); - // do the same for the rest of the fields - std::vector num_public_inputs_elements = - bb::field_conversion::convert_to_bn254_frs(this->num_public_inputs); - elements.insert(elements.end(), num_public_inputs_elements.begin(), num_public_inputs_elements.end()); - std::vector pub_inputs_offset_elements = - bb::field_conversion::convert_to_bn254_frs(this->pub_inputs_offset); - elements.insert(elements.end(), pub_inputs_offset_elements.begin(), pub_inputs_offset_elements.end()); - - for (Commitment& comm : this->get_all()) { - std::vector comm_elements = bb::field_conversion::convert_to_bn254_frs(comm); - elements.insert(elements.end(), comm_elements.begin(), comm_elements.end()); - } - return elements; - } }; /** @@ -696,7 +667,6 @@ class UltraFlavor { // take current proof and put them into the struct size_t num_frs_read = 0; circuit_size = deserialize_from_buffer(proof_data, num_frs_read); - size_t log_n = numeric::get_msb(circuit_size); public_input_size = deserialize_from_buffer(proof_data, num_frs_read); pub_inputs_offset = deserialize_from_buffer(proof_data, num_frs_read); @@ -711,13 +681,13 @@ class UltraFlavor { w_4_comm = deserialize_from_buffer(proof_data, num_frs_read); lookup_inverses_comm = deserialize_from_buffer(proof_data, num_frs_read); z_perm_comm = deserialize_from_buffer(proof_data, num_frs_read); - for (size_t i = 0; i < log_n; ++i) { + for (size_t i = 0; i < CONST_PROOF_SIZE_LOG_N; ++i) { sumcheck_univariates.push_back( deserialize_from_buffer>(proof_data, num_frs_read)); } sumcheck_evaluations = deserialize_from_buffer>(proof_data, num_frs_read); - for (size_t i = 0; i < log_n; ++i) { + for (size_t i = 0; i < CONST_PROOF_SIZE_LOG_N; ++i) { zm_cq_comms.push_back(deserialize_from_buffer(proof_data, num_frs_read)); } zm_cq_comm = deserialize_from_buffer(proof_data, num_frs_read); @@ -733,7 +703,6 @@ class UltraFlavor { { size_t old_proof_length = proof_data.size(); proof_data.clear(); // clear proof_data so the rest of the function can replace it - size_t log_n = numeric::get_msb(circuit_size); serialize_to_buffer(circuit_size, proof_data); serialize_to_buffer(public_input_size, proof_data); serialize_to_buffer(pub_inputs_offset, proof_data); @@ -748,11 +717,11 @@ class UltraFlavor { serialize_to_buffer(w_4_comm, proof_data); serialize_to_buffer(lookup_inverses_comm, proof_data); serialize_to_buffer(z_perm_comm, proof_data); - for (size_t i = 0; i < log_n; ++i) { + for (size_t i = 0; i < CONST_PROOF_SIZE_LOG_N; ++i) { serialize_to_buffer(sumcheck_univariates[i], proof_data); } serialize_to_buffer(sumcheck_evaluations, proof_data); - for (size_t i = 0; i < log_n; ++i) { + for (size_t i = 0; i < CONST_PROOF_SIZE_LOG_N; ++i) { serialize_to_buffer(zm_cq_comms[i], proof_data); } serialize_to_buffer(zm_cq_comm, proof_data); diff --git a/barretenberg/cpp/src/barretenberg/sumcheck/sumcheck.hpp b/barretenberg/cpp/src/barretenberg/sumcheck/sumcheck.hpp index 71918f18a98..d5f71eec3c1 100644 --- a/barretenberg/cpp/src/barretenberg/sumcheck/sumcheck.hpp +++ b/barretenberg/cpp/src/barretenberg/sumcheck/sumcheck.hpp @@ -213,6 +213,12 @@ template class SumcheckProver { pow_univariate.partially_evaluate(round_challenge); round.round_size = round.round_size >> 1; } + auto zero_univariate = bb::Univariate::zero(); + for (size_t idx = multivariate_d; idx < CONST_PROOF_SIZE_LOG_N; idx++) { + transcript->send_to_verifier("Sumcheck:univariate_" + std::to_string(idx), zero_univariate); + FF round_challenge = transcript->template get_challenge("Sumcheck:u_" + std::to_string(idx)); + multivariate_challenge.emplace_back(round_challenge); + } // Final round: Extract multivariate evaluations from #partially_evaluated_polynomials and add to transcript ClaimedEvaluations multivariate_evaluations; @@ -385,21 +391,39 @@ template class SumcheckVerifier { std::vector multivariate_challenge; multivariate_challenge.reserve(multivariate_d); - - for (size_t round_idx = 0; round_idx < multivariate_d; round_idx++) { + for (size_t round_idx = 0; round_idx < CONST_PROOF_SIZE_LOG_N; round_idx++) { // Obtain the round univariate from the transcript std::string round_univariate_label = "Sumcheck:univariate_" + std::to_string(round_idx); auto round_univariate = transcript->template receive_from_prover>( round_univariate_label); - - bool checked = round.check_sum(round_univariate); - verified = verified && checked; FF round_challenge = transcript->template get_challenge("Sumcheck:u_" + std::to_string(round_idx)); - multivariate_challenge.emplace_back(round_challenge); - round.compute_next_target_sum(round_univariate, round_challenge); - pow_univariate.partially_evaluate(round_challenge); + if constexpr (IsRecursiveFlavor) { + typename Flavor::CircuitBuilder* builder = round_challenge.get_context(); + stdlib::bool_t dummy_round = stdlib::witness_t(builder, round_idx >= multivariate_d); + bool checked = round.check_sum(round_univariate, dummy_round); + // Only utilize the checked value if this is not a constant proof size padding round + if (round_idx < multivariate_d) { + verified = verified && checked; + } + multivariate_challenge.emplace_back(round_challenge); + + round.compute_next_target_sum(round_univariate, round_challenge, dummy_round); + pow_univariate.partially_evaluate(round_challenge, dummy_round); + + } else { + if (round_idx < multivariate_d) { + bool checked = round.check_sum(round_univariate); + verified = verified && checked; + multivariate_challenge.emplace_back(round_challenge); + + round.compute_next_target_sum(round_univariate, round_challenge); + pow_univariate.partially_evaluate(round_challenge); + } else { + multivariate_challenge.emplace_back(round_challenge); + } + } } // Final round diff --git a/barretenberg/cpp/src/barretenberg/sumcheck/sumcheck_round.hpp b/barretenberg/cpp/src/barretenberg/sumcheck/sumcheck_round.hpp index 41aead2179a..6b014b66378 100644 --- a/barretenberg/cpp/src/barretenberg/sumcheck/sumcheck_round.hpp +++ b/barretenberg/cpp/src/barretenberg/sumcheck/sumcheck_round.hpp @@ -5,6 +5,7 @@ #include "barretenberg/relations/relation_parameters.hpp" #include "barretenberg/relations/relation_types.hpp" #include "barretenberg/relations/utils.hpp" +#include "barretenberg/stdlib/primitives/bool/bool.hpp" namespace bb { @@ -393,6 +394,42 @@ template class SumcheckVerifierRound { return !sumcheck_round_failed; }; + /** + * @brief Check that the round target sum is correct + * @details The verifier receives the claimed evaluations of the round univariate \f$ \tilde{S}^i \f$ at \f$X_i = + * 0,\ldots, D \f$ and checks \f$\sigma_i = \tilde{S}^{i-1}(u_{i-1}) \stackrel{?}{=} \tilde{S}^i(0) + \tilde{S}^i(1) + * \f$ + * @param univariate Round univariate \f$\tilde{S}^{i}\f$ represented by its evaluations over \f$0,\ldots,D\f$. + * + */ + template + bool check_sum(bb::Univariate& univariate, stdlib::bool_t dummy_round) + { + FF total_sum = + FF::conditional_assign(dummy_round, target_total_sum, univariate.value_at(0) + univariate.value_at(1)); + // TODO(#673): Conditionals like this can go away once native verification is is just recursive verification + // with a simulated builder. + bool sumcheck_round_failed(false); + if constexpr (IsRecursiveFlavor) { + if constexpr (IsECCVMRecursiveFlavor) { + // https://github.com/AztecProtocol/barretenberg/issues/998): Avoids the scenario where the assert_equal + // below fails because we are comparing a constant against a non-constant value and the non-constant + // value is in relaxed form. This happens at the first round when target_total_sum is initially set to + // 0. + total_sum.self_reduce(); + } + target_total_sum.assert_equal(total_sum); + if (!dummy_round.get_value()) { + sumcheck_round_failed = (target_total_sum.get_value() != total_sum.get_value()); + } + } else { + sumcheck_round_failed = (target_total_sum != total_sum); + } + + round_failed = round_failed || sumcheck_round_failed; + return !sumcheck_round_failed; + }; + /** * @brief After checking that the univariate is good for this round, compute the next target sum. * @@ -407,6 +444,23 @@ template class SumcheckVerifierRound { return target_total_sum; } + /** + * @brief After checking that the univariate is good for this round, compute the next target sum. + * + * @param univariate \f$ \tilde{S}^i(X) \f$, given by its evaluations over \f$ \{0,1,2,\ldots, D\}\f$. + * @param round_challenge \f$ u_i\f$ + * @return FF \f$ \sigma_{i+1} = \tilde{S}^i(u_i)\f$ + */ + template + FF compute_next_target_sum(bb::Univariate& univariate, + FF& round_challenge, + stdlib::bool_t dummy_round) + { + // Evaluate \f$\tilde{S}^{i}(u_{i}) \f$ + target_total_sum = FF::conditional_assign(dummy_round, target_total_sum, univariate.evaluate(round_challenge)); + return target_total_sum; + } + /** * @brief Given the evaluations \f$P_1(u_0,\ldots, u_{d-1}), \ldots, P_N(u_0,\ldots, u_{d-1}) \f$ of the * ProverPolynomials at the challenge point \f$(u_0,\ldots, u_{d-1})\f$ stored in \p purported_evaluations, this @@ -425,8 +479,8 @@ template class SumcheckVerifierRound { Utils::template accumulate_relation_evaluations_without_skipping<>( purported_evaluations, relation_evaluations, relation_parameters, pow_polynomial.partial_evaluation_result); - auto running_challenge = FF(1); - auto output = FF(0); + FF running_challenge{ 1 }; + FF output{ 0 }; Utils::scale_and_batch_elements(relation_evaluations, alpha, running_challenge, output); return output; } diff --git a/barretenberg/cpp/src/barretenberg/translator_vm/translator_prover.cpp b/barretenberg/cpp/src/barretenberg/translator_vm/translator_prover.cpp index 0d103c30291..560b208f605 100644 --- a/barretenberg/cpp/src/barretenberg/translator_vm/translator_prover.cpp +++ b/barretenberg/cpp/src/barretenberg/translator_vm/translator_prover.cpp @@ -173,7 +173,8 @@ void TranslatorProver::execute_pcs_rounds() using Curve = typename Flavor::Curve; using ZeroMorph = ZeroMorphProver_; auto prover_opening_claim = - ZeroMorph::prove(key->polynomials.get_unshifted_without_concatenated(), + ZeroMorph::prove(key->circuit_size, + key->polynomials.get_unshifted_without_concatenated(), key->polynomials.get_to_be_shifted(), sumcheck_output.claimed_evaluations.get_unshifted_without_concatenated(), sumcheck_output.claimed_evaluations.get_shifted(), diff --git a/barretenberg/cpp/src/barretenberg/translator_vm/translator_verifier.cpp b/barretenberg/cpp/src/barretenberg/translator_vm/translator_verifier.cpp index cfae12f3a5c..59429a35927 100644 --- a/barretenberg/cpp/src/barretenberg/translator_vm/translator_verifier.cpp +++ b/barretenberg/cpp/src/barretenberg/translator_vm/translator_verifier.cpp @@ -113,7 +113,8 @@ bool TranslatorVerifier::verify_proof(const HonkProof& proof) // Execute ZeroMorph rounds. See https://hackmd.io/dlf9xEwhTQyE3hiGbq4FsA?view for a complete description ofthe // unrolled protocol. - auto opening_claim = ZeroMorph::verify(commitments.get_unshifted_without_concatenated(), + auto opening_claim = ZeroMorph::verify(circuit_size, + commitments.get_unshifted_without_concatenated(), commitments.get_to_be_shifted(), claimed_evaluations.get_unshifted_without_concatenated(), claimed_evaluations.get_shifted(), diff --git a/barretenberg/cpp/src/barretenberg/translator_vm_recursion/translator_recursive_verifier.cpp b/barretenberg/cpp/src/barretenberg/translator_vm_recursion/translator_recursive_verifier.cpp index bf171d2c4a1..a34af2cf2fa 100644 --- a/barretenberg/cpp/src/barretenberg/translator_vm_recursion/translator_recursive_verifier.cpp +++ b/barretenberg/cpp/src/barretenberg/translator_vm_recursion/translator_recursive_verifier.cpp @@ -73,7 +73,7 @@ std::array TranslatorRecursiveVerifier_template receive_from_prover("circuit_size"); + const FF circuit_size = transcript->template receive_from_prover("circuit_size"); ASSERT(static_cast(circuit_size.get_value()) == key->circuit_size); evaluation_input_x = transcript->template receive_from_prover("evaluation_input_x"); @@ -113,7 +113,8 @@ std::array TranslatorRecursiveVerifier_ void DeciderProver_::execute_relation_ch template void DeciderProver_::execute_pcs_rounds() { using ZeroMorph = ZeroMorphProver_; - auto prover_opening_claim = ZeroMorph::prove(accumulator->proving_key.polynomials.get_unshifted(), + auto prover_opening_claim = ZeroMorph::prove(accumulator->proving_key.circuit_size, + accumulator->proving_key.polynomials.get_unshifted(), accumulator->proving_key.polynomials.get_to_be_shifted(), sumcheck_output.claimed_evaluations.get_unshifted(), sumcheck_output.claimed_evaluations.get_shifted(), diff --git a/barretenberg/cpp/src/barretenberg/ultra_honk/mega_transcript.test.cpp b/barretenberg/cpp/src/barretenberg/ultra_honk/mega_transcript.test.cpp index 8dfc816e01a..87dcb15cebe 100644 --- a/barretenberg/cpp/src/barretenberg/ultra_honk/mega_transcript.test.cpp +++ b/barretenberg/cpp/src/barretenberg/ultra_honk/mega_transcript.test.cpp @@ -88,7 +88,7 @@ class MegaTranscriptTests : public ::testing::Test { round++; } - for (size_t i = 0; i < log_n; ++i) { + for (size_t i = 0; i < CONST_PROOF_SIZE_LOG_N; ++i) { std::string idx = std::to_string(i); manifest_expected.add_entry(round, "Sumcheck:univariate_" + idx, frs_per_uni); std::string label = "Sumcheck:u_" + idx; @@ -100,7 +100,7 @@ class MegaTranscriptTests : public ::testing::Test { manifest_expected.add_challenge(round, "rho"); round++; - for (size_t i = 0; i < log_n; ++i) { + for (size_t i = 0; i < CONST_PROOF_SIZE_LOG_N; ++i) { std::string idx = std::to_string(i); manifest_expected.add_entry(round, "ZM:C_q_" + idx, frs_per_G); } diff --git a/barretenberg/cpp/src/barretenberg/ultra_honk/oink_verifier.cpp b/barretenberg/cpp/src/barretenberg/ultra_honk/oink_verifier.cpp index 0a5a1810e4c..c3d89c226df 100644 --- a/barretenberg/cpp/src/barretenberg/ultra_honk/oink_verifier.cpp +++ b/barretenberg/cpp/src/barretenberg/ultra_honk/oink_verifier.cpp @@ -136,11 +136,8 @@ template void OinkVerifier::execute_grand_product relation_parameters.gamma, key->circuit_size, static_cast(key->pub_inputs_offset)); - const FF lookup_grand_product_delta = - compute_lookup_grand_product_delta(relation_parameters.beta, relation_parameters.gamma, key->circuit_size); relation_parameters.public_input_delta = public_input_delta; - relation_parameters.lookup_grand_product_delta = lookup_grand_product_delta; // Get commitment to permutation and lookup grand products witness_comms.z_perm = transcript->template receive_from_prover(domain_separator + comm_labels.z_perm); diff --git a/barretenberg/cpp/src/barretenberg/ultra_honk/ultra_transcript.test.cpp b/barretenberg/cpp/src/barretenberg/ultra_honk/ultra_transcript.test.cpp index df541aeb2df..5124a0f64eb 100644 --- a/barretenberg/cpp/src/barretenberg/ultra_honk/ultra_transcript.test.cpp +++ b/barretenberg/cpp/src/barretenberg/ultra_honk/ultra_transcript.test.cpp @@ -79,7 +79,7 @@ class UltraTranscriptTests : public ::testing::Test { round++; } - for (size_t i = 0; i < log_n; ++i) { + for (size_t i = 0; i < CONST_PROOF_SIZE_LOG_N; ++i) { std::string idx = std::to_string(i); manifest_expected.add_entry(round, "Sumcheck:univariate_" + idx, frs_per_uni); std::string label = "Sumcheck:u_" + idx; @@ -91,7 +91,7 @@ class UltraTranscriptTests : public ::testing::Test { manifest_expected.add_challenge(round, "rho"); round++; - for (size_t i = 0; i < log_n; ++i) { + for (size_t i = 0; i < CONST_PROOF_SIZE_LOG_N; ++i) { std::string idx = std::to_string(i); manifest_expected.add_entry(round, "ZM:C_q_" + idx, frs_per_G); } diff --git a/barretenberg/cpp/src/barretenberg/ultra_honk/ultra_verifier.cpp b/barretenberg/cpp/src/barretenberg/ultra_honk/ultra_verifier.cpp index 942af05365b..6804be61836 100644 --- a/barretenberg/cpp/src/barretenberg/ultra_honk/ultra_verifier.cpp +++ b/barretenberg/cpp/src/barretenberg/ultra_honk/ultra_verifier.cpp @@ -70,12 +70,14 @@ template bool UltraVerifier_::verify_proof(const HonkP // If Sumcheck did not verify, return false if (sumcheck_verified.has_value() && !sumcheck_verified.value()) { + info("Sumcheck verification failed."); return false; } // Execute ZeroMorph rounds to produce an opening claim and verify it with a univariate PCS. See // https://hackmd.io/dlf9xEwhTQyE3hiGbq4FsA?view for a complete description of the unrolled protocol. - auto opening_claim = ZeroMorph::verify(commitments.get_unshifted(), + auto opening_claim = ZeroMorph::verify(key->circuit_size, + commitments.get_unshifted(), commitments.get_to_be_shifted(), claimed_evaluations.get_unshifted(), claimed_evaluations.get_shifted(), @@ -83,7 +85,6 @@ template bool UltraVerifier_::verify_proof(const HonkP Commitment::one(), transcript); auto pairing_points = PCS::reduce_verify(opening_claim, transcript); - auto pcs_verified = key->pcs_verification_key->pairing_check(pairing_points[0], pairing_points[1]); return sumcheck_verified.value() && pcs_verified; } diff --git a/barretenberg/cpp/src/barretenberg/vm/generated/avm_prover.cpp b/barretenberg/cpp/src/barretenberg/vm/generated/avm_prover.cpp index b35428e51f1..17c2e68976e 100644 --- a/barretenberg/cpp/src/barretenberg/vm/generated/avm_prover.cpp +++ b/barretenberg/cpp/src/barretenberg/vm/generated/avm_prover.cpp @@ -965,7 +965,8 @@ void AvmProver::execute_relation_check_rounds() * */ void AvmProver::execute_pcs_rounds() { - auto prover_opening_claim = ZeroMorph::prove(prover_polynomials.get_unshifted(), + auto prover_opening_claim = ZeroMorph::prove(key->circuit_size, + prover_polynomials.get_unshifted(), prover_polynomials.get_to_be_shifted(), sumcheck_output.claimed_evaluations.get_unshifted(), sumcheck_output.claimed_evaluations.get_shifted(), diff --git a/barretenberg/cpp/src/barretenberg/vm/generated/avm_verifier.cpp b/barretenberg/cpp/src/barretenberg/vm/generated/avm_verifier.cpp index 49302f921bb..6a23f8e56fd 100644 --- a/barretenberg/cpp/src/barretenberg/vm/generated/avm_verifier.cpp +++ b/barretenberg/cpp/src/barretenberg/vm/generated/avm_verifier.cpp @@ -713,27 +713,26 @@ bool AvmVerifier::verify_proof(const HonkProof& proof, const std::vector mle_challenge(multivariate_challenge.begin(), + multivariate_challenge.begin() + static_cast(log_circuit_size)); + FF kernel_kernel_inputs_evaluation = evaluate_public_input_column(public_inputs[0], circuit_size, mle_challenge); if (kernel_kernel_inputs_evaluation != claimed_evaluations.kernel_kernel_inputs) { return false; } - FF kernel_kernel_value_out_evaluation = - evaluate_public_input_column(public_inputs[1], circuit_size, multivariate_challenge); + FF kernel_kernel_value_out_evaluation = evaluate_public_input_column(public_inputs[1], circuit_size, mle_challenge); if (kernel_kernel_value_out_evaluation != claimed_evaluations.kernel_kernel_value_out) { return false; } FF kernel_kernel_side_effect_out_evaluation = - evaluate_public_input_column(public_inputs[2], circuit_size, multivariate_challenge); + evaluate_public_input_column(public_inputs[2], circuit_size, mle_challenge); if (kernel_kernel_side_effect_out_evaluation != claimed_evaluations.kernel_kernel_side_effect_out) { return false; } FF kernel_kernel_metadata_out_evaluation = - evaluate_public_input_column(public_inputs[3], circuit_size, multivariate_challenge); + evaluate_public_input_column(public_inputs[3], circuit_size, mle_challenge); if (kernel_kernel_metadata_out_evaluation != claimed_evaluations.kernel_kernel_metadata_out) { return false; } diff --git a/bb-pilcom/bb-pil-backend/src/prover_builder.rs b/bb-pilcom/bb-pil-backend/src/prover_builder.rs index ee129d6ca83..c0ea19709a5 100644 --- a/bb-pilcom/bb-pil-backend/src/prover_builder.rs +++ b/bb-pilcom/bb-pil-backend/src/prover_builder.rs @@ -192,7 +192,8 @@ impl ProverBuilder for BBFiles { * */ void {name}Prover::execute_pcs_rounds() {{ - auto prover_opening_claim = ZeroMorph::prove(prover_polynomials.get_unshifted(), + auto prover_opening_claim = ZeroMorph::prove(key->circuit_size, + prover_polynomials.get_unshifted(), prover_polynomials.get_to_be_shifted(), sumcheck_output.claimed_evaluations.get_unshifted(), sumcheck_output.claimed_evaluations.get_shifted(), diff --git a/bb-pilcom/bb-pil-backend/src/verifier_builder.rs b/bb-pilcom/bb-pil-backend/src/verifier_builder.rs index ed93731b67f..a10a7bea021 100644 --- a/bb-pilcom/bb-pil-backend/src/verifier_builder.rs +++ b/bb-pilcom/bb-pil-backend/src/verifier_builder.rs @@ -56,7 +56,7 @@ impl VerifierBuilder for BBFiles { |public_inputs_column_name: &String, idx: usize| { format!( " - FF {public_inputs_column_name}_evaluation = evaluate_public_input_column(public_inputs[{idx}], circuit_size, multivariate_challenge); + FF {public_inputs_column_name}_evaluation = evaluate_public_input_column(public_inputs[{idx}], circuit_size, mle_challenge); if ({public_inputs_column_name}_evaluation != claimed_evaluations.{public_inputs_column_name}) {{ return false; }} @@ -178,6 +178,8 @@ impl VerifierBuilder for BBFiles { }} // Public columns evaluation checks + std::vector mle_challenge(multivariate_challenge.begin(), + multivariate_challenge.begin() + static_cast(log_circuit_size)); {public_inputs_check} // Execute ZeroMorph rounds. See https://hackmd.io/dlf9xEwhTQyE3hiGbq4FsA?view for a complete description of the diff --git a/noir/noir-repo/test_programs/benchmarks/bench_sha256/Prover.toml b/noir/noir-repo/test_programs/benchmarks/bench_sha256/Prover.toml deleted file mode 100644 index 66779dea9d7..00000000000 --- a/noir/noir-repo/test_programs/benchmarks/bench_sha256/Prover.toml +++ /dev/null @@ -1 +0,0 @@ -input = [1,2] diff --git a/noir/noir-repo/test_programs/execution_success/verify_honk_proof/Prover.toml b/noir/noir-repo/test_programs/execution_success/verify_honk_proof/Prover.toml index 4619fd298dd..c1ec4049fa9 100644 --- a/noir/noir-repo/test_programs/execution_success/verify_honk_proof/Prover.toml +++ b/noir/noir-repo/test_programs/execution_success/verify_honk_proof/Prover.toml @@ -1,4 +1,4 @@ key_hash = "0x096129b1c6e108252fc5c829c4cc9b7e8f0d1fd9f29c2532b563d6396645e08f" -proof = ["0x0000000000000000000000000000000000000000000000000000000000000020","0x0000000000000000000000000000000000000000000000000000000000000011","0x0000000000000000000000000000000000000000000000000000000000000001","0x0000000000000000000000000000000000000000000000042ab5d6d1986846cf","0x00000000000000000000000000000000000000000000000b75c020998797da78","0x0000000000000000000000000000000000000000000000005a107acb64952eca","0x000000000000000000000000000000000000000000000000000031e97a575e9d","0x00000000000000000000000000000000000000000000000b5666547acf8bd5a4","0x00000000000000000000000000000000000000000000000c410db10a01750aeb","0x00000000000000000000000000000000000000000000000d722669117f9758a4","0x000000000000000000000000000000000000000000000000000178cbf4206471","0x000000000000000000000000000000000000000000000000e91b8a11e7842c38","0x000000000000000000000000000000000000000000000007fd51009034b3357f","0x000000000000000000000000000000000000000000000009889939f81e9c7402","0x0000000000000000000000000000000000000000000000000000f94656a2ca48","0x000000000000000000000000000000000000000000000006fb128b46c1ddb67f","0x0000000000000000000000000000000000000000000000093fe27776f50224bd","0x000000000000000000000000000000000000000000000004a0c80c0da527a081","0x0000000000000000000000000000000000000000000000000001b52c2020d746","0x0000000000000000000000000000005a9bae947e1e91af9e4033d8d6aa6ed632","0x000000000000000000000000000000000025e485e013446d4ac7981c88ba6ecc","0x000000000000000000000000000000ff1e0496e30ab24a63b32b2d1120b76e62","0x00000000000000000000000000000000001afe0a8a685d7cd85d1010e55d9d7c","0x000000000000000000000000000000b0804efd6573805f991458295f510a2004","0x00000000000000000000000000000000000c81a178016e2fe18605022d5a8b0e","0x000000000000000000000000000000eba51e76eb1cfff60a53a0092a3c3dea47","0x000000000000000000000000000000000022e7466247b533282f5936ac4e6c15","0x00000000000000000000000000000071b1d76edf770edff98f00ff4deec264cd","0x00000000000000000000000000000000001e48128e68794d8861fcbb2986a383","0x000000000000000000000000000000d3a2af4915ae6d86b097adc377fafda2d4","0x000000000000000000000000000000000006359de9ca452dab3a4f1f8d9c9d98","0x0000000000000000000000000000000d9d719a8b9f020ad3642d60fe704e696f","0x00000000000000000000000000000000000ddfdbbdefc4ac1580ed38e12cfa49","0x0000000000000000000000000000008289fe9754ce48cd01b7be96a861b5e157","0x00000000000000000000000000000000000ff3e0896bdea021253b3d360fa678","0x0000000000000000000000000000000d9d719a8b9f020ad3642d60fe704e696f","0x00000000000000000000000000000000000ddfdbbdefc4ac1580ed38e12cfa49","0x0000000000000000000000000000008289fe9754ce48cd01b7be96a861b5e157","0x00000000000000000000000000000000000ff3e0896bdea021253b3d360fa678","0x000000000000000000000000000000f968b227a358a305607f3efc933823d288","0x00000000000000000000000000000000000eaf8adb390375a76d95e918b65e08","0x000000000000000000000000000000bb34b4b447aae56f5e24f81c3acd6d547f","0x00000000000000000000000000000000002175d012746260ebcfe339a91a81e1","0x0000000000000000000000000000005b739ed2075f2b046062b8fc6a2d1e9863","0x00000000000000000000000000000000001285cd1030d338c0e1603b4da2c838","0x00000000000000000000000000000027447d6c281eb38b2b937af4a516d60c04","0x000000000000000000000000000000000019bc3d980465fbb4a656a74296fc58","0x000000000000000000000000000000b484788ace8f7df86dd5e325d2e9b12599","0x00000000000000000000000000000000000a2ca0d10eb7b767114ae230b728d3","0x000000000000000000000000000000c6dfc7092f16f95795e437664498b88d53","0x0000000000000000000000000000000000131067b4e4d95a4f6f8cf5c9b5450a","0x0f413f22eec51f2a02800e0cafaeec1d92d744fbbaef213c687b9edabd6985f5","0x21230f4ff26c80ffb5d037a9d1d26c3f955ca34cbeca4f54db6656b932967a0c","0x0521f877fe35535767f99597cc50effbd283dcae6812ee0a7620d796ccbfd642","0x202b01350a9cc5c20ec0f3eaada338c0a3b793811bd539418ffa3cc4302615e2","0x2d1214d9b0d41058ad4a172d9c0aecc5bdabe95e687c3465050c6b5396509be4","0x1113b344a151b0af091cb28d728b752ebb4865da6cd7ee68471b961ca5cf69b9","0x2aa66d0954bb83e17bd5c9928d3aa7a7df75d741d409f7c15ba596804ba643fb","0x2e26bc7a530771ef7a95d5360d537e41cf94d8a0942764ff09881c107f91a106","0x0f14f32b921bb63ad1df00adab7c82af58ea8aa7f353f14b281208d8c5fab504","0x13429515c0c53b6502bbcdf545defb3cb69a986c9263e070fcbb397391aae1a3","0x1f21cac5e2f262afc1006a21454cc6bcb018c44e53ad8ab61cebbac99e539176","0x2a9886a6ddc8a61b097c668cd362fc8acdee8dde74f7b1af192c3e060bb2948f","0x2d718181e408ead2e9bcd30a84ad1fccbaf8d48ab6d1820bad4933d284b503c4","0x2634c1aafc902f14508f34d3d7e9d485f42d1a4c95b5a1ef73711ed0d3c68d77","0x092ede9777e6472ce5ffd8c963d466006189e960e2c591d338dc8d4af1a057fb","0x1cba45b17fd24f1cb1b4ab7b83eee741f6c77ba70a497dc4de259eceb7d5ea26","0x246e887c7bf2e17f919b2393b6e9b00b33e8822d862544a775aac05cb7bff710","0x04c3f539fe8689971948afcb437f1ecbd444a5bddaca1c8a450348dcd8480047","0x20c6a423ae4fd58e8951aa378d02d77baf90508ceb48856db2319d70938b186e","0x1bcf8786b554b3316d8ebdbc9d006a4e5d4865aad512ffd404b7f83550d3d030","0x09ab038260518f0970564afcd6bf22e2abf6b1fa5e12a327bbf195b6ca5edd78","0x1024e32554746f89c195286ba6ccfc9765e5d14bbe8064bc6fdf22d16ec6b495","0x17706656f8dbd7e47bb257a6428f0cb7278ea02fa9e6ce431d7bcc9133fba9c7","0x25a3e8a33c15ef2a4dd16313a6049bf1d468b4cdc141f238f2d51a1e8e1c22b3","0x1198863f08006edb27aee23164fb117a4ddec1bf1ed89807aa907e5cd24bf068","0x1862b4856b5b4d4a064f873e221703e4e2cd1ebfca1337dedca56485c38ed5a0","0x062214af1ea6dd6bf8895b92d394571c43970b6f967e1c794624d96071b25ad3","0x1e5be9428ddcf1f9b0cbafc28101e792ec5cf73852b0cd0b84fbff71b4490e09","0x2d4189bea5b1e30f63c64bd26df82f18bcaf885ec8887b54634b2557869ce87f","0x0f2e5d9a908850e9d44925e17d8b12d1adb1ed029799c9b5858598504242bbc0","0x3050dc85746a57931d99f3f35e77c2ba561fba0baa018b79ff1fd544026833ae","0x2a591a32437e5e0b875a137fd868bd1b6dbc003ff1b661f26e00627cc7c5cf47","0x27946841e1670ad9c65717016d0cedf524724217236e81b9fd0a264a36ebfb0e","0x0fc396e9d19d6e68e289602e292ee345542d0d28bf6de34fa62cc577cbdfb1df","0x08e7433a07a44c0c9c4dd4b273a2685bbd1a91fd5cf2b43409458fab42a23e1b","0x12bd9bfb029c3503a5c6deea87b0a0f11bb9f7ea584af2d48f3e48d7e09247ae","0x2ccc4810748c0a82dfc0f063d0b8c7999ffe9474653080e6ef92b3cb7a428784","0x08eb574d7fecadadb508c8bd35fdad06b99110609d679763c2e3645229b1b95a","0x0f1a65e747c8021ed7c454a4be1e89b1bce66ead9ed980fa98a7a050eafe98a1","0x1c8ff9e36684ec71614dee4c17859b06c742089f6029d3694a16e00dac9b57f1","0x0303101a8ba712aeca4da85b767ab8d3ecf489ec7d746f8ee20041717cc000e9","0x0aaf64c65e7088e5596108c9601467911fea809ca6540d79af77e6e66e36cd99","0x17caf164ce74ea7edfb1390e07763d2197797ec26661b92cde18a98d61d2fddc","0x18cb055c7ad6d01437725bb457681d81f3ecadc4f35d838a3c13daf25a44456a","0x2d78602b8bbcd32b36a99a6e2d248e7fe044ef1b50813133370412f9ef5299f0","0x2b139276ea86d426a115479e4154f72a6bd83a6253bf13e9670dc6b4664378f0","0x127c7837b384902c39a104036c09546728571c46c8166b1b9b13b3a615ebb781","0x05faa4816f83cf0189a482ad943c94b9ec6474002f2b327f8698763ad0ea0985","0x2f90359cc30ee693fb3aced96523cf7aebd152c22329eee56a398d9a4ac0628e","0x0a71beaf17a59c5a238f04c1f203848d87502c5057a78c13f0cfb0f9876e7714","0x2696c1e6d089556adaeb95c8a5e3065b00a393a38c2d69e9bd6ce8cdc49d87da","0x1f3d165a7dc6564a036e451eb9cb7f1e1cb1e6d29daa75e3f135ea3e58a79ccd","0x1473a660819bdd838d56122b72b32b267211e9f1103239480ec50fa85c9e1035","0x0a8ccaeb22451f391b3fc3467c8e6e900270a7afb7b510e8acf5a4f06f1c0888","0x03b3080afc0658cc87e307758cebc171921f43eca159b9dedf7f72aa8dd926bd","0x2dd7d6663fa0e1755dfafac352c361fcd64c7f4d53627e3646870ac169cc4a07","0x1ec54b883f5f35ccad0e75695af20790d9860104095bab34c9bf01628dd40cb9","0x193dff50f83c241f7a9e087a29ce72ecf3f6d8563593f786dcd04c32bcfd4ced","0x135122c0dae26cda8ca1c09de8225064ad86d10423ab0aaa53b481aa4626e1d6","0x08d5a56cbfab5aeed56d3cdd7fb6b30fc26b0c1a5b63fccd7fa44c53ba6fd35a","0x0d12f126dfa2daad3726d00ca339284cc22e36c6d81bb7a4b95c6f9598b60e7c","0x2e8b24bbdf2fd839d3c7cae1f0eeb96bfcfaeef30b27476f2fafcb17da78cd5e","0x2364acfe0cea39b7f749c5f303b99504977357925f810f684c60f35d16315211","0x06ca062eb70b8c51cfac35345e7b6b51f33a8ec9ebe204fb9b4911200bf508b7","0x266c0aa1ccb97186815bf69084f600d06ddd934e59a38dfe602ee5d6b9487f22","0x1d817537a49c6d0e3b4b65c6665334b91d7593142e60065048be9e55ceb5e7ab","0x05e9b7256a368df053c691952b59e9327a7c12ed322bbd6f72c669b9b9c26d49","0x05e9b7256a368df053c691952b59e9327a7c12ed322bbd6f72c669b9b9c26d49","0x25b77026673a1e613e50df0e88fb510973739d5f9064bd364079a9f884209632","0x25c9bc7a3f6aae3d43ff68b5614b34b5eaceff37157b37347995d231784ac1fd","0x085f69baef22680ae15f4801ef4361ebe9c7fc24a94b5bc2527dce8fb705439e","0x0d7c6b9ce31bfc32238a205455baf5ffe99cd30eb0f7bb5b504e1d4501e01382","0x1001a8cc4bc1221c814fba0eddcf3c40619b133373640c600de5bed0a0a05b10","0x20f5894be90e52977cb70f4f4cbd5101693db0360848939750db7e91109d54b6","0x22c09cb26db43f0599408b4daed0f4f496c66424e6affa41c14387d8e0af851b","0x24e5f41357798432426a9549d71e8cc681eaebacbe87f6e3bf38e85de5aa2f3d","0x06eb90100c736fbf2b87432d7821ecdc0b365024739bc36363d48b905973f5b9","0x0000000000000000000000000000007f36e0b4f59927ebbb2302e76cbe8bd44e","0x00000000000000000000000000000000001b95777c6c98640c80638c195909ca","0x0000000000000000000000000000006d4b1ad71244248cb2070fbbbb0ac9df88","0x00000000000000000000000000000000001abada4d5d816a67b6fc75746cb723","0x000000000000000000000000000000465811089df032ceb5269254547a101e57","0x000000000000000000000000000000000011a4a909c59776a6df9c7615e8e87d","0x000000000000000000000000000000311f6f724e7199351c9774225f15c25f20","0x00000000000000000000000000000000001ddba8eb0ab208ad3d96c70941fcbc","0x0000000000000000000000000000000dfa80bdf5be151b21ad89466b7201b63d","0x000000000000000000000000000000000015ca7dc258adab8ea406d94e00c56d","0x000000000000000000000000000000507ea3454165f92295b6e435c7d30d14f0","0x00000000000000000000000000000000002f522608db7b7d389d1df67eab104d","0x000000000000000000000000000000950102cce743fadb23965fc72e31efd36c","0x000000000000000000000000000000000018b4a7ec90df68dfe97d3c5367d1bf","0x000000000000000000000000000000118d90258b25dba8bc0f99d9f7547c6a62","0x000000000000000000000000000000000012d78638701da6322abbf325693b0f","0x000000000000000000000000000000144743e0d082f35295b51561af65f94c6b","0x00000000000000000000000000000000002322a615615e5405836374bb3c5336","0x000000000000000000000000000000e6f08dd5904ee42f826cde680919b41a96","0x00000000000000000000000000000000002d3f823ea255b68465e4b5360bf864","0x00000000000000000000000000000076d4db93683b6363ae92a5a20d8bb9922e","0x00000000000000000000000000000000002f8a7009cac72c9599b81cb9054308","0x00000000000000000000000000000085c12dd2be9f2b29e54c1a4bc3cbf9b6ce","0x000000000000000000000000000000000024e3688a1f4f50b0c6bd6c068f32b2","0x00000000000000000000000000000023a2015e7ea351e444c9405adfbd81e84d","0x00000000000000000000000000000000001fb3e4228c15dc4380db796925ec49","0x000000000000000000000000000000834ad9406b8ded7208b872373be7445e47","0x0000000000000000000000000000000000267544d6a9f5cc46d10555f2617c65"] +proof = ["0x0000000000000000000000000000000000000000000000000000000000000020","0x0000000000000000000000000000000000000000000000000000000000000011","0x0000000000000000000000000000000000000000000000000000000000000001","0x0000000000000000000000000000000000000000000000042ab5d6d1986846cf","0x00000000000000000000000000000000000000000000000b75c020998797da78","0x0000000000000000000000000000000000000000000000005a107acb64952eca","0x000000000000000000000000000000000000000000000000000031e97a575e9d","0x00000000000000000000000000000000000000000000000b5666547acf8bd5a4","0x00000000000000000000000000000000000000000000000c410db10a01750aeb","0x00000000000000000000000000000000000000000000000d722669117f9758a4","0x000000000000000000000000000000000000000000000000000178cbf4206471","0x000000000000000000000000000000000000000000000000e91b8a11e7842c38","0x000000000000000000000000000000000000000000000007fd51009034b3357f","0x000000000000000000000000000000000000000000000009889939f81e9c7402","0x0000000000000000000000000000000000000000000000000000f94656a2ca48","0x000000000000000000000000000000000000000000000006fb128b46c1ddb67f","0x0000000000000000000000000000000000000000000000093fe27776f50224bd","0x000000000000000000000000000000000000000000000004a0c80c0da527a081","0x0000000000000000000000000000000000000000000000000001b52c2020d746","0x0000000000000000000000000000005a9bae947e1e91af9e4033d8d6aa6ed632","0x000000000000000000000000000000000025e485e013446d4ac7981c88ba6ecc","0x000000000000000000000000000000ff1e0496e30ab24a63b32b2d1120b76e62","0x00000000000000000000000000000000001afe0a8a685d7cd85d1010e55d9d7c","0x000000000000000000000000000000b0804efd6573805f991458295f510a2004","0x00000000000000000000000000000000000c81a178016e2fe18605022d5a8b0e","0x000000000000000000000000000000eba51e76eb1cfff60a53a0092a3c3dea47","0x000000000000000000000000000000000022e7466247b533282f5936ac4e6c15","0x00000000000000000000000000000071b1d76edf770edff98f00ff4deec264cd","0x00000000000000000000000000000000001e48128e68794d8861fcbb2986a383","0x000000000000000000000000000000d3a2af4915ae6d86b097adc377fafda2d4","0x000000000000000000000000000000000006359de9ca452dab3a4f1f8d9c9d98","0x0000000000000000000000000000000d9d719a8b9f020ad3642d60fe704e696f","0x00000000000000000000000000000000000ddfdbbdefc4ac1580ed38e12cfa49","0x0000000000000000000000000000008289fe9754ce48cd01b7be96a861b5e157","0x00000000000000000000000000000000000ff3e0896bdea021253b3d360fa678","0x0000000000000000000000000000000d9d719a8b9f020ad3642d60fe704e696f","0x00000000000000000000000000000000000ddfdbbdefc4ac1580ed38e12cfa49","0x0000000000000000000000000000008289fe9754ce48cd01b7be96a861b5e157","0x00000000000000000000000000000000000ff3e0896bdea021253b3d360fa678","0x000000000000000000000000000000f968b227a358a305607f3efc933823d288","0x00000000000000000000000000000000000eaf8adb390375a76d95e918b65e08","0x000000000000000000000000000000bb34b4b447aae56f5e24f81c3acd6d547f","0x00000000000000000000000000000000002175d012746260ebcfe339a91a81e1","0x0000000000000000000000000000005b739ed2075f2b046062b8fc6a2d1e9863","0x00000000000000000000000000000000001285cd1030d338c0e1603b4da2c838","0x00000000000000000000000000000027447d6c281eb38b2b937af4a516d60c04","0x000000000000000000000000000000000019bc3d980465fbb4a656a74296fc58","0x000000000000000000000000000000b484788ace8f7df86dd5e325d2e9b12599","0x00000000000000000000000000000000000a2ca0d10eb7b767114ae230b728d3","0x000000000000000000000000000000c6dfc7092f16f95795e437664498b88d53","0x0000000000000000000000000000000000131067b4e4d95a4f6f8cf5c9b5450a","0x0f413f22eec51f2a02800e0cafaeec1d92d744fbbaef213c687b9edabd6985f5","0x21230f4ff26c80ffb5d037a9d1d26c3f955ca34cbeca4f54db6656b932967a0c","0x0521f877fe35535767f99597cc50effbd283dcae6812ee0a7620d796ccbfd642","0x202b01350a9cc5c20ec0f3eaada338c0a3b793811bd539418ffa3cc4302615e2","0x2d1214d9b0d41058ad4a172d9c0aecc5bdabe95e687c3465050c6b5396509be4","0x1113b344a151b0af091cb28d728b752ebb4865da6cd7ee68471b961ca5cf69b9","0x2aa66d0954bb83e17bd5c9928d3aa7a7df75d741d409f7c15ba596804ba643fb","0x2e26bc7a530771ef7a95d5360d537e41cf94d8a0942764ff09881c107f91a106","0x0f14f32b921bb63ad1df00adab7c82af58ea8aa7f353f14b281208d8c5fab504","0x13429515c0c53b6502bbcdf545defb3cb69a986c9263e070fcbb397391aae1a3","0x1f21cac5e2f262afc1006a21454cc6bcb018c44e53ad8ab61cebbac99e539176","0x2a9886a6ddc8a61b097c668cd362fc8acdee8dde74f7b1af192c3e060bb2948f","0x2d718181e408ead2e9bcd30a84ad1fccbaf8d48ab6d1820bad4933d284b503c4","0x2634c1aafc902f14508f34d3d7e9d485f42d1a4c95b5a1ef73711ed0d3c68d77","0x092ede9777e6472ce5ffd8c963d466006189e960e2c591d338dc8d4af1a057fb","0x1cba45b17fd24f1cb1b4ab7b83eee741f6c77ba70a497dc4de259eceb7d5ea26","0x246e887c7bf2e17f919b2393b6e9b00b33e8822d862544a775aac05cb7bff710","0x04c3f539fe8689971948afcb437f1ecbd444a5bddaca1c8a450348dcd8480047","0x20c6a423ae4fd58e8951aa378d02d77baf90508ceb48856db2319d70938b186e","0x1bcf8786b554b3316d8ebdbc9d006a4e5d4865aad512ffd404b7f83550d3d030","0x09ab038260518f0970564afcd6bf22e2abf6b1fa5e12a327bbf195b6ca5edd78","0x1024e32554746f89c195286ba6ccfc9765e5d14bbe8064bc6fdf22d16ec6b495","0x17706656f8dbd7e47bb257a6428f0cb7278ea02fa9e6ce431d7bcc9133fba9c7","0x25a3e8a33c15ef2a4dd16313a6049bf1d468b4cdc141f238f2d51a1e8e1c22b3","0x1198863f08006edb27aee23164fb117a4ddec1bf1ed89807aa907e5cd24bf068","0x1862b4856b5b4d4a064f873e221703e4e2cd1ebfca1337dedca56485c38ed5a0","0x062214af1ea6dd6bf8895b92d394571c43970b6f967e1c794624d96071b25ad3","0x1e5be9428ddcf1f9b0cbafc28101e792ec5cf73852b0cd0b84fbff71b4490e09","0x2d4189bea5b1e30f63c64bd26df82f18bcaf885ec8887b54634b2557869ce87f","0x0f2e5d9a908850e9d44925e17d8b12d1adb1ed029799c9b5858598504242bbc0","0x3050dc85746a57931d99f3f35e77c2ba561fba0baa018b79ff1fd544026833ae","0x2a591a32437e5e0b875a137fd868bd1b6dbc003ff1b661f26e00627cc7c5cf47","0x27946841e1670ad9c65717016d0cedf524724217236e81b9fd0a264a36ebfb0e","0x0fc396e9d19d6e68e289602e292ee345542d0d28bf6de34fa62cc577cbdfb1df","0x08e7433a07a44c0c9c4dd4b273a2685bbd1a91fd5cf2b43409458fab42a23e1b","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x12bd9bfb029c3503a5c6deea87b0a0f11bb9f7ea584af2d48f3e48d7e09247ae","0x2ccc4810748c0a82dfc0f063d0b8c7999ffe9474653080e6ef92b3cb7a428784","0x08eb574d7fecadadb508c8bd35fdad06b99110609d679763c2e3645229b1b95a","0x0f1a65e747c8021ed7c454a4be1e89b1bce66ead9ed980fa98a7a050eafe98a1","0x1c8ff9e36684ec71614dee4c17859b06c742089f6029d3694a16e00dac9b57f1","0x0303101a8ba712aeca4da85b767ab8d3ecf489ec7d746f8ee20041717cc000e9","0x0aaf64c65e7088e5596108c9601467911fea809ca6540d79af77e6e66e36cd99","0x17caf164ce74ea7edfb1390e07763d2197797ec26661b92cde18a98d61d2fddc","0x18cb055c7ad6d01437725bb457681d81f3ecadc4f35d838a3c13daf25a44456a","0x2d78602b8bbcd32b36a99a6e2d248e7fe044ef1b50813133370412f9ef5299f0","0x2b139276ea86d426a115479e4154f72a6bd83a6253bf13e9670dc6b4664378f0","0x127c7837b384902c39a104036c09546728571c46c8166b1b9b13b3a615ebb781","0x05faa4816f83cf0189a482ad943c94b9ec6474002f2b327f8698763ad0ea0985","0x2f90359cc30ee693fb3aced96523cf7aebd152c22329eee56a398d9a4ac0628e","0x0a71beaf17a59c5a238f04c1f203848d87502c5057a78c13f0cfb0f9876e7714","0x2696c1e6d089556adaeb95c8a5e3065b00a393a38c2d69e9bd6ce8cdc49d87da","0x1f3d165a7dc6564a036e451eb9cb7f1e1cb1e6d29daa75e3f135ea3e58a79ccd","0x1473a660819bdd838d56122b72b32b267211e9f1103239480ec50fa85c9e1035","0x0a8ccaeb22451f391b3fc3467c8e6e900270a7afb7b510e8acf5a4f06f1c0888","0x03b3080afc0658cc87e307758cebc171921f43eca159b9dedf7f72aa8dd926bd","0x2dd7d6663fa0e1755dfafac352c361fcd64c7f4d53627e3646870ac169cc4a07","0x1ec54b883f5f35ccad0e75695af20790d9860104095bab34c9bf01628dd40cb9","0x193dff50f83c241f7a9e087a29ce72ecf3f6d8563593f786dcd04c32bcfd4ced","0x135122c0dae26cda8ca1c09de8225064ad86d10423ab0aaa53b481aa4626e1d6","0x08d5a56cbfab5aeed56d3cdd7fb6b30fc26b0c1a5b63fccd7fa44c53ba6fd35a","0x0d12f126dfa2daad3726d00ca339284cc22e36c6d81bb7a4b95c6f9598b60e7c","0x2e8b24bbdf2fd839d3c7cae1f0eeb96bfcfaeef30b27476f2fafcb17da78cd5e","0x2364acfe0cea39b7f749c5f303b99504977357925f810f684c60f35d16315211","0x06ca062eb70b8c51cfac35345e7b6b51f33a8ec9ebe204fb9b4911200bf508b7","0x266c0aa1ccb97186815bf69084f600d06ddd934e59a38dfe602ee5d6b9487f22","0x1d817537a49c6d0e3b4b65c6665334b91d7593142e60065048be9e55ceb5e7ab","0x05e9b7256a368df053c691952b59e9327a7c12ed322bbd6f72c669b9b9c26d49","0x05e9b7256a368df053c691952b59e9327a7c12ed322bbd6f72c669b9b9c26d49","0x25b77026673a1e613e50df0e88fb510973739d5f9064bd364079a9f884209632","0x25c9bc7a3f6aae3d43ff68b5614b34b5eaceff37157b37347995d231784ac1fd","0x085f69baef22680ae15f4801ef4361ebe9c7fc24a94b5bc2527dce8fb705439e","0x0d7c6b9ce31bfc32238a205455baf5ffe99cd30eb0f7bb5b504e1d4501e01382","0x1001a8cc4bc1221c814fba0eddcf3c40619b133373640c600de5bed0a0a05b10","0x20f5894be90e52977cb70f4f4cbd5101693db0360848939750db7e91109d54b6","0x22c09cb26db43f0599408b4daed0f4f496c66424e6affa41c14387d8e0af851b","0x24e5f41357798432426a9549d71e8cc681eaebacbe87f6e3bf38e85de5aa2f3d","0x06eb90100c736fbf2b87432d7821ecdc0b365024739bc36363d48b905973f5b9","0x000000000000000000000000000000ece6d09ed58e9f5661c01140b10558a8c2","0x000000000000000000000000000000000012b6e4f37adcb34b8e88ff8b6eebce","0x000000000000000000000000000000b226a2bb93593fa1fab19a44767828a3f5","0x00000000000000000000000000000000002b5b518342030543092e1428a7e33c","0x00000000000000000000000000000022ba33857034a0574c216eb3c1ddff3025","0x00000000000000000000000000000000001918e58df857985a7cf9eae7802165","0x00000000000000000000000000000045c2d840b96fb6106cc14dcad89dd5f675","0x00000000000000000000000000000000000afdfac1e3a1febdd0208867d44f98","0x00000000000000000000000000000042ebed6c5ec45d794f119aef24c192af0f","0x00000000000000000000000000000000002d05ef250900bbcc5751bbeb210d6a","0x00000000000000000000000000000060d604bdda48eecc90ed065bd9770e1323","0x00000000000000000000000000000000001fed91c63d0041660c1cbc84c2ffbb","0x00000000000000000000000000000054196b549cde36092e8184c7f4f7d878de","0x00000000000000000000000000000000000153f26a01294329922b492485cc31","0x00000000000000000000000000000056ebea579d10dbb440f0222931df2c0059","0x00000000000000000000000000000000000d2cbc61ce5b7cdd7fce398da4637b","0x000000000000000000000000000000e2b9512360b9797d96675d8a2fd2f7aa5d","0x000000000000000000000000000000000025742905f105ff895f74e7c3daa34a","0x000000000000000000000000000000a2dd7df55db59bd41b83518d4403fbc382","0x00000000000000000000000000000000002c1d9c3cbb9371d4cc4e9f900b9a46","0x0000000000000000000000000000000000000000000000000000000000000001","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000002","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000001","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000002","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000001","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000002","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000001","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000002","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000001","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000002","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000001","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000002","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000001","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000002","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000001","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000002","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000001","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000002","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000001","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000002","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000001","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000002","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000001","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000002","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000001","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000002","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000001","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000002","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000001","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000002","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000001","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000002","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000001","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000002","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000001","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000002","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000001","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000002","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000001","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000002","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000001","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000002","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000001","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000002","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000001","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000002","0x0000000000000000000000000000000000000000000000000000000000000000","0x000000000000000000000000000000bcf12ae40c9425c3e67654b84181f90502","0x00000000000000000000000000000000000b6d3faa8a71ff6ef1aa887b7307cf","0x0000000000000000000000000000001f6f719acc23b8f84808c0275d61cfb456","0x0000000000000000000000000000000000296030933ed0c134457ae71c393dfe","0x000000000000000000000000000000ebe1a57cdd7d3d763289b40ef5ed9a7ae0","0x000000000000000000000000000000000010f30483e7df51fca2316d3367603c","0x0000000000000000000000000000000149b7b283ab18060618c8e051864c03cd","0x00000000000000000000000000000000001ef7763235a3a25e241a5f06704dc3"] public_inputs = ["0x0000000000000000000000000000000000000000000000000000000000000003"] verification_key = ["0x0000000000000000000000000000000000000000000000000000000000000020","0x0000000000000000000000000000000000000000000000000000000000000011","0x0000000000000000000000000000000000000000000000000000000000000001","0x00000000000000000000000000000060e430ad1c23bfcf3514323aae3f206e84","0x00000000000000000000000000000000001b5c3ff4c2458d8f481b1c068f27ae","0x000000000000000000000000000000bb510ab2112def34980e4fc6998ad9dd16","0x00000000000000000000000000000000000576e7c105b43e061e13cb877fefe1","0x000000000000000000000000000000ced074785d11857b065d8199e6669a601c","0x00000000000000000000000000000000000053b48a4098c1c0ae268f273952f7","0x000000000000000000000000000000d1d4b26e941db8168cee8f6de548ae0fd8","0x00000000000000000000000000000000001a9adf5a6dadc3d948bb61dfd63f4c","0x0000000000000000000000000000009ce1faac6f8de6ebb18f1db17372c82ad5","0x00000000000000000000000000000000002002681bb417184b2df070a16a3858","0x000000000000000000000000000000161baa651a8092e0e84725594de5aba511","0x00000000000000000000000000000000000be0064399c2a1efff9eb0cdcb2223","0x0000000000000000000000000000008673be6fd1bdbe980a29d8c1ded54381e7","0x000000000000000000000000000000000008a5158a7d9648cf1d234524c9fa0c","0x0000000000000000000000000000002b4fce6e4b1c72062b296d49bca2aa4130","0x00000000000000000000000000000000002e45a9eff4b6769e55fb710cded44f","0x00000000000000000000000000000072b85bf733758b76bcf97333efb85a23e3","0x000000000000000000000000000000000017da0ea508994fc82862715e4b5592","0x00000000000000000000000000000094fa74695cf058dba8ff35aec95456c6c3","0x0000000000000000000000000000000000211acddb851061c24b8f159e832bd1","0x000000000000000000000000000000303b5e5c531384b9a792e11702ad3bcab0","0x00000000000000000000000000000000000d336dff51a60b8833d5d7f6d4314c","0x0000000000000000000000000000009f825dde88092070747180d581c342444a","0x0000000000000000000000000000000000237fbd6511a03cca8cac01b555fe01","0x0000000000000000000000000000007c313205159495df6d8de292079a4844ff","0x000000000000000000000000000000000018facdfc468530dd45e8f7a1d38ce9","0x0000000000000000000000000000000d1ce33446fc3dc4ab40ca38d92dac74e1","0x00000000000000000000000000000000000852d8e3e0e8f4435af3e94222688b","0x0000000000000000000000000000006c04ee19ec1dfec87ed47d6d04aa158de2","0x000000000000000000000000000000000013240f97a584b45184c8ec31319b5f","0x000000000000000000000000000000cefb5d240b07ceb4be26ea429b6dc9d9e0","0x00000000000000000000000000000000002dad22022121d689f57fb38ca21349","0x000000000000000000000000000000c9f189f2a91aeb664ce376d8b157ba98f8","0x00000000000000000000000000000000002531a51ad54f124d58094b219818d2","0x000000000000000000000000000000ef1e6db71809307f677677e62b4163f556","0x0000000000000000000000000000000000272da4396fb2a7ee0638b9140e523d","0x0000000000000000000000000000002e54c0244a7732c87bc4712a76dd8c83fb","0x000000000000000000000000000000000007db77b3e04b7eba9643da57cbbe4d","0x000000000000000000000000000000e0dfe1ddd7f74ae0d636c910c3e85830d8","0x00000000000000000000000000000000000466fa9b57ec4664abd1505b490862","0x0000000000000000000000000000009ee55ae8a32fe5384c79907067cc27192e","0x00000000000000000000000000000000000799d0e465cec07ecb5238c854e830","0x0000000000000000000000000000001d5910ad361e76e1c241247a823733c39f","0x00000000000000000000000000000000002b03f2ccf7507564da2e6678bef8fe","0x000000000000000000000000000000231147211b3c75e1f47d150e4bbd2fb22e","0x00000000000000000000000000000000000d19ee104a10d3c701cfd87473cbbe","0x0000000000000000000000000000006705f3f382637d00f698e2c5c94ed05ae9","0x00000000000000000000000000000000000b9c792da28bb60601dd7ce4b74e68","0x000000000000000000000000000000ac5acc8cc21e4ddb225c510670f80c80b3","0x00000000000000000000000000000000002da9d3fa57343e6998aba19429b9fa","0x0000000000000000000000000000004bacbf54b7c17a560df0af18b6d0d527be","0x00000000000000000000000000000000000faea33aeca2025b22c288964b21eb","0x000000000000000000000000000000492e756298d68d6e95de096055cc0336c3","0x00000000000000000000000000000000001a12a12f004859e5a3675c7315121b","0x000000000000000000000000000000893d521d512f30e6d32afbbc0cecd8ee00","0x00000000000000000000000000000000001674b3c1ef12c6da690631e0d86c04","0x000000000000000000000000000000aa6cb02a52e7a613873d4ac9b411349945","0x00000000000000000000000000000000001ecb1fe9c493add46751f9940f73e1","0x00000000000000000000000000000045b3d362ca82cba69fb2b9c733a5b8c351","0x000000000000000000000000000000000019a683586af466e331945b732d2f8c","0x000000000000000000000000000000fc79b052dfdfe67c0ecfc06b4267ffd694","0x00000000000000000000000000000000001336a70c396393038d5e9913744ac2","0x0000000000000000000000000000005450d29af1e9438e91cd33ddeb2548226e","0x000000000000000000000000000000000000993a602891cfd0e6f6ecf7404933","0x000000000000000000000000000000498efddab90a32e9b2db729ed6e9b40192","0x00000000000000000000000000000000002425efebe9628c63ca6fc28bdb5901","0x000000000000000000000000000000d8488157f875a21ab5f93f1c2b641f3de9","0x0000000000000000000000000000000000290f95ada3936604dc4b14df7504e3","0x0000000000000000000000000000005d6902187f3ed60dcce06fca211b40329a","0x00000000000000000000000000000000002b5870a6ba0b20aaa0178e5adfbc36","0x000000000000000000000000000000e5c2519171fa0e548fc3c4966ffc1ce570","0x00000000000000000000000000000000001cb8d8f4793b7debbdc429389dbf2d","0x000000000000000000000000000000a3ee22dd60456277b86c32a18982dcb185","0x00000000000000000000000000000000002493c99a3d068b03f8f2b8d28b57ce","0x000000000000000000000000000000f6c3731486320082c20ec71bbdc92196c1","0x00000000000000000000000000000000001ded39c4c8366469843cd63f09ecac","0x000000000000000000000000000000494997477ab161763e46601d95844837ef","0x00000000000000000000000000000000002e0cddbc5712d79b59cb3b41ebbcdd","0x000000000000000000000000000000426db4c64531d350750df62dbbc41a1bd9","0x0000000000000000000000000000000000303126892f664d8d505964d14315ec","0x00000000000000000000000000000076a6b2c6040c0c62bd59acfe3e3e125672","0x000000000000000000000000000000000000874a5ad262eecc6b565e0b085074","0x000000000000000000000000000000ef082fb517183c9c6841c2b8ef2ca1df04","0x0000000000000000000000000000000000127b2a745a1b74968c3edc18982b9b","0x000000000000000000000000000000c9efd4f8c3d56e1eb23d789a8f710d5be6","0x000000000000000000000000000000000015a18748490ff4c2b1871081954e86","0x000000000000000000000000000000a0011ef987dc016ab110eacd554a1d8bbf","0x00000000000000000000000000000000002097c84955059442a95df075833071","0x000000000000000000000000000000d38e9426ad3085b68b00a93c17897c2877","0x00000000000000000000000000000000002aecd48089890ea0798eb952c66824","0x00000000000000000000000000000078d8a9ce405ce559f441f2e71477ff3ddb","0x00000000000000000000000000000000001216bdb2f0d961bb8a7a23331d2150","0x0000000000000000000000000000000000000000000000000000000000000001","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000002","0x0000000000000000000000000000000000000000000000000000000000000000","0x000000000000000000000000000000ee40d90bea71fba7a412dd61fcf34e8ceb","0x0000000000000000000000000000000000140b0936c323fd2471155617b6af56","0x0000000000000000000000000000002b90071823185c5ff8e440fd3d73b6fefc","0x00000000000000000000000000000000002b6c10790a5f6631c87d652e059df4"] \ No newline at end of file diff --git a/noir/noir-repo/test_programs/execution_success/verify_honk_proof/src/main.nr b/noir/noir-repo/test_programs/execution_success/verify_honk_proof/src/main.nr index 9fd55df16e1..ecfd18f3837 100644 --- a/noir/noir-repo/test_programs/execution_success/verify_honk_proof/src/main.nr +++ b/noir/noir-repo/test_programs/execution_success/verify_honk_proof/src/main.nr @@ -1,11 +1,12 @@ // This circuit aggregates a single Honk proof from `assert_statement_recursive`. +global SIZE_OF_PROOF_IF_LOGN_IS_28 : u32 = 409; fn main( verification_key: [Field; 103], // This is the proof without public inputs attached. // // This means: the size of this does not change with the number of public inputs. - proof: [Field; 156], + proof: [Field; SIZE_OF_PROOF_IF_LOGN_IS_28], public_inputs: pub [Field; 1], // This is currently not public. It is fine given that the vk is a part of the circuit definition. // I believe we want to eventually make it public too though. From 99ce26f568b5210ac800889b28d396aa9c9d7e3e Mon Sep 17 00:00:00 2001 From: Lasse Herskind <16536249+LHerskind@users.noreply.github.com> Date: Fri, 28 Jun 2024 23:01:30 +0100 Subject: [PATCH 18/29] feat: example of private token transfer event (#7242) A small extension on some of the work @sklppy88 have been doing lately to satisfy @rahul-kothari and have an example where you end up with your "transfer history". The events currently only have neat macros autogenerated for when we are using a struct of fields only, but works just fine for us here. --------- Co-authored-by: esau <152162806+sklppy88@users.noreply.github.com> --- .../encrypted_event_emission.nr | 28 ++++++++++++++++++- .../contracts/test_contract/src/main.nr | 6 ++-- .../contracts/test_log_contract/src/main.nr | 6 ++-- .../contracts/token_contract/src/main.nr | 14 +++++++++- .../transfer_private.test.ts | 12 +++++++- 5 files changed, 57 insertions(+), 9 deletions(-) diff --git a/noir-projects/aztec-nr/aztec/src/encrypted_logs/encrypted_event_emission.nr b/noir-projects/aztec-nr/aztec/src/encrypted_logs/encrypted_event_emission.nr index a027c168515..935cd0fa48f 100644 --- a/noir-projects/aztec-nr/aztec/src/encrypted_logs/encrypted_event_emission.nr +++ b/noir-projects/aztec-nr/aztec/src/encrypted_logs/encrypted_event_emission.nr @@ -1,6 +1,7 @@ use crate::{ context::PrivateContext, event::event_interface::EventInterface, - encrypted_logs::payload::compute_encrypted_event_log, oracle::logs_traits::LensForEncryptedEvent + encrypted_logs::payload::compute_encrypted_event_log, oracle::logs_traits::LensForEncryptedEvent, + oracle::unsafe_rand::unsafe_rand }; use dep::protocol_types::{address::AztecAddress, grumpkin_point::GrumpkinPoint}; @@ -20,6 +21,20 @@ fn emit_with_keys( } pub fn encode_and_encrypt_event( + context: &mut PrivateContext, + ov: AztecAddress, + iv: AztecAddress +) -> fn[(&mut PrivateContext, AztecAddress, AztecAddress)](Event) -> () where Event: EventInterface, [u8; NB]: LensForEncryptedEvent { + | e: Event | { + let header = context.get_header(); + let ovpk = header.get_ovpk_m(context, ov); + let ivpk = header.get_ivpk_m(context, iv); + let randomness = unsafe_rand(); + emit_with_keys(context, randomness, e, ovpk, ivpk); + } +} + +pub fn encode_and_encrypt_event_with_randomness( context: &mut PrivateContext, randomness: Field, ov: AztecAddress, @@ -34,6 +49,17 @@ pub fn encode_and_encrypt_event( } pub fn encode_and_encrypt_event_with_keys( + context: &mut PrivateContext, + ovpk: GrumpkinPoint, + ivpk: GrumpkinPoint +) -> fn[(&mut PrivateContext, GrumpkinPoint, GrumpkinPoint)](Event) -> () where Event: EventInterface, [u8; NB]: LensForEncryptedEvent { + | e: Event | { + let randomness = unsafe_rand(); + emit_with_keys(context, randomness, e, ovpk, ivpk); + } +} + +pub fn encode_and_encrypt_event_with_keys_with_randomness( context: &mut PrivateContext, randomness: Field, ovpk: GrumpkinPoint, diff --git a/noir-projects/noir-contracts/contracts/test_contract/src/main.nr b/noir-projects/noir-contracts/contracts/test_contract/src/main.nr index eb34674b095..1db5e91ff24 100644 --- a/noir-projects/noir-contracts/contracts/test_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/test_contract/src/main.nr @@ -8,7 +8,7 @@ contract Test { PrivateContext, PrivateImmutable, PrivateSet, SharedImmutable }; use dep::aztec::encrypted_logs::encrypted_note_emission::encode_and_encrypt_note; - use dep::aztec::encrypted_logs::encrypted_event_emission::encode_and_encrypt_event_with_keys; + use dep::aztec::encrypted_logs::encrypted_event_emission::encode_and_encrypt_event_with_keys_with_randomness; use dep::aztec::protocol_types::{ abis::private_circuit_public_inputs::PrivateCircuitPublicInputs, @@ -271,7 +271,7 @@ contract Test { let event = ExampleEvent { value0: fields[0], value1: fields[1], value2: fields[2], value3: fields[3], value4: fields[4] }; event.emit( - encode_and_encrypt_event_with_keys( + encode_and_encrypt_event_with_keys_with_randomness( &mut context, // testing only - a secret random value is passed in here to salt / mask the address 5, @@ -288,7 +288,7 @@ contract Test { let otherEvent = ExampleEvent { value0: 1, value1: 2, value2: 3, value3: 4, value4: 5 }; otherEvent.emit( - encode_and_encrypt_event_with_keys( + encode_and_encrypt_event_with_keys_with_randomness( &mut context, // testing only - a randomness of 0 signals the kerels to not mask the address 0, diff --git a/noir-projects/noir-contracts/contracts/test_log_contract/src/main.nr b/noir-projects/noir-contracts/contracts/test_log_contract/src/main.nr index 41f9772ec52..26b1462de58 100644 --- a/noir-projects/noir-contracts/contracts/test_log_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/test_log_contract/src/main.nr @@ -7,7 +7,7 @@ contract TestLog { use dep::value_note::value_note::ValueNote; use dep::aztec::encrypted_logs::incoming_body::EncryptedLogIncomingBody; use dep::aztec::event::event_interface::EventInterface; - use dep::aztec::encrypted_logs::encrypted_event_emission::{encode_and_encrypt_event, encode_and_encrypt_event_with_keys}; + use dep::aztec::encrypted_logs::encrypted_event_emission::encode_and_encrypt_event_with_randomness; use dep::aztec::unencrypted_logs::unencrypted_event_emission::encode_event; #[aztec(event)] @@ -49,7 +49,7 @@ contract TestLog { let event0 = ExampleEvent0 { value0: preimages[0], value1: preimages[1] }; event0.emit( - encode_and_encrypt_event( + encode_and_encrypt_event_with_randomness( &mut context, randomness[0], // outgoing is set to other, incoming is set to msg sender @@ -72,7 +72,7 @@ contract TestLog { let event1 = ExampleEvent1 { value2: preimages[2], value3: preimages[3] }; event1.emit( - encode_and_encrypt_event( + encode_and_encrypt_event_with_randomness( &mut context, randomness[1], // outgoing is set to other, incoming is set to msg sender diff --git a/noir-projects/noir-contracts/contracts/token_contract/src/main.nr b/noir-projects/noir-contracts/contracts/token_contract/src/main.nr index d13145c89e2..dce1ac83275 100644 --- a/noir-projects/noir-contracts/contracts/token_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/token_contract/src/main.nr @@ -18,7 +18,10 @@ contract Token { use dep::aztec::{ hash::compute_secret_hash, prelude::{NoteGetterOptions, Map, PublicMutable, SharedImmutable, PrivateSet, AztecAddress}, - encrypted_logs::encrypted_note_emission::{encode_and_encrypt_note, encode_and_encrypt_note_with_keys} + encrypted_logs::{ + encrypted_note_emission::{encode_and_encrypt_note, encode_and_encrypt_note_with_keys}, + encrypted_event_emission::{encode_and_encrypt_event, encode_and_encrypt_event_with_keys} + } }; // docs:start:import_authwit @@ -28,6 +31,13 @@ contract Token { use crate::types::{transparent_note::TransparentNote, token_note::{TokenNote, TOKEN_NOTE_LEN}, balances_map::BalancesMap}; // docs:end::imports + #[aztec(event)] + struct Transfer { + from: Field, + to: Field, + amount: Field, + } + // docs:start:storage_struct #[aztec(storage)] struct Storage { @@ -324,6 +334,8 @@ contract Token { let amount = U128::from_integer(amount); storage.balances.sub(from, amount).emit(encode_and_encrypt_note_with_keys(&mut context, from_ovpk, from_ivpk)); storage.balances.add(to, amount).emit(encode_and_encrypt_note_with_keys(&mut context, from_ovpk, to_ivpk)); + + Transfer { from: from.to_field(), to: to.to_field(), amount: amount.to_field() }.emit(encode_and_encrypt_event_with_keys(&mut context, from_ovpk, to_ivpk)); } // docs:end:transfer diff --git a/yarn-project/end-to-end/src/e2e_token_contract/transfer_private.test.ts b/yarn-project/end-to-end/src/e2e_token_contract/transfer_private.test.ts index 4d1536e4df4..8ac1bf96cda 100644 --- a/yarn-project/end-to-end/src/e2e_token_contract/transfer_private.test.ts +++ b/yarn-project/end-to-end/src/e2e_token_contract/transfer_private.test.ts @@ -1,10 +1,12 @@ import { AztecAddress, CompleteAddress, + EventType, Fr, computeAuthWitMessageHash, computeInnerAuthWitHashFromAction, } from '@aztec/aztec.js'; +import { TokenContract } from '@aztec/noir-contracts.js'; import { DUPLICATE_NULLIFIER_ERROR } from '../fixtures/fixtures.js'; import { TokenContractTest } from './token_contract_test.js'; @@ -32,8 +34,16 @@ describe('e2e_token_contract transfer private', () => { const balance0 = await asset.methods.balance_of_private(accounts[0].address).simulate(); const amount = balance0 / 2n; expect(amount).toBeGreaterThan(0n); - await asset.methods.transfer(accounts[1].address, amount).send().wait(); + const tx = await asset.methods.transfer(accounts[1].address, amount).send().wait(); tokenSim.transferPrivate(accounts[0].address, accounts[1].address, amount); + + const events = await wallets[1].getEvents(EventType.Encrypted, TokenContract.events.Transfer, tx.blockNumber!, 1); + + expect(events[0]).toEqual({ + from: accounts[0].address, + to: accounts[1].address, + amount: new Fr(amount), + }); }); it('transfer less than balance to non-deployed account', async () => { From 153b2010c5d79f308779370d240dfaa2a086ca3c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Nicol=C3=A1s=20Venturo?= Date: Fri, 28 Jun 2024 21:12:44 -0300 Subject: [PATCH 19/29] feat!: extend storage read oracle to receive address and block number (#7243) Follow up to #7237, closes https://github.com/AztecProtocol/aztec-packages/issues/7230. I only changed the oracles and not the PXE interface to keep this change as small as possible. I did change the node interface, but made it so you can still do it the old way by passing `'latest'`, which I had to do in a couple places. Finally, I added getters for `UnconstrainedContext`, mirroring the work in #7320, which I imagine are the ones we'll use in the vast majority of cases. --- .../src/context/unconstrained_context.nr | 11 ++++- .../aztec-nr/aztec/src/oracle/storage.nr | 42 +++++++++++++------ .../aztec/src/state_vars/public_immutable.nr | 2 +- .../aztec/src/state_vars/public_mutable.nr | 2 +- .../aztec/src/state_vars/shared_immutable.nr | 2 +- .../token_contract/src/test/utils.nr | 3 +- .../aztec-node/src/aztec-node/server.ts | 5 ++- .../src/interfaces/aztec-node.ts | 3 +- .../benchmarks/bench_process_history.test.ts | 2 +- .../benchmarks/bench_publish_rollup.test.ts | 2 +- yarn-project/noir-contracts.js/package.json | 2 +- .../pxe/src/pxe_service/pxe_service.ts | 2 +- .../simulator/src/acvm/oracle/oracle.ts | 14 ++++++- .../simulator/src/acvm/oracle/typed_oracle.ts | 7 +++- .../src/client/client_execution_context.ts | 15 +++++-- .../simulator/src/client/view_data_oracle.ts | 15 +++++-- yarn-project/txe/src/oracle/txe_oracle.ts | 9 +++- .../txe/src/txe_service/txe_service.ts | 9 +++- 18 files changed, 110 insertions(+), 37 deletions(-) diff --git a/noir-projects/aztec-nr/aztec/src/context/unconstrained_context.nr b/noir-projects/aztec-nr/aztec/src/context/unconstrained_context.nr index b36691616e0..eedd97aa8d6 100644 --- a/noir-projects/aztec-nr/aztec/src/context/unconstrained_context.nr +++ b/noir-projects/aztec-nr/aztec/src/context/unconstrained_context.nr @@ -1,4 +1,5 @@ -use dep::protocol_types::address::AztecAddress; +use dep::protocol_types::{address::AztecAddress, traits::Deserialize}; +use crate::oracle::storage::{raw_storage_read, storage_read}; struct UnconstrainedContext { block_number: u32, @@ -35,6 +36,14 @@ impl UnconstrainedContext { fn chain_id(self) -> Field { self.chain_id } + + unconstrained fn raw_storage_read(self: Self, storage_slot: Field) -> [Field; N] { + storage_read(self.this_address(), storage_slot, self.block_number()) + } + + unconstrained fn storage_read(self, storage_slot: Field) -> T where T: Deserialize { + T::deserialize(self.raw_storage_read(storage_slot)) + } } #[oracle(getContractAddress)] diff --git a/noir-projects/aztec-nr/aztec/src/oracle/storage.nr b/noir-projects/aztec-nr/aztec/src/oracle/storage.nr index 92925f889fa..b4a6b1f9102 100644 --- a/noir-projects/aztec-nr/aztec/src/oracle/storage.nr +++ b/noir-projects/aztec-nr/aztec/src/oracle/storage.nr @@ -1,42 +1,58 @@ -use dep::protocol_types::traits::Deserialize; +use dep::protocol_types::{address::AztecAddress, traits::Deserialize}; #[oracle(storageRead)] -unconstrained fn storage_read_oracle(storage_slot: Field, length: Field) -> [Field; N] {} - -unconstrained pub fn raw_storage_read(storage_slot: Field) -> [Field; N] { - storage_read_oracle(storage_slot, N) +unconstrained fn storage_read_oracle( + address: Field, + storage_slot: Field, + block_number: Field, + length: Field +) -> [Field; N] {} + +unconstrained pub fn raw_storage_read( + address: AztecAddress, + storage_slot: Field, + block_number: u32 +) -> [Field; N] { + storage_read_oracle(address.to_field(), storage_slot, block_number as Field, N) } -unconstrained pub fn storage_read(storage_slot: Field) -> T where T: Deserialize { - T::deserialize(raw_storage_read(storage_slot)) +unconstrained pub fn storage_read( + address: AztecAddress, + storage_slot: Field, + block_number: u32 +) -> T where T: Deserialize { + T::deserialize(raw_storage_read(address, storage_slot, block_number)) } mod tests { use crate::oracle::storage::{raw_storage_read, storage_read}; + use dep::protocol_types::address::AztecAddress; use std::test::OracleMock; use crate::test::mocks::mock_struct::MockStruct; + global address = AztecAddress::from_field(29); + global slot = 7; + global block_number = 17; + #[test] fn test_raw_storage_read() { - let slot = 7; let written = MockStruct { a: 13, b: 42 }; - let _ = OracleMock::mock("storageRead").with_params((slot, 2)).returns(written.serialize()); + let _ = OracleMock::mock("storageRead").returns(written.serialize()); - let read: [Field; 2] = raw_storage_read(slot); + let read: [Field; 2] = raw_storage_read(address, slot, block_number); assert_eq(read[0], 13); assert_eq(read[1], 42); } #[test] fn test_storage_read() { - let slot = 7; let written = MockStruct { a: 13, b: 42 }; - let _ = OracleMock::mock("storageRead").with_params((slot, 2)).returns(written.serialize()); + let _ = OracleMock::mock("storageRead").returns(written.serialize()); - let read: MockStruct = storage_read(slot); + let read: MockStruct = storage_read(address, slot, block_number); assert_eq(read.a, 13); assert_eq(read.b, 42); } diff --git a/noir-projects/aztec-nr/aztec/src/state_vars/public_immutable.nr b/noir-projects/aztec-nr/aztec/src/state_vars/public_immutable.nr index 32955df7593..a8322463054 100644 --- a/noir-projects/aztec-nr/aztec/src/state_vars/public_immutable.nr +++ b/noir-projects/aztec-nr/aztec/src/state_vars/public_immutable.nr @@ -56,6 +56,6 @@ impl PublicImmutable { impl PublicImmutable { unconstrained pub fn read(self) -> T where T: Deserialize { - storage_read(self.storage_slot) + self.context.storage_read(self.storage_slot) } } diff --git a/noir-projects/aztec-nr/aztec/src/state_vars/public_mutable.nr b/noir-projects/aztec-nr/aztec/src/state_vars/public_mutable.nr index 0d463051717..07038e14984 100644 --- a/noir-projects/aztec-nr/aztec/src/state_vars/public_mutable.nr +++ b/noir-projects/aztec-nr/aztec/src/state_vars/public_mutable.nr @@ -41,6 +41,6 @@ impl PublicMutable { impl PublicMutable { unconstrained pub fn read(self) -> T where T: Deserialize { - storage_read(self.storage_slot) + self.context.storage_read(self.storage_slot) } } diff --git a/noir-projects/aztec-nr/aztec/src/state_vars/shared_immutable.nr b/noir-projects/aztec-nr/aztec/src/state_vars/shared_immutable.nr index f71448809d4..a29f5e9c737 100644 --- a/noir-projects/aztec-nr/aztec/src/state_vars/shared_immutable.nr +++ b/noir-projects/aztec-nr/aztec/src/state_vars/shared_immutable.nr @@ -48,7 +48,7 @@ impl SharedImmutable { impl SharedImmutable { unconstrained pub fn read_public(self) -> T where T: Deserialize { - storage_read(self.storage_slot) + self.context.storage_read(self.storage_slot) } } diff --git a/noir-projects/noir-contracts/contracts/token_contract/src/test/utils.nr b/noir-projects/noir-contracts/contracts/token_contract/src/test/utils.nr index cb5b51339d6..5431b94bc10 100644 --- a/noir-projects/noir-contracts/contracts/token_contract/src/test/utils.nr +++ b/noir-projects/noir-contracts/contracts/token_contract/src/test/utils.nr @@ -72,10 +72,11 @@ pub fn setup_and_mint(with_account_contracts: bool) -> (&mut TestEnvironment, Az pub fn check_public_balance(token_contract_address: AztecAddress, address: AztecAddress, address_amount: Field) { let current_contract_address = cheatcodes::get_contract_address(); cheatcodes::set_contract_address(token_contract_address); + let block_number = cheatcodes::get_block_number(); let balances_slot = Token::storage().public_balances.slot; let address_slot = derive_storage_slot_in_map(balances_slot, address); - let amount: U128 = storage_read(address_slot); + let amount: U128 = storage_read(token_contract_address, address_slot, block_number); assert(amount.to_field() == address_amount, "Public balance is not correct"); cheatcodes::set_contract_address(current_contract_address); } diff --git a/yarn-project/aztec-node/src/aztec-node/server.ts b/yarn-project/aztec-node/src/aztec-node/server.ts index 4e1eb36c75d..b7cbfdb577a 100644 --- a/yarn-project/aztec-node/src/aztec-node/server.ts +++ b/yarn-project/aztec-node/src/aztec-node/server.ts @@ -704,10 +704,11 @@ export class AztecNodeService implements AztecNode { * * @param contract - Address of the contract to query. * @param slot - Slot to query. + * @param blockNumber - The block number at which to get the data or 'latest'. * @returns Storage value at the given contract slot. */ - public async getPublicStorageAt(contract: AztecAddress, slot: Fr): Promise { - const committedDb = await this.#getWorldState('latest'); + public async getPublicStorageAt(contract: AztecAddress, slot: Fr, blockNumber: L2BlockNumber): Promise { + const committedDb = await this.#getWorldState(blockNumber); const leafSlot = computePublicDataTreeLeafSlot(contract, slot); const lowLeafResult = await committedDb.getPreviousValueIndex(MerkleTreeId.PUBLIC_DATA_TREE, leafSlot.toBigInt()); diff --git a/yarn-project/circuit-types/src/interfaces/aztec-node.ts b/yarn-project/circuit-types/src/interfaces/aztec-node.ts index 0890b0c6904..a79a28ece15 100644 --- a/yarn-project/circuit-types/src/interfaces/aztec-node.ts +++ b/yarn-project/circuit-types/src/interfaces/aztec-node.ts @@ -291,9 +291,10 @@ export interface AztecNode { * * @param contract - Address of the contract to query. * @param slot - Slot to query. + * @param blockNumber - The block number at which to get the data or 'latest'. * @returns Storage value at the given contract slot. */ - getPublicStorageAt(contract: AztecAddress, slot: Fr): Promise; + getPublicStorageAt(contract: AztecAddress, slot: Fr, blockNumber: L2BlockNumber): Promise; /** * Returns the currently committed block header. diff --git a/yarn-project/end-to-end/src/benchmarks/bench_process_history.test.ts b/yarn-project/end-to-end/src/benchmarks/bench_process_history.test.ts index 0f146badc93..1932986a768 100644 --- a/yarn-project/end-to-end/src/benchmarks/bench_process_history.test.ts +++ b/yarn-project/end-to-end/src/benchmarks/bench_process_history.test.ts @@ -54,7 +54,7 @@ describe('benchmarks/process_history', () => { const node = await AztecNodeService.createAndSync(nodeConfig); // call getPublicStorageAt (which calls #getWorldState, which calls #syncWorldState) to force a sync with // world state to ensure the node has caught up - await node.getPublicStorageAt(AztecAddress.random(), Fr.random()); + await node.getPublicStorageAt(AztecAddress.random(), Fr.random(), 'latest'); return node; }); diff --git a/yarn-project/end-to-end/src/benchmarks/bench_publish_rollup.test.ts b/yarn-project/end-to-end/src/benchmarks/bench_publish_rollup.test.ts index b3bb55d6489..67b3a8f1c7f 100644 --- a/yarn-project/end-to-end/src/benchmarks/bench_publish_rollup.test.ts +++ b/yarn-project/end-to-end/src/benchmarks/bench_publish_rollup.test.ts @@ -37,7 +37,7 @@ describe('benchmarks/publish_rollup', () => { // world state to ensure the node has caught up context.logger.info(`Starting new aztec node`); const node = await AztecNodeService.createAndSync({ ...context.config, disableSequencer: true }); - await node.getPublicStorageAt(AztecAddress.random(), Fr.random()); + await node.getPublicStorageAt(AztecAddress.random(), Fr.random(), 'latest'); // Spin up a new pxe and sync it, we'll use it to test sync times of new accounts for the last block context.logger.info(`Starting new pxe`); diff --git a/yarn-project/noir-contracts.js/package.json b/yarn-project/noir-contracts.js/package.json index 0a86d66706b..6bf009796df 100644 --- a/yarn-project/noir-contracts.js/package.json +++ b/yarn-project/noir-contracts.js/package.json @@ -73,4 +73,4 @@ "engines": { "node": ">=18" } -} \ No newline at end of file +} diff --git a/yarn-project/pxe/src/pxe_service/pxe_service.ts b/yarn-project/pxe/src/pxe_service/pxe_service.ts index 87c74997393..7549f45efef 100644 --- a/yarn-project/pxe/src/pxe_service/pxe_service.ts +++ b/yarn-project/pxe/src/pxe_service/pxe_service.ts @@ -293,7 +293,7 @@ export class PXEService implements PXE { if (!(await this.getContractInstance(contract))) { throw new Error(`Contract ${contract.toString()} is not deployed`); } - return await this.node.getPublicStorageAt(contract, slot); + return await this.node.getPublicStorageAt(contract, slot, 'latest'); } public async getIncomingNotes(filter: IncomingNotesFilter): Promise { diff --git a/yarn-project/simulator/src/acvm/oracle/oracle.ts b/yarn-project/simulator/src/acvm/oracle/oracle.ts index d2cd8ddf7d1..b3f04b97429 100644 --- a/yarn-project/simulator/src/acvm/oracle/oracle.ts +++ b/yarn-project/simulator/src/acvm/oracle/oracle.ts @@ -287,8 +287,18 @@ export class Oracle { return message.toFields().map(toACVMField); } - async storageRead([startStorageSlot]: ACVMField[], [numberOfElements]: ACVMField[]): Promise { - const values = await this.typedOracle.storageRead(fromACVMField(startStorageSlot), +numberOfElements); + async storageRead( + [contractAddress]: ACVMField[], + [startStorageSlot]: ACVMField[], + [blockNumber]: ACVMField[], + [numberOfElements]: ACVMField[], + ): Promise { + const values = await this.typedOracle.storageRead( + fromACVMField(contractAddress), + fromACVMField(startStorageSlot), + +blockNumber, + +numberOfElements, + ); return values.map(toACVMField); } diff --git a/yarn-project/simulator/src/acvm/oracle/typed_oracle.ts b/yarn-project/simulator/src/acvm/oracle/typed_oracle.ts index 690ccf8ac86..fdda81ac6ae 100644 --- a/yarn-project/simulator/src/acvm/oracle/typed_oracle.ts +++ b/yarn-project/simulator/src/acvm/oracle/typed_oracle.ts @@ -190,7 +190,12 @@ export abstract class TypedOracle { throw new OracleMethodNotAvailableError('getL1ToL2MembershipWitness'); } - storageRead(_startStorageSlot: Fr, _numberOfElements: number): Promise { + storageRead( + _contractAddress: Fr, + _startStorageSlot: Fr, + _blockNumber: number, + _numberOfElements: number, + ): Promise { throw new OracleMethodNotAvailableError('storageRead'); } diff --git a/yarn-project/simulator/src/client/client_execution_context.ts b/yarn-project/simulator/src/client/client_execution_context.ts index 7da0e48928d..f8f78336619 100644 --- a/yarn-project/simulator/src/client/client_execution_context.ts +++ b/yarn-project/simulator/src/client/client_execution_context.ts @@ -678,16 +678,25 @@ export class ClientExecutionContext extends ViewDataOracle { /** * Read the public storage data. + * @param contractAddress - The address to read storage from. * @param startStorageSlot - The starting storage slot. + * @param blockNumber - The block number to read storage at. * @param numberOfElements - Number of elements to read from the starting storage slot. */ - public override async storageRead(startStorageSlot: Fr, numberOfElements: number): Promise { + public override async storageRead( + contractAddress: Fr, + startStorageSlot: Fr, + blockNumber: number, + numberOfElements: number, + ): Promise { const values = []; for (let i = 0n; i < numberOfElements; i++) { const storageSlot = new Fr(startStorageSlot.value + i); - const value = await this.aztecNode.getPublicStorageAt(this.callContext.storageContractAddress, storageSlot); - this.log.debug(`Oracle storage read: slot=${storageSlot.toString()} value=${value}`); + const value = await this.aztecNode.getPublicStorageAt(contractAddress, storageSlot, blockNumber); + this.log.debug( + `Oracle storage read: slot=${storageSlot.toString()} address-${contractAddress.toString()} value=${value}`, + ); values.push(value); } diff --git a/yarn-project/simulator/src/client/view_data_oracle.ts b/yarn-project/simulator/src/client/view_data_oracle.ts index fd1710205dc..75e7f180233 100644 --- a/yarn-project/simulator/src/client/view_data_oracle.ts +++ b/yarn-project/simulator/src/client/view_data_oracle.ts @@ -260,16 +260,25 @@ export class ViewDataOracle extends TypedOracle { /** * Read the public storage data. + * @param contractAddress - The address to read storage from. * @param startStorageSlot - The starting storage slot. + * @param blockNumber - The block number to read storage at. * @param numberOfElements - Number of elements to read from the starting storage slot. */ - public override async storageRead(startStorageSlot: Fr, numberOfElements: number) { + public override async storageRead( + contractAddress: Fr, + startStorageSlot: Fr, + blockNumber: number, + numberOfElements: number, + ) { const values = []; for (let i = 0n; i < numberOfElements; i++) { const storageSlot = new Fr(startStorageSlot.value + i); - const value = await this.aztecNode.getPublicStorageAt(this.contractAddress, storageSlot); + const value = await this.aztecNode.getPublicStorageAt(contractAddress, storageSlot, blockNumber); - this.log.debug(`Oracle storage read: slot=${storageSlot.toString()} value=${value}`); + this.log.debug( + `Oracle storage read: slot=${storageSlot.toString()} address-${contractAddress.toString()} value=${value}`, + ); values.push(value); } return values; diff --git a/yarn-project/txe/src/oracle/txe_oracle.ts b/yarn-project/txe/src/oracle/txe_oracle.ts index 567bf0a656d..eb805c21d08 100644 --- a/yarn-project/txe/src/oracle/txe_oracle.ts +++ b/yarn-project/txe/src/oracle/txe_oracle.ts @@ -434,13 +434,18 @@ export class TXE implements TypedOracle { throw new Error('Method not implemented.'); } - async storageRead(startStorageSlot: Fr, numberOfElements: number): Promise { + async storageRead( + contractAddress: Fr, + startStorageSlot: Fr, + blockNumber: number, // TODO(#7230): use block number + numberOfElements: number, + ): Promise { const db = this.trees.asLatest(); const values = []; for (let i = 0n; i < numberOfElements; i++) { const storageSlot = startStorageSlot.add(new Fr(i)); - const leafSlot = computePublicDataTreeLeafSlot(this.contractAddress, storageSlot).toBigInt(); + const leafSlot = computePublicDataTreeLeafSlot(contractAddress, storageSlot).toBigInt(); const lowLeafResult = await db.getPreviousValueIndex(MerkleTreeId.PUBLIC_DATA_TREE, leafSlot); diff --git a/yarn-project/txe/src/txe_service/txe_service.ts b/yarn-project/txe/src/txe_service/txe_service.ts index de25cc81299..bfd99bd74d1 100644 --- a/yarn-project/txe/src/txe_service/txe_service.ts +++ b/yarn-project/txe/src/txe_service/txe_service.ts @@ -308,9 +308,16 @@ export class TXEService { return toForeignCallResult([]); } - async storageRead(startStorageSlot: ForeignCallSingle, numberOfElements: ForeignCallSingle) { + async storageRead( + contractAddress: ForeignCallSingle, + startStorageSlot: ForeignCallSingle, + blockNumber: ForeignCallSingle, + numberOfElements: ForeignCallSingle, + ) { const values = await this.typedOracle.storageRead( + fromSingle(contractAddress), fromSingle(startStorageSlot), + fromSingle(blockNumber).toNumber(), fromSingle(numberOfElements).toNumber(), ); return toForeignCallResult([toArray(values)]); From d068f99b4a5095dd45ac22044bd5e5b0e5db6d2e Mon Sep 17 00:00:00 2001 From: AztecBot Date: Sat, 29 Jun 2024 02:14:04 +0000 Subject: [PATCH 20/29] git subrepo push --branch=master barretenberg subrepo: subdir: "barretenberg" merged: "b6c8d0277c" upstream: origin: "https://github.com/AztecProtocol/barretenberg" branch: "master" commit: "b6c8d0277c" git-subrepo: version: "0.4.6" origin: "???" commit: "???" [skip ci] --- barretenberg/.gitrepo | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/barretenberg/.gitrepo b/barretenberg/.gitrepo index 1d7dc1ca5e2..2311e903e76 100644 --- a/barretenberg/.gitrepo +++ b/barretenberg/.gitrepo @@ -6,7 +6,7 @@ [subrepo] remote = https://github.com/AztecProtocol/barretenberg branch = master - commit = 696e195e46d64aeb8fdc0cb1f73aa336dd208b2a - parent = 2740d600c0d4a18ce90df24e334e572a80233832 + commit = b6c8d0277c75d3e51a9333ac87771344445feb23 + parent = 153b2010c5d79f308779370d240dfaa2a086ca3c method = merge cmdver = 0.4.6 From b9ddf43faa0184692917d543e39507192b2ac64b Mon Sep 17 00:00:00 2001 From: AztecBot Date: Sat, 29 Jun 2024 02:14:39 +0000 Subject: [PATCH 21/29] chore: replace relative paths to noir-protocol-circuits --- noir-projects/aztec-nr/aztec/Nargo.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/noir-projects/aztec-nr/aztec/Nargo.toml b/noir-projects/aztec-nr/aztec/Nargo.toml index 7a1f1af5863..3e6f8c8bf4c 100644 --- a/noir-projects/aztec-nr/aztec/Nargo.toml +++ b/noir-projects/aztec-nr/aztec/Nargo.toml @@ -5,4 +5,4 @@ compiler_version = ">=0.18.0" type = "lib" [dependencies] -protocol_types = { path = "../../noir-protocol-circuits/crates/types" } +protocol_types = { git="https://github.com/AztecProtocol/aztec-packages", tag="aztec-packages-v0.44.0", directory="noir-projects/noir-protocol-circuits/crates/types" } From aaf733ac6162a95015df333aee2605cc2626f280 Mon Sep 17 00:00:00 2001 From: AztecBot Date: Sat, 29 Jun 2024 02:14:39 +0000 Subject: [PATCH 22/29] git_subrepo.sh: Fix parent in .gitrepo file. [skip ci] --- noir-projects/aztec-nr/.gitrepo | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/noir-projects/aztec-nr/.gitrepo b/noir-projects/aztec-nr/.gitrepo index 0266e13fd62..289bce31021 100644 --- a/noir-projects/aztec-nr/.gitrepo +++ b/noir-projects/aztec-nr/.gitrepo @@ -9,4 +9,4 @@ commit = 497d868c8a62f7792cebf999e9e0dea2be3b0c81 method = merge cmdver = 0.4.6 - parent = 81cfdc9971d0b750aa9ea40327c5c6ce5aa4508b + parent = 1d4ca2b77dac07448b06e9023fa9b91c745d24e7 From 3b1879647342c4edc4f1e19f66868202697a1658 Mon Sep 17 00:00:00 2001 From: AztecBot Date: Sat, 29 Jun 2024 02:14:44 +0000 Subject: [PATCH 23/29] git subrepo push --branch=master noir-projects/aztec-nr subrepo: subdir: "noir-projects/aztec-nr" merged: "63adfd13bd" upstream: origin: "https://github.com/AztecProtocol/aztec-nr" branch: "master" commit: "63adfd13bd" git-subrepo: version: "0.4.6" origin: "???" commit: "???" [skip ci] --- noir-projects/aztec-nr/.gitrepo | 4 ++-- noir-projects/aztec-nr/aztec/Nargo.toml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/noir-projects/aztec-nr/.gitrepo b/noir-projects/aztec-nr/.gitrepo index 289bce31021..b52b640e67c 100644 --- a/noir-projects/aztec-nr/.gitrepo +++ b/noir-projects/aztec-nr/.gitrepo @@ -6,7 +6,7 @@ [subrepo] remote = https://github.com/AztecProtocol/aztec-nr branch = master - commit = 497d868c8a62f7792cebf999e9e0dea2be3b0c81 + commit = 63adfd13bdb3dbb68bb4164697218f1cf8a63fb0 method = merge cmdver = 0.4.6 - parent = 1d4ca2b77dac07448b06e9023fa9b91c745d24e7 + parent = 3c8c520e6d320b490ebeae79787cd2a77fbce758 diff --git a/noir-projects/aztec-nr/aztec/Nargo.toml b/noir-projects/aztec-nr/aztec/Nargo.toml index 3e6f8c8bf4c..7a1f1af5863 100644 --- a/noir-projects/aztec-nr/aztec/Nargo.toml +++ b/noir-projects/aztec-nr/aztec/Nargo.toml @@ -5,4 +5,4 @@ compiler_version = ">=0.18.0" type = "lib" [dependencies] -protocol_types = { git="https://github.com/AztecProtocol/aztec-packages", tag="aztec-packages-v0.44.0", directory="noir-projects/noir-protocol-circuits/crates/types" } +protocol_types = { path = "../../noir-protocol-circuits/crates/types" } From fa15a450408181ffc50946ee56c4ae0fd8c5a61f Mon Sep 17 00:00:00 2001 From: David Banks <47112877+dbanks12@users.noreply.github.com> Date: Sat, 29 Jun 2024 14:07:19 -0400 Subject: [PATCH 24/29] fix: reran pil->cpp codegen & encode_and_encrypt_event_with_randomness fix (#7247) Co-authored-by: sklppy88 --- .../cpp/src/barretenberg/vm/generated/avm_verifier.cpp | 3 ++- .../noir-contracts/contracts/test_log_contract/src/main.nr | 2 +- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/barretenberg/cpp/src/barretenberg/vm/generated/avm_verifier.cpp b/barretenberg/cpp/src/barretenberg/vm/generated/avm_verifier.cpp index 6a23f8e56fd..b341ba30e09 100644 --- a/barretenberg/cpp/src/barretenberg/vm/generated/avm_verifier.cpp +++ b/barretenberg/cpp/src/barretenberg/vm/generated/avm_verifier.cpp @@ -715,6 +715,7 @@ bool AvmVerifier::verify_proof(const HonkProof& proof, const std::vector mle_challenge(multivariate_challenge.begin(), multivariate_challenge.begin() + static_cast(log_circuit_size)); + FF kernel_kernel_inputs_evaluation = evaluate_public_input_column(public_inputs[0], circuit_size, mle_challenge); if (kernel_kernel_inputs_evaluation != claimed_evaluations.kernel_kernel_inputs) { return false; @@ -737,7 +738,7 @@ bool AvmVerifier::verify_proof(const HonkProof& proof, const std::vector Date: Sun, 30 Jun 2024 02:17:03 +0000 Subject: [PATCH 25/29] git subrepo push --branch=master barretenberg subrepo: subdir: "barretenberg" merged: "c0a5796c6a" upstream: origin: "https://github.com/AztecProtocol/barretenberg" branch: "master" commit: "c0a5796c6a" git-subrepo: version: "0.4.6" origin: "???" commit: "???" [skip ci] --- barretenberg/.gitrepo | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/barretenberg/.gitrepo b/barretenberg/.gitrepo index 2311e903e76..d65386ea822 100644 --- a/barretenberg/.gitrepo +++ b/barretenberg/.gitrepo @@ -6,7 +6,7 @@ [subrepo] remote = https://github.com/AztecProtocol/barretenberg branch = master - commit = b6c8d0277c75d3e51a9333ac87771344445feb23 - parent = 153b2010c5d79f308779370d240dfaa2a086ca3c + commit = c0a5796c6a7664918b3b5bd9a76e63ed61f1f2ce + parent = fa15a450408181ffc50946ee56c4ae0fd8c5a61f method = merge cmdver = 0.4.6 From eca858775f8f84455cc0a20d9f9fb828cf342b68 Mon Sep 17 00:00:00 2001 From: AztecBot Date: Sun, 30 Jun 2024 02:17:36 +0000 Subject: [PATCH 26/29] chore: replace relative paths to noir-protocol-circuits --- noir-projects/aztec-nr/aztec/Nargo.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/noir-projects/aztec-nr/aztec/Nargo.toml b/noir-projects/aztec-nr/aztec/Nargo.toml index 7a1f1af5863..3e6f8c8bf4c 100644 --- a/noir-projects/aztec-nr/aztec/Nargo.toml +++ b/noir-projects/aztec-nr/aztec/Nargo.toml @@ -5,4 +5,4 @@ compiler_version = ">=0.18.0" type = "lib" [dependencies] -protocol_types = { path = "../../noir-protocol-circuits/crates/types" } +protocol_types = { git="https://github.com/AztecProtocol/aztec-packages", tag="aztec-packages-v0.44.0", directory="noir-projects/noir-protocol-circuits/crates/types" } From 59d6939e99cd89648334772f45a1cb9b63830fd8 Mon Sep 17 00:00:00 2001 From: AztecBot Date: Sun, 30 Jun 2024 02:17:36 +0000 Subject: [PATCH 27/29] git_subrepo.sh: Fix parent in .gitrepo file. [skip ci] --- noir-projects/aztec-nr/.gitrepo | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/noir-projects/aztec-nr/.gitrepo b/noir-projects/aztec-nr/.gitrepo index b52b640e67c..e190d9d3858 100644 --- a/noir-projects/aztec-nr/.gitrepo +++ b/noir-projects/aztec-nr/.gitrepo @@ -9,4 +9,4 @@ commit = 63adfd13bdb3dbb68bb4164697218f1cf8a63fb0 method = merge cmdver = 0.4.6 - parent = 3c8c520e6d320b490ebeae79787cd2a77fbce758 + parent = aaf733ac6162a95015df333aee2605cc2626f280 From 88d43e753079f9b0c263b655bfd779c2098e9097 Mon Sep 17 00:00:00 2001 From: AztecBot Date: Sun, 30 Jun 2024 02:17:39 +0000 Subject: [PATCH 28/29] git subrepo push --branch=master noir-projects/aztec-nr subrepo: subdir: "noir-projects/aztec-nr" merged: "78cf0ff0aa" upstream: origin: "https://github.com/AztecProtocol/aztec-nr" branch: "master" commit: "78cf0ff0aa" git-subrepo: version: "0.4.6" origin: "???" commit: "???" [skip ci] --- noir-projects/aztec-nr/.gitrepo | 4 ++-- noir-projects/aztec-nr/aztec/Nargo.toml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/noir-projects/aztec-nr/.gitrepo b/noir-projects/aztec-nr/.gitrepo index e190d9d3858..c7d2bb77284 100644 --- a/noir-projects/aztec-nr/.gitrepo +++ b/noir-projects/aztec-nr/.gitrepo @@ -6,7 +6,7 @@ [subrepo] remote = https://github.com/AztecProtocol/aztec-nr branch = master - commit = 63adfd13bdb3dbb68bb4164697218f1cf8a63fb0 + commit = 78cf0ff0aa88841df0e344b2f6253313dde889af method = merge cmdver = 0.4.6 - parent = aaf733ac6162a95015df333aee2605cc2626f280 + parent = 8c45343ad837b0e273bc9a946e37644a76b219ec diff --git a/noir-projects/aztec-nr/aztec/Nargo.toml b/noir-projects/aztec-nr/aztec/Nargo.toml index 3e6f8c8bf4c..7a1f1af5863 100644 --- a/noir-projects/aztec-nr/aztec/Nargo.toml +++ b/noir-projects/aztec-nr/aztec/Nargo.toml @@ -5,4 +5,4 @@ compiler_version = ">=0.18.0" type = "lib" [dependencies] -protocol_types = { git="https://github.com/AztecProtocol/aztec-packages", tag="aztec-packages-v0.44.0", directory="noir-projects/noir-protocol-circuits/crates/types" } +protocol_types = { path = "../../noir-protocol-circuits/crates/types" } From 3e6d88e53c3e4c0777e152393d5310b5607baa0a Mon Sep 17 00:00:00 2001 From: Lasse Herskind <16536249+LHerskind@users.noreply.github.com> Date: Mon, 1 Jul 2024 10:04:47 +0100 Subject: [PATCH 29/29] refactor: gets rid of unencrypted emit in private_context (#7236) This pr took a stab at something we had discussed related to #7161: - remove the unencrypted emit functions from the private context - have the logic reside in the contracts that need it - `contract_instance_deployer` - `contract_class_registerer` Since having unencrypted emits easily available in the private context is a huge footgun privacy-wise, we decided that it would be better to get rid of it there. However, as we still needed it for the deployer (as we need something to deploy the "first" public code), we left the public inputs and just insert directly into those instead of using a neat function. The setup is still slightly different from what we are doing in private, because it is dealing with `event_type_id` slightly odd, and doing a lot of inefficient things. So it needs to be revisited at some point for optimisations. When the event macros are refined to also handle structs with non-field elements we should be able to use a `to_be_bytes` value from in there to more cleanly emit the event, and also update the "listener" such that we could get rid of the current `DEPLOYER_CONTRACT_INSTANCE_DEPLOYED_MAGIC_VALUE` and instead be looking at just the `event_type_id` and the contract address to match it. --- .../aztec/src/context/private_context.nr | 51 +--------------- .../src/main.nr | 29 +++++++-- .../src/events.nr | 1 - .../src/events/instance_deployed.nr | 33 ---------- .../src/main.nr | 60 ++++++++++++++++--- .../crowdfunding_contract/src/main.nr | 16 ++--- .../contracts/test_contract/src/main.nr | 19 ------ yarn-project/circuits.js/package.json | 2 +- .../end-to-end/src/e2e_block_building.test.ts | 18 ------ .../src/e2e_non_contract_account.test.ts | 25 +------- .../src/client/private_execution.test.ts | 39 +----------- 11 files changed, 94 insertions(+), 199 deletions(-) delete mode 100644 noir-projects/noir-contracts/contracts/contract_instance_deployer_contract/src/events.nr delete mode 100644 noir-projects/noir-contracts/contracts/contract_instance_deployer_contract/src/events/instance_deployed.nr diff --git a/noir-projects/aztec-nr/aztec/src/context/private_context.nr b/noir-projects/aztec-nr/aztec/src/context/private_context.nr index 25bb33ba663..6a5a7048371 100644 --- a/noir-projects/aztec-nr/aztec/src/context/private_context.nr +++ b/noir-projects/aztec-nr/aztec/src/context/private_context.nr @@ -1,17 +1,11 @@ use crate::{ context::{inputs::PrivateContextInputs, packed_returns::PackedReturns}, - messaging::process_l1_to_l2_message, - hash::{hash_args_array, ArgsHasher, compute_unencrypted_log_hash}, + messaging::process_l1_to_l2_message, hash::{hash_args_array, ArgsHasher}, keys::constants::{NULLIFIER_INDEX, OUTGOING_INDEX, NUM_KEY_TYPES, sk_generators}, - note::note_interface::NoteInterface, oracle::{ key_validation_request::get_key_validation_request, arguments, returns::pack_returns, call_private_function::call_private_function_internal, header::get_header_at, - logs::{ - emit_encrypted_note_log, emit_encrypted_event_log, - emit_contract_class_unencrypted_log_private_internal, emit_unencrypted_log_private_internal -}, - logs_traits::{LensForEncryptedLog, ToBytesForUnencryptedLog}, + logs::{emit_encrypted_note_log, emit_encrypted_event_log}, enqueue_public_function_call::{ enqueue_public_function_call_internal, set_public_teardown_function_call_internal, parse_public_call_stack_item_from_oracle @@ -36,10 +30,7 @@ use dep::protocol_types::{ MAX_KEY_VALIDATION_REQUESTS_PER_CALL, MAX_ENCRYPTED_LOGS_PER_CALL, MAX_UNENCRYPTED_LOGS_PER_CALL, MAX_NOTE_ENCRYPTED_LOGS_PER_CALL }, - contrakt::{storage_read::StorageRead, storage_update_request::StorageUpdateRequest}, - grumpkin_private_key::GrumpkinPrivateKey, grumpkin_point::GrumpkinPoint, header::Header, - messaging::l2_to_l1_message::L2ToL1Message, utils::reader::Reader, traits::{is_empty, Empty}, - utils::arrays::find_index + header::Header, messaging::l2_to_l1_message::L2ToL1Message, utils::reader::Reader, traits::Empty }; // When finished, one can call .finish() to convert back to the abi @@ -270,42 +261,6 @@ impl PrivateContext { } // docs:end:consume_l1_to_l2_message - // TODO: We might want to remove this since emitting unencrypted logs from private functions is violating privacy. - // --> might be a better approach to force devs to make a public function call that emits the log if needed then - // it would be less easy to accidentally leak information. - // If we decide to keep this function around would make sense to wait for traits and then merge it with emit_unencrypted_log. - pub fn emit_unencrypted_log(&mut self, log: T) where T: ToBytesForUnencryptedLog { - let event_selector = 5; // TODO: compute actual event selector. - let contract_address = self.this_address(); - let counter = self.next_counter(); - let log_slice = log.to_be_bytes_arr(); - let log_hash = compute_unencrypted_log_hash(contract_address, event_selector, log); - // 44 = addr (32) + selector (4) + raw log len (4) + processed log len (4) - let len = 44 + log_slice.len().to_field(); - let side_effect = LogHash { value: log_hash, counter, length: len }; - self.unencrypted_logs_hashes.push(side_effect); - // call oracle - let _void = emit_unencrypted_log_private_internal(contract_address, event_selector, log, counter); - } - - // This fn exists separately from emit_unencrypted_log because sha hashing the preimage - // is too large to compile (16,200 fields, 518,400 bytes) => the oracle hashes it - // It is ONLY used with contract_class_registerer_contract since we already assert correctness: - // - Contract class -> we will commit to the packed bytecode (currently a TODO) - // - Private function -> we provide a membership proof - // - Unconstrained function -> we provide a membership proof - // Ordinary logs are not protected by the above so this fn shouldn't be called by anything else - pub fn emit_contract_class_unencrypted_log(&mut self, log: [Field; N]) { - let event_selector = 5; // TODO: compute actual event selector. - let contract_address = self.this_address(); - let counter = self.next_counter(); - let log_hash = emit_contract_class_unencrypted_log_private_internal(contract_address, event_selector, log, counter); - // 44 = addr (32) + selector (4) + raw log len (4) + processed log len (4) - let len = 44 + N * 32; - let side_effect = LogHash { value: log_hash, counter, length: len }; - self.unencrypted_logs_hashes.push(side_effect); - } - // NB: A randomness value of 0 signals that the kernels should not mask the contract address // used in siloing later on e.g. 'handshaking' contract w/ known address. pub fn emit_raw_event_log_with_masked_address(&mut self, randomness: Field, encrypted_log: [u8; M]) { diff --git a/noir-projects/noir-contracts/contracts/contract_class_registerer_contract/src/main.nr b/noir-projects/noir-contracts/contracts/contract_class_registerer_contract/src/main.nr index aa5e3bf242d..16d6f1ceaf6 100644 --- a/noir-projects/noir-contracts/contracts/contract_class_registerer_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/contract_class_registerer_contract/src/main.nr @@ -9,9 +9,11 @@ contract ContractClassRegisterer { ARTIFACT_FUNCTION_TREE_MAX_HEIGHT, FUNCTION_TREE_HEIGHT, MAX_PACKED_PUBLIC_BYTECODE_SIZE_IN_FIELDS, REGISTERER_CONTRACT_CLASS_REGISTERED_MAGIC_VALUE }, - traits::Serialize + traits::Serialize, abis::log_hash::LogHash }; + use dep::aztec::{context::PrivateContext, oracle::logs::emit_contract_class_unencrypted_log_private_internal}; + use crate::events::{ class_registered::ContractClassRegistered, private_function_broadcasted::{ClassPrivateFunctionBroadcasted, PrivateFunction}, @@ -52,7 +54,7 @@ contract ContractClassRegisterer { public_bytecode_commitment ] ); - context.emit_contract_class_unencrypted_log(event.serialize()); + emit_contract_class_unencrypted_log(&mut context, event.serialize()); } #[aztec(private)] @@ -87,7 +89,7 @@ contract ContractClassRegisterer { function_data.metadata_hash ] ); - context.emit_contract_class_unencrypted_log(event.serialize()); + emit_contract_class_unencrypted_log(&mut context, event.serialize()); } #[aztec(private)] @@ -117,6 +119,25 @@ contract ContractClassRegisterer { function_data.metadata_hash ] ); - context.emit_contract_class_unencrypted_log(event.serialize()); + emit_contract_class_unencrypted_log(&mut context, event.serialize()); + } + + // This fn exists separately from emit_unencrypted_log because sha hashing the preimage + // is too large to compile (16,200 fields, 518,400 bytes) => the oracle hashes it + // It is ONLY used with contract_class_registerer_contract since we already assert correctness: + // - Contract class -> we will commit to the packed bytecode (currently a TODO) + // - Private function -> we provide a membership proof + // - Unconstrained function -> we provide a membership proof + // Ordinary logs are not protected by the above so this fn shouldn't be called by anything else + #[contract_library_method] + pub fn emit_contract_class_unencrypted_log(context: &mut PrivateContext, log: [Field; N]) { + let event_selector = 5; // TODO: compute actual event selector. + let contract_address = context.this_address(); + let counter = context.next_counter(); + let log_hash = emit_contract_class_unencrypted_log_private_internal(contract_address, event_selector, log, counter); + // 44 = addr (32) + selector (4) + raw log len (4) + processed log len (4) + let len = 44 + N * 32; + let side_effect = LogHash { value: log_hash, counter, length: len }; + context.unencrypted_logs_hashes.push(side_effect); } } diff --git a/noir-projects/noir-contracts/contracts/contract_instance_deployer_contract/src/events.nr b/noir-projects/noir-contracts/contracts/contract_instance_deployer_contract/src/events.nr deleted file mode 100644 index d2b6ed6033f..00000000000 --- a/noir-projects/noir-contracts/contracts/contract_instance_deployer_contract/src/events.nr +++ /dev/null @@ -1 +0,0 @@ -mod instance_deployed; diff --git a/noir-projects/noir-contracts/contracts/contract_instance_deployer_contract/src/events/instance_deployed.nr b/noir-projects/noir-contracts/contracts/contract_instance_deployer_contract/src/events/instance_deployed.nr deleted file mode 100644 index 638a08db001..00000000000 --- a/noir-projects/noir-contracts/contracts/contract_instance_deployer_contract/src/events/instance_deployed.nr +++ /dev/null @@ -1,33 +0,0 @@ -use dep::aztec::protocol_types::{ - contract_class_id::ContractClassId, - address::{AztecAddress, EthAddress, PublicKeysHash, PartialAddress}, - constants::DEPLOYER_CONTRACT_INSTANCE_DEPLOYED_MAGIC_VALUE, traits::Serialize -}; - -// #[aztec(event)] -struct ContractInstanceDeployed { - address: AztecAddress, - version: u8, - salt: Field, - contract_class_id: ContractClassId, - initialization_hash: Field, - public_keys_hash: PublicKeysHash, - deployer: AztecAddress, -} - -global CONTRACT_INSTANCE_DEPLOYED_SERIALIZED_SIZE: Field = 8; - -impl Serialize for ContractInstanceDeployed { - fn serialize(self: Self) -> [Field; CONTRACT_INSTANCE_DEPLOYED_SERIALIZED_SIZE] { - [ - DEPLOYER_CONTRACT_INSTANCE_DEPLOYED_MAGIC_VALUE, - self.address.to_field(), - self.version as Field, - self.salt, - self.contract_class_id.to_field(), - self.initialization_hash, - self.public_keys_hash.to_field(), - self.deployer.to_field(), - ] - } -} diff --git a/noir-projects/noir-contracts/contracts/contract_instance_deployer_contract/src/main.nr b/noir-projects/noir-contracts/contracts/contract_instance_deployer_contract/src/main.nr index 43e7d45c655..56f5e4827f5 100644 --- a/noir-projects/noir-contracts/contracts/contract_instance_deployer_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/contract_instance_deployer_contract/src/main.nr @@ -1,13 +1,42 @@ -mod events; - contract ContractInstanceDeployer { use dep::aztec::protocol_types::{ address::{AztecAddress, EthAddress, PublicKeysHash, PartialAddress}, contract_class_id::ContractClassId, constants::DEPLOYER_CONTRACT_INSTANCE_DEPLOYED_MAGIC_VALUE, - traits::Serialize + traits::Serialize, abis::log_hash::LogHash + }; + use dep::aztec::{ + context::PrivateContext, hash::compute_unencrypted_log_hash, + oracle::logs::emit_unencrypted_log_private_internal }; - use crate::events::{instance_deployed::ContractInstanceDeployed}; + // @todo This should be using an event, but currently we only support fields in the struct. + // #[aztec(event)] + struct ContractInstanceDeployed { + address: AztecAddress, + version: u8, + salt: Field, + contract_class_id: ContractClassId, + initialization_hash: Field, + public_keys_hash: PublicKeysHash, + deployer: AztecAddress, + } + + global CONTRACT_INSTANCE_DEPLOYED_SERIALIZED_SIZE: Field = 8; + + impl Serialize for ContractInstanceDeployed { + fn serialize(self: Self) -> [Field; CONTRACT_INSTANCE_DEPLOYED_SERIALIZED_SIZE] { + [ + DEPLOYER_CONTRACT_INSTANCE_DEPLOYED_MAGIC_VALUE, + self.address.to_field(), + self.version as Field, + self.salt, + self.contract_class_id.to_field(), + self.initialization_hash, + self.public_keys_hash.to_field(), + self.deployer.to_field(), + ] + } + } #[aztec(private)] fn deploy( @@ -34,8 +63,25 @@ contract ContractInstanceDeployer { // Broadcast the event let event = ContractInstanceDeployed { contract_class_id, address, public_keys_hash, initialization_hash, salt, deployer, version: 1 }; - let event_payload = event.serialize(); - dep::aztec::oracle::debug_log::debug_log_format("ContractInstanceDeployed: {}", event_payload); - context.emit_unencrypted_log(event_payload); + + let payload = event.serialize(); + dep::aztec::oracle::debug_log::debug_log_format("ContractInstanceDeployed: {}", payload); + + let contract_address = context.this_address(); + let counter = context.next_counter(); + + // The event_type_id is not strictly needed. So i'm setting it to 0 here, and we can then purge it + // later on. + let event_type_id = 0; + + // @todo This is very inefficient, we are doing a lot of back and forth conversions. + let log_slice = payload.to_be_bytes_arr(); + let log_hash = compute_unencrypted_log_hash(contract_address, event_type_id, payload); + // 44 = addr (32) + selector (4) + raw log len (4) + processed log len (4) + let len = 44 + log_slice.len().to_field(); + let side_effect = LogHash { value: log_hash, counter, length: len }; + context.unencrypted_logs_hashes.push(side_effect); + + let _void = emit_unencrypted_log_private_internal(contract_address, event_type_id, payload, counter); } } diff --git a/noir-projects/noir-contracts/contracts/crowdfunding_contract/src/main.nr b/noir-projects/noir-contracts/contracts/crowdfunding_contract/src/main.nr index 9e43661a329..4eb7657462e 100644 --- a/noir-projects/noir-contracts/contracts/crowdfunding_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/crowdfunding_contract/src/main.nr @@ -4,13 +4,11 @@ contract Crowdfunding { // docs:start:all-deps use dep::aztec::{ - protocol_types::{ - abis::function_selector::FunctionSelector, address::AztecAddress, traits::Serialize, - grumpkin_point::GrumpkinPoint - }, + protocol_types::address::AztecAddress, encrypted_logs::encrypted_note_emission::encode_and_encrypt_note, state_vars::{PrivateSet, PublicImmutable, SharedImmutable} }; + use dep::aztec::unencrypted_logs::unencrypted_event_emission::encode_event; use dep::value_note::value_note::ValueNote; use dep::token::Token; // docs:end:all-deps @@ -95,10 +93,14 @@ contract Crowdfunding { // 2) Transfer the donation tokens from this contract to the operator Token::at(storage.donation_token.read_private()).transfer(operator_address, amount as Field).call(&mut context); - // 3) Emit an unencrypted event so that anyone can audit how much the operator has withdrawn - let event = WithdrawalProcessed { amount: amount as Field, who: operator_address.to_field() }; - context.emit_unencrypted_log(event.serialize()); + Crowdfunding::at(context.this_address())._publish_donation_receipts(amount, operator_address).enqueue(&mut context); } // docs:end:operator-withdrawals + + #[aztec(public)] + #[aztec(internal)] + fn _publish_donation_receipts(amount: u64, to: AztecAddress) { + WithdrawalProcessed { amount: amount as Field, who: to.to_field() }.emit(encode_event(&mut context)); + } } diff --git a/noir-projects/noir-contracts/contracts/test_contract/src/main.nr b/noir-projects/noir-contracts/contracts/test_contract/src/main.nr index 1db5e91ff24..2f8e598aa64 100644 --- a/noir-projects/noir-contracts/contracts/test_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/test_contract/src/main.nr @@ -299,25 +299,6 @@ contract Test { } } - #[aztec(private)] - fn emit_msg_sender() { - context.emit_unencrypted_log(context.msg_sender()); - } - - #[aztec(private)] - fn emit_unencrypted_logs(fields: [Field; 5], nest: bool) { - // Merged two fns to avoid hitting max #functions limit: - // nest -> emit_unencrypted_logs_nested - // else -> emit_array_as_unencrypted_log - if nest { - Test::at(context.this_address()).emit_msg_sender().call(&mut context); - Test::at(context.this_address()).emit_unencrypted_logs(fields, false).call(&mut context); - context.emit_unencrypted_log("test"); - } else { - context.emit_unencrypted_log(fields); - } - } - #[aztec(private)] fn emit_encrypted_logs_nested(value: Field, owner: AztecAddress, outgoing_viewer: AztecAddress) { let mut storage_slot = storage.example_constant.get_storage_slot() + 1; diff --git a/yarn-project/circuits.js/package.json b/yarn-project/circuits.js/package.json index 524b4a62210..1444de37a2d 100644 --- a/yarn-project/circuits.js/package.json +++ b/yarn-project/circuits.js/package.json @@ -97,4 +97,4 @@ ] ] } -} +} \ No newline at end of file diff --git a/yarn-project/end-to-end/src/e2e_block_building.test.ts b/yarn-project/end-to-end/src/e2e_block_building.test.ts index 9a308a2e523..206e19e00b4 100644 --- a/yarn-project/end-to-end/src/e2e_block_building.test.ts +++ b/yarn-project/end-to-end/src/e2e_block_building.test.ts @@ -246,24 +246,6 @@ describe('e2e_block_building', () => { testContract = await TestContract.deploy(owner).send().deployed(); }, 60_000); - it('calls a method with nested unencrypted logs', async () => { - const tx = await testContract.methods.emit_unencrypted_logs([1, 2, 3, 4, 5], true).send().wait(); - const logs = (await pxe.getUnencryptedLogs({ txHash: tx.txHash })).logs.map(l => l.log); - - // First log should be contract address - expect(logs[0].data).toEqual(testContract.address.toBuffer()); - - // Second log should be array of fields - let expectedBuffer = Buffer.concat([1, 2, 3, 4, 5].map(num => new Fr(num).toBuffer())); - expect(logs[1].data.subarray(-32 * 5)).toEqual(expectedBuffer); - - // Third log should be string "test" - expectedBuffer = Buffer.concat( - ['t', 'e', 's', 't'].map(num => Buffer.concat([Buffer.alloc(31), Buffer.from(num)])), - ); - expect(logs[2].data.subarray(-32 * 5)).toEqual(expectedBuffer); - }, 60_000); - it('calls a method with nested note encrypted logs', async () => { // account setup const privateKey = new Fr(7n); diff --git a/yarn-project/end-to-end/src/e2e_non_contract_account.test.ts b/yarn-project/end-to-end/src/e2e_non_contract_account.test.ts index 1728f637e1e..54e377ad4e9 100644 --- a/yarn-project/end-to-end/src/e2e_non_contract_account.test.ts +++ b/yarn-project/end-to-end/src/e2e_non_contract_account.test.ts @@ -1,13 +1,4 @@ -import { - type DebugLogger, - ExtendedNote, - Fr, - Note, - type PXE, - SignerlessWallet, - type Wallet, - toBigInt, -} from '@aztec/aztec.js'; +import { type DebugLogger, ExtendedNote, Fr, Note, type PXE, SignerlessWallet, type Wallet } from '@aztec/aztec.js'; import { siloNullifier } from '@aztec/circuits.js/hash'; import { TestContract } from '@aztec/noir-contracts.js/Test'; @@ -50,20 +41,6 @@ describe('e2e_non_contract_account', () => { expect(siloedNullifier.equals(expectedSiloedNullifier)).toBeTruthy(); }); - it('msg.sender is 0 when a non-contract account calls a private function on a contract', async () => { - const contractWithNoContractWallet = await TestContract.at(contract.address, nonContractAccountWallet); - - // Send transaction as arbitrary non-contract account - const tx = contractWithNoContractWallet.methods.emit_msg_sender().send(); - await tx.wait({ interval: 0.1 }); - - const logs = (await tx.getUnencryptedLogs()).logs; - expect(logs.length).toBe(1); - - const msgSender = toBigInt(logs[0].log.data); - expect(msgSender).toBe(0n); - }); - // Note: This test doesn't really belong here as it doesn't have anything to do with non-contract accounts. I needed // to test the TestNote functionality and it doesn't really fit anywhere else. Creating a separate e2e test for this // seems wasteful. Move this test if a better place is found. diff --git a/yarn-project/simulator/src/client/private_execution.test.ts b/yarn-project/simulator/src/client/private_execution.test.ts index c46153495fb..e5e1e22ac18 100644 --- a/yarn-project/simulator/src/client/private_execution.test.ts +++ b/yarn-project/simulator/src/client/private_execution.test.ts @@ -67,7 +67,7 @@ import { MessageLoadOracleInputs } from '../acvm/index.js'; import { buildL1ToL2Message } from '../test/utils.js'; import { computeSlotForMapping } from '../utils.js'; import { type DBOracle } from './db_oracle.js'; -import { type ExecutionResult, collectSortedEncryptedLogs, collectSortedUnencryptedLogs } from './execution_result.js'; +import { type ExecutionResult, collectSortedEncryptedLogs } from './execution_result.js'; import { AcirSimulator } from './simulator.js'; jest.setTimeout(60_000); @@ -266,41 +266,6 @@ describe('Private Execution test suite', () => { }); describe('no constructor', () => { - it('emits a field as an unencrypted log', async () => { - const artifact = getFunctionArtifact(TestContractArtifact, 'emit_msg_sender'); - const result = await runSimulator({ artifact, msgSender: owner }); - - const newUnencryptedLogs = getNonEmptyItems(result.callStackItem.publicInputs.unencryptedLogsHashes); - expect(newUnencryptedLogs).toHaveLength(1); - - const functionLogs = collectSortedUnencryptedLogs(result); - expect(functionLogs.logs).toHaveLength(1); - - const [unencryptedLog] = newUnencryptedLogs; - expect(unencryptedLog.value).toEqual(Fr.fromBuffer(functionLogs.logs[0].hash())); - expect(unencryptedLog.length).toEqual(new Fr(functionLogs.getKernelLength())); - // Test that the log payload (ie ignoring address, selector, and header) matches what we emitted - expect(functionLogs.logs[0].data.subarray(-32).toString('hex')).toEqual(owner.toBuffer().toString('hex')); - }); - - it('emits a field array as an unencrypted log', async () => { - const artifact = getFunctionArtifact(TestContractArtifact, 'emit_unencrypted_logs'); - const args = times(5, () => Fr.random()); - const result = await runSimulator({ artifact, msgSender: owner, args: [args, false] }); - - const newUnencryptedLogs = getNonEmptyItems(result.callStackItem.publicInputs.unencryptedLogsHashes); - expect(newUnencryptedLogs).toHaveLength(1); - const functionLogs = collectSortedUnencryptedLogs(result); - expect(functionLogs.logs).toHaveLength(1); - - const [unencryptedLog] = newUnencryptedLogs; - expect(unencryptedLog.value).toEqual(Fr.fromBuffer(functionLogs.logs[0].hash())); - expect(unencryptedLog.length).toEqual(new Fr(functionLogs.getKernelLength())); - // Test that the log payload (ie ignoring address, selector, and header) matches what we emitted - const expected = Buffer.concat(args.map(arg => arg.toBuffer())).toString('hex'); - expect(functionLogs.logs[0].data.subarray(-32 * 5).toString('hex')).toEqual(expected); - }); - it('emits a field array as an encrypted log', async () => { // NB: this test does NOT cover correct enc/dec of values, just whether // the kernels correctly populate non-note encrypted logs @@ -952,7 +917,7 @@ describe('Private Execution test suite', () => { describe('setting fee payer', () => { it('should default to not being a fee payer', async () => { // arbitrary random function that doesn't set a fee payer - const entrypoint = getFunctionArtifact(TestContractArtifact, 'emit_msg_sender'); + const entrypoint = getFunctionArtifact(TestContractArtifact, 'get_this_address'); const contractAddress = AztecAddress.random(); const result = await runSimulator({ artifact: entrypoint, contractAddress }); expect(result.callStackItem.publicInputs.isFeePayer).toBe(false);