diff --git a/.cargo/release-version b/.cargo/release-version index 90787b183e..ef9b3919c6 100644 --- a/.cargo/release-version +++ b/.cargo/release-version @@ -1 +1 @@ -v0.16.3 \ No newline at end of file +v0.16.15 \ No newline at end of file diff --git a/.circleci/config.yml b/.circleci/config.yml index c78cf5f13b..2402f7faa9 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -480,6 +480,16 @@ jobs: workspace_member: ledger cache_key: snarkvm-ledger-cache + ledger-with-rocksdb: + docker: + - image: cimg/rust:1.71.1 + resource_class: 2xlarge + steps: + - run_serial: + flags: --features=rocks + workspace_member: ledger + cache_key: snarkvm-ledger-with-rocksdb-cache + ledger-authority: docker: - image: cimg/rust:1.71.1 @@ -854,6 +864,8 @@ workflows: - curves - fields - ledger +# TODO (howardwu) - Implement `open_testing` on all storage, update to `CurrentConsensusStore::open_testing`, then re-enable. +# - ledger-with-rocksdb - ledger-authority - ledger-block - ledger-coinbase diff --git a/.github/workflows/benchmarks.yml b/.github/workflows/benchmarks.yml index f33dfcd481..bd6e7ad390 100644 --- a/.github/workflows/benchmarks.yml +++ b/.github/workflows/benchmarks.yml @@ -60,7 +60,6 @@ jobs: run: | cd console/collections cargo bench --bench merkle_tree -- --output-format bencher | tee -a ../../output.txt - cargo bench --bench kary_merkle_tree -- --output-format bencher | tee -a ../../output.txt cd ../.. - name: Benchmark curves diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 9dda0af373..b0233c6c71 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -20,7 +20,7 @@ snarkVM is a big project, so (non-)adherence to best practices related to perfor ### Memory handling - if the final size is known, pre-allocate the collections (`Vec`, `HashMap` etc.) using `with_capacity` or `reserve` - this ensures that there are both fewer allocations (which involve system calls) and that the final allocated capacity is as close to the required size as possible - create the collections right before they are populated/used, as opposed to e.g. creating a few big ones at the beginning of a function and only using them later on; this reduces the amount of time they occupy memory -- if an intermediate vector is avoidable, use an `Iterator` instead; most of the time this just amounts to omitting the call to `.collect()` if a single-pass iteraton follows afterwards, or returning an `impl Iterator` from a function when the caller only needs to iterate over that result once +- if an intermediate vector is avoidable, use an `Iterator` instead; most of the time this just amounts to omitting the call to `.collect()` if a single-pass iteration follows afterwards, or returning an `impl Iterator` from a function when the caller only needs to iterate over that result once - when possible, fill/resize collections "in bulk" instead of pushing a single element in a loop; this is usually (but not always) detected by `clippy`, suggesting to create vectors containing a repeated value with `vec![x; N]` or extending them with `.resize(N, x)` - when a value is to eventually be consumed in a chain of function calls, pass it by value instead of by reference; this has the following benefits: * it makes the fact that the value is needed by value clear to the caller, who can then potentially reclaim it from the object afterwards if it is "heavy", limiting allocations diff --git a/Cargo.lock b/Cargo.lock index 40342afae2..d5dfef8577 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1163,6 +1163,15 @@ version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" +[[package]] +name = "hashbrown" +version = "0.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "33ff8ae62cd3a9102e5637afc8452c55acf3844001bd5374e0b0bd7b6616c038" +dependencies = [ + "ahash", +] + [[package]] name = "hashbrown" version = "0.14.1" @@ -1477,6 +1486,15 @@ dependencies = [ "libc", ] +[[package]] +name = "mach2" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6d0d1830bcd151a6fc4aea1369af235b36c1528fe976b8ff678683c9995eade8" +dependencies = [ + "libc", +] + [[package]] name = "matchers" version = "0.1.0" @@ -1501,6 +1519,61 @@ dependencies = [ "autocfg", ] +[[package]] +name = "metrics" +version = "0.21.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fde3af1a009ed76a778cb84fdef9e7dbbdf5775ae3e4cc1f434a6a307f6f76c5" +dependencies = [ + "ahash", + "metrics-macros", + "portable-atomic", +] + +[[package]] +name = "metrics-exporter-prometheus" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a4964177ddfdab1e3a2b37aec7cf320e14169abb0ed73999f558136409178d5" +dependencies = [ + "base64", + "hyper", + "indexmap 1.9.3", + "ipnet", + "metrics", + "metrics-util", + "quanta", + "thiserror", + "tokio", + "tracing", +] + +[[package]] +name = "metrics-macros" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ddece26afd34c31585c74a4db0630c376df271c285d682d1e55012197830b6df" +dependencies = [ + "proc-macro2", + "quote 1.0.33", + "syn 2.0.38", +] + +[[package]] +name = "metrics-util" +version = "0.15.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4de2ed6e491ed114b40b732e4d1659a9d53992ebd87490c44a6ffe23739d973e" +dependencies = [ + "crossbeam-epoch", + "crossbeam-utils", + "hashbrown 0.13.1", + "metrics", + "num_cpus", + "quanta", + "sketches-ddsketch", +] + [[package]] name = "mime" version = "0.3.17" @@ -1879,6 +1952,22 @@ dependencies = [ "unarray", ] +[[package]] +name = "quanta" +version = "0.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a17e662a7a8291a865152364c20c7abc5e60486ab2001e8ec10b24862de0b9ab" +dependencies = [ + "crossbeam-utils", + "libc", + "mach2", + "once_cell", + "raw-cpuid", + "wasi", + "web-sys", + "winapi", +] + [[package]] name = "quick-error" version = "1.2.3" @@ -1958,6 +2047,15 @@ dependencies = [ "rand_core", ] +[[package]] +name = "raw-cpuid" +version = "10.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c297679cb867470fa8c9f67dbba74a78d78e3e98d7cf2b08d6d71540f797332" +dependencies = [ + "bitflags 1.3.2", +] + [[package]] name = "rayon" version = "1.8.0" @@ -2447,6 +2545,12 @@ version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a7cee0529a6d40f580e7a5e6c495c8fbfe21b7b52795ed4bb5e62cdf92bc6380" +[[package]] +name = "sketches-ddsketch" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "68a406c1882ed7f29cd5e248c9848a80e7cb6ae0fea82346d2746f2f941c07e1" + [[package]] name = "slab" version = "0.4.9" @@ -2473,7 +2577,7 @@ dependencies = [ [[package]] name = "snarkvm" -version = "0.16.3" +version = "0.16.15" dependencies = [ "anstyle", "anyhow", @@ -2496,6 +2600,7 @@ dependencies = [ "snarkvm-curves", "snarkvm-fields", "snarkvm-ledger", + "snarkvm-metrics", "snarkvm-parameters", "snarkvm-synthesizer", "snarkvm-utilities", @@ -2508,7 +2613,7 @@ dependencies = [ [[package]] name = "snarkvm-algorithms" -version = "0.16.3" +version = "0.16.15" dependencies = [ "aleo-std", "anyhow", @@ -2547,7 +2652,7 @@ dependencies = [ [[package]] name = "snarkvm-algorithms-cuda" -version = "0.16.3" +version = "0.16.15" dependencies = [ "blst", "cc", @@ -2557,7 +2662,7 @@ dependencies = [ [[package]] name = "snarkvm-circuit" -version = "0.16.3" +version = "0.16.15" dependencies = [ "snarkvm-circuit-account", "snarkvm-circuit-algorithms", @@ -2570,7 +2675,7 @@ dependencies = [ [[package]] name = "snarkvm-circuit-account" -version = "0.16.3" +version = "0.16.15" dependencies = [ "anyhow", "snarkvm-circuit-algorithms", @@ -2582,7 +2687,7 @@ dependencies = [ [[package]] name = "snarkvm-circuit-algorithms" -version = "0.16.3" +version = "0.16.15" dependencies = [ "anyhow", "snarkvm-circuit-types", @@ -2594,7 +2699,7 @@ dependencies = [ [[package]] name = "snarkvm-circuit-collections" -version = "0.16.3" +version = "0.16.15" dependencies = [ "anyhow", "snarkvm-circuit-algorithms", @@ -2608,7 +2713,7 @@ dependencies = [ [[package]] name = "snarkvm-circuit-environment" -version = "0.16.3" +version = "0.16.15" dependencies = [ "criterion", "indexmap 2.0.2", @@ -2629,11 +2734,11 @@ dependencies = [ [[package]] name = "snarkvm-circuit-environment-witness" -version = "0.16.3" +version = "0.16.15" [[package]] name = "snarkvm-circuit-network" -version = "0.16.3" +version = "0.16.15" dependencies = [ "snarkvm-circuit-algorithms", "snarkvm-circuit-collections", @@ -2644,7 +2749,7 @@ dependencies = [ [[package]] name = "snarkvm-circuit-program" -version = "0.16.3" +version = "0.16.15" dependencies = [ "anyhow", "paste", @@ -2662,7 +2767,7 @@ dependencies = [ [[package]] name = "snarkvm-circuit-types" -version = "0.16.3" +version = "0.16.15" dependencies = [ "snarkvm-circuit-environment", "snarkvm-circuit-types-address", @@ -2677,7 +2782,7 @@ dependencies = [ [[package]] name = "snarkvm-circuit-types-address" -version = "0.16.3" +version = "0.16.15" dependencies = [ "snarkvm-circuit-environment", "snarkvm-circuit-types-boolean", @@ -2689,7 +2794,7 @@ dependencies = [ [[package]] name = "snarkvm-circuit-types-boolean" -version = "0.16.3" +version = "0.16.15" dependencies = [ "criterion", "snarkvm-circuit-environment", @@ -2698,7 +2803,7 @@ dependencies = [ [[package]] name = "snarkvm-circuit-types-field" -version = "0.16.3" +version = "0.16.15" dependencies = [ "snarkvm-circuit-environment", "snarkvm-circuit-types-boolean", @@ -2707,7 +2812,7 @@ dependencies = [ [[package]] name = "snarkvm-circuit-types-group" -version = "0.16.3" +version = "0.16.15" dependencies = [ "snarkvm-circuit-environment", "snarkvm-circuit-types-boolean", @@ -2719,7 +2824,7 @@ dependencies = [ [[package]] name = "snarkvm-circuit-types-integers" -version = "0.16.3" +version = "0.16.15" dependencies = [ "paste", "snarkvm-circuit-environment", @@ -2732,7 +2837,7 @@ dependencies = [ [[package]] name = "snarkvm-circuit-types-scalar" -version = "0.16.3" +version = "0.16.15" dependencies = [ "snarkvm-circuit-environment", "snarkvm-circuit-types-boolean", @@ -2742,7 +2847,7 @@ dependencies = [ [[package]] name = "snarkvm-circuit-types-string" -version = "0.16.3" +version = "0.16.15" dependencies = [ "rand", "snarkvm-circuit-environment", @@ -2755,7 +2860,7 @@ dependencies = [ [[package]] name = "snarkvm-console" -version = "0.16.3" +version = "0.16.15" dependencies = [ "snarkvm-console-account", "snarkvm-console-algorithms", @@ -2767,7 +2872,7 @@ dependencies = [ [[package]] name = "snarkvm-console-account" -version = "0.16.3" +version = "0.16.15" dependencies = [ "bincode", "bs58", @@ -2780,7 +2885,7 @@ dependencies = [ [[package]] name = "snarkvm-console-algorithms" -version = "0.16.3" +version = "0.16.15" dependencies = [ "blake2s_simd", "criterion", @@ -2798,7 +2903,7 @@ dependencies = [ [[package]] name = "snarkvm-console-collections" -version = "0.16.3" +version = "0.16.15" dependencies = [ "aleo-std", "criterion", @@ -2811,7 +2916,7 @@ dependencies = [ [[package]] name = "snarkvm-console-network" -version = "0.16.3" +version = "0.16.15" dependencies = [ "anyhow", "indexmap 2.0.2", @@ -2833,7 +2938,7 @@ dependencies = [ [[package]] name = "snarkvm-console-network-environment" -version = "0.16.3" +version = "0.16.15" dependencies = [ "anyhow", "bech32", @@ -2850,7 +2955,7 @@ dependencies = [ [[package]] name = "snarkvm-console-program" -version = "0.16.3" +version = "0.16.15" dependencies = [ "bincode", "enum_index", @@ -2871,7 +2976,7 @@ dependencies = [ [[package]] name = "snarkvm-console-types" -version = "0.16.3" +version = "0.16.15" dependencies = [ "snarkvm-console-network-environment", "snarkvm-console-types-address", @@ -2885,7 +2990,7 @@ dependencies = [ [[package]] name = "snarkvm-console-types-address" -version = "0.16.3" +version = "0.16.15" dependencies = [ "bincode", "serde_json", @@ -2897,7 +3002,7 @@ dependencies = [ [[package]] name = "snarkvm-console-types-boolean" -version = "0.16.3" +version = "0.16.15" dependencies = [ "bincode", "serde_json", @@ -2906,7 +3011,7 @@ dependencies = [ [[package]] name = "snarkvm-console-types-field" -version = "0.16.3" +version = "0.16.15" dependencies = [ "bincode", "serde_json", @@ -2917,7 +3022,7 @@ dependencies = [ [[package]] name = "snarkvm-console-types-group" -version = "0.16.3" +version = "0.16.15" dependencies = [ "bincode", "serde_json", @@ -2929,7 +3034,7 @@ dependencies = [ [[package]] name = "snarkvm-console-types-integers" -version = "0.16.3" +version = "0.16.15" dependencies = [ "bincode", "serde_json", @@ -2941,7 +3046,7 @@ dependencies = [ [[package]] name = "snarkvm-console-types-scalar" -version = "0.16.3" +version = "0.16.15" dependencies = [ "bincode", "serde_json", @@ -2953,7 +3058,7 @@ dependencies = [ [[package]] name = "snarkvm-console-types-string" -version = "0.16.3" +version = "0.16.15" dependencies = [ "bincode", "serde_json", @@ -2965,7 +3070,7 @@ dependencies = [ [[package]] name = "snarkvm-curves" -version = "0.16.3" +version = "0.16.15" dependencies = [ "bincode", "criterion", @@ -2980,7 +3085,7 @@ dependencies = [ [[package]] name = "snarkvm-fields" -version = "0.16.3" +version = "0.16.15" dependencies = [ "aleo-std", "anyhow", @@ -2997,7 +3102,7 @@ dependencies = [ [[package]] name = "snarkvm-ledger" -version = "0.16.3" +version = "0.16.15" dependencies = [ "aleo-std", "anyhow", @@ -3024,7 +3129,7 @@ dependencies = [ [[package]] name = "snarkvm-ledger-authority" -version = "0.16.3" +version = "0.16.15" dependencies = [ "anyhow", "bincode", @@ -3037,7 +3142,7 @@ dependencies = [ [[package]] name = "snarkvm-ledger-block" -version = "0.16.3" +version = "0.16.15" dependencies = [ "bincode", "indexmap 2.0.2", @@ -3049,6 +3154,7 @@ dependencies = [ "snarkvm-ledger-authority", "snarkvm-ledger-coinbase", "snarkvm-ledger-committee", + "snarkvm-ledger-narwhal-batch-header", "snarkvm-ledger-narwhal-subdag", "snarkvm-ledger-narwhal-transmission-id", "snarkvm-ledger-query", @@ -3060,7 +3166,7 @@ dependencies = [ [[package]] name = "snarkvm-ledger-coinbase" -version = "0.16.3" +version = "0.16.15" dependencies = [ "aleo-std", "anyhow", @@ -3081,7 +3187,7 @@ dependencies = [ [[package]] name = "snarkvm-ledger-committee" -version = "0.16.3" +version = "0.16.15" dependencies = [ "anyhow", "bincode", @@ -3095,12 +3201,14 @@ dependencies = [ "serde_json", "snarkvm-console", "snarkvm-ledger-committee", + "snarkvm-ledger-narwhal-batch-header", + "snarkvm-metrics", "test-strategy", ] [[package]] name = "snarkvm-ledger-narwhal" -version = "0.16.3" +version = "0.16.15" dependencies = [ "snarkvm-ledger-narwhal", "snarkvm-ledger-narwhal-batch-certificate", @@ -3113,21 +3221,21 @@ dependencies = [ [[package]] name = "snarkvm-ledger-narwhal-batch-certificate" -version = "0.16.3" +version = "0.16.15" dependencies = [ "bincode", "indexmap 2.0.2", + "rayon", "serde_json", "snarkvm-console", "snarkvm-ledger-narwhal-batch-certificate", "snarkvm-ledger-narwhal-batch-header", "snarkvm-ledger-narwhal-transmission-id", - "time", ] [[package]] name = "snarkvm-ledger-narwhal-batch-header" -version = "0.16.3" +version = "0.16.15" dependencies = [ "bincode", "indexmap 2.0.2", @@ -3140,7 +3248,7 @@ dependencies = [ [[package]] name = "snarkvm-ledger-narwhal-data" -version = "0.16.3" +version = "0.16.15" dependencies = [ "bytes", "serde_json", @@ -3150,7 +3258,7 @@ dependencies = [ [[package]] name = "snarkvm-ledger-narwhal-subdag" -version = "0.16.3" +version = "0.16.15" dependencies = [ "bincode", "indexmap 2.0.2", @@ -3158,13 +3266,14 @@ dependencies = [ "serde_json", "snarkvm-console", "snarkvm-ledger-narwhal-batch-certificate", + "snarkvm-ledger-narwhal-batch-header", "snarkvm-ledger-narwhal-subdag", "snarkvm-ledger-narwhal-transmission-id", ] [[package]] name = "snarkvm-ledger-narwhal-transmission" -version = "0.16.3" +version = "0.16.15" dependencies = [ "bincode", "bytes", @@ -3177,7 +3286,7 @@ dependencies = [ [[package]] name = "snarkvm-ledger-narwhal-transmission-id" -version = "0.16.3" +version = "0.16.15" dependencies = [ "bincode", "serde_json", @@ -3187,7 +3296,7 @@ dependencies = [ [[package]] name = "snarkvm-ledger-query" -version = "0.16.3" +version = "0.16.15" dependencies = [ "async-trait", "reqwest", @@ -3199,7 +3308,7 @@ dependencies = [ [[package]] name = "snarkvm-ledger-store" -version = "0.16.3" +version = "0.16.15" dependencies = [ "aleo-std", "anyhow", @@ -3227,7 +3336,7 @@ dependencies = [ [[package]] name = "snarkvm-ledger-test-helpers" -version = "0.16.3" +version = "0.16.15" dependencies = [ "once_cell", "snarkvm-circuit", @@ -3239,9 +3348,17 @@ dependencies = [ "snarkvm-synthesizer-program", ] +[[package]] +name = "snarkvm-metrics" +version = "0.16.15" +dependencies = [ + "metrics", + "metrics-exporter-prometheus", +] + [[package]] name = "snarkvm-parameters" -version = "0.16.3" +version = "0.16.15" dependencies = [ "aleo-std", "anyhow", @@ -3255,6 +3372,7 @@ dependencies = [ "itertools 0.11.0", "js-sys", "lazy_static", + "parking_lot", "paste", "rand", "serde_json", @@ -3273,7 +3391,7 @@ dependencies = [ [[package]] name = "snarkvm-synthesizer" -version = "0.16.3" +version = "0.16.15" dependencies = [ "aleo-std", "anyhow", @@ -3304,7 +3422,7 @@ dependencies = [ [[package]] name = "snarkvm-synthesizer-process" -version = "0.16.3" +version = "0.16.15" dependencies = [ "aleo-std", "bincode", @@ -3324,12 +3442,13 @@ dependencies = [ "snarkvm-ledger-test-helpers", "snarkvm-synthesizer-program", "snarkvm-synthesizer-snark", + "snarkvm-utilities", "tempfile", ] [[package]] name = "snarkvm-synthesizer-program" -version = "0.16.3" +version = "0.16.15" dependencies = [ "bincode", "criterion", @@ -3345,7 +3464,7 @@ dependencies = [ [[package]] name = "snarkvm-synthesizer-snark" -version = "0.16.3" +version = "0.16.15" dependencies = [ "bincode", "colored", @@ -3358,7 +3477,7 @@ dependencies = [ [[package]] name = "snarkvm-utilities" -version = "0.16.3" +version = "0.16.15" dependencies = [ "aleo-std", "anyhow", @@ -3378,7 +3497,7 @@ dependencies = [ [[package]] name = "snarkvm-utilities-derives" -version = "0.16.3" +version = "0.16.15" dependencies = [ "proc-macro2", "quote 1.0.33", @@ -3387,17 +3506,18 @@ dependencies = [ [[package]] name = "snarkvm-wasm" -version = "0.16.3" +version = "0.16.15" dependencies = [ "getrandom", - "rand", - "serde", + "snarkvm-circuit-network", "snarkvm-console", "snarkvm-curves", "snarkvm-fields", + "snarkvm-ledger-block", + "snarkvm-ledger-query", + "snarkvm-ledger-store", "snarkvm-synthesizer", "snarkvm-utilities", - "wasm-bindgen", "wasm-bindgen-test", ] @@ -3962,8 +4082,6 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7706a72ab36d8cb1f80ffbf0e071533974a60d0a308d01a5d0375bf60499a342" dependencies = [ "cfg-if", - "serde", - "serde_json", "wasm-bindgen-macro", ] diff --git a/Cargo.toml b/Cargo.toml index da49a99dd2..2932649638 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "snarkvm" -version = "0.16.3" +version = "0.16.15" authors = [ "The Aleo Team " ] description = "A decentralized virtual machine" homepage = "https://aleo.org" @@ -76,6 +76,7 @@ members = [ "ledger/query", "ledger/store", "ledger/test-helpers", + "metrics", "parameters", "synthesizer", "synthesizer/process", @@ -123,7 +124,8 @@ cli = [ "rand", "self_update", "serde_json", - "thiserror" + "thiserror", + "ureq" ] aleo-cli = [ "snarkvm-synthesizer/aleo-cli" ] async = [ "snarkvm-ledger/async", "snarkvm-synthesizer/async" ] @@ -131,6 +133,7 @@ cuda = [ "snarkvm-algorithms/cuda" ] parameters_no_std_out = [ "snarkvm-parameters/no_std_out" ] noconfig = [ ] rocks = [ "snarkvm-ledger/rocks" ] +test = [ "snarkvm-ledger/test" ] test-helpers = [ "snarkvm-ledger/test-helpers" ] timer = [ "snarkvm-ledger/timer" ] algorithms = [ "snarkvm-algorithms" ] @@ -139,6 +142,7 @@ console = [ "snarkvm-console" ] curves = [ "snarkvm-curves" ] fields = [ "snarkvm-fields" ] ledger = [ "snarkvm-ledger" ] +metrics = [ "snarkvm-metrics", "snarkvm-ledger/metrics" ] parameters = [ "snarkvm-parameters" ] synthesizer = [ "snarkvm-synthesizer" ] utilities = [ "snarkvm-utilities" ] @@ -146,53 +150,58 @@ wasm = [ "snarkvm-wasm" ] [dependencies.snarkvm-algorithms] path = "./algorithms" -version = "=0.16.3" +version = "=0.16.15" optional = true [dependencies.snarkvm-circuit] path = "./circuit" -version = "=0.16.3" +version = "=0.16.15" optional = true [dependencies.snarkvm-console] path = "./console" -version = "=0.16.3" +version = "=0.16.15" optional = true [dependencies.snarkvm-curves] path = "./curves" -version = "=0.16.3" +version = "=0.16.15" optional = true [dependencies.snarkvm-fields] path = "./fields" -version = "=0.16.3" +version = "=0.16.15" optional = true [dependencies.snarkvm-ledger] path = "./ledger" -version = "=0.16.3" +version = "=0.16.15" +optional = true + +[dependencies.snarkvm-metrics] +path = "./metrics" +version = "=0.16.15" optional = true [dependencies.snarkvm-parameters] path = "./parameters" -version = "=0.16.3" +version = "=0.16.15" optional = true [dependencies.snarkvm-synthesizer] path = "./synthesizer" -version = "=0.16.3" +version = "=0.16.15" default-features = false optional = true [dependencies.snarkvm-utilities] path = "./utilities" -version = "=0.16.3" +version = "=0.16.15" optional = true [dependencies.snarkvm-wasm] path = "./wasm" -version = "=0.16.3" +version = "=0.16.15" optional = true [dependencies.anstyle] @@ -251,6 +260,7 @@ optional = true [dependencies.ureq] version = "2.7" features = [ "json" ] +optional = true [dev-dependencies.bincode] version = "1.3" diff --git a/README.md b/README.md index dbd1bf0af6..f496a6c566 100644 --- a/README.md +++ b/README.md @@ -6,7 +6,8 @@ - + +

## Table of Contents @@ -85,7 +86,7 @@ snarkvm ## 4. Contributors -Thank you for helping make SnarkVM better! +Thank you for helping make snarkVM better! [🧐 What do the emojis mean?](https://allcontributors.org/docs/en/emoji-key) @@ -104,21 +105,21 @@ Thank you for helping make SnarkVM better! Collin Chin
Collin Chin

πŸ’» πŸ“– πŸ‘€ - Alessandro Coglio
Alessandro Coglio

πŸ’» πŸ“– ⚠️ + Alessandro Coglio
Alessandro Coglio

πŸ’» πŸ“– ⚠️ Niklas Long
Niklas Long

πŸ’» jules
jules

πŸ’» Ali Mousa
Ali Mousa

πŸ’» Weikeng Chen
Weikeng Chen

πŸ’» - Max Bruce
Max Bruce

πŸ’» + Evan Schott
Evan Schott

πŸ’» + Max Bruce
Max Bruce

πŸ’» zhiqiangxu
zhiqiangxu

πŸ’» Javier RodrΓ­guez Chatruc
Javier RodrΓ­guez Chatruc

πŸ’» Eduardo Morais
Eduardo Morais

πŸ’» Maciej ZwoliΕ„ski
Maciej ZwoliΕ„ski

πŸ’» Ivan Litteri
Ivan Litteri

πŸ’» Francisco Strambini
Francisco Strambini

πŸ’» - swift-mx
swift-mx

πŸ’» Haruka
Haruka

πŸ› πŸ’» @@ -130,11 +131,20 @@ Thank you for helping make SnarkVM better! Psi Vesely
Psi Vesely

πŸ’» + swift-mx
swift-mx

πŸ’» Nacho Avecilla
Nacho Avecilla

πŸ’» qy3u
qy3u

πŸ’» Yt
Yt

πŸ’» Kostyan
Kostyan

πŸ’» + stanlagermin
stanlagermin

πŸ’» + Sukey
Sukey

πŸ’» + + + Alex Zhao
Alex Zhao

πŸ’» + ghost ant
ghost ant

πŸ’» + Psi Vesely
Psi Vesely

πŸ’» Dependabot
Dependabot

πŸ’» + Dependabot Preview
Dependabot Preview

πŸ’» All Contributors
All Contributors

πŸ“– diff --git a/algorithms/Cargo.toml b/algorithms/Cargo.toml index d6cdf4e8e9..af5a76e19b 100644 --- a/algorithms/Cargo.toml +++ b/algorithms/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "snarkvm-algorithms" -version = "0.16.3" +version = "0.16.15" authors = [ "The Aleo Team " ] description = "Algorithms for a decentralized virtual machine" homepage = "https://aleo.org" @@ -47,27 +47,27 @@ required-features = [ "test" ] [dependencies.snarkvm-curves] path = "../curves" -version = "=0.16.3" +version = "=0.16.15" default-features = false [dependencies.snarkvm-fields] path = "../fields" -version = "=0.16.3" +version = "=0.16.15" default-features = false [dependencies.snarkvm-parameters] path = "../parameters" -version = "=0.16.3" +version = "=0.16.15" optional = true [dependencies.snarkvm-utilities] path = "../utilities" -version = "=0.16.3" +version = "=0.16.15" default-features = false [dependencies.snarkvm-algorithms-cuda] path = "./cuda" -version = "=0.16.3" +version = "=0.16.15" optional = true [dependencies.aleo-std] diff --git a/algorithms/benches/snark/varuna.rs b/algorithms/benches/snark/varuna.rs index da095e49eb..ea23d8d4ed 100644 --- a/algorithms/benches/snark/varuna.rs +++ b/algorithms/benches/snark/varuna.rs @@ -25,7 +25,7 @@ use snarkvm_curves::bls12_377::{Bls12_377, Fq, Fr}; use snarkvm_utilities::{CanonicalDeserialize, CanonicalSerialize, TestRng}; use criterion::Criterion; -use std::collections::BTreeMap; +use std::{collections::BTreeMap, time::Duration}; type VarunaInst = VarunaSNARK; type FS = PoseidonSponge; @@ -58,11 +58,12 @@ fn snark_circuit_setup(c: &mut Criterion) { } fn snark_prove(c: &mut Criterion) { + let rng = &mut TestRng::default(); + c.bench_function("snark_prove", move |b| { let num_constraints = 100; let num_variables = 25; let mul_depth = 1; - let rng = &mut TestRng::default(); let max_degree = AHPForR1CS::::max_degree(1000, 1000, 1000).unwrap(); let universal_srs = VarunaInst::universal_setup(max_degree).unwrap(); @@ -77,11 +78,12 @@ fn snark_prove(c: &mut Criterion) { } fn snark_batch_prove(c: &mut Criterion) { + let rng = &mut TestRng::default(); + c.bench_function("snark_batch_prove", move |b| { let num_constraints_base = 100; let num_variables_base = 25; let mul_depth_base = 1; - let rng = &mut TestRng::default(); let max_degree = AHPForR1CS::::max_degree(1000000, 1000000, 1000000).unwrap(); let universal_srs = VarunaInst::universal_setup(max_degree).unwrap(); @@ -119,11 +121,12 @@ fn snark_batch_prove(c: &mut Criterion) { } fn snark_verify(c: &mut Criterion) { + let rng = &mut TestRng::default(); + c.bench_function("snark_verify", move |b| { let num_constraints = 100; let num_variables = 25; let mul_depth = 1; - let rng = &mut TestRng::default(); let max_degree = AHPForR1CS::::max_degree(100, 100, 100).unwrap(); let universal_srs = VarunaInst::universal_setup(max_degree).unwrap(); @@ -145,10 +148,11 @@ fn snark_verify(c: &mut Criterion) { } fn snark_batch_verify(c: &mut Criterion) { + let rng = &mut TestRng::default(); + c.bench_function("snark_batch_verify", move |b| { let num_constraints_base = 100; let num_variables_base = 25; - let rng = &mut TestRng::default(); let max_degree = AHPForR1CS::::max_degree(1000, 1000, 100).unwrap(); let universal_srs = VarunaInst::universal_setup(max_degree).unwrap(); @@ -200,6 +204,7 @@ fn snark_batch_verify(c: &mut Criterion) { fn snark_vk_serialize(c: &mut Criterion) { use snarkvm_utilities::serialize::Compress; let mut group = c.benchmark_group("snark_vk_serialize"); + let rng = &mut TestRng::default(); for mode in [Compress::Yes, Compress::No] { let name = match mode { Compress::No => "uncompressed", @@ -208,7 +213,6 @@ fn snark_vk_serialize(c: &mut Criterion) { let num_constraints = 100; let num_variables = 25; let mul_depth = 1; - let rng = &mut TestRng::default(); let max_degree = AHPForR1CS::::max_degree(100, 100, 100).unwrap(); let universal_srs = VarunaInst::universal_setup(max_degree).unwrap(); @@ -229,6 +233,7 @@ fn snark_vk_serialize(c: &mut Criterion) { fn snark_vk_deserialize(c: &mut Criterion) { use snarkvm_utilities::serialize::{Compress, Validate}; let mut group = c.benchmark_group("snark_vk_deserialize"); + let rng = &mut TestRng::default(); for compress in [Compress::Yes, Compress::No] { let compress_name = match compress { Compress::No => "uncompressed", @@ -243,7 +248,6 @@ fn snark_vk_deserialize(c: &mut Criterion) { let num_constraints = 100; let num_variables = 25; let mul_depth = 1; - let rng = &mut TestRng::default(); let max_degree = AHPForR1CS::::max_degree(100, 100, 100).unwrap(); let universal_srs = VarunaInst::universal_setup(max_degree).unwrap(); @@ -273,13 +277,13 @@ fn snark_certificate_prove(c: &mut Criterion) { let fs_p = &fs_parameters; for size in [100, 1_000, 10_000, 100_000] { - c.bench_function(&format!("snark_certificate_prove_{size}"), |b| { - let num_constraints = size; - let num_variables = size; - let mul_depth = 1; - let (circuit, _) = TestCircuit::gen_rand(mul_depth, num_constraints, num_variables, rng); - let (pk, vk) = VarunaInst::circuit_setup(&universal_srs, &circuit).unwrap(); + let num_constraints = size; + let num_variables = size; + let mul_depth = 1; + let (circuit, _) = TestCircuit::gen_rand(mul_depth, num_constraints, num_variables, rng); + let (pk, vk) = VarunaInst::circuit_setup(&universal_srs, &circuit).unwrap(); + c.bench_function(&format!("snark_certificate_prove_{size}"), |b| { b.iter(|| VarunaInst::prove_vk(universal_prover, fs_p, &vk, &pk).unwrap()) }); } @@ -296,14 +300,14 @@ fn snark_certificate_verify(c: &mut Criterion) { let fs_p = &fs_parameters; for size in [100, 1_000, 10_000, 100_000] { - c.bench_function(&format!("snark_certificate_verify_{size}"), |b| { - let num_constraints = size; - let num_variables = size; - let mul_depth = 1; - let (circuit, _) = TestCircuit::gen_rand(mul_depth, num_constraints, num_variables, rng); - let (pk, vk) = VarunaInst::circuit_setup(&universal_srs, &circuit).unwrap(); - let certificate = VarunaInst::prove_vk(universal_prover, fs_p, &vk, &pk).unwrap(); + let num_constraints = size; + let num_variables = size; + let mul_depth = 1; + let (circuit, _) = TestCircuit::gen_rand(mul_depth, num_constraints, num_variables, rng); + let (pk, vk) = VarunaInst::circuit_setup(&universal_srs, &circuit).unwrap(); + let certificate = VarunaInst::prove_vk(universal_prover, fs_p, &vk, &pk).unwrap(); + c.bench_function(&format!("snark_certificate_verify_{size}"), |b| { b.iter(|| VarunaInst::verify_vk(universal_verifier, fs_p, &circuit, &vk, &certificate).unwrap()) }); } @@ -311,7 +315,7 @@ fn snark_certificate_verify(c: &mut Criterion) { criterion_group! { name = varuna_snark; - config = Criterion::default().sample_size(10); + config = Criterion::default().measurement_time(Duration::from_secs(10)); targets = snark_universal_setup, snark_circuit_setup, snark_prove, snark_verify, snark_batch_prove, snark_batch_verify, snark_vk_serialize, snark_vk_deserialize, snark_certificate_prove, snark_certificate_verify, } diff --git a/algorithms/cuda/Cargo.toml b/algorithms/cuda/Cargo.toml index 1e79a92768..f57066d0f1 100644 --- a/algorithms/cuda/Cargo.toml +++ b/algorithms/cuda/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "snarkvm-algorithms-cuda" -version = "0.16.3" +version = "0.16.15" authors = [ "The Aleo Team " ] description = "Cuda optimizations for a decentralized virtual machine" homepage = "https://aleo.org" diff --git a/algorithms/cuda/build.rs b/algorithms/cuda/build.rs index 8b2036d54a..8e85744b6c 100644 --- a/algorithms/cuda/build.rs +++ b/algorithms/cuda/build.rs @@ -20,7 +20,7 @@ fn main() { // account for cross-compilation [by examining environment variable] let target_arch = env::var("CARGO_CFG_TARGET_ARCH").unwrap(); - // Set CC environment variable to choose alternative C compiler. + // Set CC environment variable to choose an alternative C compiler. // Optimization level depends on whether or not --release is passed // or implied. let mut cc = cc::Build::new(); diff --git a/algorithms/cuda/cuda/polynomial.cuh b/algorithms/cuda/cuda/polynomial.cuh index e74fa7bad3..745075accc 100644 --- a/algorithms/cuda/cuda/polynomial.cuh +++ b/algorithms/cuda/cuda/polynomial.cuh @@ -116,7 +116,7 @@ public: size_t pcur = 0; size_t ecur = 0; - // Set up the first polynomail / evaluation in dmem0 + // Set up the first polynomial / evaluation in dmem0 if (pcount > 0) { mul_copy_poly(hmem0, dmem0, polynomials[0], plens[0], stream, lg_domain_size); // Perform NTT on the input data diff --git a/algorithms/src/crypto_hash/poseidon.rs b/algorithms/src/crypto_hash/poseidon.rs index 3476c271b8..77a28d8d47 100644 --- a/algorithms/src/crypto_hash/poseidon.rs +++ b/algorithms/src/crypto_hash/poseidon.rs @@ -18,6 +18,7 @@ use snarkvm_utilities::{BigInteger, FromBits, ToBits}; use smallvec::SmallVec; use std::{ + iter::Peekable, ops::{Index, IndexMut}, sync::Arc, }; @@ -40,7 +41,7 @@ impl State impl Iterator { self.capacity_state.iter_mut().chain(self.rate_state.iter_mut()) } @@ -332,7 +333,11 @@ impl PoseidonSponge { /// Compress every two elements if possible. /// Provides a vector of (limb, num_of_additions), both of which are F. - pub fn compress_elements(&self, src_limbs: &[(F, F)], ty: OptimizationType) -> Vec { + pub fn compress_elements>( + &self, + mut src_limbs: Peekable, + ty: OptimizationType, + ) -> Vec { let capacity = F::size_in_bits() - 1; let mut dest_limbs = Vec::::new(); @@ -341,11 +346,8 @@ impl PoseidonSponge { // Prepare a reusable vector to be used in overhead calculation. let mut num_bits = Vec::new(); - let mut i = 0; - let src_len = src_limbs.len(); - while i < src_len { - let first = &src_limbs[i]; - let second = if i + 1 < src_len { Some(&src_limbs[i + 1]) } else { None }; + while let Some(first) = src_limbs.next() { + let second = src_limbs.peek(); let first_max_bits_per_limb = params.bits_per_limb + crate::overhead!(first.1 + F::one(), &mut num_bits); let second_max_bits_per_limb = if let Some(second) = second { @@ -359,14 +361,12 @@ impl PoseidonSponge { let adjustment_factor = &self.adjustment_factor_lookup_table[second_max_bits_per_limb]; dest_limbs.push(first.0 * adjustment_factor + second.0); - i += 2; + src_limbs.next(); } else { dest_limbs.push(first.0); - i += 1; } } else { dest_limbs.push(first.0); - i += 1; } } @@ -405,7 +405,7 @@ impl PoseidonSponge { cur_bits.clear(); } - // then we reserve, so that the limbs are ``big limb first'' + // then we reverse, so that the limbs are ``big limb first'' limbs.reverse(); limbs @@ -417,17 +417,16 @@ impl PoseidonSponge { src: impl IntoIterator, ty: OptimizationType, ) { - let mut src_limbs = Vec::<(F, F)>::new(); - - for elem in src { - let limbs = Self::get_limbs_representations(&elem, ty); - for limb in limbs.iter() { - src_limbs.push((*limb, F::one())); + let src_limbs = src + .into_iter() + .flat_map(|elem| { + let limbs = Self::get_limbs_representations(&elem, ty); + limbs.into_iter().map(|limb| (limb, F::one())) // specifically set to one, since most gadgets in the constraint world would not have zero noise (due to the relatively weak normal form testing in `alloc`) - } - } + }) + .peekable(); - let dest_limbs = self.compress_elements::(&src_limbs, ty); + let dest_limbs = self.compress_elements::(src_limbs, ty); self.absorb_native_field_elements(&dest_limbs); } @@ -465,7 +464,7 @@ impl PoseidonSponge { }; let bits = self.get_bits(num_bits_per_nonnative * num_elements); - let mut lookup_table = Vec::::new(); + let mut lookup_table = Vec::::with_capacity(num_bits_per_nonnative); let mut cur = TargetField::one(); for _ in 0..num_bits_per_nonnative { lookup_table.push(cur); diff --git a/algorithms/src/errors.rs b/algorithms/src/errors.rs index 0713953dc7..a1dedf9f12 100644 --- a/algorithms/src/errors.rs +++ b/algorithms/src/errors.rs @@ -43,9 +43,6 @@ pub enum SNARKError { #[error("Circuit not found")] CircuitNotFound, - - #[error("terminated")] - Terminated, } impl From for SNARKError { @@ -53,12 +50,3 @@ impl From for SNARKError { SNARKError::Crate("AHPError", format!("{err:?}")) } } - -impl From for SNARKError { - fn from(err: crate::polycommit::PCError) -> Self { - match err { - crate::polycommit::PCError::Terminated => SNARKError::Terminated, - err => SNARKError::Crate("PCError", format!("{err:?}")), - } - } -} diff --git a/algorithms/src/fft/domain.rs b/algorithms/src/fft/domain.rs index 93e7d1ed12..c121e9bfe2 100644 --- a/algorithms/src/fft/domain.rs +++ b/algorithms/src/fft/domain.rs @@ -345,13 +345,13 @@ impl EvaluationDomain { /// Perform O(n) multiplication of two polynomials that are presented by their /// evaluations in the domain. /// Returns the evaluations of the product over the domain. - #[must_use] - pub fn mul_polynomials_in_evaluation_domain(&self, self_evals: Vec, other_evals: &[F]) -> Vec { + pub fn mul_polynomials_in_evaluation_domain(&self, self_evals: Vec, other_evals: &[F]) -> Result> { let mut result = self_evals; + ensure!(result.len() == other_evals.len()); cfg_iter_mut!(result).zip_eq(other_evals).for_each(|(a, b)| *a *= b); - result + Ok(result) } } diff --git a/algorithms/src/fft/evaluations.rs b/algorithms/src/fft/evaluations.rs index f58925a02a..532e06756b 100644 --- a/algorithms/src/fft/evaluations.rs +++ b/algorithms/src/fft/evaluations.rs @@ -38,7 +38,9 @@ pub struct Evaluations { impl Evaluations { /// Construct `Self` from evaluations and a domain. - pub fn from_vec_and_domain(evaluations: Vec, domain: EvaluationDomain) -> Self { + pub fn from_vec_and_domain(mut evaluations: Vec, domain: EvaluationDomain) -> Self { + // Pad evaluations to ensure we can always evaluate + evaluations.resize(domain.size(), F::zero()); Self { evaluations, domain } } diff --git a/algorithms/src/fft/polynomial/dense.rs b/algorithms/src/fft/polynomial/dense.rs index 07454c9b5a..80b9658fc1 100644 --- a/algorithms/src/fft/polynomial/dense.rs +++ b/algorithms/src/fft/polynomial/dense.rs @@ -14,10 +14,12 @@ //! A polynomial represented in coefficient form. +use super::PolyMultiplier; use crate::fft::{EvaluationDomain, Evaluations, Polynomial}; use snarkvm_fields::{Field, PrimeField}; use snarkvm_utilities::{cfg_iter_mut, serialize::*}; +use anyhow::Result; use num_traits::CheckedDiv; use rand::Rng; use std::{ @@ -25,14 +27,11 @@ use std::{ ops::{Add, AddAssign, Deref, DerefMut, Div, Mul, MulAssign, Neg, Sub, SubAssign}, }; -#[cfg(feature = "serial")] use itertools::Itertools; #[cfg(not(feature = "serial"))] use rayon::prelude::*; -use super::PolyMultiplier; - /// Stores a polynomial in coefficient form. #[derive(Clone, PartialEq, Eq, Hash, Default, CanonicalSerialize, CanonicalDeserialize)] #[must_use] @@ -75,10 +74,10 @@ impl DensePolynomial { /// Constructs a new polynomial from a list of coefficients. pub fn from_coefficients_vec(mut coeffs: Vec) -> Self { // While there are zeros at the end of the coefficient vector, pop them off. - while coeffs.last().map_or(false, |c| c.is_zero()) { + while let Some(true) = coeffs.last().map(|c| c.is_zero()) { coeffs.pop(); } - // Check that either the coefficients vec is empty or that the last coeff is non-zero. + // Check that either the coefficients vec are empty or that the last coeff is non-zero. assert!(coeffs.last().map_or(true, |coeff| !coeff.is_zero())); Self { coeffs } @@ -113,10 +112,16 @@ impl DensePolynomial { crate::cfg_reduce!(mapping, || zero, |a, b| a + b) } - /// Outputs a polynomial of degree `d` where each coefficient is sampled uniformly at random - /// from the field `F`. + /// Outputs a univariate polynomial of degree `d` where each non-leading + /// coefficient is sampled uniformly at random from R and the leading + /// coefficient is sampled uniformly at random from among the non-zero + /// elements of R. pub fn rand(d: usize, rng: &mut R) -> Self { - let random_coeffs = (0..(d + 1)).map(|_| F::rand(rng)).collect(); + let mut random_coeffs = (0..(d + 1)).map(|_| F::rand(rng)).collect_vec(); + while random_coeffs[d].is_zero() { + // In the extremely unlikely event, sample again. + random_coeffs[d] = F::rand(rng); + } Self::from_coefficients_vec(random_coeffs) } @@ -156,7 +161,7 @@ impl DensePolynomial { pub fn divide_by_vanishing_poly( &self, domain: EvaluationDomain, - ) -> Option<(DensePolynomial, DensePolynomial)> { + ) -> Result<(DensePolynomial, DensePolynomial)> { let self_poly = Polynomial::from(self); let vanishing_poly = Polynomial::from(domain.vanishing_polynomial()); self_poly.divide_with_q_and_r(&vanishing_poly) @@ -189,7 +194,7 @@ impl<'a, 'b, F: Field> Add<&'a DensePolynomial> for &'b DensePolynomial { type Output = DensePolynomial; fn add(self, other: &'a DensePolynomial) -> DensePolynomial { - if self.is_zero() { + let mut result = if self.is_zero() { other.clone() } else if other.is_zero() { self.clone() @@ -202,12 +207,13 @@ impl<'a, 'b, F: Field> Add<&'a DensePolynomial> for &'b DensePolynomial { let mut result = other.clone(); // Zip safety: `result` and `other` could have different lengths. cfg_iter_mut!(result.coeffs).zip(&self.coeffs).for_each(|(a, b)| *a += b); - // If the leading coefficient ends up being zero, pop it off. - while let Some(true) = self.coeffs.last().map(|c| c.is_zero()) { - result.coeffs.pop(); - } result + }; + // If the leading coefficient ends up being zero, pop it off. + while let Some(true) = result.coeffs.last().map(|c| c.is_zero()) { + result.coeffs.pop(); } + result } } @@ -273,10 +279,10 @@ impl<'a, F: Field> AddAssign<(F, &'a DensePolynomial)> for DensePolynomial cfg_iter_mut!(self.coeffs).zip(&other.coeffs).for_each(|(a, b)| { *a += f * b; }); - // If the leading coefficient ends up being zero, pop it off. - while let Some(true) = self.coeffs.last().map(|c| c.is_zero()) { - self.coeffs.pop(); - } + } + // If the leading coefficient ends up being zero, pop it off. + while let Some(true) = self.coeffs.last().map(|c| c.is_zero()) { + self.coeffs.pop(); } } } @@ -298,7 +304,7 @@ impl<'a, 'b, F: Field> Sub<&'a DensePolynomial> for &'b DensePolynomial { #[inline] fn sub(self, other: &'a DensePolynomial) -> DensePolynomial { - if self.is_zero() { + let mut result = if self.is_zero() { let mut result = other.clone(); for coeff in &mut result.coeffs { *coeff = -(*coeff); @@ -318,15 +324,13 @@ impl<'a, 'b, F: Field> Sub<&'a DensePolynomial> for &'b DensePolynomial { cfg_iter_mut!(result.coeffs).zip(&other.coeffs).for_each(|(a, b)| { *a -= b; }); - if !result.is_zero() { - // If the leading coefficient ends up being zero, pop it off. - while result.coeffs.last().map(|c| c.is_zero()) == Some(true) { - result.coeffs.pop(); - } - } - result + }; + // If the leading coefficient ends up being zero, pop it off. + while let Some(true) = result.coeffs.last().map(|c| c.is_zero()) { + result.coeffs.pop(); } + result } } @@ -348,10 +352,10 @@ impl<'a, F: Field> SubAssign<&'a DensePolynomial> for DensePolynomial { self.coeffs.resize(other.coeffs.len(), F::zero()); // Zip safety: self and other have the same length after the resize. cfg_iter_mut!(self.coeffs).zip(&other.coeffs).for_each(|(a, b)| *a -= b); - // If the leading coefficient ends up being zero, pop it off. - while let Some(true) = self.coeffs.last().map(|c| c.is_zero()) { - self.coeffs.pop(); - } + } + // If the leading coefficient ends up being zero, pop it off. + while let Some(true) = self.coeffs.last().map(|c| c.is_zero()) { + self.coeffs.pop(); } } } @@ -421,14 +425,14 @@ impl CheckedDiv for DensePolynomial { let a: Polynomial<_> = self.into(); let b: Polynomial<_> = divisor.into(); match a.divide_with_q_and_r(&b) { - Some((divisor, remainder)) => { + Ok((divisor, remainder)) => { if remainder.is_zero() { Some(divisor) } else { None } } - None => None, + Err(_) => None, } } } @@ -592,11 +596,9 @@ mod tests { for b_degree in 0..70 { let dividend = DensePolynomial::::rand(a_degree, rng); let divisor = DensePolynomial::::rand(b_degree, rng); - if let Some((quotient, remainder)) = - Polynomial::divide_with_q_and_r(&(÷nd).into(), &(&divisor).into()) - { - assert_eq!(dividend, &(&divisor * "ient) + &remainder) - } + let (quotient, remainder) = + Polynomial::divide_with_q_and_r(&(÷nd).into(), &(&divisor).into()).unwrap(); + assert_eq!(dividend, &(&divisor * "ient) + &remainder) } } } @@ -615,6 +617,13 @@ mod tests { } } + #[test] + fn divide_poly_by_zero() { + let a = Polynomial::::zero(); + let b = Polynomial::::zero(); + assert!(a.divide_with_q_and_r(&b).is_err()); + } + #[test] fn mul_polynomials_random() { let rng = &mut TestRng::default(); @@ -691,7 +700,7 @@ mod tests { multiplier.add_polynomial(a.clone(), "a"); assert_eq!(multiplier.multiply().unwrap(), a); - // Note PolyMultiplier doesn't support a evluations with no polynomials + // Note PolyMultiplier doesn't support evaluations with no polynomials } #[test] diff --git a/algorithms/src/fft/polynomial/mod.rs b/algorithms/src/fft/polynomial/mod.rs index fa24d0c72e..8fcb907408 100644 --- a/algorithms/src/fft/polynomial/mod.rs +++ b/algorithms/src/fft/polynomial/mod.rs @@ -17,11 +17,11 @@ use crate::fft::{EvaluationDomain, Evaluations}; use snarkvm_fields::{Field, PrimeField}; use snarkvm_utilities::{cfg_iter_mut, serialize::*, SerializationError}; +use Polynomial::*; +use anyhow::{ensure, Result}; use std::{borrow::Cow, convert::TryInto}; -use Polynomial::*; - #[cfg(not(feature = "serial"))] use rayon::prelude::*; @@ -67,7 +67,16 @@ impl<'a, F: Field> CanonicalSerialize for Polynomial<'a, F> { impl<'a, F: Field> Valid for Polynomial<'a, F> { fn check(&self) -> Result<(), SerializationError> { - Ok(()) + // Check that the polynomial contains a trailing zero coefficient. + let has_trailing_zero = match self { + Sparse(p) => p.coeffs().last().map(|(_, c)| c.is_zero()), + Dense(p) => p.coeffs.last().map(|c| c.is_zero()), + }; + // Fail if the trailing coefficient is zero. + match has_trailing_zero { + Some(true) => Err(SerializationError::InvalidData), + Some(false) | None => Ok(()), + } } } @@ -209,13 +218,13 @@ impl<'a, F: Field> Polynomial<'a, F> { } /// Divide self by another (sparse or dense) polynomial, and returns the quotient and remainder. - pub fn divide_with_q_and_r(&self, divisor: &Self) -> Option<(DensePolynomial, DensePolynomial)> { + pub fn divide_with_q_and_r(&self, divisor: &Self) -> Result<(DensePolynomial, DensePolynomial)> { + ensure!(!divisor.is_zero(), "Dividing by zero polynomial is undefined"); + if self.is_zero() { - Some((DensePolynomial::zero(), DensePolynomial::zero())) - } else if divisor.is_zero() { - panic!("Dividing by zero polynomial") + Ok((DensePolynomial::zero(), DensePolynomial::zero())) } else if self.degree() < divisor.degree() { - Some((DensePolynomial::zero(), self.clone().into())) + Ok((DensePolynomial::zero(), self.clone().into())) } else { // Now we know that self.degree() >= divisor.degree(); let mut quotient = vec![F::zero(); self.degree() - divisor.degree() + 1]; @@ -241,7 +250,7 @@ impl<'a, F: Field> Polynomial<'a, F> { remainder.coeffs.pop(); } } - Some((DensePolynomial::from_coefficients_vec(quotient), remainder)) + Ok((DensePolynomial::from_coefficients_vec(quotient), remainder)) } } } diff --git a/algorithms/src/fft/polynomial/multiplier.rs b/algorithms/src/fft/polynomial/multiplier.rs index 66d7a82c9e..c93aa4de76 100644 --- a/algorithms/src/fft/polynomial/multiplier.rs +++ b/algorithms/src/fft/polynomial/multiplier.rs @@ -102,7 +102,7 @@ impl<'a, F: PrimeField> PolyMultiplier<'a, F> { } let fft_pc = &self.fft_precomputation.unwrap(); let ifft_pc = &self.ifft_precomputation.unwrap(); - let mut pool = ExecutionPool::new(); + let mut pool = ExecutionPool::with_capacity(self.polynomials.len() + self.evaluations.len()); for (_, p) in self.polynomials { pool.add_job(move || { let mut p = p.clone().into_owned().coeffs; @@ -146,7 +146,7 @@ impl<'a, F: PrimeField> PolyMultiplier<'a, F> { Some(Cow::Owned(self.fft_precomputation.as_ref().unwrap().to_ifft_precomputation())); } let fft_pc = self.fft_precomputation.as_ref().unwrap(); - let mut pool = ExecutionPool::new(); + let mut pool = ExecutionPool::with_capacity(self.polynomials.len() + self.evaluations.len()); for (l, p) in self.polynomials { pool.add_job(move || { let mut p = p.clone().into_owned().coeffs; diff --git a/algorithms/src/fft/polynomial/sparse.rs b/algorithms/src/fft/polynomial/sparse.rs index 23b2b1f0df..f38967f99f 100644 --- a/algorithms/src/fft/polynomial/sparse.rs +++ b/algorithms/src/fft/polynomial/sparse.rs @@ -93,7 +93,7 @@ impl SparsePolynomial { total } - /// Perform a naive n^2 multiplicatoin of `self` by `other`. + /// Perform a naive n^2 multiplication of `self` by `other`. pub fn mul(&self, other: &Self) -> Self { if self.is_zero() || other.is_zero() { SparsePolynomial::zero() diff --git a/algorithms/src/msm/variable_base/batched.rs b/algorithms/src/msm/variable_base/batched.rs index 18fe9e9b8e..b1f172d8a4 100644 --- a/algorithms/src/msm/variable_base/batched.rs +++ b/algorithms/src/msm/variable_base/batched.rs @@ -48,7 +48,7 @@ impl PartialOrd for BucketPosition { } } -/// Returns a batch size of sufficient size to amortise the cost of an inversion, +/// Returns a batch size of sufficient size to amortize the cost of an inversion, /// while attempting to reduce strain to the CPU cache. #[inline] const fn batch_size(msm_size: usize) -> usize { @@ -57,7 +57,7 @@ const fn batch_size(msm_size: usize) -> usize { // L1 and L2 cache sizes and dividing them by the size of group elements (i.e. 96 bytes). // // As the algorithm itself requires caching additional values beyond the group elements, - // the ideal batch size is less than expectations, to accommodate those values. + // the ideal batch size is less than expected, to accommodate those values. // In general, it was found that undershooting is better than overshooting this heuristic. if cfg!(target_arch = "x86_64") && msm_size < 500_000 { // Assumes an L1 cache size of 32KiB. Note that larger cache sizes @@ -192,7 +192,7 @@ pub(super) fn batch_add( let mut number_of_bases_in_batch = 0; let mut instr = Vec::<(u32, u32)>::with_capacity(batch_size); - let mut new_bases = Vec::with_capacity(bases.len() * 3 / 8); + let mut new_bases = Vec::with_capacity(bases.len()); let mut scratch_space = Vec::with_capacity(batch_size / 2); // In the first loop, copy the results of the first in-place addition tree to the vector `new_bases`. diff --git a/algorithms/src/polycommit/error.rs b/algorithms/src/polycommit/error.rs index 2be41f7f8e..f101d30224 100644 --- a/algorithms/src/polycommit/error.rs +++ b/algorithms/src/polycommit/error.rs @@ -13,33 +13,33 @@ // limitations under the License. /// The error type for `PolynomialCommitment`. -#[derive(Debug)] +#[derive(Debug, Error)] pub enum PCError { - AnyhowError(anyhow::Error), + #[error("{0}")] + AnyhowError(#[from] anyhow::Error), - /// The query set contains a label for a polynomial that was not provided as - /// input to the `PC::open`. + #[error("QuerySet` refers to polynomial \"{label}\", but it was not provided.")] MissingPolynomial { - /// The label of the missing polynomial. + /// The label of the missing polynomial label: String, }, - /// `Evaluations` does not contain an evaluation for the polynomial labelled - /// `label` at a particular query. + #[error("`QuerySet` refers to polynomial \"{label}\", but `Evaluations` does not contain an evaluation for it.")] MissingEvaluation { /// The label of the missing polynomial. label: String, }, - /// The provided polynomial was meant to be hiding, but `rng` was `None`. + #[error("The provided polynomial was meant to be hiding, but `rng` was `None`.")] MissingRng, - /// The degree provided in setup was too small; degree 0 polynomials - /// are not supported. + #[error("The degree provided in setup was too small; degree 0 polynomials are not supported.")] DegreeIsZero, - /// The degree of the polynomial passed to `commit` or `open` - /// was too large. + #[error( + "the number of coefficients in the polynomial ({num_coefficients:?}) is greater than \ + the maximum number of powers in `Powers` ({num_powers:?})" + )] TooManyCoefficients { /// The number of coefficients in the polynomial. num_coefficients: usize, @@ -47,10 +47,12 @@ pub enum PCError { num_powers: usize, }, - /// The hiding bound was not `None`, but the hiding bound was zero. + #[error("The hiding bound was not `None`, but the hiding bound was zero.")] HidingBoundIsZero, - /// The hiding bound was too large for the given `Powers`. + #[error( + "the degree of the hiding poly ({hiding_poly_degree:?}) is not less than the maximum number of powers in `Powers` ({num_powers:?})" + )] HidingBoundToolarge { /// The hiding bound hiding_poly_degree: usize, @@ -58,29 +60,28 @@ pub enum PCError { num_powers: usize, }, - /// The lagrange basis is not a power of two. + #[error("The lagrange basis is not a power of two.")] LagrangeBasisSizeIsNotPowerOfTwo, - /// The lagrange basis is larger than the supported degree, + #[error("The lagrange basis is larger than the supported degree.")] LagrangeBasisSizeIsTooLarge, - /// The degree provided to `trim` was too large. + #[error("The degree provided to `trim` was too large.")] TrimmingDegreeTooLarge, - /// The provided equation contained multiple polynomials, of which least one - /// had a strict degree bound. + #[error("the equation \"{0}\" contained degree-bounded polynomials")] EquationHasDegreeBounds(String), - /// The required degree bound is not supported by ck/vk + #[error("the degree bound ({0}) is not supported by the parameters")] UnsupportedDegreeBound(usize), - /// The provided equation contained multiple polynomials, of which least one - /// had a strict degree bound. + #[error("the Lagrange basis size ({0}) is not supported by the parameters")] UnsupportedLagrangeBasisSize(usize), - /// The degree bound for the `index`-th polynomial passed to `commit`, `open` - /// or `check` was incorrect, that is, `degree_bound >= poly_degree` or - /// `degree_bound <= max_degree`. + #[error( + "the degree bound ({degree_bound}) for the polynomial {label} \ + (having degree {poly_degree}) is greater than the maximum degree ({max_degree})" + )] IncorrectDegreeBound { /// Degree of the polynomial. poly_degree: usize, @@ -91,63 +92,4 @@ pub enum PCError { /// Index of the offending polynomial. label: String, }, - - Terminated, -} - -impl snarkvm_utilities::error::Error for PCError {} - -impl From for PCError { - fn from(other: anyhow::Error) -> Self { - Self::AnyhowError(other) - } -} - -impl core::fmt::Display for PCError { - fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { - match self { - Self::AnyhowError(error) => write!(f, "{error}"), - Self::MissingPolynomial { label } => { - write!(f, "`QuerySet` refers to polynomial \"{label}\", but it was not provided.") - } - Self::MissingEvaluation { label } => write!( - f, - "`QuerySet` refers to polynomial \"{label}\", but `Evaluations` does not contain an evaluation for it." - ), - Self::MissingRng => write!(f, "hiding commitments require `Some(rng)`"), - Self::DegreeIsZero => write!(f, "this scheme does not support committing to degree 0 polynomials"), - Self::TooManyCoefficients { num_coefficients, num_powers } => write!( - f, - "the number of coefficients in the polynomial ({num_coefficients:?}) is greater than\ - the maximum number of powers in `Powers` ({num_powers:?})" - ), - Self::HidingBoundIsZero => write!(f, "this scheme does not support non-`None` hiding bounds that are 0"), - Self::HidingBoundToolarge { hiding_poly_degree, num_powers } => write!( - f, - "the degree of the hiding poly ({hiding_poly_degree:?}) is not less than the maximum number of powers in `Powers` ({num_powers:?})" - ), - Self::TrimmingDegreeTooLarge => write!(f, "the degree provided to `trim` was too large"), - Self::EquationHasDegreeBounds(e) => { - write!(f, "the eqaution \"{e}\" contained degree-bounded polynomials") - } - Self::UnsupportedDegreeBound(bound) => { - write!(f, "the degree bound ({bound:?}) is not supported by the parameters") - } - Self::LagrangeBasisSizeIsNotPowerOfTwo => { - write!(f, "the Lagrange Basis size is not a power of two") - } - Self::UnsupportedLagrangeBasisSize(size) => { - write!(f, "the Lagrange basis size ({size:?}) is not supported by the parameters") - } - Self::LagrangeBasisSizeIsTooLarge => { - write!(f, "the Lagrange Basis size larger than max supported degree") - } - Self::IncorrectDegreeBound { poly_degree, degree_bound, max_degree, label } => write!( - f, - "the degree bound ({degree_bound}) for the polynomial {label} \ - (having degree {poly_degree}) is greater than the maximum degree ({max_degree})" - ), - Self::Terminated => write!(f, "terminated"), - } - } } diff --git a/algorithms/src/polycommit/kzg10/data_structures.rs b/algorithms/src/polycommit/kzg10/data_structures.rs index c13328439c..495e817788 100644 --- a/algorithms/src/polycommit/kzg10/data_structures.rs +++ b/algorithms/src/polycommit/kzg10/data_structures.rs @@ -31,7 +31,6 @@ use snarkvm_utilities::{ use crate::srs::{UniversalProver, UniversalVerifier}; use anyhow::Result; use core::ops::{Add, AddAssign}; -use parking_lot::RwLock; use rand_core::RngCore; use std::{collections::BTreeMap, io, ops::Range, sync::Arc}; @@ -42,7 +41,7 @@ pub struct UniversalParams { /// and group elements of the form `{ \beta^i \gamma G }`, where `i` ranges from 0 to `degree`. /// This struct provides an abstraction over the powers which are located on-disk /// to reduce memory usage. - powers: Arc>>, + powers: Arc>, /// The generator of G2. pub h: E::G2Affine, /// The generator of G2, prepared for use in pairings. @@ -53,16 +52,16 @@ pub struct UniversalParams { impl UniversalParams { pub fn load() -> Result { - let powers = Arc::new(RwLock::new(PowersOfG::::load()?)); + let powers = Arc::new(PowersOfG::::load()?); let h = E::G2Affine::prime_subgroup_generator(); let prepared_h = h.prepare(); - let prepared_beta_h = powers.read().beta_h().prepare(); + let prepared_beta_h = powers.beta_h().prepare(); Ok(Self { powers, h, prepared_h, prepared_beta_h }) } pub fn download_powers_for(&self, range: Range) -> Result<()> { - self.powers.write().download_powers_for(range) + self.powers.download_powers_for(range) } pub fn lagrange_basis(&self, domain: EvaluationDomain) -> Result> { @@ -72,23 +71,23 @@ impl UniversalParams { } pub fn power_of_beta_g(&self, index: usize) -> Result { - self.powers.write().power_of_beta_g(index) + self.powers.power_of_beta_g(index) } pub fn powers_of_beta_g(&self, lower: usize, upper: usize) -> Result> { - Ok(self.powers.write().powers_of_beta_g(lower..upper)?.to_vec()) + self.powers.powers_of_beta_g(lower..upper) } - pub fn powers_of_beta_times_gamma_g(&self) -> Arc> { - self.powers.read().powers_of_beta_gamma_g() + pub fn powers_of_beta_times_gamma_g(&self) -> &BTreeMap { + self.powers.powers_of_beta_gamma_g() } pub fn beta_h(&self) -> E::G2Affine { - self.powers.read().beta_h() + self.powers.beta_h() } pub fn max_degree(&self) -> usize { - self.powers.read().max_num_powers() - 1 + self.powers.max_num_powers() - 1 } pub fn to_universal_prover(&self) -> Result> { @@ -105,7 +104,7 @@ impl UniversalParams { Ok(UniversalVerifier { vk: VerifierKey:: { g, gamma_g, h, beta_h, prepared_h, prepared_beta_h }, - prepared_negative_powers_of_beta_h: self.powers.read().prepared_negative_powers_of_beta_h(), + prepared_negative_powers_of_beta_h: self.powers.prepared_negative_powers_of_beta_h(), }) } } @@ -113,7 +112,7 @@ impl UniversalParams { impl FromBytes for UniversalParams { fn read_le(mut reader: R) -> io::Result { // Deserialize `powers`. - let powers = Arc::new(RwLock::new(PowersOfG::read_le(&mut reader)?)); + let powers = Arc::new(PowersOfG::read_le(&mut reader)?); // Deserialize `h`. let h: E::G2Affine = FromBytes::read_le(&mut reader)?; @@ -131,7 +130,7 @@ impl FromBytes for UniversalParams { impl ToBytes for UniversalParams { fn write_le(&self, mut writer: W) -> io::Result<()> { // Serialize powers. - self.powers.read().write_le(&mut writer)?; + self.powers.write_le(&mut writer)?; // Serialize `h`. self.h.write_le(&mut writer)?; diff --git a/algorithms/src/polycommit/kzg10/mod.rs b/algorithms/src/polycommit/kzg10/mod.rs index 8ddaf0f79c..e6fe95d534 100644 --- a/algorithms/src/polycommit/kzg10/mod.rs +++ b/algorithms/src/polycommit/kzg10/mod.rs @@ -24,11 +24,11 @@ use crate::{ msm::VariableBase, polycommit::PCError, }; -use anyhow::anyhow; use snarkvm_curves::traits::{AffineCurve, PairingCurve, PairingEngine, ProjectiveCurve}; use snarkvm_fields::{One, PrimeField, Zero}; use snarkvm_utilities::{cfg_iter, cfg_iter_mut, rand::Uniform, BitIteratorBE}; +use anyhow::{anyhow, ensure, Result}; use core::{marker::PhantomData, ops::Mul}; use itertools::Itertools; use rand_core::RngCore; @@ -276,7 +276,7 @@ impl KZG10 { evaluations: &[E::Fr], point: E::Fr, evaluation_at_point: E::Fr, - ) -> Result, PCError> { + ) -> Result> { Self::check_degree_is_too_large(evaluations.len() - 1, lagrange_basis.size())?; // Ensure that the point is not in the domain if lagrange_basis.domain.evaluate_vanishing_polynomial(point).is_zero() { @@ -290,6 +290,7 @@ impl KZG10 { let mut divisor_evals = cfg_iter!(domain_elements).map(|&e| e - point).collect::>(); snarkvm_fields::batch_inversion(&mut divisor_evals); + ensure!(divisor_evals.len() == evaluations.len()); cfg_iter_mut!(divisor_evals).zip_eq(evaluations).for_each(|(divisor_eval, &eval)| { *divisor_eval *= eval - evaluation_at_point; }); @@ -351,7 +352,7 @@ impl KZG10 { values: &[E::Fr], proofs: &[KZGProof], rng: &mut R, - ) -> Result { + ) -> Result { let check_time = start_timer!(|| format!("Checking {} evaluation proofs", commitments.len())); let g = vk.g.to_projective(); let gamma_g = vk.gamma_g.to_projective(); @@ -365,6 +366,9 @@ impl KZG10 { // their coefficients and perform a final multiplication at the end. let mut g_multiplier = E::Fr::zero(); let mut gamma_g_multiplier = E::Fr::zero(); + ensure!(commitments.len() == points.len()); + ensure!(commitments.len() == values.len()); + ensure!(commitments.len() == proofs.len()); for (((c, z), v), proof) in commitments.iter().zip_eq(points).zip_eq(values).zip_eq(proofs) { let w = proof.w; let mut temp = w.mul(*z); @@ -451,11 +455,12 @@ fn skip_leading_zeros_and_convert_to_bigints(p: &DensePolynomial< if p.coeffs.is_empty() { (0, vec![]) } else { - let mut num_leading_zeros = 0; - while p.coeffs[num_leading_zeros].is_zero() && num_leading_zeros < p.coeffs.len() { - num_leading_zeros += 1; - } - let coeffs = convert_to_bigints(&p.coeffs[num_leading_zeros..]); + let num_leading_zeros = p.coeffs.iter().take_while(|c| c.is_zero()).count(); + let coeffs = if num_leading_zeros == p.coeffs.len() { + vec![] + } else { + convert_to_bigints(&p.coeffs[num_leading_zeros..]) + }; (num_leading_zeros, coeffs) } } diff --git a/algorithms/src/polycommit/sonic_pc/data_structures.rs b/algorithms/src/polycommit/sonic_pc/data_structures.rs index 25fbbd8292..6504d198fa 100644 --- a/algorithms/src/polycommit/sonic_pc/data_structures.rs +++ b/algorithms/src/polycommit/sonic_pc/data_structures.rs @@ -36,7 +36,7 @@ pub type Randomness = kzg10::KZGRandomness; pub type Commitment = kzg10::KZGCommitment; /// `CommitterKey` is used to commit to, and create evaluation proofs for, a given polynomial. -#[derive(Clone, Debug, Default, Hash, CanonicalSerialize, CanonicalDeserialize, PartialEq, Eq)] +#[derive(Debug)] pub struct CommitterKey { /// The key used to commit to polynomials. pub powers_of_beta_g: Vec, @@ -271,7 +271,7 @@ impl CommitterKey { } /// `CommitterUnionKey` is a union of `CommitterKey`s, useful for multi-circuit batch proofs. -#[derive(Clone, Debug, Hash, PartialEq, Eq)] +#[derive(Debug)] pub struct CommitterUnionKey<'a, E: PairingEngine> { /// The key used to commit to polynomials. pub powers_of_beta_g: Option<&'a Vec>, diff --git a/algorithms/src/polycommit/sonic_pc/mod.rs b/algorithms/src/polycommit/sonic_pc/mod.rs index 3b1858f6e2..847483ca69 100644 --- a/algorithms/src/polycommit/sonic_pc/mod.rs +++ b/algorithms/src/polycommit/sonic_pc/mod.rs @@ -24,7 +24,7 @@ use itertools::Itertools; use snarkvm_curves::traits::{AffineCurve, PairingCurve, PairingEngine, ProjectiveCurve}; use snarkvm_fields::{One, Zero}; -use anyhow::{bail, Result}; +use anyhow::{bail, ensure, Result}; use core::{convert::TryInto, marker::PhantomData, ops::Mul}; use rand_core::{RngCore, SeedableRng}; use std::{ @@ -95,18 +95,17 @@ impl> SonicKZG10 { max_degree - lowest_shift_degree + 1 )); - let shifted_powers_of_beta_g = pp.powers_of_beta_g(lowest_shift_degree, pp.max_degree() + 1)?.to_vec(); + let shifted_powers_of_beta_g = pp.powers_of_beta_g(lowest_shift_degree, pp.max_degree() + 1)?; let mut shifted_powers_of_beta_times_gamma_g = BTreeMap::new(); // Also add degree 0. for degree_bound in enforced_degree_bounds { let shift_degree = max_degree - degree_bound; - let mut powers_for_degree_bound = Vec::with_capacity((max_degree + 2).saturating_sub(shift_degree)); - for i in 0..=supported_hiding_bound + 1 { - // We have an additional degree in `powers_of_beta_times_gamma_g` beyond `powers_of_beta_g`. - if shift_degree + i < max_degree + 2 { - powers_for_degree_bound.push(pp.powers_of_beta_times_gamma_g()[&(shift_degree + i)]); - } - } + // We have an additional degree in `powers_of_beta_times_gamma_g` beyond `powers_of_beta_g`. + let powers_for_degree_bound = pp + .powers_of_beta_times_gamma_g() + .range(shift_degree..max_degree.min(shift_degree + supported_hiding_bound) + 2) + .map(|(_k, v)| *v) + .collect(); shifted_powers_of_beta_times_gamma_g.insert(*degree_bound, powers_for_degree_bound); } @@ -118,15 +117,17 @@ impl> SonicKZG10 { (None, None) }; - let powers_of_beta_g = pp.powers_of_beta_g(0, supported_degree + 1)?.to_vec(); - let powers_of_beta_times_gamma_g = (0..=(supported_hiding_bound + 1)) - .map(|i| { - pp.powers_of_beta_times_gamma_g() - .get(&i) - .copied() - .ok_or(PCError::HidingBoundToolarge { hiding_poly_degree: supported_hiding_bound, num_powers: 0 }) - }) - .collect::, _>>()?; + let powers_of_beta_g = pp.powers_of_beta_g(0, supported_degree + 1)?; + let powers_of_beta_times_gamma_g = pp + .powers_of_beta_times_gamma_g() + .range(0..=(supported_hiding_bound + 1)) + .map(|(_k, v)| *v) + .collect::>(); + if powers_of_beta_times_gamma_g.len() != supported_hiding_bound + 2 { + return Err( + PCError::HidingBoundToolarge { hiding_poly_degree: supported_hiding_bound, num_powers: 0 }.into() + ); + } let mut lagrange_bases_at_beta_g = BTreeMap::new(); for size in supported_lagrange_sizes { @@ -159,7 +160,7 @@ impl> SonicKZG10 { Ok((ck, vk)) } - /// Outputs a commitments to `polynomials`. + /// Outputs commitments to `polynomials`. /// /// If `polynomials[i].is_hiding()`, then the `i`-th commitment is hiding /// up to `polynomials.hiding_bound()` queries. @@ -180,8 +181,6 @@ impl> SonicKZG10 { ) -> Result<(Vec>>, Vec>), PCError> { let rng = &mut OptionalRng(rng); let commit_time = start_timer!(|| "Committing to polynomials"); - let mut labeled_comms: Vec>> = Vec::new(); - let mut randomness: Vec> = Vec::new(); let mut pool = snarkvm_utilities::ExecutionPool::>::new(); for p in polynomials { @@ -210,49 +209,42 @@ impl> SonicKZG10 { hiding_bound, )); - let (comm, rand) = p - .sum() - .map(move |p| { - let rng_ref = rng.as_mut().map(|s| s as _); - match p { - PolynomialWithBasis::Lagrange { evaluations } => { - let domain = crate::fft::EvaluationDomain::new(evaluations.evaluations.len()).unwrap(); - let lagrange_basis = ck - .lagrange_basis(domain) - .ok_or(PCError::UnsupportedLagrangeBasisSize(domain.size()))?; - assert!(domain.size().is_power_of_two()); - assert!(lagrange_basis.size().is_power_of_two()); - kzg10::KZG10::commit_lagrange( - &lagrange_basis, - &evaluations.evaluations, - hiding_bound, - rng_ref, - ) - } - PolynomialWithBasis::Monomial { polynomial, degree_bound } => { - let powers = if let Some(degree_bound) = degree_bound { - ck.shifted_powers_of_beta_g(degree_bound).unwrap() - } else { - ck.powers() - }; - - kzg10::KZG10::commit(&powers, &polynomial, hiding_bound, rng_ref) - } + let (comm, rand) = { + let rng_ref = rng.as_mut().map(|s| s as _); + match p.polynomial { + PolynomialWithBasis::Lagrange { evaluations } => { + let domain = crate::fft::EvaluationDomain::new(evaluations.evaluations.len()).unwrap(); + let lagrange_basis = ck + .lagrange_basis(domain) + .ok_or(PCError::UnsupportedLagrangeBasisSize(domain.size()))?; + assert!(domain.size().is_power_of_two()); + assert!(lagrange_basis.size().is_power_of_two()); + kzg10::KZG10::commit_lagrange( + &lagrange_basis, + &evaluations.evaluations, + hiding_bound, + rng_ref, + )? + } + PolynomialWithBasis::Monomial { polynomial, degree_bound } => { + let powers = if let Some(degree_bound) = degree_bound { + ck.shifted_powers_of_beta_g(degree_bound).unwrap() + } else { + ck.powers() + }; + + kzg10::KZG10::commit(&powers, &polynomial, hiding_bound, rng_ref)? } - }) - .collect::, _>>()? - .into_iter() - .fold((E::G1Projective::zero(), Randomness::empty()), |mut a, b| { - a.0.add_assign_mixed(&b.0.0); - a.1 += (E::Fr::one(), &b.1); - a - }); - let comm = kzg10::KZGCommitment(comm.to_affine()); + } + }; Ok((LabeledCommitment::new(label.to_string(), comm, degree_bound), rand)) }); } let results: Vec> = pool.execute_all(); + + let mut labeled_comms = Vec::with_capacity(results.len()); + let mut randomness = Vec::with_capacity(results.len()); for result in results { let (comm, rand) = result?; labeled_comms.push(comm); @@ -266,22 +258,26 @@ impl> SonicKZG10 { pub fn combine_for_open<'a>( universal_prover: &UniversalProver, ck: &CommitterUnionKey, - labeled_polynomials: impl IntoIterator>, - rands: impl IntoIterator>, + labeled_polynomials: impl ExactSizeIterator>, + rands: impl ExactSizeIterator>, fs_rng: &mut S, - ) -> Result<(DensePolynomial, Randomness), PCError> + ) -> Result<(DensePolynomial, Randomness)> where Randomness: 'a, Commitment: 'a, { - Ok(Self::combine_polynomials(labeled_polynomials.into_iter().zip_eq(rands).map(|(p, r)| { + ensure!(labeled_polynomials.len() == rands.len()); + let mut to_combine = Vec::with_capacity(labeled_polynomials.len()); + + for (p, r) in labeled_polynomials.zip_eq(rands) { let enforced_degree_bounds: Option<&[usize]> = ck.enforced_degree_bounds.as_deref(); - kzg10::KZG10::::check_degrees_and_bounds(universal_prover.max_degree, enforced_degree_bounds, p) - .unwrap(); + kzg10::KZG10::::check_degrees_and_bounds(universal_prover.max_degree, enforced_degree_bounds, p)?; let challenge = fs_rng.squeeze_short_nonnative_field_element::(); - (challenge, p.polynomial().to_dense(), r) - }))) + to_combine.push((challenge, p.polynomial().to_dense(), r)); + } + + Ok(Self::combine_polynomials(to_combine)) } /// On input a list of labeled polynomials and a query set, `open` outputs a proof of evaluation @@ -289,15 +285,16 @@ impl> SonicKZG10 { pub fn batch_open<'a>( universal_prover: &UniversalProver, ck: &CommitterUnionKey, - labeled_polynomials: impl IntoIterator>, + labeled_polynomials: impl ExactSizeIterator>, query_set: &QuerySet, - rands: impl IntoIterator>, + rands: impl ExactSizeIterator>, fs_rng: &mut S, - ) -> Result, PCError> + ) -> Result> where Randomness: 'a, Commitment: 'a, { + ensure!(labeled_polynomials.len() == rands.len()); let poly_rand: HashMap<_, _> = labeled_polynomials.into_iter().zip_eq(rands).map(|(poly, r)| (poly.label(), (poly, r))).collect(); @@ -326,7 +323,8 @@ impl> SonicKZG10 { query_polys.push(*polynomial); query_rands.push(*rand); } - let (polynomial, rand) = Self::combine_for_open(universal_prover, ck, query_polys, query_rands, fs_rng)?; + let (polynomial, rand) = + Self::combine_for_open(universal_prover, ck, query_polys.into_iter(), query_rands.into_iter(), fs_rng)?; let _randomizer = fs_rng.squeeze_short_nonnative_field_element::(); pool.add_job(move || { @@ -336,7 +334,7 @@ impl> SonicKZG10 { proof }); } - let batch_proof = pool.execute_all().into_iter().collect::>().map(BatchProof); + let batch_proof = pool.execute_all().into_iter().collect::>().map(BatchProof).map_err(Into::into); end_timer!(open_time); batch_proof @@ -349,7 +347,7 @@ impl> SonicKZG10 { values: &Evaluations, proof: &BatchProof, fs_rng: &mut S, - ) -> Result + ) -> Result where Commitment: 'a, { @@ -374,6 +372,7 @@ impl> SonicKZG10 { let mut combined_witness = E::G1Projective::zero(); let mut combined_adjusted_witness = E::G1Projective::zero(); + ensure!(query_to_labels_map.len() == proof.0.len()); for ((_query_name, (query, labels)), p) in query_to_labels_map.into_iter().zip_eq(&proof.0) { let mut comms_to_combine: Vec<&'_ LabeledCommitment<_>> = Vec::new(); let mut values_to_combine = Vec::new(); @@ -400,14 +399,14 @@ impl> SonicKZG10 { p, Some(randomizer), fs_rng, - ); + )?; randomizer = fs_rng.squeeze_short_nonnative_field_element::(); } let result = Self::check_elems(vk, combined_comms, combined_witness, combined_adjusted_witness); end_timer!(batch_check_time); - result + result.map_err(Into::into) } pub fn open_combinations<'a>( @@ -418,7 +417,7 @@ impl> SonicKZG10 { rands: impl IntoIterator>, query_set: &QuerySet, fs_rng: &mut S, - ) -> Result, PCError> + ) -> Result> where Randomness: 'a, Commitment: 'a, @@ -445,7 +444,7 @@ impl> SonicKZG10 { label_map.get(label as &str).ok_or(PCError::MissingPolynomial { label: label.to_string() })?; if let Some(cur_degree_bound) = cur_poly.degree_bound() { if num_polys != 1 { - return Err(PCError::EquationHasDegreeBounds(lc_label)); + bail!(PCError::EquationHasDegreeBounds(lc_label)); } assert!(coeff.is_one(), "Coefficient must be one for degree-bounded equations"); if let Some(old_degree_bound) = degree_bound { @@ -482,7 +481,7 @@ impl> SonicKZG10 { evaluations: &Evaluations, proof: &BatchLCProof, fs_rng: &mut S, - ) -> Result + ) -> Result where Commitment: 'a, { @@ -515,10 +514,9 @@ impl> SonicKZG10 { .ok_or(PCError::MissingPolynomial { label: label.to_string() })?; if cur_comm.degree_bound().is_some() { - if num_polys != 1 { - return Err(PCError::EquationHasDegreeBounds(lc_label)); + if num_polys != 1 || !coeff.is_one() { + bail!(PCError::EquationHasDegreeBounds(lc_label)); } - assert!(coeff.is_one(), "Coefficient must be one for degree-bounded equations"); degree_bound = cur_comm.degree_bound(); } coeffs_and_comms.push((*coeff, cur_comm.commitment())); @@ -533,6 +531,7 @@ impl> SonicKZG10 { let combined_comms_norm_time = start_timer!(|| "Normalizing commitments"); let comms = Self::normalize_commitments(lc_commitments); + ensure!(lc_info.len() == comms.len()); let lc_commitments = lc_info .into_iter() .zip_eq(comms) @@ -571,7 +570,7 @@ impl> SonicKZG10 { VariableBase::msm(&bases, &scalars) } - fn normalize_commitments(commitments: Vec) -> impl Iterator> { + fn normalize_commitments(commitments: Vec) -> impl ExactSizeIterator> { let comms = E::G1Projective::batch_normalization_into_affine(commitments); comms.into_iter().map(|c| kzg10::KZGCommitment(c)) } @@ -584,18 +583,19 @@ impl> SonicKZG10 { combined_witness: &mut E::G1Projective, combined_adjusted_witness: &mut E::G1Projective, vk: &UniversalVerifier, - commitments: impl IntoIterator>>, + commitments: impl ExactSizeIterator>>, point: E::Fr, - values: impl IntoIterator, + values: impl ExactSizeIterator, proof: &kzg10::KZGProof, randomizer: Option, fs_rng: &mut S, - ) { + ) -> Result<()> { let acc_time = start_timer!(|| "Accumulating elements"); // Keeps track of running combination of values let mut combined_values = E::Fr::zero(); // Iterates through all of the commitments and accumulates common degree_bound elements in a BTreeMap + ensure!(commitments.len() == values.len()); for (labeled_comm, value) in commitments.into_iter().zip_eq(values) { let acc_timer = start_timer!(|| format!("Accumulating {}", labeled_comm.label())); let curr_challenge = fs_rng.squeeze_short_nonnative_field_element::(); @@ -630,6 +630,7 @@ impl> SonicKZG10 { let coeffs = coeffs.into_iter().map(|c| c.into()).collect::>(); *combined_adjusted_witness += VariableBase::msm(&bases, &coeffs); end_timer!(acc_time); + Ok(()) } fn check_elems( @@ -637,7 +638,7 @@ impl> SonicKZG10 { combined_comms: BTreeMap, E::G1Projective>, combined_witness: E::G1Projective, combined_adjusted_witness: E::G1Projective, - ) -> Result { + ) -> Result { let check_time = start_timer!(|| "Checking elems"); let mut g1_projective_elems = Vec::with_capacity(combined_comms.len() + 2); let mut g2_prepared_elems = Vec::with_capacity(combined_comms.len() + 2); @@ -668,6 +669,7 @@ impl> SonicKZG10 { .map(|a| a.prepare()) .collect::>(); + ensure!(g1_prepared_elems_iter.len() == g2_prepared_elems.len()); let g1_g2_prepared = g1_prepared_elems_iter.iter().zip_eq(g2_prepared_elems.iter()); let is_one: bool = E::product_of_pairings(g1_g2_prepared).is_one(); end_timer!(check_time); diff --git a/algorithms/src/polycommit/sonic_pc/polynomial.rs b/algorithms/src/polycommit/sonic_pc/polynomial.rs index 4d23d5266a..a18a0bbace 100644 --- a/algorithms/src/polycommit/sonic_pc/polynomial.rs +++ b/algorithms/src/polycommit/sonic_pc/polynomial.rs @@ -17,7 +17,7 @@ use crate::fft::{DensePolynomial, EvaluationDomain, Evaluations as EvaluationsOn use snarkvm_fields::{Field, PrimeField}; use snarkvm_utilities::{cfg_iter, cfg_iter_mut, CanonicalDeserialize, CanonicalSerialize}; -use hashbrown::HashMap; +use anyhow::Result; use std::borrow::Cow; #[cfg(feature = "serial")] @@ -141,7 +141,7 @@ impl LabeledPolynomial { #[derive(Debug, Clone)] pub struct LabeledPolynomialWithBasis<'a, F: PrimeField> { pub info: PolynomialInfo, - pub polynomial: Vec<(F, PolynomialWithBasis<'a, F>)>, + pub polynomial: PolynomialWithBasis<'a, F>, } impl<'a, F: PrimeField> LabeledPolynomialWithBasis<'a, F> { @@ -154,16 +154,6 @@ impl<'a, F: PrimeField> LabeledPolynomialWithBasis<'a, F> { ) -> Self { let polynomial = PolynomialWithBasis::new_monomial_basis_ref(polynomial, degree_bound); let info = PolynomialInfo::new(label, degree_bound, hiding_bound); - Self { info, polynomial: vec![(F::one(), polynomial)] } - } - - /// Construct a new labeled polynomial by consuming `polynomial`. - pub fn new_linear_combination( - label: PolynomialLabel, - polynomial: Vec<(F, PolynomialWithBasis<'a, F>)>, - hiding_bound: Option, - ) -> Self { - let info = PolynomialInfo::new(label, None, hiding_bound); Self { info, polynomial } } @@ -174,7 +164,7 @@ impl<'a, F: PrimeField> LabeledPolynomialWithBasis<'a, F> { ) -> Self { let polynomial = PolynomialWithBasis::new_lagrange_basis(polynomial); let info = PolynomialInfo::new(label, None, hiding_bound); - Self { info, polynomial: vec![(F::one(), polynomial)] } + Self { info, polynomial } } pub fn new_lagrange_basis_ref( @@ -184,7 +174,7 @@ impl<'a, F: PrimeField> LabeledPolynomialWithBasis<'a, F> { ) -> Self { let polynomial = PolynomialWithBasis::new_lagrange_basis_ref(polynomial); let info = PolynomialInfo::new(label, None, hiding_bound); - Self { info, polynomial: vec![(F::one(), polynomial)] } + Self { info, polynomial } } /// Return the label for `self`. @@ -198,94 +188,23 @@ impl<'a, F: PrimeField> LabeledPolynomialWithBasis<'a, F> { } pub fn degree(&self) -> usize { - self.polynomial - .iter() - .map(|(_, p)| match p { - PolynomialWithBasis::Lagrange { evaluations } => evaluations.domain().size() - 1, - PolynomialWithBasis::Monomial { polynomial, .. } => polynomial.degree(), - }) - .max() - .unwrap_or(0) + match &self.polynomial { + PolynomialWithBasis::Lagrange { evaluations } => evaluations.domain().size() - 1, + PolynomialWithBasis::Monomial { polynomial, .. } => polynomial.degree(), + } } /// Evaluate the polynomial in `self`. pub fn evaluate(&self, point: F) -> F { - self.polynomial.iter().map(|(coeff, p)| p.evaluate(point) * coeff).sum() - } - - /// Compute a linear combination of the terms in `self.polynomial`, producing an iterator - /// over polynomials of the same time. - pub fn sum(&self) -> impl Iterator> { - if self.polynomial.len() == 1 && self.polynomial[0].0.is_one() { - vec![self.polynomial[0].1.clone()].into_iter() - } else { - use PolynomialWithBasis::*; - let mut lagrange_polys = HashMap::>::new(); - let mut dense_polys = HashMap::<_, DensePolynomial>::new(); - let mut sparse_poly = SparsePolynomial::zero(); - // We have sets of polynomials divided along three critera: - // 1. All `Lagrange` polynomials are in the set corresponding to their domain. - // 2. All `Dense` polynomials are in the set corresponding to their degree bound. - // 3. All `Sparse` polynomials are in the set corresponding to their degree bound. - for (c, poly) in self.polynomial.iter() { - match poly { - Monomial { polynomial, degree_bound } => { - use Polynomial::*; - match polynomial.as_ref() { - Dense(p) => { - if let Some(e) = dense_polys.get_mut(degree_bound) { - // Zip safety: `p` could be of smaller degree than `e` (or vice versa), - // so it's okay to just use `zip` here. - cfg_iter_mut!(e).zip(&p.coeffs).for_each(|(e, f)| *e += *c * f) - } else { - let mut e: DensePolynomial = p.clone().into_owned(); - cfg_iter_mut!(e).for_each(|e| *e *= c); - dense_polys.insert(degree_bound, e); - } - } - Sparse(p) => sparse_poly += (*c, p.as_ref()), - } - } - Lagrange { evaluations } => { - let domain = evaluations.domain().size(); - if let Some(e) = lagrange_polys.get_mut(&domain) { - cfg_iter_mut!(e).zip_eq(&evaluations.evaluations).for_each(|(e, f)| *e += *c * f) - } else { - let mut e = evaluations.clone().into_owned().evaluations; - cfg_iter_mut!(e).for_each(|e| *e *= c); - lagrange_polys.insert(domain, e); - } - } - } - } - let sparse_poly = Polynomial::from(sparse_poly); - let sparse_poly = Monomial { polynomial: Cow::Owned(sparse_poly), degree_bound: None }; - lagrange_polys - .into_iter() - .map(|(k, v)| { - let domain = EvaluationDomain::new(k).unwrap(); - Lagrange { evaluations: Cow::Owned(EvaluationsOnDomain::from_vec_and_domain(v, domain)) } - }) - .chain({ - dense_polys - .into_iter() - .map(|(degree_bound, p)| PolynomialWithBasis::new_dense_monomial_basis(p, *degree_bound)) - }) - .chain([sparse_poly]) - .collect::>() - .into_iter() - } + self.polynomial.evaluate(point) } /// Retrieve the degree bound in `self`. pub fn degree_bound(&self) -> Option { - self.polynomial - .iter() - .filter_map(|(_, p)| match p { - PolynomialWithBasis::Monomial { degree_bound, .. } => *degree_bound, - _ => None, - }) - .max() + match self.polynomial { + PolynomialWithBasis::Monomial { degree_bound, .. } => degree_bound, + _ => None, + } } /// Retrieve whether the polynomial in `self` should be hidden. @@ -305,7 +224,7 @@ impl<'a, F: PrimeField> From<&'a LabeledPolynomial> for LabeledPolynomialWith polynomial: Cow::Borrowed(other.polynomial()), degree_bound: other.degree_bound(), }; - Self { info: other.info.clone(), polynomial: vec![(F::one(), polynomial)] } + Self { info: other.info.clone(), polynomial } } } @@ -315,7 +234,7 @@ impl<'a, F: PrimeField> From> for LabeledPolynomialWithBasi polynomial: Cow::Owned(other.polynomial), degree_bound: other.info.degree_bound, }; - Self { info: other.info.clone(), polynomial: vec![(F::one(), polynomial)] } + Self { info: other.info.clone(), polynomial } } } diff --git a/algorithms/src/polycommit/test_templates.rs b/algorithms/src/polycommit/test_templates.rs index e251db6ebc..5664111254 100644 --- a/algorithms/src/polycommit/test_templates.rs +++ b/algorithms/src/polycommit/test_templates.rs @@ -113,8 +113,14 @@ pub fn bad_degree_bound_test>() - println!("Generated query set"); let mut sponge_for_open = S::new(); - let proof = - SonicKZG10::batch_open(universal_prover, &ck, &polynomials, &query_set, &rands, &mut sponge_for_open)?; + let proof = SonicKZG10::batch_open( + universal_prover, + &ck, + polynomials.iter(), + &query_set, + rands.iter(), + &mut sponge_for_open, + )?; let mut sponge_for_check = S::new(); let result = SonicKZG10::batch_check(&vk, &comms, &query_set, &values, &proof, &mut sponge_for_check)?; assert!(result, "proof was incorrect, Query set: {query_set:#?}"); @@ -203,8 +209,14 @@ pub fn lagrange_test_template>() println!("Generated query set"); let mut sponge_for_open = S::new(); - let proof = - SonicKZG10::batch_open(universal_prover, &ck, &polynomials, &query_set, &rands, &mut sponge_for_open)?; + let proof = SonicKZG10::batch_open( + universal_prover, + &ck, + polynomials.iter(), + &query_set, + rands.iter(), + &mut sponge_for_open, + )?; let mut sponge_for_check = S::new(); let result = SonicKZG10::batch_check(&vk, &comms, &query_set, &values, &proof, &mut sponge_for_check)?; if !result { @@ -327,8 +339,14 @@ where println!("Generated query set"); let mut sponge_for_open = S::new(); - let proof = - SonicKZG10::batch_open(universal_prover, &ck, &polynomials, &query_set, &rands, &mut sponge_for_open)?; + let proof = SonicKZG10::batch_open( + universal_prover, + &ck, + polynomials.iter(), + &query_set, + rands.iter(), + &mut sponge_for_open, + )?; let mut sponge_for_check = S::new(); let result = SonicKZG10::batch_check(&vk, &comms, &query_set, &values, &proof, &mut sponge_for_check)?; if !result { diff --git a/algorithms/src/r1cs/constraint_system.rs b/algorithms/src/r1cs/constraint_system.rs index c6558570b8..0c73678a1a 100644 --- a/algorithms/src/r1cs/constraint_system.rs +++ b/algorithms/src/r1cs/constraint_system.rs @@ -26,7 +26,7 @@ pub trait ConstraintSynthesizer: Sync { } /// Represents a constraint system which can have new variables -/// allocated and constrains between them formed. +/// allocated and constraints between them formed. pub trait ConstraintSystem: Sized { /// Represents the type of the "root" of this constraint system /// so that nested namespaces can minimize indirection. diff --git a/algorithms/src/r1cs/errors.rs b/algorithms/src/r1cs/errors.rs index f1ab0a42b6..52b386e137 100644 --- a/algorithms/src/r1cs/errors.rs +++ b/algorithms/src/r1cs/errors.rs @@ -34,7 +34,7 @@ pub enum SynthesisError { Unsatisfiable, /// During synthesis, our polynomials ended up being too high of degree #[error("Polynomial degree is too large")] - PolynomialDegreeTooLarge, + PolyTooLarge, /// During proof generation, we encountered an identity in the CRS #[error("Encountered an identity element in the CRS")] UnexpectedIdentity, diff --git a/algorithms/src/r1cs/mod.rs b/algorithms/src/r1cs/mod.rs index 8f788008cc..11fc8042d5 100644 --- a/algorithms/src/r1cs/mod.rs +++ b/algorithms/src/r1cs/mod.rs @@ -67,9 +67,9 @@ impl Variable { /// Represents the index of either a public variable (input) or a private variable (auxiliary). #[derive(Copy, Clone, PartialEq, Debug, Eq, Hash)] pub enum Index { - /// Index of an public variable. + /// Index of a public variable. Public(usize), - /// Index of an private variable. + /// Index of a private variable. Private(usize), } diff --git a/algorithms/src/snark/varuna/ahp/ahp.rs b/algorithms/src/snark/varuna/ahp/ahp.rs index fc6bea6d5d..0b88868473 100644 --- a/algorithms/src/snark/varuna/ahp/ahp.rs +++ b/algorithms/src/snark/varuna/ahp/ahp.rs @@ -27,12 +27,12 @@ use crate::{ SNARKMode, }, }; -use anyhow::anyhow; +use anyhow::{anyhow, ensure, Result}; use snarkvm_fields::{Field, PrimeField}; use core::{borrow::Borrow, marker::PhantomData}; use itertools::Itertools; -use std::collections::BTreeMap; +use std::{collections::BTreeMap, fmt::Write}; /// The algebraic holographic proof defined in [CHMMVW19](https://eprint.iacr.org/2019/1047). /// Currently, this AHP only supports inputs of size one @@ -43,7 +43,9 @@ pub struct AHPForR1CS { } pub(crate) fn witness_label(circuit_id: CircuitId, poly: &str, i: usize) -> String { - format!("circuit_{circuit_id}_{poly}_{i:0>8}") + let mut label = String::with_capacity(82 + poly.len()); + let _ = write!(&mut label, "circuit_{circuit_id}_{poly}_{i:0>8}"); + label } pub(crate) struct NonZeroDomains { @@ -79,17 +81,17 @@ impl AHPForR1CS { /// of this protocol. /// The number of the variables must include the "one" variable. That is, it /// must be with respect to the number of formatted public inputs. - pub fn max_degree(num_constraints: usize, num_variables: usize, num_non_zero: usize) -> Result { + pub fn max_degree(num_constraints: usize, num_variables: usize, num_non_zero: usize) -> Result { let zk_bound = Self::zk_bound().unwrap_or(0); let constraint_domain_size = - EvaluationDomain::::compute_size_of_domain(num_constraints).ok_or(AHPError::PolynomialDegreeTooLarge)?; + EvaluationDomain::::compute_size_of_domain(num_constraints).ok_or(AHPError::PolyTooLarge)?; let variable_domain_size = - EvaluationDomain::::compute_size_of_domain(num_variables).ok_or(AHPError::PolynomialDegreeTooLarge)?; + EvaluationDomain::::compute_size_of_domain(num_variables).ok_or(AHPError::PolyTooLarge)?; let non_zero_domain_size = - EvaluationDomain::::compute_size_of_domain(num_non_zero).ok_or(AHPError::PolynomialDegreeTooLarge)?; + EvaluationDomain::::compute_size_of_domain(num_non_zero).ok_or(AHPError::PolyTooLarge)?; // these should correspond with the bounds set in the .rs files - Ok(*[ + [ 2 * constraint_domain_size + 2 * zk_bound - 2, 2 * variable_domain_size + 2 * zk_bound - 2, if SM::ZK { variable_domain_size + 3 } else { 0 }, // mask_poly @@ -99,34 +101,40 @@ impl AHPForR1CS { ] .iter() .max() - .unwrap()) + .copied() + .ok_or(anyhow!("Could not find max_degree")) } /// Get all the strict degree bounds enforced in the AHP. - pub fn get_degree_bounds(info: &CircuitInfo) -> [usize; 4] { + pub fn get_degree_bounds(info: &CircuitInfo) -> Result<[usize; 4]> { let num_variables = info.num_variables; let num_non_zero_a = info.num_non_zero_a; let num_non_zero_b = info.num_non_zero_b; let num_non_zero_c = info.num_non_zero_c; - [ - EvaluationDomain::::compute_size_of_domain(num_variables).unwrap() - 2, - EvaluationDomain::::compute_size_of_domain(num_non_zero_a).unwrap() - 2, - EvaluationDomain::::compute_size_of_domain(num_non_zero_b).unwrap() - 2, - EvaluationDomain::::compute_size_of_domain(num_non_zero_c).unwrap() - 2, - ] + Ok([ + EvaluationDomain::::compute_size_of_domain(num_variables).ok_or(SynthesisError::PolyTooLarge)? - 2, + EvaluationDomain::::compute_size_of_domain(num_non_zero_a).ok_or(SynthesisError::PolyTooLarge)? - 2, + EvaluationDomain::::compute_size_of_domain(num_non_zero_b).ok_or(SynthesisError::PolyTooLarge)? - 2, + EvaluationDomain::::compute_size_of_domain(num_non_zero_c).ok_or(SynthesisError::PolyTooLarge)? - 2, + ]) } pub(crate) fn cmp_non_zero_domains( info: &CircuitInfo, max_candidate: Option>, - ) -> Result, SynthesisError> { - let domain_a = EvaluationDomain::new(info.num_non_zero_a).ok_or(SynthesisError::PolynomialDegreeTooLarge)?; - let domain_b = EvaluationDomain::new(info.num_non_zero_b).ok_or(SynthesisError::PolynomialDegreeTooLarge)?; - let domain_c = EvaluationDomain::new(info.num_non_zero_c).ok_or(SynthesisError::PolynomialDegreeTooLarge)?; - let new_candidate = [domain_a, domain_b, domain_c].into_iter().max_by_key(|d| d.size()).unwrap(); + ) -> Result> { + let domain_a = EvaluationDomain::new(info.num_non_zero_a).ok_or(SynthesisError::PolyTooLarge)?; + let domain_b = EvaluationDomain::new(info.num_non_zero_b).ok_or(SynthesisError::PolyTooLarge)?; + let domain_c = EvaluationDomain::new(info.num_non_zero_c).ok_or(SynthesisError::PolyTooLarge)?; + let new_candidate = [domain_a, domain_b, domain_c] + .into_iter() + .max_by_key(|d| d.size()) + .ok_or(anyhow!("could not find max domain"))?; let mut max_non_zero_domain = Some(new_candidate); - if max_candidate.is_some() && max_candidate.unwrap().size() > new_candidate.size() { - max_non_zero_domain = max_candidate; + if let Some(max_candidate) = max_candidate { + if max_candidate.size() > new_candidate.size() { + max_non_zero_domain = Some(max_candidate); + } } Ok(NonZeroDomains { max_non_zero_domain, domain_a, domain_b, domain_c }) } @@ -158,7 +166,7 @@ impl AHPForR1CS { /// Public input should be unformatted. /// We construct the linear combinations as per section 5 of our protocol documentation. /// We can distinguish between: - /// (1) simple comitments: $\{\cm{g_A}, \cm{g_B}, \cm{g_C}\}$ and $\{\cm{\hat{z}_{B,i,j}}\}_{i \in {[\mathcal{D}]}}$, $\cm{g_1}$ + /// (1) simple commitments: $\{\cm{g_A}, \cm{g_B}, \cm{g_C}\}$ and $\{\cm{\hat{z}_{B,i,j}}\}_{i \in {[\mathcal{D}]}}$, $\cm{g_1}$ /// (2) virtual commitments for the lincheck_sumcheck and matrix_sumcheck. These are linear combinations of the simple commitments #[allow(non_snake_case)] pub fn construct_linear_combinations>( @@ -167,8 +175,8 @@ impl AHPForR1CS { prover_third_message: &prover::ThirdMessage, prover_fourth_message: &prover::FourthMessage, state: &verifier::State, - ) -> Result>, AHPError> { - assert!(!public_inputs.is_empty()); + ) -> Result>> { + ensure!(!public_inputs.is_empty()); let max_constraint_domain = state.max_constraint_domain; let max_variable_domain = state.max_variable_domain; let max_non_zero_domain = state.max_non_zero_domain; @@ -181,11 +189,12 @@ impl AHPForR1CS { .iter() .map(|p| { let public_input = prover::ConstraintSystem::format_public_input(p); - Self::formatted_public_input_is_admissible(&public_input).map(|_| public_input) + Self::formatted_public_input_is_admissible(&public_input)?; + Ok::<_, AHPError>(public_input) }) - .collect::, _>>(); - assert_eq!(public_inputs.as_ref().unwrap()[0].len(), input_domain.size()); - public_inputs + .collect::, _>>()?; + ensure!(public_inputs[0].len() == input_domain.size()); + Ok(public_inputs) }) .collect::, _>>()?; @@ -350,6 +359,9 @@ impl AHPForR1CS { let non_zero_domains = [&state_i.non_zero_a_domain, &state_i.non_zero_b_domain, &state_i.non_zero_c_domain]; let sums = sums_fourth_msg[i].iter(); + ensure!(matrices.len() == sums.len()); + ensure!(matrices.len() == deltas.len()); + ensure!(matrices.len() == non_zero_domains.len()); for (((m, sum), delta), non_zero_domain) in matrices.into_iter().zip_eq(sums).zip_eq(deltas).zip_eq(non_zero_domains) { @@ -361,7 +373,7 @@ impl AHPForR1CS { let g_m = LinearCombination::new(g_m_label.clone(), [(F::one(), g_m_label)]); let g_m_at_gamma = evals.get_lc_eval(&g_m, gamma)?; - let (a_poly, b_poly) = Self::construct_matrix_linear_combinations(evals, id, m, v_rc, challenges, rc); + let (a_poly, b_poly) = Self::construct_matrix_linear_combinations(evals, id, m, v_rc, challenges, rc)?; let g_m_term = Self::construct_g_m_term(gamma, g_m_at_gamma, sum, *selector, a_poly, b_poly); matrix_sumcheck += (delta, &g_m_term); @@ -402,7 +414,7 @@ impl AHPForR1CS { v_rc_at_alpha_beta: F, challenges: QueryPoints, rc_size: F, - ) -> (LinearCombination, LinearCombination) { + ) -> Result<(LinearCombination, LinearCombination)> { let label_a_poly = format!("circuit_{id}_a_poly_{matrix}"); let label_b_poly = format!("circuit_{id}_b_poly_{matrix}"); let QueryPoints { alpha, beta, gamma } = challenges; @@ -412,9 +424,9 @@ impl AHPForR1CS { let a_poly_eval_available = evals.get_lc_eval(&a_poly, gamma).is_ok(); let b_poly = LinearCombination::new(label_b_poly.clone(), [(F::one(), label_b_poly.clone())]); let b_poly_eval_available = evals.get_lc_eval(&b_poly, gamma).is_ok(); - assert_eq!(a_poly_eval_available, b_poly_eval_available); + ensure!(a_poly_eval_available == b_poly_eval_available); if a_poly_eval_available && b_poly_eval_available { - return (a_poly, b_poly); + return Ok((a_poly, b_poly)); }; // When running as the verifier, we need to construct a(X) and b(X) from the indexing polynomials @@ -431,7 +443,7 @@ impl AHPForR1CS { (F::one(), (label_row_col).into()), ]); b *= rc_size; - (a, b) + Ok((a, b)) } } @@ -441,14 +453,14 @@ impl AHPForR1CS { /// when constructing linear combinations via `AHPForR1CS::construct_linear_combinations`. pub trait EvaluationsProvider: core::fmt::Debug { /// Get the evaluation of linear combination `lc` at `point`. - fn get_lc_eval(&self, lc: &LinearCombination, point: F) -> Result; + fn get_lc_eval(&self, lc: &LinearCombination, point: F) -> Result; } /// The `EvaluationsProvider` used by the verifier impl EvaluationsProvider for crate::polycommit::sonic_pc::Evaluations { - fn get_lc_eval(&self, lc: &LinearCombination, point: F) -> Result { + fn get_lc_eval(&self, lc: &LinearCombination, point: F) -> Result { let key = (lc.label.clone(), point); - self.get(&key).copied().ok_or_else(|| AHPError::MissingEval(lc.label.clone())) + self.get(&key).copied().ok_or_else(|| AHPError::MissingEval(lc.label.clone())).map_err(Into::into) } } @@ -458,7 +470,7 @@ where F: PrimeField, T: Borrow> + core::fmt::Debug, { - fn get_lc_eval(&self, lc: &LinearCombination, point: F) -> Result { + fn get_lc_eval(&self, lc: &LinearCombination, point: F) -> Result { let mut eval = F::zero(); for (coeff, term) in lc.iter() { let value = if let LCTerm::PolyLabel(label) = term { @@ -468,7 +480,7 @@ where .borrow() .evaluate(point) } else { - assert!(term.is_one()); + ensure!(term.is_one()); F::one() }; eval += &(*coeff * value) diff --git a/algorithms/src/snark/varuna/ahp/errors.rs b/algorithms/src/snark/varuna/ahp/errors.rs index c4bbb30c69..a18dfb46b2 100644 --- a/algorithms/src/snark/varuna/ahp/errors.rs +++ b/algorithms/src/snark/varuna/ahp/errors.rs @@ -13,24 +13,31 @@ // limitations under the License. /// Describes the failure modes of the AHP scheme. -#[derive(Debug)] +#[derive(Debug, Error)] pub enum AHPError { - /// Anyhow error - Anyhow(anyhow::Error), - /// The batch size is zero. + #[error("{}", _0)] + AnyhowError(#[from] anyhow::Error), + + #[error("Batch size was zero; must be at least 1.")] BatchSizeIsZero, - /// An error occurred during constraint generation. + + #[error("An error occurred during constraint generation.")] ConstraintSystemError(crate::r1cs::errors::SynthesisError), - /// The instance generated during proving does not match that in the index. + + #[error("The instance generated during proving does not match that in the index.")] InstanceDoesNotMatchIndex, - /// The number of public inputs is incorrect. + + #[error("The number of public inputs is incorrect.")] InvalidPublicInputLength, - /// During verification, a required evaluation is missing + + #[error("During verification, a required evaluation is missing: {}", _0)] MissingEval(String), - /// Currently we only support square constraint matrices. + + #[error("Currently we only support square constraint matrices.")] NonSquareMatrix, - /// During synthesis, our polynomials ended up being too high of degree - PolynomialDegreeTooLarge, + + #[error("During synthesis, our polynomials ended up being too high of degree.")] + PolyTooLarge, } impl From for AHPError { @@ -38,9 +45,3 @@ impl From for AHPError { AHPError::ConstraintSystemError(other) } } - -impl From for AHPError { - fn from(other: anyhow::Error) -> Self { - AHPError::Anyhow(other) - } -} diff --git a/algorithms/src/snark/varuna/ahp/indexer/circuit.rs b/algorithms/src/snark/varuna/ahp/indexer/circuit.rs index 00326103a9..53f3ca3d6e 100644 --- a/algorithms/src/snark/varuna/ahp/indexer/circuit.rs +++ b/algorithms/src/snark/varuna/ahp/indexer/circuit.rs @@ -29,6 +29,7 @@ use crate::{ SNARKMode, }, }; +use anyhow::{anyhow, Result}; use blake2::Digest; use hex::FromHex; use snarkvm_fields::PrimeField; @@ -61,7 +62,7 @@ impl CircuitId { /// public input /// 2) `{a,b,c}` are the matrices defining the R1CS instance /// 3) `{a,b,c}_arith` are structs containing information about the arithmetized matrices -#[derive(Clone, Debug)] +#[derive(Debug)] pub struct Circuit { /// Information about the indexed circuit. pub index_info: CircuitInfo, @@ -119,29 +120,30 @@ impl Circuit { } /// The maximum degree required to represent polynomials of this index. - pub fn max_degree(&self) -> usize { + pub fn max_degree(&self) -> Result { self.index_info.max_degree::() } /// The size of the constraint domain in this R1CS instance. - pub fn constraint_domain_size(&self) -> usize { - crate::fft::EvaluationDomain::::new(self.index_info.num_constraints).unwrap().size() + pub fn constraint_domain_size(&self) -> Result { + Ok(crate::fft::EvaluationDomain::::new(self.index_info.num_constraints) + .ok_or(anyhow!("Cannot create EvaluationDomain"))? + .size()) } /// The size of the variable domain in this R1CS instance. - pub fn variable_domain_size(&self) -> usize { - crate::fft::EvaluationDomain::::new(self.index_info.num_variables).unwrap().size() - } - - pub fn interpolate_matrix_evals(&self) -> impl Iterator> { - let [a_arith, b_arith, c_arith]: [_; 3] = [("a", &self.a_arith), ("b", &self.b_arith), ("c", &self.c_arith)] - .into_iter() - .map(|(label, evals)| MatrixArithmetization::new(&self.id, label, evals)) - .collect::, _>>() - .unwrap() - .try_into() - .unwrap(); - a_arith.into_iter().chain(b_arith.into_iter()).chain(c_arith.into_iter()) + pub fn variable_domain_size(&self) -> Result { + Ok(crate::fft::EvaluationDomain::::new(self.index_info.num_variables) + .ok_or(anyhow!("Cannot create EvaluationDomain"))? + .size()) + } + + pub fn interpolate_matrix_evals(&self) -> Result>> { + let mut iters = Vec::with_capacity(3); + for (label, evals) in [("a", &self.a_arith), ("b", &self.b_arith), ("c", &self.c_arith)] { + iters.push(MatrixArithmetization::new(&self.id, label, evals)?.into_iter()); + } + Ok(iters.into_iter().flatten()) } /// After indexing, we drop these evaluations to save space in the ProvingKey. diff --git a/algorithms/src/snark/varuna/ahp/indexer/circuit_info.rs b/algorithms/src/snark/varuna/ahp/indexer/circuit_info.rs index 1e17028fd3..5db3583414 100644 --- a/algorithms/src/snark/varuna/ahp/indexer/circuit_info.rs +++ b/algorithms/src/snark/varuna/ahp/indexer/circuit_info.rs @@ -13,6 +13,7 @@ // limitations under the License. use crate::snark::varuna::{ahp::AHPForR1CS, SNARKMode}; +use anyhow::Result; use snarkvm_fields::PrimeField; use snarkvm_utilities::{serialize::*, ToBytes}; @@ -37,9 +38,9 @@ pub struct CircuitInfo { impl CircuitInfo { /// The maximum degree of polynomial required to represent this index in the AHP. - pub fn max_degree(&self) -> usize { + pub fn max_degree(&self) -> Result { let max_non_zero = self.num_non_zero_a.max(self.num_non_zero_b).max(self.num_non_zero_c); - AHPForR1CS::::max_degree(self.num_constraints, self.num_variables, max_non_zero).unwrap() + AHPForR1CS::::max_degree(self.num_constraints, self.num_variables, max_non_zero) } } diff --git a/algorithms/src/snark/varuna/ahp/indexer/indexer.rs b/algorithms/src/snark/varuna/ahp/indexer/indexer.rs index c38e4a9970..021a0fcb80 100644 --- a/algorithms/src/snark/varuna/ahp/indexer/indexer.rs +++ b/algorithms/src/snark/varuna/ahp/indexer/indexer.rs @@ -14,12 +14,11 @@ use crate::{ fft::EvaluationDomain, - polycommit::sonic_pc::{PolynomialInfo, PolynomialLabel}, + polycommit::sonic_pc::{LinearCombination, PolynomialInfo, PolynomialLabel}, r1cs::{errors::SynthesisError, ConstraintSynthesizer, ConstraintSystem}, snark::varuna::{ ahp::{ indexer::{Circuit, CircuitId, CircuitInfo, ConstraintSystem as IndexerConstraintSystem}, - AHPError, AHPForR1CS, }, matrices::{matrix_evals, MatrixEvals}, @@ -30,8 +29,9 @@ use crate::{ use snarkvm_fields::PrimeField; use snarkvm_utilities::cfg_into_iter; -use anyhow::{anyhow, Result}; +use anyhow::{anyhow, ensure, Result}; use core::marker::PhantomData; +use itertools::Itertools; use std::collections::BTreeMap; #[cfg(not(feature = "serial"))] @@ -101,25 +101,29 @@ impl AHPForR1CS { map } + pub fn index_polynomial_labels_single<'a>( + matrix: &str, + id: &'a CircuitId, + ) -> impl ExactSizeIterator + 'a { + [ + format!("circuit_{id}_row_{matrix}"), + format!("circuit_{id}_col_{matrix}"), + format!("circuit_{id}_row_col_{matrix}"), + format!("circuit_{id}_row_col_val_{matrix}"), + ] + .into_iter() + } + pub fn index_polynomial_labels<'a>( matrices: &'a [&str], ids: impl Iterator + 'a, ) -> impl Iterator + 'a { - ids.flat_map(move |id| { - matrices.iter().flat_map(move |matrix| { - [ - format!("circuit_{id}_row_{matrix}"), - format!("circuit_{id}_col_{matrix}"), - format!("circuit_{id}_row_col_{matrix}"), - format!("circuit_{id}_row_col_val_{matrix}"), - ] - }) - }) + ids.flat_map(move |id| matrices.iter().flat_map(move |matrix| Self::index_polynomial_labels_single(matrix, id))) } /// Generate the indexed circuit evaluations for this constraint system. /// Used by both the Prover and Verifier - pub(crate) fn index_helper>(c: &C) -> Result, AHPError> { + pub(crate) fn index_helper>(c: &C) -> Result> { let index_time = start_timer!(|| "AHP::Index"); let constraint_time = start_timer!(|| "Generating constraints"); @@ -134,7 +138,7 @@ impl AHPForR1CS { crate::snark::varuna::ahp::matrices::add_randomizing_variables::<_, _>(&mut ics, random_assignments) }); - crate::snark::varuna::ahp::matrices::pad_input_for_indexer_and_prover(&mut ics); + crate::snark::varuna::ahp::matrices::pad_input_for_indexer_and_prover(&mut ics)?; let a = ics.a_matrix()?; let b = ics.b_matrix()?; @@ -171,18 +175,13 @@ impl AHPForR1CS { num_non_zero_c, }; - let constraint_domain = - EvaluationDomain::new(num_constraints).ok_or(SynthesisError::PolynomialDegreeTooLarge)?; - let variable_domain = EvaluationDomain::new(num_variables).ok_or(SynthesisError::PolynomialDegreeTooLarge)?; - let input_domain = - EvaluationDomain::new(num_padded_public_variables).ok_or(SynthesisError::PolynomialDegreeTooLarge)?; + let constraint_domain = EvaluationDomain::new(num_constraints).ok_or(SynthesisError::PolyTooLarge)?; + let variable_domain = EvaluationDomain::new(num_variables).ok_or(SynthesisError::PolyTooLarge)?; + let input_domain = EvaluationDomain::new(num_padded_public_variables).ok_or(SynthesisError::PolyTooLarge)?; - let non_zero_a_domain = - EvaluationDomain::new(num_non_zero_a).ok_or(SynthesisError::PolynomialDegreeTooLarge)?; - let non_zero_b_domain = - EvaluationDomain::new(num_non_zero_b).ok_or(SynthesisError::PolynomialDegreeTooLarge)?; - let non_zero_c_domain = - EvaluationDomain::new(num_non_zero_c).ok_or(SynthesisError::PolynomialDegreeTooLarge)?; + let non_zero_a_domain = EvaluationDomain::new(num_non_zero_a).ok_or(SynthesisError::PolyTooLarge)?; + let non_zero_b_domain = EvaluationDomain::new(num_non_zero_b).ok_or(SynthesisError::PolyTooLarge)?; + let non_zero_c_domain = EvaluationDomain::new(num_non_zero_c).ok_or(SynthesisError::PolyTooLarge)?; let constraint_domain_elements = constraint_domain.elements().collect::>(); let variable_domain_elements = variable_domain.elements().collect::>(); @@ -203,7 +202,7 @@ impl AHPForR1CS { .try_into() .unwrap(); - let id = Circuit::::hash(&index_info, &a, &b, &c).unwrap(); + let id = Circuit::::hash(&index_info, &a, &b, &c)?; let result = Ok(IndexerState { constraint_domain, @@ -228,25 +227,36 @@ impl AHPForR1CS { result } + /// Evaluate the index polynomials for this constraint system at the given point. + /// Return the LinearCombination of the index polynomials and the sum of the evaluations. pub(crate) fn evaluate_index_polynomials( state: IndexerState, id: &CircuitId, point: F, - ) -> Result, AHPError> { - let mut evals = [ - (state.a_arith, state.non_zero_a_domain), - (state.b_arith, state.non_zero_b_domain), - (state.c_arith, state.non_zero_c_domain), - ] - .into_iter() - .flat_map(move |(evals, domain)| { - let labels = Self::index_polynomial_labels(&["a", "b", "c"], std::iter::once(id)); + mut combiners: impl Iterator, + ) -> Result<(LinearCombination, F)> { + let mut lc = LinearCombination::empty("circuit_check"); + let mut all_evals = Vec::with_capacity(3); + let mut sum = F::zero(); + for (evals, domain, label) in [ + (state.a_arith, state.non_zero_a_domain, "a"), + (state.b_arith, state.non_zero_b_domain, "b"), + (state.c_arith, state.non_zero_c_domain, "c"), + ] { + let labels = Self::index_polynomial_labels_single(label, id); let lagrange_coefficients_at_point = domain.evaluate_all_lagrange_coefficients(point); - labels.zip(evals.evaluate(&lagrange_coefficients_at_point).unwrap()) - }) - .collect::>(); - evals.sort_by(|(l1, _), (l2, _)| l1.cmp(l2)); - Ok(evals.into_iter().map(|(_, eval)| eval)) + let evals_at_point = evals.evaluate(&lagrange_coefficients_at_point)?; + ensure!(labels.len() == evals_at_point.len()); + all_evals.push(labels.into_iter().zip_eq(evals_at_point.into_iter())); + } + let sorted_evals = all_evals.into_iter().flatten().sorted_unstable_by(|(l1, _), (l2, _)| l1.cmp(l2)); + for (label, eval) in sorted_evals { + let combiner = combiners.next().ok_or(anyhow!("No combiner left"))?; + lc.add(combiner, label.as_str()); + sum += eval * combiner; + } + ensure!(combiners.next().is_none(), "Found more combiners than sorted_evals"); + Ok((lc, sum)) } } diff --git a/algorithms/src/snark/varuna/ahp/matrices.rs b/algorithms/src/snark/varuna/ahp/matrices.rs index 9374eee05d..56bbe174a8 100644 --- a/algorithms/src/snark/varuna/ahp/matrices.rs +++ b/algorithms/src/snark/varuna/ahp/matrices.rs @@ -26,10 +26,11 @@ use crate::{ use snarkvm_fields::{Field, PrimeField}; use snarkvm_utilities::{cfg_iter, cfg_iter_mut, serialize::*}; -use anyhow::{ensure, Result}; -use itertools::Itertools; +use anyhow::{anyhow, ensure, Result}; use std::collections::BTreeMap; +#[cfg(feature = "serial")] +use itertools::Itertools; #[cfg(not(feature = "serial"))] use rayon::prelude::*; @@ -60,7 +61,7 @@ pub(crate) fn to_matrix_helper( pub(crate) fn add_randomizing_variables>( cs: &mut CS, rand_assignments: Option<[F; 3]>, -) { +) -> Result<()> { let mut assignments = [F::one(); 3]; if let Some(r) = rand_assignments { assignments = r; @@ -69,28 +70,30 @@ pub(crate) fn add_randomizing_variables>( let zk_vars = assignments .into_iter() .enumerate() - .map(|(i, assignment)| cs.alloc(|| format!("random_{i}"), || Ok(assignment)).unwrap()) - .collect_vec(); + .map(|(i, assignment)| cs.alloc(|| format!("random_{i}"), || Ok(assignment))) + .collect::, _>>()?; cs.enforce(|| "constraint zk", |lc| lc + zk_vars[0], |lc| lc + zk_vars[1], |lc| lc + zk_vars[2]); + Ok(()) } /// Pads the public variables up to the closest power of two. -pub(crate) fn pad_input_for_indexer_and_prover>(cs: &mut CS) { +pub(crate) fn pad_input_for_indexer_and_prover>(cs: &mut CS) -> Result<()> { let num_public_variables = cs.num_public_variables(); - let power_of_two = EvaluationDomain::::new(num_public_variables); - assert!(power_of_two.is_some()); + let power_of_two = + EvaluationDomain::::new(num_public_variables).ok_or(anyhow!("Could not create EvaluationDomain"))?; // Allocated `zero` variables to pad the public input up to the next power of two. - let padded_size = power_of_two.unwrap().size(); + let padded_size = power_of_two.size(); if padded_size > num_public_variables { for i in 0..(padded_size - num_public_variables) { - cs.alloc_input(|| format!("pad_input_{i}"), || Ok(F::zero())).unwrap(); + cs.alloc_input(|| format!("pad_input_{i}"), || Ok(F::zero()))?; } } + Ok(()) } -#[derive(Clone, Debug, CanonicalSerialize, CanonicalDeserialize, PartialEq, Eq)] +#[derive(Debug, CanonicalSerialize, CanonicalDeserialize, PartialEq, Eq)] pub struct MatrixEvals { /// Evaluations of the `row` polynomial. pub row: EvaluationsOnDomain, @@ -105,11 +108,14 @@ pub struct MatrixEvals { impl MatrixEvals { pub(crate) fn evaluate(&self, lagrange_coefficients_at_point: &[F]) -> Result<[F; 4]> { - ensure!(self.row_col.is_some(), "row_col evaluations are not available"); Ok([ self.row.evaluate_with_coeffs(lagrange_coefficients_at_point), self.col.evaluate_with_coeffs(lagrange_coefficients_at_point), - self.row_col.as_ref().unwrap().evaluate_with_coeffs(lagrange_coefficients_at_point), + self.row_col + .as_ref() + .ok_or("row_col evaluations are not available") + .map_err(anyhow::Error::msg)? + .evaluate_with_coeffs(lagrange_coefficients_at_point), self.row_col_val.evaluate_with_coeffs(lagrange_coefficients_at_point), ]) } @@ -207,6 +213,7 @@ impl MatrixArithmetization { let row_col = if let Some(row_col) = matrix_evals.row_col.as_ref() { row_col.clone().interpolate() } else { + ensure!(matrix_evals.row.evaluations.len() == matrix_evals.col.evaluations.len()); let row_col_evals: Vec = cfg_iter!(matrix_evals.row.evaluations) .zip_eq(&matrix_evals.col.evaluations) .map(|(&r, &c)| r * c) @@ -216,8 +223,8 @@ impl MatrixArithmetization { let row_col_val = matrix_evals.row_col_val.clone().interpolate(); end_timer!(interpolate_time); - let label = &[label]; - let mut labels = AHPForR1CS::::index_polynomial_labels(label, std::iter::once(id)); + let mut labels = AHPForR1CS::::index_polynomial_labels_single(label, id); + ensure!(labels.len() == 4); Ok(MatrixArithmetization { row: LabeledPolynomial::new(labels.next().unwrap(), row, None, None), @@ -228,7 +235,7 @@ impl MatrixArithmetization { } /// Iterate over the indexed polynomials. - pub fn into_iter(self) -> impl Iterator> { + pub fn into_iter(self) -> impl ExactSizeIterator> { // Alphabetical order [self.col, self.row, self.row_col, self.row_col_val].into_iter() } diff --git a/algorithms/src/snark/varuna/ahp/prover/message.rs b/algorithms/src/snark/varuna/ahp/prover/message.rs index 098bbc76ba..c13ed2fc26 100644 --- a/algorithms/src/snark/varuna/ahp/prover/message.rs +++ b/algorithms/src/snark/varuna/ahp/prover/message.rs @@ -27,7 +27,7 @@ pub struct MatrixSums { impl MatrixSums { /// Iterate over the sums - pub fn iter(&self) -> impl Iterator { + pub fn iter(&self) -> impl ExactSizeIterator { [self.sum_a, self.sum_b, self.sum_c].into_iter() } } diff --git a/algorithms/src/snark/varuna/ahp/prover/oracles.rs b/algorithms/src/snark/varuna/ahp/prover/oracles.rs index b4b645346c..725b6a291d 100644 --- a/algorithms/src/snark/varuna/ahp/prover/oracles.rs +++ b/algorithms/src/snark/varuna/ahp/prover/oracles.rs @@ -112,17 +112,17 @@ impl ThirdOracles { /// The fourth set of prover oracles. #[derive(Debug)] pub struct FourthOracles { - pub(super) gs: BTreeMap>, + pub(in crate::snark::varuna) gs: BTreeMap>, } #[derive(Debug)] -pub(super) struct MatrixGs { +pub(in crate::snark::varuna) struct MatrixGs { /// The polynomial `g_a` resulting from the second sumcheck. - pub(super) g_a: LabeledPolynomial, + pub(in crate::snark::varuna) g_a: LabeledPolynomial, /// The polynomial `g_b` resulting from the second sumcheck. - pub(super) g_b: LabeledPolynomial, + pub(in crate::snark::varuna) g_b: LabeledPolynomial, /// The polynomial `g_c` resulting from the second sumcheck. - pub(super) g_c: LabeledPolynomial, + pub(in crate::snark::varuna) g_c: LabeledPolynomial, } impl MatrixGs { diff --git a/algorithms/src/snark/varuna/ahp/prover/round_functions/fifth.rs b/algorithms/src/snark/varuna/ahp/prover/round_functions/fifth.rs index 66f949c149..c8e0b71e19 100644 --- a/algorithms/src/snark/varuna/ahp/prover/round_functions/fifth.rs +++ b/algorithms/src/snark/varuna/ahp/prover/round_functions/fifth.rs @@ -23,11 +23,11 @@ use crate::{ SNARKMode, }, }; +use snarkvm_fields::PrimeField; +use snarkvm_utilities::{cfg_par_bridge, cfg_reduce}; use itertools::Itertools; use rand_core::RngCore; -use snarkvm_fields::PrimeField; -use snarkvm_utilities::{cfg_par_bridge, cfg_reduce}; #[cfg(not(feature = "serial"))] use rayon::prelude::*; diff --git a/algorithms/src/snark/varuna/ahp/prover/round_functions/first.rs b/algorithms/src/snark/varuna/ahp/prover/round_functions/first.rs index 43e243cbc6..df9d33a72c 100644 --- a/algorithms/src/snark/varuna/ahp/prover/round_functions/first.rs +++ b/algorithms/src/snark/varuna/ahp/prover/round_functions/first.rs @@ -12,8 +12,6 @@ // See the License for the specific language governing permissions and // limitations under the License. -use std::collections::BTreeMap; - use crate::{ fft::{DensePolynomial, EvaluationDomain, Evaluations as EvaluationsOnDomain, SparsePolynomial}, polycommit::sonic_pc::{LabeledPolynomial, PolynomialInfo, PolynomialLabel}, @@ -26,11 +24,13 @@ use crate::{ SNARKMode, }, }; -use itertools::Itertools; -use rand_core::RngCore; use snarkvm_fields::PrimeField; use snarkvm_utilities::cfg_into_iter; +use itertools::Itertools; +use rand_core::RngCore; +use std::collections::BTreeMap; + #[cfg(not(feature = "serial"))] use rayon::prelude::*; diff --git a/algorithms/src/snark/varuna/ahp/prover/round_functions/mod.rs b/algorithms/src/snark/varuna/ahp/prover/round_functions/mod.rs index 9251aa91fc..a3cc01b438 100644 --- a/algorithms/src/snark/varuna/ahp/prover/round_functions/mod.rs +++ b/algorithms/src/snark/varuna/ahp/prover/round_functions/mod.rs @@ -94,7 +94,7 @@ impl AHPForR1CS { rand_assignments, ) }); - crate::snark::varuna::ahp::matrices::pad_input_for_indexer_and_prover(&mut pcs); + crate::snark::varuna::ahp::matrices::pad_input_for_indexer_and_prover(&mut pcs)?; end_timer!(padding_time); diff --git a/algorithms/src/snark/varuna/ahp/prover/round_functions/third.rs b/algorithms/src/snark/varuna/ahp/prover/round_functions/third.rs index 4e994d4981..bccfedfaac 100644 --- a/algorithms/src/snark/varuna/ahp/prover/round_functions/third.rs +++ b/algorithms/src/snark/varuna/ahp/prover/round_functions/third.rs @@ -218,7 +218,9 @@ impl AHPForR1CS { Ok((h_1_sum, xg_1_sum, msg)) } - fn calculate_assignments(state: &mut prover::State) -> Result>>> { + pub(in crate::snark::varuna) fn calculate_assignments( + state: &mut prover::State, + ) -> Result>>> { let assignments_time = start_timer!(|| "Calculate assignments"); let assignments: BTreeMap<_, _> = state .circuit_specific_states diff --git a/algorithms/src/snark/varuna/ahp/prover/state.rs b/algorithms/src/snark/varuna/ahp/prover/state.rs index f23b57f301..02a90ea688 100644 --- a/algorithms/src/snark/varuna/ahp/prover/state.rs +++ b/algorithms/src/snark/varuna/ahp/prover/state.rs @@ -20,6 +20,7 @@ use crate::{ r1cs::{SynthesisError, SynthesisResult}, snark::varuna::{AHPError, AHPForR1CS, Circuit, SNARKMode}, }; +use anyhow::anyhow; use snarkvm_fields::PrimeField; /// Circuit Specific State of the Prover @@ -117,22 +118,23 @@ impl<'a, F: PrimeField, SM: SNARKMode> State<'a, F, SM> { .map(|(circuit, variable_assignments)| { let index_info = &circuit.index_info; - let constraint_domain = EvaluationDomain::new(index_info.num_constraints) - .ok_or(SynthesisError::PolynomialDegreeTooLarge)?; + let constraint_domain = + EvaluationDomain::new(index_info.num_constraints).ok_or(SynthesisError::PolyTooLarge)?; max_num_constraints = max_num_constraints.max(index_info.num_constraints); let variable_domain = - EvaluationDomain::new(index_info.num_variables).ok_or(SynthesisError::PolynomialDegreeTooLarge)?; + EvaluationDomain::new(index_info.num_variables).ok_or(SynthesisError::PolyTooLarge)?; max_num_variables = max_num_variables.max(index_info.num_variables); let non_zero_domains = AHPForR1CS::<_, SM>::cmp_non_zero_domains(index_info, max_non_zero_domain)?; max_non_zero_domain = non_zero_domains.max_non_zero_domain; let first_padded_public_inputs = &variable_assignments[0].0; - let input_domain = EvaluationDomain::new(first_padded_public_inputs.len()).unwrap(); + let input_domain = EvaluationDomain::new(first_padded_public_inputs.len()) + .ok_or(anyhow!("Cannot create EvaluationDomain"))?; let batch_size = variable_assignments.len(); total_instances = - total_instances.checked_add(batch_size).ok_or_else(|| anyhow::anyhow!("Batch size too large"))?; + total_instances.checked_add(batch_size).ok_or_else(|| anyhow!("Batch size too large"))?; let mut z_as = Vec::with_capacity(batch_size); let mut z_bs = Vec::with_capacity(batch_size); let mut z_cs = Vec::with_capacity(batch_size); @@ -174,10 +176,8 @@ impl<'a, F: PrimeField, SM: SNARKMode> State<'a, F, SM> { .collect::>>()?; let max_non_zero_domain = max_non_zero_domain.ok_or(AHPError::BatchSizeIsZero)?; - let max_constraint_domain = - EvaluationDomain::new(max_num_constraints).ok_or(SynthesisError::PolynomialDegreeTooLarge)?; - let max_variable_domain = - EvaluationDomain::new(max_num_variables).ok_or(SynthesisError::PolynomialDegreeTooLarge)?; + let max_constraint_domain = EvaluationDomain::new(max_num_constraints).ok_or(SynthesisError::PolyTooLarge)?; + let max_variable_domain = EvaluationDomain::new(max_num_variables).ok_or(SynthesisError::PolyTooLarge)?; Ok(Self { max_constraint_domain, diff --git a/algorithms/src/snark/varuna/ahp/selectors.rs b/algorithms/src/snark/varuna/ahp/selectors.rs index a80de2e0e4..b6831e5423 100644 --- a/algorithms/src/snark/varuna/ahp/selectors.rs +++ b/algorithms/src/snark/varuna/ahp/selectors.rs @@ -12,19 +12,18 @@ // See the License for the specific language governing permissions and // limitations under the License. +use super::verifier::QueryPoints; use crate::fft::{DensePolynomial, EvaluationDomain}; -use anyhow::Result; use snarkvm_fields::{batch_inversion, PrimeField}; use snarkvm_utilities::{cfg_into_iter, cfg_iter_mut, serialize::*}; +use anyhow::{ensure, Result}; use itertools::Itertools; use std::collections::{BTreeMap, HashSet}; #[cfg(not(feature = "serial"))] use rayon::prelude::*; -use super::verifier::QueryPoints; - /// Precompute a batch of selectors at challenges. We batch: /// - constraint domain selectors at alpha /// - variable domain selectors at beta @@ -88,11 +87,13 @@ pub(crate) fn apply_randomized_selector( if !remainder_witness { // Substituting in s_i, we get that poly_i * s_i / v_H = poly_i / v_H * (H_i.size() / H.size()); let selector_time = start_timer!(|| "Compute selector without remainder witness"); - let (mut h_i, remainder) = - poly.divide_by_vanishing_poly(*src_domain).ok_or(anyhow::anyhow!("could not divide by vanishing poly"))?; - assert!(remainder.is_zero()); + + let (mut h_i, remainder) = poly.divide_by_vanishing_poly(*src_domain)?; + ensure!(remainder.is_zero(), "Failed to divide by vanishing polynomial - non-zero remainder ({remainder:?})"); + let multiplier = combiner * src_domain.size_as_field_element * target_domain.size_inv; cfg_iter_mut!(h_i.coeffs).for_each(|c| *c *= multiplier); + end_timer!(selector_time); Ok((h_i, None)) } else { @@ -105,12 +106,16 @@ pub(crate) fn apply_randomized_selector( // (\sum_i{c_i*s_i*poly_i})/v_H = h_1*v_H + x_g_1 // That's what we're computing here. let selector_time = start_timer!(|| "Compute selector with remainder witness"); + let multiplier = combiner * src_domain.size_as_field_element * target_domain.size_inv; cfg_iter_mut!(poly.coeffs).for_each(|c| *c *= multiplier); - let (h_i, mut xg_i) = poly.divide_by_vanishing_poly(*src_domain).unwrap(); + + let (h_i, mut xg_i) = poly.divide_by_vanishing_poly(*src_domain)?; xg_i = xg_i.mul_by_vanishing_poly(*target_domain); - let (xg_i, remainder) = xg_i.divide_by_vanishing_poly(*src_domain).unwrap(); - assert!(remainder.is_zero()); + + let (xg_i, remainder) = xg_i.divide_by_vanishing_poly(*src_domain)?; + ensure!(remainder.is_zero(), "Failed to divide by vanishing polynomial - non-zero remainder ({remainder:?})"); + end_timer!(selector_time); Ok((h_i, Some(xg_i))) } diff --git a/algorithms/src/snark/varuna/ahp/verifier/messages.rs b/algorithms/src/snark/varuna/ahp/verifier/messages.rs index b6ea8f4146..b939a86c74 100644 --- a/algorithms/src/snark/varuna/ahp/verifier/messages.rs +++ b/algorithms/src/snark/varuna/ahp/verifier/messages.rs @@ -12,9 +12,9 @@ // See the License for the specific language governing permissions and // limitations under the License. +use crate::snark::varuna::{witness_label, CircuitId, SNARKMode}; use snarkvm_fields::PrimeField; -use crate::snark::varuna::{witness_label, CircuitId, SNARKMode}; use itertools::Itertools; use std::collections::BTreeMap; diff --git a/algorithms/src/snark/varuna/ahp/verifier/verifier.rs b/algorithms/src/snark/varuna/ahp/verifier/verifier.rs index 92daabfc0d..0ec22a32ef 100644 --- a/algorithms/src/snark/varuna/ahp/verifier/verifier.rs +++ b/algorithms/src/snark/varuna/ahp/verifier/verifier.rs @@ -28,6 +28,7 @@ use crate::{ }, AlgebraicSponge, }; +use anyhow::{ensure, Result}; use smallvec::SmallVec; use snarkvm_fields::PrimeField; use std::collections::BTreeMap; @@ -41,7 +42,7 @@ impl AHPForR1CS { max_variable_domain: EvaluationDomain, max_non_zero_domain: EvaluationDomain, fs_rng: &mut R, - ) -> Result<(FirstMessage, State), AHPError> { + ) -> Result<(FirstMessage, State)> { let mut batch_combiners = BTreeMap::new(); let mut circuit_specific_states = BTreeMap::new(); let mut num_circuit_combiners = vec![1; batch_sizes.len()]; @@ -55,7 +56,7 @@ impl AHPForR1CS { end_timer!(squeeze_time); let (instance_combiners, circuit_combiner) = elems.split_at(*batch_size - 1); - assert_eq!(circuit_combiner.len(), num_c_combiner); + ensure!(circuit_combiner.len() == num_c_combiner); let mut combiners = BatchCombiners { circuit_combiner: TargetField::one(), instance_combiners: vec![TargetField::one()] }; if num_c_combiner == 1 { @@ -66,32 +67,27 @@ impl AHPForR1CS { let constraint_domain_time = start_timer!(|| format!("Constructing constraint domain for {circuit_id}")); let constraint_domain = - EvaluationDomain::new(circuit_info.num_constraints).ok_or(AHPError::PolynomialDegreeTooLarge)?; + EvaluationDomain::new(circuit_info.num_constraints).ok_or(AHPError::PolyTooLarge)?; end_timer!(constraint_domain_time); let variable_domain_time = start_timer!(|| format!("Constructing constraint domain for {circuit_id}")); - let variable_domain = - EvaluationDomain::new(circuit_info.num_variables).ok_or(AHPError::PolynomialDegreeTooLarge)?; + let variable_domain = EvaluationDomain::new(circuit_info.num_variables).ok_or(AHPError::PolyTooLarge)?; end_timer!(variable_domain_time); let non_zero_a_time = start_timer!(|| format!("Constructing non-zero-a domain for {circuit_id}")); - let non_zero_a_domain = - EvaluationDomain::new(circuit_info.num_non_zero_a).ok_or(AHPError::PolynomialDegreeTooLarge)?; + let non_zero_a_domain = EvaluationDomain::new(circuit_info.num_non_zero_a).ok_or(AHPError::PolyTooLarge)?; end_timer!(non_zero_a_time); let non_zero_b_time = start_timer!(|| format!("Constructing non-zero-b domain {circuit_id}")); - let non_zero_b_domain = - EvaluationDomain::new(circuit_info.num_non_zero_b).ok_or(AHPError::PolynomialDegreeTooLarge)?; + let non_zero_b_domain = EvaluationDomain::new(circuit_info.num_non_zero_b).ok_or(AHPError::PolyTooLarge)?; end_timer!(non_zero_b_time); let non_zero_c_time = start_timer!(|| format!("Constructing non-zero-c domain for {circuit_id}")); - let non_zero_c_domain = - EvaluationDomain::new(circuit_info.num_non_zero_c).ok_or(AHPError::PolynomialDegreeTooLarge)?; + let non_zero_c_domain = EvaluationDomain::new(circuit_info.num_non_zero_c).ok_or(AHPError::PolyTooLarge)?; end_timer!(non_zero_c_time); let input_domain_time = start_timer!(|| format!("Constructing input domain {circuit_id}")); - let input_domain = - EvaluationDomain::new(circuit_info.num_public_inputs).ok_or(AHPError::PolynomialDegreeTooLarge)?; + let input_domain = EvaluationDomain::new(circuit_info.num_public_inputs).ok_or(AHPError::PolyTooLarge)?; end_timer!(input_domain_time); let circuit_specific_state = CircuitSpecificState { @@ -130,13 +126,13 @@ impl AHPForR1CS { pub fn verifier_second_round>( mut state: State, fs_rng: &mut R, - ) -> Result<(SecondMessage, State), AHPError> { + ) -> Result<(SecondMessage, State)> { let elems = fs_rng.squeeze_nonnative_field_elements(3); let (first, _) = elems.split_at(3); - let [alpha, eta_b, eta_c]: [_; 3] = first.try_into().unwrap(); + let [alpha, eta_b, eta_c]: [_; 3] = first.try_into().map_err(anyhow::Error::msg)?; let check_vanish_poly_time = start_timer!(|| "Evaluating vanishing polynomial"); - assert!(!state.max_constraint_domain.evaluate_vanishing_polynomial(alpha).is_zero()); + ensure!(!state.max_constraint_domain.evaluate_vanishing_polynomial(alpha).is_zero()); end_timer!(check_vanish_poly_time); let message = SecondMessage { alpha, eta_b, eta_c }; @@ -149,10 +145,10 @@ impl AHPForR1CS { pub fn verifier_third_round>( mut state: State, fs_rng: &mut R, - ) -> Result<(ThirdMessage, State), AHPError> { + ) -> Result<(ThirdMessage, State)> { let elems = fs_rng.squeeze_nonnative_field_elements(1); let beta = elems[0]; - assert!(!state.max_variable_domain.evaluate_vanishing_polynomial(beta).is_zero()); + ensure!(!state.max_variable_domain.evaluate_vanishing_polynomial(beta).is_zero()); let message = ThirdMessage { beta }; state.third_round_message = Some(message); @@ -164,7 +160,7 @@ impl AHPForR1CS { pub fn verifier_fourth_round>( mut state: State, fs_rng: &mut R, - ) -> Result<(FourthMessage, State), AHPError> { + ) -> Result<(FourthMessage, State)> { let num_circuits = state.circuit_specific_states.len(); let mut delta_a = Vec::with_capacity(num_circuits); let mut delta_b = Vec::with_capacity(num_circuits); @@ -189,10 +185,10 @@ impl AHPForR1CS { pub fn verifier_fifth_round>( mut state: State, fs_rng: &mut R, - ) -> Result, AHPError> { + ) -> Result> { let elems = fs_rng.squeeze_nonnative_field_elements(1); let gamma = elems[0]; - assert!(!state.max_non_zero_domain.evaluate_vanishing_polynomial(gamma).is_zero()); + ensure!(!state.max_non_zero_domain.evaluate_vanishing_polynomial(gamma).is_zero()); state.gamma = Some(gamma); Ok(state) diff --git a/algorithms/src/snark/varuna/data_structures/circuit_proving_key.rs b/algorithms/src/snark/varuna/data_structures/circuit_proving_key.rs index 1adb86b2c8..5848743d0f 100644 --- a/algorithms/src/snark/varuna/data_structures/circuit_proving_key.rs +++ b/algorithms/src/snark/varuna/data_structures/circuit_proving_key.rs @@ -27,7 +27,7 @@ use snarkvm_utilities::{ use std::{cmp::Ordering, sync::Arc}; /// Proving key for a specific circuit (i.e., R1CS matrices). -#[derive(Clone, Debug, PartialEq, Eq)] +#[derive(Clone, Debug)] pub struct CircuitProvingKey { /// The circuit verifying key. pub circuit_verifying_key: CircuitVerifyingKey, @@ -58,6 +58,14 @@ impl FromBytes for CircuitProvingKey { } } +impl PartialEq for CircuitProvingKey { + fn eq(&self, other: &Self) -> bool { + self.circuit.id == other.circuit.id + } +} + +impl Eq for CircuitProvingKey {} + impl Ord for CircuitProvingKey { fn cmp(&self, other: &Self) -> Ordering { self.circuit.id.cmp(&other.circuit.id) diff --git a/algorithms/src/snark/varuna/data_structures/proof.rs b/algorithms/src/snark/varuna/data_structures/proof.rs index 196059b70f..1fc4962215 100644 --- a/algorithms/src/snark/varuna/data_structures/proof.rs +++ b/algorithms/src/snark/varuna/data_structures/proof.rs @@ -170,7 +170,7 @@ impl Evaluations { for (label, value) in map { if label == "g_1" { - break; + continue; } if label.contains("g_a") { @@ -424,9 +424,9 @@ mod test { fn rand_evaluations(rng: &mut TestRng, i: usize) -> Evaluations { Evaluations { g_1_eval: F::rand(rng), - g_a_evals: vec![F::rand(rng); i], - g_b_evals: vec![F::rand(rng); i], - g_c_evals: vec![F::rand(rng); i], + g_a_evals: (0..i).map(|_| F::rand(rng)).collect(), + g_b_evals: (0..i).map(|_| F::rand(rng)).collect(), + g_c_evals: (0..i).map(|_| F::rand(rng)).collect(), } } diff --git a/algorithms/src/snark/varuna/data_structures/test_circuit.rs b/algorithms/src/snark/varuna/data_structures/test_circuit.rs index 8e66d71f03..e269ef7d46 100644 --- a/algorithms/src/snark/varuna/data_structures/test_circuit.rs +++ b/algorithms/src/snark/varuna/data_structures/test_circuit.rs @@ -84,6 +84,7 @@ impl ConstraintSynthesizer for TestCircuit TestCircuit { + // Generate a test circuit with a random witness. pub fn gen_rand( mul_depth: usize, num_constraints: usize, @@ -104,4 +105,26 @@ impl TestCircuit { (TestCircuit { a: Some(a), b: Some(b), num_constraints, num_variables, mul_depth }, public_inputs) } + + // Generate a test circuit with a fixed witness. + pub fn generate_circuit_with_fixed_witness( + a: u128, + b: u128, + mul_depth: usize, + num_constraints: usize, + num_variables: usize, + ) -> (Self, Vec) { + let mut public_inputs: Vec = Vec::with_capacity(mul_depth); + let a = F::from(a); + let b = F::from(b); + for j in 1..(mul_depth + 1) { + let mut new_var = a; + for _ in 0..j { + new_var.mul_assign(&b); + } + public_inputs.push(new_var); + } + + (TestCircuit { a: Some(a), b: Some(b), num_constraints, num_variables, mul_depth }, public_inputs) + } } diff --git a/algorithms/src/snark/varuna/resources/circuit_0/challenges.input b/algorithms/src/snark/varuna/resources/circuit_0/challenges.input new file mode 100644 index 0000000000..c452bb9bab --- /dev/null +++ b/algorithms/src/snark/varuna/resources/circuit_0/challenges.input @@ -0,0 +1,9 @@ +3848747268438146429751199396409351181775389242768022193485885831738448017147 +1 +8197944265508088395536605774074305135172727109973647025295290482999689956740 +969057258436037177120044092706484307847087860293738150232755543560372962965 +1261454636320080423466301508402274008580035865105120100172548996301504688503 +1 +4987583518937978349829618221849930643691403053432331091973338029344238378359 +5292820491592383411924896857610185298390500160570506754003580089093852949536 +672738024541753390172108082983901395072703770443783662610485842877496432861 \ No newline at end of file diff --git a/algorithms/src/snark/varuna/resources/circuit_0/domain/C.txt b/algorithms/src/snark/varuna/resources/circuit_0/domain/C.txt new file mode 100644 index 0000000000..4fee833f10 --- /dev/null +++ b/algorithms/src/snark/varuna/resources/circuit_0/domain/C.txt @@ -0,0 +1 @@ +[1, 3279917132858342911831074864712036382710139745724269329239664300762234227201, 880904806456922042258150504921383618666682042621506879489, 1973030855696769125460623085327505793054673234941098473458474059731617992635, 8444461749428370424248824938781546531375899335154063827935233455917409239040, 5164544616570027512417750074069510148665759589429794498695569155155175011840, 8444461749428370423367920132324624489117748830232680209268551413295902359552, 6471430893731601298788201853454040738321226100212965354476759396185791246406] \ No newline at end of file diff --git a/algorithms/src/snark/varuna/resources/circuit_0/domain/K.txt b/algorithms/src/snark/varuna/resources/circuit_0/domain/K.txt new file mode 100644 index 0000000000..4fee833f10 --- /dev/null +++ b/algorithms/src/snark/varuna/resources/circuit_0/domain/K.txt @@ -0,0 +1 @@ +[1, 3279917132858342911831074864712036382710139745724269329239664300762234227201, 880904806456922042258150504921383618666682042621506879489, 1973030855696769125460623085327505793054673234941098473458474059731617992635, 8444461749428370424248824938781546531375899335154063827935233455917409239040, 5164544616570027512417750074069510148665759589429794498695569155155175011840, 8444461749428370423367920132324624489117748830232680209268551413295902359552, 6471430893731601298788201853454040738321226100212965354476759396185791246406] \ No newline at end of file diff --git a/algorithms/src/snark/varuna/resources/circuit_0/domain/R.txt b/algorithms/src/snark/varuna/resources/circuit_0/domain/R.txt new file mode 100644 index 0000000000..4fee833f10 --- /dev/null +++ b/algorithms/src/snark/varuna/resources/circuit_0/domain/R.txt @@ -0,0 +1 @@ +[1, 3279917132858342911831074864712036382710139745724269329239664300762234227201, 880904806456922042258150504921383618666682042621506879489, 1973030855696769125460623085327505793054673234941098473458474059731617992635, 8444461749428370424248824938781546531375899335154063827935233455917409239040, 5164544616570027512417750074069510148665759589429794498695569155155175011840, 8444461749428370423367920132324624489117748830232680209268551413295902359552, 6471430893731601298788201853454040738321226100212965354476759396185791246406] \ No newline at end of file diff --git a/algorithms/src/snark/varuna/resources/circuit_0/instance.input b/algorithms/src/snark/varuna/resources/circuit_0/instance.input new file mode 100644 index 0000000000..57534290ae --- /dev/null +++ b/algorithms/src/snark/varuna/resources/circuit_0/instance.input @@ -0,0 +1,24 @@ +A +0, 0, 0, 0, 1, 0, 0, +0, 0, 0, 0, 1, 0, 0, +0, 0, 0, 0, 1, 0, 0, +0, 0, 0, 0, 1, 0, 0, +0, 0, 0, 0, 1, 0, 0, +0, 1, 0, 0, 0, 0, 0, +0, 0, 1, 0, 0, 0, 0, +B +0, 0, 0, 0, 0, 1, 0, +0, 0, 0, 0, 0, 1, 0, +0, 0, 0, 0, 0, 1, 0, +0, 0, 0, 0, 0, 1, 0, +0, 0, 0, 0, 0, 1, 0, +0, 0, 0, 0, 0, 1, 0, +0, 0, 0, 0, 0, 1, 0, +C +0, 1, 0, 0, 0, 0, 0, +0, 1, 0, 0, 0, 0, 0, +0, 1, 0, 0, 0, 0, 0, +0, 1, 0, 0, 0, 0, 0, +0, 1, 0, 0, 0, 0, 0, +0, 0, 1, 0, 0, 0, 0, +0, 0, 0, 1, 0, 0, 0, \ No newline at end of file diff --git a/algorithms/src/snark/varuna/resources/circuit_0/polynomials/g_1.txt b/algorithms/src/snark/varuna/resources/circuit_0/polynomials/g_1.txt new file mode 100644 index 0000000000..66c9b89ece --- /dev/null +++ b/algorithms/src/snark/varuna/resources/circuit_0/polynomials/g_1.txt @@ -0,0 +1 @@ +[6619044933395879708506708233785160900848417926433900807753717609232615537930, 3821893577243645065467553327033258263806878911070937029265276300425404027087, 1076457615332399821357918023630894203218737643598338498309569170592118610105, 4055793519271390007054111087031631384766037777614854573354385584678647572635, 5151025438151686500811984829825775386132972581217168951169837107565191699213, 5451596424008550546888850265776737232625677836482903207071209344567449341848, 3448452754104072214340119903505567632967740660166690880356165129542564717469] \ No newline at end of file diff --git a/algorithms/src/snark/varuna/resources/circuit_0/polynomials/g_a.txt b/algorithms/src/snark/varuna/resources/circuit_0/polynomials/g_a.txt new file mode 100644 index 0000000000..406dfcb42a --- /dev/null +++ b/algorithms/src/snark/varuna/resources/circuit_0/polynomials/g_a.txt @@ -0,0 +1 @@ +[7298426509888201269891881417058316917443386443130089954044569917277532451124, 4513665418379767804370095812278055346820156274660773052289744107232412707613, 4475996609146606675867680993589252805148646238137939055092225188380378102122, 1164867027976035030197277870836287693729511372979478632421748407217397685680, 241156138388132028780023732378805911361134319610119168887483568462409314180, 5388924537180330478887903617303497159805558377912056989902396473207071789448, 4874868009316021445675218576593739719745472140358591754846406994220385278800] \ No newline at end of file diff --git a/algorithms/src/snark/varuna/resources/circuit_0/polynomials/g_b.txt b/algorithms/src/snark/varuna/resources/circuit_0/polynomials/g_b.txt new file mode 100644 index 0000000000..5bd1204a5c --- /dev/null +++ b/algorithms/src/snark/varuna/resources/circuit_0/polynomials/g_b.txt @@ -0,0 +1 @@ +[5156102875137215468349822857341787486070810620190600082313993541539039694040, 642243162042132842519243997058576263060505644548708158364282235038668235582, 4232914989795410755789500032792636846277428417456597259068424201075935215723, 5294805659491479092801660967840299543626893059046679165413663075966109761338, 4428888977080738940451109661040008630918481794519438117648033775895551925110, 4353953527706377329056342142048774285909447424786294362569762658050561552035, 3147342381590763655483856523901415730605847626281467108012225760335596205826] \ No newline at end of file diff --git a/algorithms/src/snark/varuna/resources/circuit_0/polynomials/g_c.txt b/algorithms/src/snark/varuna/resources/circuit_0/polynomials/g_c.txt new file mode 100644 index 0000000000..5bd1204a5c --- /dev/null +++ b/algorithms/src/snark/varuna/resources/circuit_0/polynomials/g_c.txt @@ -0,0 +1 @@ +[5156102875137215468349822857341787486070810620190600082313993541539039694040, 642243162042132842519243997058576263060505644548708158364282235038668235582, 4232914989795410755789500032792636846277428417456597259068424201075935215723, 5294805659491479092801660967840299543626893059046679165413663075966109761338, 4428888977080738940451109661040008630918481794519438117648033775895551925110, 4353953527706377329056342142048774285909447424786294362569762658050561552035, 3147342381590763655483856523901415730605847626281467108012225760335596205826] \ No newline at end of file diff --git a/algorithms/src/snark/varuna/resources/circuit_0/polynomials/h_0.txt b/algorithms/src/snark/varuna/resources/circuit_0/polynomials/h_0.txt new file mode 100644 index 0000000000..5b5fb13912 --- /dev/null +++ b/algorithms/src/snark/varuna/resources/circuit_0/polynomials/h_0.txt @@ -0,0 +1 @@ +[7388904030749824121217721821433853214953911918259805849443329273927733084164, 2135176220032946734943829824459106118384054561849250372330716048683975588842, 4754990573109988415472033418479071784044875994712276662893248150540227089791, 2758334532884618923758430413601802477377254380601541511107093225976782776983, 8095203738994452079955122826479915970380729551727226719303332837983196321727, 6177424990788360784192413675740409023496996883035955845846493361503576983134, 5094286905430846656542049153180259645877255793223574061499131126611824661186] \ No newline at end of file diff --git a/algorithms/src/snark/varuna/resources/circuit_0/polynomials/h_1.txt b/algorithms/src/snark/varuna/resources/circuit_0/polynomials/h_1.txt new file mode 100644 index 0000000000..b635e6289e --- /dev/null +++ b/algorithms/src/snark/varuna/resources/circuit_0/polynomials/h_1.txt @@ -0,0 +1 @@ +[165474815053007573527603177207806062734264344258107184951357675349729517680, 5801432311648519240757166384710257525281134619669773204356367264314316460174, 5779294849724998706280834835495248447327865373185082571164164495999431007313, 6195154650395302844568965061674617811933630360198179606653410538752273578806, 1424571001001798030720515357589755476564998467791760803818317281629675760920, 644304059262549681272611563429424161652824970479283239535226429205883512483, 1985534495267414125495515420354543072112034540966649759533850695585151887015] \ No newline at end of file diff --git a/algorithms/src/snark/varuna/resources/circuit_0/polynomials/h_2.txt b/algorithms/src/snark/varuna/resources/circuit_0/polynomials/h_2.txt new file mode 100644 index 0000000000..ffe41016e9 --- /dev/null +++ b/algorithms/src/snark/varuna/resources/circuit_0/polynomials/h_2.txt @@ -0,0 +1 @@ +[8290113549992594314646287997621385217684845052221443536793209549506488920846, 4219277537425088937784490235792966206782906842687518132171996608740394894532, 590462121801222593314525094454279217199506852881783863984329676220226405050, 2657750100182903992679485191896472785743422123052387651366434608567501075462, 591064579761258100262799594047186364485022182696204209512789652131850886891, 3322216460403101876331086740648778039637725754343916626810493733358099221767, 8341638249347740785844637855796311718756490590925437333299426275491585624411] \ No newline at end of file diff --git a/algorithms/src/snark/varuna/resources/circuit_0/polynomials/w_lde.txt b/algorithms/src/snark/varuna/resources/circuit_0/polynomials/w_lde.txt new file mode 100644 index 0000000000..33e90cb354 --- /dev/null +++ b/algorithms/src/snark/varuna/resources/circuit_0/polynomials/w_lde.txt @@ -0,0 +1 @@ +[7388904030749824121217721821433853214953911918259805849443329273927733084181, 4806631722464810378222418881252928774493289697365662880095775335672748770572, 7388904030749824121217721821433853214953911918259805849443329273927733084148, 4153188583884023458610048797853002211921041294332568892204718936512234268619] \ No newline at end of file diff --git a/algorithms/src/snark/varuna/resources/circuit_0/polynomials/z_lde.txt b/algorithms/src/snark/varuna/resources/circuit_0/polynomials/z_lde.txt new file mode 100644 index 0000000000..89b0c81ec5 --- /dev/null +++ b/algorithms/src/snark/varuna/resources/circuit_0/polynomials/z_lde.txt @@ -0,0 +1 @@ +[7388904030749824121217721821433853214953911918259805849443329273927733084183, 1526714589606467466391344016540892391783149951641393550856111034910514543371, 7388904030749824121217721821433853214953911918259805849443329273927733084148, 2180157728187254333149425712525496418866368059391470418746244876780616275984, 7388904030749824121217721821433853214953911918259805849443329273927733084181, 4806631722464810378222418881252928774493289697365662880095775335672748770572, 7388904030749824121217721821433853214953911918259805849443329273927733084148, 4153188583884023458610048797853002211921041294332568892204718936512234268619] \ No newline at end of file diff --git a/algorithms/src/snark/varuna/resources/circuit_0/witness.input b/algorithms/src/snark/varuna/resources/circuit_0/witness.input new file mode 100644 index 0000000000..4e9e621149 --- /dev/null +++ b/algorithms/src/snark/varuna/resources/circuit_0/witness.input @@ -0,0 +1,2 @@ +[2, 4] +[1, 8, 32, 128, 2, 4, 2] \ No newline at end of file diff --git a/algorithms/src/snark/varuna/tests.rs b/algorithms/src/snark/varuna/tests.rs index 8c7edaa3b8..2d2f20bccb 100644 --- a/algorithms/src/snark/varuna/tests.rs +++ b/algorithms/src/snark/varuna/tests.rs @@ -129,7 +129,7 @@ mod varuna { for instance_input in vks_to_inputs.values() { let mut fake_instance_input = Vec::with_capacity(instance_input.len()); for input in instance_input.iter() { - let fake_input = vec![Fr::rand(rng); input.len()]; + let fake_input: Vec<_> = (0..input.len()).map(|_| Fr::rand(rng)).collect(); fake_instance_input.push(fake_input); } fake_instance_inputs.push(fake_instance_input); @@ -531,3 +531,280 @@ mod varuna_hiding { assert!(VarunaInst::verify(universal_verifier, &fs_parameters, &vk1, public_inputs1, &proof1).unwrap()); } } + +mod varuna_test_vectors { + use crate::{ + fft::EvaluationDomain, + snark::varuna::{ahp::verifier, AHPForR1CS, TestCircuit, VarunaNonHidingMode, VarunaSNARK}, + traits::snark::SNARK, + }; + use snarkvm_curves::bls12_377::{Bls12_377, Fq, Fr}; + use snarkvm_fields::One; + use std::{collections::BTreeMap, fs, ops::Deref, path::PathBuf, str::FromStr, sync::Arc}; + + type FS = crate::crypto_hash::PoseidonSponge; + type MM = VarunaNonHidingMode; + type VarunaSonicInst = VarunaSNARK; + + // Create the path for the `resources` folder. + fn resources_path(create_dir: bool) -> PathBuf { + let mut path = PathBuf::from(env!("CARGO_MANIFEST_DIR")); + path.push("src"); + path.push("snark"); + path.push("varuna"); + path.push("resources"); + + // Create the `resources` folder, if it does not exist. + if !path.exists() { + if create_dir { + fs::create_dir(&path).unwrap_or_else(|_| panic!("Failed to create resources folder: {path:?}")); + } else { + panic!("Resources folder does not exist: {path:?}"); + } + } + + path + } + + // Create the file path. + fn test_vector_path(folder: &str, file: &str, circuit: &str, create_dir: bool) -> PathBuf { + let mut path = resources_path(create_dir); + + // Construct the path where the test data lives. + path.push(circuit); + path.push(folder); + + // Create the test folder if it does not exist if specified, otherwise panic. + if !path.exists() { + if create_dir { + fs::create_dir(&path).unwrap_or_else(|_| panic!("Failed to create resources folder: {path:?}")); + } else { + panic!("Resources folder does not exist: {path:?}"); + } + } + + // Construct the path for the test file. + path.push(file); + path.set_extension("txt"); + + path + } + + // Loads the given `test_folder/test_file` and asserts the given `candidate` matches the expected values. + #[track_caller] + fn assert_test_vector_equality(test_folder: &str, test_file: &str, candidate: &str, circuit: &str) { + // Get the path to the test file. + let path = test_vector_path(test_folder, test_file, circuit, false); + + // Assert the test file is equal to the expected value. + expect_test::expect_file![path].assert_eq(candidate); + } + + // Create a test vector from a trusted revision of Varuna. + fn create_test_vector(folder: &str, file: &str, data: &str, circuit: &str) { + // Get the path to the test file. + let path = test_vector_path(folder, file, circuit, true); + + // Write the test vector to file. + fs::write(&path, data).unwrap_or_else(|_| panic!("Failed to write to file: {:?}", path)); + } + + // Tests varuna against the test vectors in all circuits in the resources folder. + fn test_varuna_with_all_circuits(create_test_vectors: bool) { + let entries = fs::read_dir(resources_path(create_test_vectors)).expect("Failed to read resources folder"); + entries.into_iter().for_each(|entry| { + let path = entry.unwrap().path(); + if path.is_dir() { + let circuit = path.file_name().unwrap().to_str().unwrap(); + test_circuit_with_test_vectors(create_test_vectors, circuit); + } + }); + } + + // Test Varuna against test vectors for a specific circuit. + fn test_circuit_with_test_vectors(create_test_vectors: bool, circuit: &str) { + // Initialize the parts of the witness used in the multiplicative constraints. + let witness_path = format!("src/snark/varuna/resources/{}/witness.input", circuit); + let instance_file = fs::read_to_string(witness_path).expect("Could not read the file"); + let witness: Vec = serde_json::from_str(instance_file.lines().next().unwrap()).unwrap(); + let (a, b) = (witness[0], witness[1]); + + // Initialize challenges from file. + let challenges_path = format!("src/snark/varuna/resources/{}/challenges.input", circuit); + let challenges_file = fs::read_to_string(challenges_path).expect("Could not read the file"); + let mut challenges = Vec::new(); + for line in challenges_file.lines() { + challenges.push(line) + } + let (alpha, _eta_a, eta_b, eta_c, beta, delta_a, delta_b, delta_c, _gamma) = ( + Fr::from_str(challenges[0]).unwrap(), + Fr::from_str(challenges[1]).unwrap(), + Fr::from_str(challenges[2]).unwrap(), + Fr::from_str(challenges[3]).unwrap(), + Fr::from_str(challenges[4]).unwrap(), + vec![Fr::from_str(challenges[5]).unwrap()], + vec![Fr::from_str(challenges[6]).unwrap()], + vec![Fr::from_str(challenges[7]).unwrap()], + Fr::from_str(challenges[8]).unwrap(), + ); + + let circuit_combiner = Fr::one(); + let instance_combiners = vec![Fr::one()]; + + // Create sample circuit which corresponds to instance.input file. + let mul_depth = 3; + let num_constraints = 7; + let num_variables = 7; + + // Create a fixed seed rng that matches those the test vectors were generated with. + let rng = &mut snarkvm_utilities::rand::TestRng::fixed(4730); + let max_degree = + AHPForR1CS::::max_degree(num_constraints, num_variables, num_variables * num_constraints).unwrap(); + let universal_srs = VarunaSonicInst::universal_setup(max_degree).unwrap(); + let (circ, _) = + TestCircuit::generate_circuit_with_fixed_witness(a, b, mul_depth, num_constraints, num_variables); + println!("Circuit: {:?}", circ); + let (index_pk, _index_vk) = VarunaSonicInst::circuit_setup(&universal_srs, &circ).unwrap(); + let mut keys_to_constraints = BTreeMap::new(); + keys_to_constraints.insert(index_pk.circuit.deref(), std::slice::from_ref(&circ)); + + // Begin the Varuna protocol execution. + let prover_state = AHPForR1CS::<_, MM>::init_prover(&keys_to_constraints, rng).unwrap(); + let mut prover_state = AHPForR1CS::<_, MM>::prover_first_round(prover_state, rng).unwrap(); + let first_round_oracles = Arc::new(prover_state.first_round_oracles.as_ref().unwrap()); + + // Get private witness polynomial coefficients. + let (_, w_poly) = first_round_oracles.batches.iter().next().unwrap(); + let w_lde = format!("{:?}", w_poly[0].0.coeffs().map(|(_, coeff)| coeff).collect::>()); + if create_test_vectors { + create_test_vector("polynomials", "w_lde", &w_lde, circuit); + } + + // Generate test vectors from assignments. + let assignments = AHPForR1CS::<_, MM>::calculate_assignments(&mut prover_state).unwrap(); + + // Get full witness polynomial coefficients. + let (_, z_poly) = assignments.iter().next().unwrap(); + let z_lde = format!("{:?}", z_poly[0].coeffs().iter().collect::>()); + if create_test_vectors { + create_test_vector("polynomials", "z_lde", &z_lde, circuit); + } + + let combiners = verifier::BatchCombiners:: { circuit_combiner, instance_combiners }; + let batch_combiners = BTreeMap::from_iter([(index_pk.circuit.id, combiners)]); + let verifier_first_msg = verifier::FirstMessage:: { batch_combiners }; + + let (second_oracles, prover_state) = + AHPForR1CS::<_, MM>::prover_second_round::<_>(&verifier_first_msg, prover_state, rng).unwrap(); + + // Get round 2 rowcheck polynomial oracle coefficients. + let h_0 = format!("{:?}", second_oracles.h_0.coeffs().map(|(_, coeff)| coeff).collect::>()); + if create_test_vectors { + create_test_vector("polynomials", "h_0", &h_0, circuit); + } + + let verifier_second_msg = verifier::SecondMessage:: { alpha, eta_b, eta_c }; + let (_prover_third_message, third_oracles, prover_state) = + AHPForR1CS::<_, MM>::prover_third_round(&verifier_first_msg, &verifier_second_msg, prover_state, rng) + .unwrap(); + + // Get coefficients round 3 univariate rowcheck polynomial oracles. + let g_1 = format!("{:?}", third_oracles.g_1.coeffs().map(|(_, coeff)| coeff).collect::>()); + if create_test_vectors { + create_test_vector("polynomials", "g_1", &g_1, circuit); + } + let h_1 = format!("{:?}", third_oracles.h_1.coeffs().map(|(_, coeff)| coeff).collect::>()); + if create_test_vectors { + create_test_vector("polynomials", "h_1", &h_1, circuit); + } + + let verifier_third_msg = verifier::ThirdMessage:: { beta }; + let (_prover_fourth_message, fourth_oracles, prover_state) = + AHPForR1CS::<_, MM>::prover_fourth_round(&verifier_second_msg, &verifier_third_msg, prover_state, rng) + .unwrap(); + + // Create round 4 rational sumcheck oracle polynomials. + let (_, gm_polys) = fourth_oracles.gs.iter().next().unwrap(); + let g_a = format!("{:?}", gm_polys.g_a.coeffs().map(|(_, coeff)| coeff).collect::>()); + let g_b = format!("{:?}", gm_polys.g_b.coeffs().map(|(_, coeff)| coeff).collect::>()); + let g_c = format!("{:?}", gm_polys.g_b.coeffs().map(|(_, coeff)| coeff).collect::>()); + if create_test_vectors { + create_test_vector("polynomials", "g_a", &g_a, circuit); + create_test_vector("polynomials", "g_b", &g_b, circuit); + create_test_vector("polynomials", "g_c", &g_c, circuit); + } + + // Create the verifier's fourth message. + let verifier_fourth_msg = verifier::FourthMessage:: { delta_a, delta_b, delta_c }; + + let mut public_inputs = BTreeMap::new(); + let public_input = prover_state.public_inputs(&index_pk.circuit).unwrap(); + public_inputs.insert(index_pk.circuit.id, public_input); + let non_zero_a_domain = EvaluationDomain::::new(index_pk.circuit.index_info.num_non_zero_a).unwrap(); + let non_zero_b_domain = EvaluationDomain::::new(index_pk.circuit.index_info.num_non_zero_b).unwrap(); + let non_zero_c_domain = EvaluationDomain::::new(index_pk.circuit.index_info.num_non_zero_c).unwrap(); + let variable_domain = EvaluationDomain::::new(index_pk.circuit.index_info.num_variables).unwrap(); + let constraint_domain = EvaluationDomain::::new(index_pk.circuit.index_info.num_constraints).unwrap(); + let input_domain = EvaluationDomain::::new(index_pk.circuit.index_info.num_public_inputs).unwrap(); + + // Get constraint domain elements. + let mut constraint_domain_elements = Vec::with_capacity(constraint_domain.size()); + for el in constraint_domain.elements() { + constraint_domain_elements.push(el); + } + if create_test_vectors { + create_test_vector("domain", "R", &format!("{:?}", constraint_domain_elements), circuit); + } + + // Get non_zero_domain elements. + let non_zero_domain = *[&non_zero_a_domain, &non_zero_b_domain, &non_zero_c_domain] + .iter() + .max_by_key(|domain| domain.size) + .unwrap(); + let mut non_zero_domain_elements = Vec::with_capacity(non_zero_domain.size()); + for el in non_zero_domain.elements() { + non_zero_domain_elements.push(el); + } + if create_test_vectors { + create_test_vector("domain", "K", &format!("{:?}", non_zero_domain_elements), circuit); + } + + // Get variable domain elements. + let mut variable_domain_elements = Vec::with_capacity(input_domain.size()); + for el in variable_domain.elements() { + variable_domain_elements.push(el); + } + if create_test_vectors { + create_test_vector("domain", "C", &format!("{:?}", variable_domain_elements), circuit); + } + + let fifth_oracles = AHPForR1CS::<_, MM>::prover_fifth_round(verifier_fourth_msg, prover_state, rng).unwrap(); + + // Get coefficients of final oracle polynomial from round 5. + let h_2 = format!("{:?}", fifth_oracles.h_2.coeffs().map(|(_, coeff)| coeff).collect::>()); + if create_test_vectors { + create_test_vector("polynomials", "h_2", &h_2, circuit); + } + + // Check the intermediate oracle polynomials against the test vectors. + assert_test_vector_equality("polynomials", "w_lde", &w_lde, circuit); + assert_test_vector_equality("polynomials", "z_lde", &z_lde, circuit); + assert_test_vector_equality("polynomials", "h_0", &h_0, circuit); + assert_test_vector_equality("polynomials", "h_1", &h_1, circuit); + assert_test_vector_equality("polynomials", "g_1", &g_1, circuit); + assert_test_vector_equality("polynomials", "h_2", &h_2, circuit); + assert_test_vector_equality("polynomials", "g_a", &g_a, circuit); + assert_test_vector_equality("polynomials", "g_b", &g_b, circuit); + assert_test_vector_equality("polynomials", "g_c", &g_c, circuit); + + // Check that the domains match the test vectors. + assert_test_vector_equality("domain", "R", &format!("{:?}", constraint_domain_elements), circuit); + assert_test_vector_equality("domain", "K", &format!("{:?}", non_zero_domain_elements), circuit); + assert_test_vector_equality("domain", "C", &format!("{:?}", variable_domain_elements), circuit); + } + + #[test] + fn test_varuna_with_prover_test_vectors() { + test_varuna_with_all_circuits(false); + } +} diff --git a/algorithms/src/snark/varuna/varuna.rs b/algorithms/src/snark/varuna/varuna.rs index ac07edcee3..df01ee4425 100644 --- a/algorithms/src/snark/varuna/varuna.rs +++ b/algorithms/src/snark/varuna/varuna.rs @@ -46,7 +46,7 @@ use snarkvm_curves::PairingEngine; use snarkvm_fields::{One, PrimeField, ToConstraintField, Zero}; use snarkvm_utilities::{to_bytes_le, ToBytes}; -use anyhow::{anyhow, Result}; +use anyhow::{anyhow, bail, ensure, Result}; use core::marker::PhantomData; use itertools::Itertools; use rand::{CryptoRng, Rng}; @@ -82,17 +82,17 @@ impl, SM: SNARKMode> VarunaSNARK let mut indexed_circuit = AHPForR1CS::<_, SM>::index(*circuit)?; // TODO: Add check that c is in the correct mode. // Ensure the universal SRS supports the circuit size. - universal_srs - .download_powers_for(0..indexed_circuit.max_degree()) - .map_err(|e| anyhow!("Failed to download powers for degree {}: {e}", indexed_circuit.max_degree()))?; - let coefficient_support = AHPForR1CS::::get_degree_bounds(&indexed_circuit.index_info); + universal_srs.download_powers_for(0..indexed_circuit.max_degree()?).map_err(|e| { + anyhow!("Failed to download powers for degree {}: {e}", indexed_circuit.max_degree().unwrap()) + })?; + let coefficient_support = AHPForR1CS::::get_degree_bounds(&indexed_circuit.index_info)?; // Varuna only needs degree 2 random polynomials. let supported_hiding_bound = 1; let supported_lagrange_sizes = [].into_iter(); // TODO: consider removing lagrange_bases_at_beta_g from CommitterKey let (committer_key, _) = SonicKZG10::::trim( universal_srs, - indexed_circuit.max_degree(), + indexed_circuit.max_degree()?, supported_lagrange_sizes, supported_hiding_bound, Some(coefficient_support.as_slice()), @@ -106,11 +106,11 @@ impl, SM: SNARKMode> VarunaSNARK let (mut circuit_commitments, commitment_randomnesses): (_, _) = SonicKZG10::::commit( universal_prover, &ck, - indexed_circuit.interpolate_matrix_evals().map(Into::into), + indexed_circuit.interpolate_matrix_evals()?.map(Into::into), setup_rng, )?; let empty_randomness = Randomness::::empty(); - assert!(commitment_randomnesses.iter().all(|r| r == &empty_randomness)); + ensure!(commitment_randomnesses.iter().all(|r| r == &empty_randomness)); end_timer!(commit_time); circuit_commitments.sort_by(|c1, c2| c1.label().cmp(c2.label())); @@ -141,7 +141,7 @@ impl, SM: SNARKMode> VarunaSNARK let mut sponge = FS::new_with_parameters(fs_parameters); sponge.absorb_bytes(Self::PROTOCOL_NAME); for (batch_size, inputs) in inputs_and_batch_sizes.values() { - sponge.absorb_bytes(&(u64::try_from(*batch_size).unwrap()).to_le_bytes()); + sponge.absorb_bytes(&(*batch_size as u64).to_le_bytes()); for input in inputs.iter() { sponge.absorb_nonnative_field_elements(input.iter().copied()); } @@ -157,7 +157,7 @@ impl, SM: SNARKMode> VarunaSNARK verifying_key: &CircuitVerifyingKey, ) -> Result { let mut sponge = FS::new_with_parameters(fs_parameters); - sponge.absorb_bytes(&to_bytes_le![&Self::PROTOCOL_NAME].unwrap()); + sponge.absorb_bytes(&to_bytes_le![&Self::PROTOCOL_NAME]?); sponge.absorb_bytes(&verifying_key.circuit_info.to_bytes_le()?); sponge.absorb_native_field_elements(&verifying_key.circuit_commitments); sponge.absorb_bytes(&verifying_key.id.0); @@ -214,7 +214,7 @@ where type VerifierInput = [E::Fr]; type VerifyingKey = CircuitVerifyingKey; - fn universal_setup(max_degree: usize) -> Result { + fn universal_setup(max_degree: usize) -> Result { let setup_time = start_timer!(|| { format!("Varuna::UniversalSetup with max_degree {max_degree}",) }); let srs = SonicKZG10::::load_srs(max_degree).map_err(Into::into); end_timer!(setup_time); @@ -228,7 +228,7 @@ where circuit: &C, ) -> Result<(Self::ProvingKey, Self::VerifyingKey)> { let mut circuit_keys = Self::batch_circuit_setup::(universal_srs, &[circuit])?; - assert_eq!(circuit_keys.len(), 1); + ensure!(circuit_keys.len() == 1); Ok(circuit_keys.pop().unwrap()) } @@ -238,14 +238,14 @@ where fs_parameters: &Self::FSParameters, verifying_key: &Self::VerifyingKey, proving_key: &Self::ProvingKey, - ) -> Result { + ) -> Result { // Initialize sponge let mut sponge = Self::init_sponge_for_certificate(fs_parameters, verifying_key)?; // Compute challenges for linear combination, and the point to evaluate the polynomials at. // The linear combination requires `num_polynomials - 1` coefficients // (since the first coeff is 1), and so we squeeze out `num_polynomials` points. let mut challenges = sponge.squeeze_nonnative_field_elements(verifying_key.circuit_commitments.len()); - let point = challenges.pop().unwrap(); + let point = challenges.pop().ok_or(anyhow!("Failed to squeeze random element"))?; let one = E::Fr::one(); let linear_combination_challenges = core::iter::once(&one).chain(challenges.iter()); @@ -266,7 +266,7 @@ where universal_prover, &committer_key, &[lc], - proving_key.circuit.interpolate_matrix_evals(), + proving_key.circuit.interpolate_matrix_evals()?, &empty_randomness, &query_set, &mut sponge, @@ -283,15 +283,15 @@ where circuit: &C, verifying_key: &Self::VerifyingKey, certificate: &Self::Certificate, - ) -> Result { + ) -> Result { // Ensure the VerifyingKey encodes the expected circuit. let circuit_id = &verifying_key.id; let state = AHPForR1CS::::index_helper(circuit)?; if state.index_info != verifying_key.circuit_info { - return Err(SNARKError::CircuitNotFound); + bail!(SNARKError::CircuitNotFound); } if state.id != *circuit_id { - return Err(SNARKError::CircuitNotFound); + bail!(SNARKError::CircuitNotFound); } // Initialize sponge. @@ -301,26 +301,20 @@ where // The linear combination requires `num_polynomials - 1` coefficients // (since the first coeff is 1), and so we squeeze out `num_polynomials` points. let mut challenges = sponge.squeeze_nonnative_field_elements(verifying_key.circuit_commitments.len()); - let point = challenges.pop().unwrap(); - let one = E::Fr::one(); - let linear_combination_challenges = core::iter::once(&one).chain(challenges.iter()); + let point = challenges.pop().ok_or(anyhow!("Failed to squeeze random element"))?; + let combiners = core::iter::once(E::Fr::one()).chain(challenges.into_iter()); // We will construct a linear combination and provide a proof of evaluation of the lc at `point`. - let poly_info = AHPForR1CS::::index_polynomial_info(std::iter::once(circuit_id)); - let evaluations_at_point = AHPForR1CS::::evaluate_index_polynomials(state, circuit_id, point)?; - let mut lc = crate::polycommit::sonic_pc::LinearCombination::empty("circuit_check"); - let mut evaluation = E::Fr::zero(); - for ((label, &c), eval) in poly_info.keys().zip_eq(linear_combination_challenges).zip_eq(evaluations_at_point) { - lc.add(c, label.as_str()); - evaluation += c * eval; - } + let (lc, evaluation) = + AHPForR1CS::::evaluate_index_polynomials(state, circuit_id, point, combiners)?; + ensure!(verifying_key.circuit_commitments.len() == lc.terms.len()); let commitments = verifying_key .iter() .cloned() - .zip_eq(poly_info.values()) - .map(|(c, info)| LabeledCommitment::new_with_info(info, c)) - .collect::>(); + .zip_eq(lc.terms.keys()) + .map(|(c, label)| LabeledCommitment::new(format!("{label:?}"), c, None)) + .collect_vec(); let evaluations = Evaluations::from_iter([(("circuit_check".into(), point), evaluation)]); let query_set = QuerySet::from_iter([("circuit_check".into(), ("challenge".into(), point))]); @@ -338,16 +332,16 @@ where /// This is the main entrypoint for creating proofs. /// You can find a specification of the prover algorithm in: - /// https://github.com/AleoHQ/protocol-docs/tree/main/snark/varuna + /// https://github.com/AleoHQ/protocol-docs fn prove_batch, R: Rng + CryptoRng>( universal_prover: &Self::UniversalProver, fs_parameters: &Self::FSParameters, keys_to_constraints: &BTreeMap<&CircuitProvingKey, &[C]>, zk_rng: &mut R, - ) -> Result { + ) -> Result { let prover_time = start_timer!(|| "Varuna::Prover"); if keys_to_constraints.is_empty() { - return Err(SNARKError::EmptyBatch); + bail!(SNARKError::EmptyBatch); } let mut circuits_to_constraints = BTreeMap::new(); @@ -378,7 +372,7 @@ where circuit_ids.push(circuit_id); } - assert_eq!(prover_state.total_instances, total_instances); + ensure!(prover_state.total_instances == total_instances); let committer_key = CommitterUnionKey::union(keys_to_constraints.keys().map(|pk| pk.committer_key.deref())); @@ -522,7 +516,7 @@ where .chain(fourth_oracles.into_iter()) .chain(fifth_oracles.into_iter()) .collect(); - assert!( + ensure!( polynomials.len() == num_unique_circuits * 6 + // numerator and denominator for each matrix sumcheck AHPForR1CS::::num_first_round_oracles(total_instances) + @@ -570,9 +564,9 @@ where let empty_randomness = Randomness::::empty(); if SM::ZK { - assert!(commitment_randomnesses.iter().any(|r| r != &empty_randomness)); + ensure!(commitment_randomnesses.iter().any(|r| r != &empty_randomness)); } else { - assert!(commitment_randomnesses.iter().all(|r| r == &empty_randomness)); + ensure!(commitment_randomnesses.iter().all(|r| r == &empty_randomness)); } // Compute the AHP verifier's query set. @@ -619,7 +613,7 @@ where pc_proof, )?; proof.check_batch_sizes()?; - assert_eq!(proof.pc_proof.is_hiding(), SM::ZK); + ensure!(proof.pc_proof.is_hiding() == SM::ZK); end_timer!(prover_time); Ok(proof) @@ -627,15 +621,15 @@ where /// This is the main entrypoint for verifying proofs. /// You can find a specification of the verifier algorithm in: - /// https://github.com/AleoHQ/protocol-docs/tree/main/marlin + /// https://github.com/AleoHQ/protocol-docs fn verify_batch>( universal_verifier: &Self::UniversalVerifier, fs_parameters: &Self::FSParameters, keys_to_inputs: &BTreeMap<&Self::VerifyingKey, &[B]>, proof: &Self::Proof, - ) -> Result { + ) -> Result { if keys_to_inputs.is_empty() { - return Err(SNARKError::EmptyBatch); + bail!(SNARKError::EmptyBatch); } proof.check_batch_sizes()?; @@ -645,11 +639,11 @@ where batch_sizes.insert(vk.id, batch_sizes_vec[i]); if public_inputs_i.is_empty() { - return Err(SNARKError::EmptyBatch); + bail!(SNARKError::EmptyBatch); } if public_inputs_i.len() != batch_sizes_vec[i] { - return Err(SNARKError::BatchSizeMismatch); + bail!(SNARKError::BatchSizeMismatch); } } @@ -670,17 +664,22 @@ where let non_zero_domains = AHPForR1CS::<_, SM>::cmp_non_zero_domains(&vk.circuit_info, max_non_zero_domain)?; max_non_zero_domain = non_zero_domains.max_non_zero_domain; - let input_domain = EvaluationDomain::::new(vk.circuit_info.num_public_inputs).unwrap(); + let input_domain = EvaluationDomain::::new(vk.circuit_info.num_public_inputs) + .ok_or(anyhow!("Failed to create EvaluationDomain from num_public_inputs"))?; input_domains.insert(vk.id, input_domain); + let input_fields = public_inputs_i + .iter() + .map(|input| input.borrow().to_field_elements()) + .collect::, _>>()?; + let (padded_public_inputs_i, parsed_public_inputs_i): (Vec<_>, Vec<_>) = { - public_inputs_i + input_fields .iter() .map(|input| { - let input = input.borrow().to_field_elements().unwrap(); let mut new_input = Vec::with_capacity((1 + input.len()).max(input_domain.size())); new_input.push(E::Fr::one()); - new_input.extend_from_slice(&input); + new_input.extend_from_slice(input); new_input.resize(input.len().max(input_domain.size()), E::Fr::zero()); if cfg!(debug_assertions) { println!("Number of padded public variables: {}", new_input.len()); @@ -700,10 +699,10 @@ where inputs_and_batch_sizes.insert(vk.id, (batch_size, padded_public_vec[i].as_slice())); } let max_constraint_domain = - EvaluationDomain::::new(max_num_constraints).ok_or(SynthesisError::PolynomialDegreeTooLarge)?; + EvaluationDomain::::new(max_num_constraints).ok_or(SynthesisError::PolyTooLarge)?; let max_variable_domain = - EvaluationDomain::::new(max_num_variables).ok_or(SynthesisError::PolynomialDegreeTooLarge)?; - let max_non_zero_domain = max_non_zero_domain.ok_or(SynthesisError::PolynomialDegreeTooLarge)?; + EvaluationDomain::::new(max_num_variables).ok_or(SynthesisError::PolyTooLarge)?; + let max_non_zero_domain = max_non_zero_domain.ok_or(SynthesisError::PolyTooLarge)?; let comms = &proof.commitments; let proof_has_correct_zk_mode = if SM::ZK { @@ -743,8 +742,8 @@ where if SM::ZK { first_commitments.push(LabeledCommitment::new_with_info( - first_round_info.get("mask_poly").unwrap(), - comms.mask_poly.unwrap(), + first_round_info.get("mask_poly").ok_or(anyhow!("Missing mask_poly"))?, + comms.mask_poly.ok_or(anyhow!("Missing mask_poly"))?, )); } @@ -837,7 +836,6 @@ where // degree bounds because we know the committed index polynomial has the // correct degree. - // Gather commitments in one vector. let commitments: Vec<_> = circuit_commitments .into_iter() .flatten() diff --git a/algorithms/src/traits/snark.rs b/algorithms/src/traits/snark.rs index ede71ad08a..a5f48c456b 100644 --- a/algorithms/src/traits/snark.rs +++ b/algorithms/src/traits/snark.rs @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -use crate::{errors::SNARKError, r1cs::ConstraintSynthesizer, AlgebraicSponge}; +use crate::{r1cs::ConstraintSynthesizer, AlgebraicSponge}; use snarkvm_fields::PrimeField; use snarkvm_utilities::{CanonicalDeserialize, CanonicalSerialize, FromBytes, ToBytes}; @@ -54,7 +54,7 @@ pub trait SNARK { type FiatShamirRng: AlgebraicSponge; type FSParameters; - fn universal_setup(config: usize) -> Result; + fn universal_setup(config: usize) -> Result; fn circuit_setup>( srs: &Self::UniversalSRS, @@ -66,7 +66,7 @@ pub trait SNARK { fs_parameters: &Self::FSParameters, verifying_key: &Self::VerifyingKey, proving_key: &Self::ProvingKey, - ) -> Result; + ) -> Result; fn prove, R: Rng + CryptoRng>( universal_prover: &Self::UniversalProver, @@ -74,7 +74,7 @@ pub trait SNARK { proving_key: &Self::ProvingKey, constraints: &C, rng: &mut R, - ) -> Result { + ) -> Result { let mut keys_to_constraints = BTreeMap::new(); keys_to_constraints.insert(proving_key, std::slice::from_ref(constraints)); Self::prove_batch(universal_prover, fs_parameters, &keys_to_constraints, rng) @@ -85,7 +85,7 @@ pub trait SNARK { fs_parameters: &Self::FSParameters, keys_to_constraints: &BTreeMap<&Self::ProvingKey, &[C]>, rng: &mut R, - ) -> Result; + ) -> Result; fn verify_vk>( universal_verifier: &Self::UniversalVerifier, @@ -93,7 +93,7 @@ pub trait SNARK { circuit: &C, verifying_key: &Self::VerifyingKey, certificate: &Self::Certificate, - ) -> Result; + ) -> Result; fn verify>( universal_verifier: &Self::UniversalVerifier, @@ -101,7 +101,7 @@ pub trait SNARK { verifying_key: &Self::VerifyingKey, input: B, proof: &Self::Proof, - ) -> Result { + ) -> Result { let mut keys_to_inputs = BTreeMap::new(); let inputs = [input]; keys_to_inputs.insert(verifying_key, &inputs[..]); @@ -113,5 +113,5 @@ pub trait SNARK { fs_parameters: &Self::FSParameters, keys_to_inputs: &BTreeMap<&Self::VerifyingKey, &[B]>, proof: &Self::Proof, - ) -> Result; + ) -> Result; } diff --git a/circuit/Cargo.toml b/circuit/Cargo.toml index 459911dd14..801a7d7160 100644 --- a/circuit/Cargo.toml +++ b/circuit/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "snarkvm-circuit" -version = "0.16.3" +version = "0.16.15" authors = [ "The Aleo Team " ] description = "Circuits for a decentralized virtual machine" homepage = "https://aleo.org" @@ -25,28 +25,28 @@ edition = "2021" [dependencies.snarkvm-circuit-account] path = "./account" -version = "=0.16.3" +version = "=0.16.15" [dependencies.snarkvm-circuit-algorithms] path = "./algorithms" -version = "=0.16.3" +version = "=0.16.15" [dependencies.snarkvm-circuit-collections] path = "./collections" -version = "=0.16.3" +version = "=0.16.15" [dependencies.snarkvm-circuit-environment] path = "./environment" -version = "=0.16.3" +version = "=0.16.15" [dependencies.snarkvm-circuit-network] path = "./network" -version = "=0.16.3" +version = "=0.16.15" [dependencies.snarkvm-circuit-program] path = "./program" -version = "=0.16.3" +version = "=0.16.15" [dependencies.snarkvm-circuit-types] path = "./types" -version = "=0.16.3" +version = "=0.16.15" diff --git a/circuit/account/Cargo.toml b/circuit/account/Cargo.toml index 57fff57d3f..99ab62b6c4 100644 --- a/circuit/account/Cargo.toml +++ b/circuit/account/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "snarkvm-circuit-account" -version = "0.16.3" +version = "0.16.15" authors = [ "The Aleo Team " ] description = "Account circuit library for a decentralized virtual machine" license = "Apache-2.0" @@ -9,20 +9,20 @@ edition = "2021" [dependencies.console] package = "snarkvm-console-account" path = "../../console/account" -version = "=0.16.3" +version = "=0.16.15" optional = true [dependencies.snarkvm-circuit-algorithms] path = "../algorithms" -version = "=0.16.3" +version = "=0.16.15" [dependencies.snarkvm-circuit-network] path = "../network" -version = "=0.16.3" +version = "=0.16.15" [dependencies.snarkvm-circuit-types] path = "../types" -version = "=0.16.3" +version = "=0.16.15" [dev-dependencies.snarkvm-utilities] path = "../../utilities" diff --git a/circuit/algorithms/Cargo.toml b/circuit/algorithms/Cargo.toml index f8490154d2..918caade59 100644 --- a/circuit/algorithms/Cargo.toml +++ b/circuit/algorithms/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "snarkvm-circuit-algorithms" -version = "0.16.3" +version = "0.16.15" authors = [ "The Aleo Team " ] description = "Algorithm circuit library for a decentralized virtual machine" license = "Apache-2.0" @@ -9,16 +9,16 @@ edition = "2021" [dependencies.console] package = "snarkvm-console-algorithms" path = "../../console/algorithms" -version = "=0.16.3" +version = "=0.16.15" optional = true [dependencies.snarkvm-circuit-types] path = "../types" -version = "=0.16.3" +version = "=0.16.15" [dependencies.snarkvm-fields] path = "../../fields" -version = "=0.16.3" +version = "=0.16.15" default-features = false [dev-dependencies.anyhow] diff --git a/circuit/algorithms/src/bhp/hash_uncompressed.rs b/circuit/algorithms/src/bhp/hash_uncompressed.rs index 54a2edee22..2bcac4d09a 100644 --- a/circuit/algorithms/src/bhp/hash_uncompressed.rs +++ b/circuit/algorithms/src/bhp/hash_uncompressed.rs @@ -38,31 +38,32 @@ impl HashUncompres // Initialize a variable to store the hash from the current iteration. let mut digest = Group::zero(); + // Prepare a reusable vector for the preimage. + let mut preimage = Vec::with_capacity(num_hasher_bits); + // Compute the hash of the input. for (i, input_bits) in input.chunks(max_input_bits_per_iteration).enumerate() { // Determine if this is the first iteration. - let preimage = match i == 0 { + match i == 0 { // Construct the first iteration as: [ 0...0 || DOMAIN || LENGTH(INPUT) || INPUT[0..BLOCK_SIZE] ]. true => { // Initialize a vector for the hash preimage. - let mut preimage = Vec::with_capacity(num_hasher_bits); preimage.extend(self.domain.clone()); U64::constant(console::U64::new(input.len() as u64)).write_bits_le(&mut preimage); preimage.extend_from_slice(input_bits); - preimage } // Construct the subsequent iterations as: [ PREVIOUS_HASH[0..DATA_BITS] || INPUT[I * BLOCK_SIZE..(I + 1) * BLOCK_SIZE] ]. false => { // Initialize a vector for the hash preimage. - let mut preimage = Vec::with_capacity(num_hasher_bits); digest.to_x_coordinate().write_bits_le(&mut preimage); preimage.truncate(num_data_bits); preimage.extend_from_slice(input_bits); - preimage } - }; + } // Hash the preimage for this iteration. digest = self.hasher.hash_uncompressed(&preimage); + // Clear the preimage vector for the next iteration. + preimage.clear(); } digest diff --git a/circuit/algorithms/src/bhp/hasher/hash_uncompressed.rs b/circuit/algorithms/src/bhp/hasher/hash_uncompressed.rs index 5812bf25ad..4783a24d7b 100644 --- a/circuit/algorithms/src/bhp/hasher/hash_uncompressed.rs +++ b/circuit/algorithms/src/bhp/hasher/hash_uncompressed.rs @@ -14,6 +14,8 @@ use super::*; +use std::borrow::Cow; + impl HashUncompressed for BHPHasher { @@ -31,18 +33,27 @@ impl HashUncompres } // Ensure the input size is within the parameter size. - let mut input = input.to_vec(); - match input.len() <= Self::MAX_BITS { + let input = match input.len() <= Self::MAX_BITS { true => { // Pad the input to a multiple of `BHP_CHUNK_SIZE` for hashing. if input.len() % BHP_CHUNK_SIZE != 0 { + // Compute the number of padding bits. let padding = BHP_CHUNK_SIZE - (input.len() % BHP_CHUNK_SIZE); - input.resize(input.len() + padding, Boolean::constant(false)); - assert_eq!(input.len() % BHP_CHUNK_SIZE, 0, "Input must be a multiple of {BHP_CHUNK_SIZE}"); + // Pad the input with `false` bits. + let mut padded_input = Vec::with_capacity(input.len() + padding); + padded_input.extend_from_slice(input); + padded_input.resize(input.len() + padding, Boolean::constant(false)); + // Ensure the input is a multiple of `BHP_CHUNK_SIZE`. + assert_eq!(padded_input.len() % BHP_CHUNK_SIZE, 0, "Input must be a multiple of {BHP_CHUNK_SIZE}"); + // Return the padded input. + Cow::Owned(padded_input) + } else { + // Return the input as a borrowed slice. + Cow::Borrowed(input) } } false => E::halt(format!("Inputs to this BHP cannot exceed {} bits", Self::MAX_BITS)), - } + }; // Declare the 1 constant field element. let one = Field::one(); diff --git a/circuit/algorithms/src/elligator2/encode.rs b/circuit/algorithms/src/elligator2/encode.rs index 0a4e43a0b7..62db70c64d 100644 --- a/circuit/algorithms/src/elligator2/encode.rs +++ b/circuit/algorithms/src/elligator2/encode.rs @@ -22,7 +22,7 @@ impl Elligator2 { debug_assert!(console::Group::::EDWARDS_D.legendre().is_qnr()); // Ensure the input is nonzero. - E::assert_neq(input, &Field::::zero()); + E::assert_neq(input, Field::::zero()); // Define `1` as a constant. let one = Field::one(); diff --git a/circuit/collections/Cargo.toml b/circuit/collections/Cargo.toml index 66358e7124..7a24023f22 100644 --- a/circuit/collections/Cargo.toml +++ b/circuit/collections/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "snarkvm-circuit-collections" -version = "0.16.3" +version = "0.16.15" authors = [ "The Aleo Team " ] description = "Collections circuit library for a decentralized virtual machine" license = "Apache-2.0" @@ -9,16 +9,16 @@ edition = "2021" [dependencies.console] package = "snarkvm-console-collections" path = "../../console/collections" -version = "=0.16.3" +version = "=0.16.15" optional = true [dependencies.snarkvm-circuit-algorithms] path = "../algorithms" -version = "=0.16.3" +version = "=0.16.15" [dependencies.snarkvm-circuit-types] path = "../types" -version = "=0.16.3" +version = "=0.16.15" [dev-dependencies.snarkvm-circuit-network] path = "../network" diff --git a/circuit/environment/Cargo.toml b/circuit/environment/Cargo.toml index 60d66a8794..eda3378021 100644 --- a/circuit/environment/Cargo.toml +++ b/circuit/environment/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "snarkvm-circuit-environment" -version = "0.16.3" +version = "0.16.15" authors = [ "The Aleo Team " ] description = "Circuit environment for a decentralized virtual machine" license = "Apache-2.0" @@ -14,32 +14,32 @@ harness = false [dependencies.console] package = "snarkvm-console-network" path = "../../console/network" -version = "=0.16.3" +version = "=0.16.15" [dependencies.snarkvm-algorithms] path = "../../algorithms" -version = "=0.16.3" +version = "=0.16.15" default-features = false features = [ "r1cs" ] [dependencies.snarkvm-circuit-environment-witness] path = "./witness" -version = "=0.16.3" +version = "=0.16.15" default-features = false [dependencies.snarkvm-curves] path = "../../curves" -version = "=0.16.3" +version = "=0.16.15" default-features = false [dependencies.snarkvm-fields] path = "../../fields" -version = "=0.16.3" +version = "=0.16.15" default-features = false [dependencies.snarkvm-utilities] path = "../../utilities" -version = "=0.16.3" +version = "=0.16.15" default-features = false [dependencies.indexmap] diff --git a/circuit/environment/src/circuit.rs b/circuit/environment/src/circuit.rs index 4dd58fdc65..be60fe946b 100644 --- a/circuit/environment/src/circuit.rs +++ b/circuit/environment/src/circuit.rs @@ -14,14 +14,16 @@ use crate::{helpers::Constraint, Mode, *}; -use core::{cell::RefCell, fmt}; -use std::rc::Rc; +use core::{ + cell::{Cell, RefCell}, + fmt, +}; type Field = ::Field; thread_local! { - pub(super) static CIRCUIT: Rc>> = Rc::new(RefCell::new(R1CS::new())); - pub(super) static IN_WITNESS: Rc> = Rc::new(RefCell::new(false)); + pub(super) static CIRCUIT: RefCell> = RefCell::new(R1CS::new()); + pub(super) static IN_WITNESS: Cell = Cell::new(false); pub(super) static ZERO: LinearCombination = LinearCombination::zero(); pub(super) static ONE: LinearCombination = LinearCombination::one(); } @@ -49,11 +51,11 @@ impl Environment for Circuit { fn new_variable(mode: Mode, value: Self::BaseField) -> Variable { IN_WITNESS.with(|in_witness| { // Ensure we are not in witness mode. - if !(*(**in_witness).borrow()) { + if !in_witness.get() { CIRCUIT.with(|circuit| match mode { - Mode::Constant => (**circuit).borrow_mut().new_constant(value), - Mode::Public => (**circuit).borrow_mut().new_public(value), - Mode::Private => (**circuit).borrow_mut().new_private(value), + Mode::Constant => circuit.borrow_mut().new_constant(value), + Mode::Public => circuit.borrow_mut().new_public(value), + Mode::Private => circuit.borrow_mut().new_private(value), }) } else { Self::halt("Tried to initialize a new variable in witness mode") @@ -65,13 +67,13 @@ impl Environment for Circuit { fn new_witness Output::Primitive, Output: Inject>(mode: Mode, logic: Fn) -> Output { IN_WITNESS.with(|in_witness| { // Set the entire environment to witness mode. - *(**in_witness).borrow_mut() = true; + in_witness.replace(true); // Run the logic. let output = logic(); // Return the entire environment from witness mode. - *(**in_witness).borrow_mut() = false; + in_witness.replace(false); Inject::new(mode, output) }) @@ -108,11 +110,11 @@ impl Environment for Circuit { { IN_WITNESS.with(|in_witness| { // Ensure we are not in witness mode. - if !(*(**in_witness).borrow()) { + if !in_witness.get() { CIRCUIT.with(|circuit| { // Set the entire environment to the new scope. let name = name.into(); - if let Err(error) = (**circuit).borrow_mut().push_scope(&name) { + if let Err(error) = circuit.borrow_mut().push_scope(&name) { Self::halt(error) } @@ -120,7 +122,7 @@ impl Environment for Circuit { let output = logic(); // Return the entire environment to the previous scope. - if let Err(error) = (**circuit).borrow_mut().pop_scope(name) { + if let Err(error) = circuit.borrow_mut().pop_scope(name) { Self::halt(error) } @@ -142,7 +144,7 @@ impl Environment for Circuit { { IN_WITNESS.with(|in_witness| { // Ensure we are not in witness mode. - if !(*(**in_witness).borrow()) { + if !in_witness.get() { CIRCUIT.with(|circuit| { let (a, b, c) = constraint(); let (a, b, c) = (a.into(), b.into(), c.into()); @@ -167,9 +169,9 @@ impl Environment for Circuit { } false => { // Construct the constraint object. - let constraint = Constraint((**circuit).borrow().scope(), a, b, c); + let constraint = Constraint(circuit.borrow().scope(), a, b, c); // Append the constraint. - (**circuit).borrow_mut().enforce(constraint) + circuit.borrow_mut().enforce(constraint) } } }); @@ -181,62 +183,62 @@ impl Environment for Circuit { /// Returns `true` if all constraints in the environment are satisfied. fn is_satisfied() -> bool { - CIRCUIT.with(|circuit| (**circuit).borrow().is_satisfied()) + CIRCUIT.with(|circuit| circuit.borrow().is_satisfied()) } /// Returns `true` if all constraints in the current scope are satisfied. fn is_satisfied_in_scope() -> bool { - CIRCUIT.with(|circuit| (**circuit).borrow().is_satisfied_in_scope()) + CIRCUIT.with(|circuit| circuit.borrow().is_satisfied_in_scope()) } /// Returns the number of constants in the entire circuit. fn num_constants() -> u64 { - CIRCUIT.with(|circuit| (**circuit).borrow().num_constants()) + CIRCUIT.with(|circuit| circuit.borrow().num_constants()) } /// Returns the number of public variables in the entire circuit. fn num_public() -> u64 { - CIRCUIT.with(|circuit| (**circuit).borrow().num_public()) + CIRCUIT.with(|circuit| circuit.borrow().num_public()) } /// Returns the number of private variables in the entire circuit. fn num_private() -> u64 { - CIRCUIT.with(|circuit| (**circuit).borrow().num_private()) + CIRCUIT.with(|circuit| circuit.borrow().num_private()) } /// Returns the number of constraints in the entire circuit. fn num_constraints() -> u64 { - CIRCUIT.with(|circuit| (**circuit).borrow().num_constraints()) + CIRCUIT.with(|circuit| circuit.borrow().num_constraints()) } /// Returns the number of nonzeros in the entire circuit. fn num_nonzeros() -> (u64, u64, u64) { - CIRCUIT.with(|circuit| (**circuit).borrow().num_nonzeros()) + CIRCUIT.with(|circuit| circuit.borrow().num_nonzeros()) } /// Returns the number of constants for the current scope. fn num_constants_in_scope() -> u64 { - CIRCUIT.with(|circuit| (**circuit).borrow().num_constants_in_scope()) + CIRCUIT.with(|circuit| circuit.borrow().num_constants_in_scope()) } /// Returns the number of public variables for the current scope. fn num_public_in_scope() -> u64 { - CIRCUIT.with(|circuit| (**circuit).borrow().num_public_in_scope()) + CIRCUIT.with(|circuit| circuit.borrow().num_public_in_scope()) } /// Returns the number of private variables for the current scope. fn num_private_in_scope() -> u64 { - CIRCUIT.with(|circuit| (**circuit).borrow().num_private_in_scope()) + CIRCUIT.with(|circuit| circuit.borrow().num_private_in_scope()) } /// Returns the number of constraints for the current scope. fn num_constraints_in_scope() -> u64 { - CIRCUIT.with(|circuit| (**circuit).borrow().num_constraints_in_scope()) + CIRCUIT.with(|circuit| circuit.borrow().num_constraints_in_scope()) } /// Returns the number of nonzeros for the current scope. fn num_nonzeros_in_scope() -> (u64, u64, u64) { - CIRCUIT.with(|circuit| (**circuit).borrow().num_nonzeros_in_scope()) + CIRCUIT.with(|circuit| circuit.borrow().num_nonzeros_in_scope()) } /// Halts the program from further synthesis, evaluation, and execution in the current environment. @@ -252,10 +254,10 @@ impl Environment for Circuit { fn inject_r1cs(r1cs: R1CS) { CIRCUIT.with(|circuit| { // Ensure the circuit is empty before injecting. - assert_eq!(0, (**circuit).borrow().num_constants()); - assert_eq!(1, (**circuit).borrow().num_public()); - assert_eq!(0, (**circuit).borrow().num_private()); - assert_eq!(0, (**circuit).borrow().num_constraints()); + assert_eq!(0, circuit.borrow().num_constants()); + assert_eq!(1, circuit.borrow().num_public()); + assert_eq!(0, circuit.borrow().num_private()); + assert_eq!(0, circuit.borrow().num_constraints()); // Inject the R1CS instance. let r1cs = circuit.replace(r1cs); // Ensure the circuit that was replaced is empty. @@ -272,14 +274,14 @@ impl Environment for Circuit { fn eject_r1cs_and_reset() -> R1CS { CIRCUIT.with(|circuit| { // Reset the witness mode. - IN_WITNESS.with(|in_witness| *(**in_witness).borrow_mut() = false); + IN_WITNESS.with(|in_witness| in_witness.replace(false)); // Eject the R1CS instance. let r1cs = circuit.replace(R1CS::<::BaseField>::new()); // Ensure the circuit is now empty. - assert_eq!(0, (**circuit).borrow().num_constants()); - assert_eq!(1, (**circuit).borrow().num_public()); - assert_eq!(0, (**circuit).borrow().num_private()); - assert_eq!(0, (**circuit).borrow().num_constraints()); + assert_eq!(0, circuit.borrow().num_constants()); + assert_eq!(1, circuit.borrow().num_public()); + assert_eq!(0, circuit.borrow().num_private()); + assert_eq!(0, circuit.borrow().num_constraints()); // Return the R1CS instance. r1cs }) @@ -291,13 +293,13 @@ impl Environment for Circuit { fn eject_assignment_and_reset() -> Assignment<::Field> { CIRCUIT.with(|circuit| { // Reset the witness mode. - IN_WITNESS.with(|in_witness| *(**in_witness).borrow_mut() = false); + IN_WITNESS.with(|in_witness| in_witness.replace(false)); // Eject the R1CS instance. let r1cs = circuit.replace(R1CS::<::BaseField>::new()); - assert_eq!(0, (**circuit).borrow().num_constants()); - assert_eq!(1, (**circuit).borrow().num_public()); - assert_eq!(0, (**circuit).borrow().num_private()); - assert_eq!(0, (**circuit).borrow().num_constraints()); + assert_eq!(0, circuit.borrow().num_constants()); + assert_eq!(1, circuit.borrow().num_public()); + assert_eq!(0, circuit.borrow().num_private()); + assert_eq!(0, circuit.borrow().num_constraints()); // Convert the R1CS instance to an assignment. Assignment::from(r1cs) }) @@ -307,19 +309,19 @@ impl Environment for Circuit { fn reset() { CIRCUIT.with(|circuit| { // Reset the witness mode. - IN_WITNESS.with(|in_witness| *(**in_witness).borrow_mut() = false); - *(**circuit).borrow_mut() = R1CS::<::BaseField>::new(); - assert_eq!(0, (**circuit).borrow().num_constants()); - assert_eq!(1, (**circuit).borrow().num_public()); - assert_eq!(0, (**circuit).borrow().num_private()); - assert_eq!(0, (**circuit).borrow().num_constraints()); + IN_WITNESS.with(|in_witness| in_witness.replace(false)); + *circuit.borrow_mut() = R1CS::<::BaseField>::new(); + assert_eq!(0, circuit.borrow().num_constants()); + assert_eq!(1, circuit.borrow().num_public()); + assert_eq!(0, circuit.borrow().num_private()); + assert_eq!(0, circuit.borrow().num_constraints()); }); } } impl fmt::Display for Circuit { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - CIRCUIT.with(|circuit| write!(f, "{}", (**circuit).borrow())) + CIRCUIT.with(|circuit| write!(f, "{}", circuit.borrow())) } } diff --git a/circuit/environment/src/helpers/assignment.rs b/circuit/environment/src/helpers/assignment.rs index ec69502eec..acea1bc67e 100644 --- a/circuit/environment/src/helpers/assignment.rs +++ b/circuit/environment/src/helpers/assignment.rs @@ -29,8 +29,14 @@ impl From<&crate::Variable> for AssignmentVariable { fn from(variable: &crate::Variable) -> Self { match variable { crate::Variable::Constant(value) => Self::Constant(**value), - crate::Variable::Public(index, _) => Self::Public(*index), - crate::Variable::Private(index, _) => Self::Private(*index), + crate::Variable::Public(index_value) => { + let (index, _value) = index_value.as_ref(); + Self::Public(*index) + } + crate::Variable::Private(index_value) => { + let (index, _value) = index_value.as_ref(); + Self::Private(*index) + } } } } @@ -38,7 +44,7 @@ impl From<&crate::Variable> for AssignmentVariable { #[derive(Clone, Debug)] pub struct AssignmentLC { constant: F, - terms: IndexMap, F>, + terms: Vec<(AssignmentVariable, F)>, } impl From<&crate::LinearCombination> for AssignmentLC { @@ -60,7 +66,7 @@ impl AssignmentLC { } /// Returns the terms of the linear combination. - pub const fn terms(&self) -> &IndexMap, F> { + pub const fn terms(&self) -> &Vec<(AssignmentVariable, F)> { &self.terms } @@ -78,8 +84,8 @@ impl AssignmentLC { /// and constraint assignments. #[derive(Clone, Debug)] pub struct Assignment { - public: IndexMap, - private: IndexMap, + public: Vec<(Index, F)>, + private: Vec<(Index, F)>, constraints: Vec<(AssignmentLC, AssignmentLC, AssignmentLC)>, } @@ -103,12 +109,12 @@ impl From> for Assignment { impl Assignment { /// Returns the public inputs of the assignment. - pub const fn public_inputs(&self) -> &IndexMap { + pub const fn public_inputs(&self) -> &Vec<(Index, F)> { &self.public } /// Returns the private inputs of the assignment. - pub const fn private_inputs(&self) -> &IndexMap { + pub const fn private_inputs(&self) -> &Vec<(Index, F)> { &self.private } diff --git a/circuit/environment/src/helpers/converter.rs b/circuit/environment/src/helpers/converter.rs index 30e75b8601..2466c5954d 100644 --- a/circuit/environment/src/helpers/converter.rs +++ b/circuit/environment/src/helpers/converter.rs @@ -30,7 +30,7 @@ impl snarkvm_algorithms::r1cs::ConstraintSynthesizer for Circuit { &self, cs: &mut CS, ) -> Result<(), snarkvm_algorithms::r1cs::SynthesisError> { - crate::circuit::CIRCUIT.with(|circuit| (*(**circuit).borrow()).generate_constraints(cs)) + crate::circuit::CIRCUIT.with(|circuit| circuit.borrow().generate_constraints(cs)) } } @@ -50,13 +50,15 @@ impl R1CS { // Allocate the public variables. for (i, public) in self.to_public_variables().iter().enumerate() { match public { - Variable::Public(index, value) => { + Variable::Public(index_value) => { + let (index, value) = index_value.as_ref(); + assert_eq!( i as u64, *index, "Public variables in first system must be processed in lexicographic order" ); - let gadget = cs.alloc_input(|| format!("Public {i}"), || Ok(**value))?; + let gadget = cs.alloc_input(|| format!("Public {i}"), || Ok(*value))?; assert_eq!( snarkvm_algorithms::r1cs::Index::Public((index + 1) as usize), @@ -75,13 +77,15 @@ impl R1CS { // Allocate the private variables. for (i, private) in self.to_private_variables().iter().enumerate() { match private { - Variable::Private(index, value) => { + Variable::Private(index_value) => { + let (index, value) = index_value.as_ref(); + assert_eq!( i as u64, *index, "Private variables in first system must be processed in lexicographic order" ); - let gadget = cs.alloc(|| format!("Private {i}"), || Ok(**value))?; + let gadget = cs.alloc(|| format!("Private {i}"), || Ok(*value))?; assert_eq!( snarkvm_algorithms::r1cs::Index::Private(i), @@ -113,7 +117,8 @@ impl R1CS { "Failed during constraint translation. The first system by definition cannot have constant variables in the terms" ) } - Variable::Public(index, _) => { + Variable::Public(index_value) => { + let (index, _value) = index_value.as_ref(); let gadget = converter.public.get(index).unwrap(); assert_eq!( snarkvm_algorithms::r1cs::Index::Public((index + 1) as usize), @@ -122,7 +127,8 @@ impl R1CS { ); linear_combination += (*coefficient, *gadget); } - Variable::Private(index, _) => { + Variable::Private(index_value) => { + let (index, _value) = index_value.as_ref(); let gadget = converter.private.get(index).unwrap(); assert_eq!( snarkvm_algorithms::r1cs::Index::Private(*index as usize), diff --git a/circuit/environment/src/helpers/linear_combination.rs b/circuit/environment/src/helpers/linear_combination.rs index 27f23a7513..4d33d210ff 100644 --- a/circuit/environment/src/helpers/linear_combination.rs +++ b/circuit/environment/src/helpers/linear_combination.rs @@ -19,7 +19,6 @@ use core::{ fmt, ops::{Add, AddAssign, Mul, Neg, Sub}, }; -use indexmap::{map::Entry, IndexMap}; // Before high level program operations are converted into constraints, they are first tracked as linear combinations. // Each linear combination corresponds to a portion or all of a single row of an R1CS matrix, and consists of: @@ -34,7 +33,8 @@ use indexmap::{map::Entry, IndexMap}; #[derive(Clone)] pub struct LinearCombination { constant: F, - terms: IndexMap, F>, + /// The list of terms is kept sorted in order to speed up lookups. + terms: Vec<(Variable, F)>, /// The value of this linear combination, defined as the sum of the `terms` and `constant`. value: F, } @@ -60,7 +60,7 @@ impl LinearCombination { pub fn is_public(&self) -> bool { self.constant.is_zero() && self.terms.len() == 1 - && match self.terms.iter().next() { + && match self.terms.first() { Some((Variable::Public(..), coefficient)) => *coefficient == F::one(), _ => false, } @@ -130,7 +130,7 @@ impl LinearCombination { } /// Returns the terms (excluding the constant value) in the linear combination. - pub(super) fn to_terms(&self) -> &IndexMap, F> { + pub(super) fn to_terms(&self) -> &[(Variable, F)] { &self.terms } @@ -197,18 +197,18 @@ impl From<&[Variable]> for LinearCombination { match variable.is_constant() { true => output.constant += variable.value(), false => { - match output.terms.entry(variable.clone()) { - Entry::Occupied(mut entry) => { + match output.terms.binary_search_by(|(v, _)| v.cmp(variable)) { + Ok(idx) => { // Increment the existing coefficient by 1. - *entry.get_mut() += F::one(); + output.terms[idx].1 += F::one(); // If the coefficient of the term is now zero, remove the entry. - if entry.get().is_zero() { - entry.remove_entry(); + if output.terms[idx].1.is_zero() { + output.terms.remove(idx); } } - Entry::Vacant(entry) => { + Err(idx) => { // Insert the variable and a coefficient of 1 as a new term. - entry.insert(F::one()); + output.terms.insert(idx, (variable.clone(), F::one())); } } } @@ -336,18 +336,18 @@ impl AddAssign<&LinearCombination> for LinearCombination { match variable.is_constant() { true => panic!("Malformed linear combination found"), false => { - match self.terms.entry(variable.clone()) { - Entry::Occupied(mut entry) => { + match self.terms.binary_search_by(|(v, _)| v.cmp(variable)) { + Ok(idx) => { // Add the coefficient to the existing coefficient for this term. - *entry.get_mut() += *coefficient; + self.terms[idx].1 += *coefficient; // If the coefficient of the term is now zero, remove the entry. - if entry.get().is_zero() { - entry.remove_entry(); + if self.terms[idx].1.is_zero() { + self.terms.remove(idx); } } - Entry::Vacant(entry) => { + Err(idx) => { // Insert the variable and coefficient as a new term. - entry.insert(*coefficient); + self.terms.insert(idx, (variable.clone(), *coefficient)); } } } @@ -467,11 +467,7 @@ impl fmt::Debug for LinearCombination { fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { let mut output = format!("Constant({})", self.constant); - // Sort the terms. - let mut terms = self.terms.clone(); - terms.sort_keys(); - - for (variable, coefficient) in &terms { + for (variable, coefficient) in &self.terms { output += &match (variable.mode(), coefficient.is_one()) { (Mode::Constant, _) => panic!("Malformed linear combination at: ({coefficient} * {variable:?})"), (_, true) => format!(" + {variable:?}"), @@ -549,7 +545,7 @@ mod tests { let two = one + one; let four = two + two; - let start = LinearCombination::from(Variable::Public(1, Rc::new(one))); + let start = LinearCombination::from(Variable::Public(Rc::new((1, one)))); assert!(!start.is_constant()); assert_eq!(one, start.value()); @@ -559,7 +555,7 @@ mod tests { assert_eq!(zero, candidate.constant); assert_eq!(1, candidate.terms.len()); - let (candidate_variable, candidate_coefficient) = candidate.terms.iter().next().unwrap(); + let (candidate_variable, candidate_coefficient) = candidate.terms.first().unwrap(); assert!(candidate_variable.is_public()); assert_eq!(one, candidate_variable.value()); assert_eq!(four, *candidate_coefficient); diff --git a/circuit/environment/src/helpers/r1cs.rs b/circuit/environment/src/helpers/r1cs.rs index 7dc324b14a..d08adbdfdb 100644 --- a/circuit/environment/src/helpers/r1cs.rs +++ b/circuit/environment/src/helpers/r1cs.rs @@ -37,7 +37,7 @@ impl R1CS { pub(crate) fn new() -> Self { Self { constants: Default::default(), - public: vec![Variable::Public(0u64, Rc::new(F::one()))], + public: vec![Variable::Public(Rc::new((0u64, F::one())))], private: Default::default(), constraints: Default::default(), counter: Default::default(), @@ -65,7 +65,7 @@ impl R1CS { /// Returns a new public variable with the given value and scope. pub(crate) fn new_public(&mut self, value: F) -> Variable { - let variable = Variable::Public(self.public.len() as u64, Rc::new(value)); + let variable = Variable::Public(Rc::new((self.public.len() as u64, value))); self.public.push(variable.clone()); self.counter.increment_public(); variable @@ -73,7 +73,7 @@ impl R1CS { /// Returns a new private variable with the given value and scope. pub(crate) fn new_private(&mut self, value: F) -> Variable { - let variable = Variable::Private(self.private.len() as u64, Rc::new(value)); + let variable = Variable::Private(Rc::new((self.private.len() as u64, value))); self.private.push(variable.clone()); self.counter.increment_private(); variable @@ -91,9 +91,37 @@ impl R1CS { self.counter.add_constraint(constraint); } - /// Returns `true` if all constraints in the environment are satisfied. + /// Returns `true` if all of the constraints are satisfied. + /// + /// In addition, when in debug mode, this function also checks that + /// all constraints use variables corresponding to the declared variables. pub fn is_satisfied(&self) -> bool { - self.constraints.iter().all(|constraint| constraint.is_satisfied()) + // Ensure all constraints are satisfied. + let constraints_satisfied = self.constraints.iter().all(|constraint| constraint.is_satisfied()); + if !constraints_satisfied { + return false; + } + + // In debug mode, ensure all constraints use variables corresponding to the declared variables. + #[cfg(not(debug_assertions))] + return true; + #[cfg(debug_assertions)] + self.constraints.iter().all(|constraint| { + let (a, b, c) = constraint.to_terms(); + [a, b, c].into_iter().all(|lc| { + lc.to_terms().iter().all(|(variable, _)| match variable { + Variable::Constant(_value) => false, // terms should not contain Constants + Variable::Private(private) => { + let (index, value) = private.as_ref(); + self.private.get(*index as usize).map_or_else(|| false, |v| v.value() == *value) + } + Variable::Public(public) => { + let (index, value) = public.as_ref(); + self.public.get(*index as usize).map_or_else(|| false, |v| v.value() == *value) + } + }) + }) + }) } /// Returns `true` if all constraints in the current scope are satisfied. diff --git a/circuit/environment/src/helpers/variable.rs b/circuit/environment/src/helpers/variable.rs index 6b130303a7..6dd7012af4 100644 --- a/circuit/environment/src/helpers/variable.rs +++ b/circuit/environment/src/helpers/variable.rs @@ -27,8 +27,8 @@ pub type Index = u64; #[derive(Clone, PartialEq, Eq, Hash)] pub enum Variable { Constant(Rc), - Public(Index, Rc), - Private(Index, Rc), + Public(Rc<(Index, F)>), + Private(Rc<(Index, F)>), } impl Variable { @@ -70,8 +70,10 @@ impl Variable { pub fn index(&self) -> Index { match self { Self::Constant(..) => 0, - Self::Public(index, ..) => *index, - Self::Private(index, ..) => *index, + Self::Public(index_value) | Self::Private(index_value) => { + let (index, _value) = index_value.as_ref(); + *index + } } } @@ -81,8 +83,10 @@ impl Variable { pub fn value(&self) -> F { match self { Self::Constant(value) => **value, - Self::Public(_, value) => **value, - Self::Private(_, value) => **value, + Self::Public(index_value) | Self::Private(index_value) => { + let (_index, value) = index_value.as_ref(); + *value + } } } } @@ -259,8 +263,14 @@ impl fmt::Debug for Variable { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", match self { Self::Constant(value) => format!("Constant({value})"), - Self::Public(index, value) => format!("Public({index}, {value})"), - Self::Private(index, value) => format!("Private({index}, {value})"), + Self::Public(index_value) => { + let (index, value) = index_value.as_ref(); + format!("Public({index}, {value})") + } + Self::Private(index_value) => { + let (index, value) = index_value.as_ref(); + format!("Private({index}, {value})") + } }) } } @@ -277,6 +287,6 @@ mod tests { #[test] fn test_size() { - assert_eq!(24, std::mem::size_of::::BaseField>>()); + assert_eq!(16, std::mem::size_of::::BaseField>>()); } } diff --git a/circuit/environment/witness/Cargo.toml b/circuit/environment/witness/Cargo.toml index b5f6d04202..64c87aca08 100644 --- a/circuit/environment/witness/Cargo.toml +++ b/circuit/environment/witness/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "snarkvm-circuit-environment-witness" -version = "0.16.3" +version = "0.16.15" authors = [ "The Aleo Team " ] description = "A procedural macro to construct a witness in an environment" license = "Apache-2.0" diff --git a/circuit/network/Cargo.toml b/circuit/network/Cargo.toml index 10d0a9e6b7..6d9e086b43 100644 --- a/circuit/network/Cargo.toml +++ b/circuit/network/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "snarkvm-circuit-network" -version = "0.16.3" +version = "0.16.15" authors = [ "The Aleo Team " ] description = "Network circuit library for a decentralized virtual machine" license = "Apache-2.0" @@ -9,20 +9,20 @@ edition = "2021" [dependencies.console] package = "snarkvm-console-network" path = "../../console/network" -version = "=0.16.3" +version = "=0.16.15" optional = true [dependencies.snarkvm-circuit-algorithms] path = "../algorithms" -version = "=0.16.3" +version = "=0.16.15" [dependencies.snarkvm-circuit-collections] path = "../collections" -version = "=0.16.3" +version = "=0.16.15" [dependencies.snarkvm-circuit-types] path = "../types" -version = "=0.16.3" +version = "=0.16.15" [dev-dependencies.snarkvm-console-types] path = "../../console/types" @@ -30,3 +30,4 @@ path = "../../console/types" [features] default = [ "enable_console" ] enable_console = [ "console" ] +wasm = [ "console/wasm" ] diff --git a/circuit/program/Cargo.toml b/circuit/program/Cargo.toml index 62bffe63c0..7b0e18ce2c 100644 --- a/circuit/program/Cargo.toml +++ b/circuit/program/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "snarkvm-circuit-program" -version = "0.16.3" +version = "0.16.15" authors = [ "The Aleo Team " ] description = "Program circuit library for a decentralized virtual machine" license = "Apache-2.0" @@ -9,32 +9,32 @@ edition = "2021" [dependencies.console] package = "snarkvm-console-program" path = "../../console/program" -version = "=0.16.3" +version = "=0.16.15" optional = true [dependencies.snarkvm-circuit-account] path = "../account" -version = "=0.16.3" +version = "=0.16.15" [dependencies.snarkvm-circuit-algorithms] path = "../algorithms" -version = "=0.16.3" +version = "=0.16.15" [dependencies.snarkvm-circuit-collections] path = "../collections" -version = "=0.16.3" +version = "=0.16.15" [dependencies.snarkvm-circuit-network] path = "../network" -version = "=0.16.3" +version = "=0.16.15" [dependencies.snarkvm-circuit-types] path = "../types" -version = "=0.16.3" +version = "=0.16.15" [dependencies.snarkvm-utilities] path = "../../utilities" -version = "=0.16.3" +version = "=0.16.15" [dependencies.paste] version = "1.0" diff --git a/circuit/program/src/state_path/mod.rs b/circuit/program/src/state_path/mod.rs index d8cf025aa6..6e8a30ed74 100644 --- a/circuit/program/src/state_path/mod.rs +++ b/circuit/program/src/state_path/mod.rs @@ -191,16 +191,16 @@ mod tests { #[test] fn test_state_path_new_constant() -> Result<()> { - check_new(Mode::Constant, 446, 1, 0, 0) + check_new(Mode::Constant, 450, 1, 0, 0) } #[test] fn test_state_path_new_public() -> Result<()> { - check_new(Mode::Public, 0, 447, 0, 376) + check_new(Mode::Public, 0, 451, 0, 376) } #[test] fn test_state_path_new_private() -> Result<()> { - check_new(Mode::Private, 0, 1, 446, 376) + check_new(Mode::Private, 0, 1, 450, 376) } } diff --git a/circuit/program/src/state_path/verify.rs b/circuit/program/src/state_path/verify.rs index f4c6e624cc..9101486ff9 100644 --- a/circuit/program/src/state_path/verify.rs +++ b/circuit/program/src/state_path/verify.rs @@ -225,43 +225,43 @@ mod tests { #[test] fn test_state_path_verify_global_constant() -> Result<()> { - check_verify_global(Mode::Constant, true, 106309, 1, 2, 2)?; - check_verify_global(Mode::Constant, false, 106309, 1, 2, 2) + check_verify_global(Mode::Constant, true, 112709, 1, 2, 2)?; + check_verify_global(Mode::Constant, false, 112709, 1, 2, 2) } #[test] fn test_state_path_verify_global_public() -> Result<()> { - check_verify_global(Mode::Public, true, 27814, 449, 123343, 123982)?; - check_verify_global(Mode::Public, false, 27814, 449, 123343, 123982) + check_verify_global(Mode::Public, true, 29450, 453, 130867, 131522)?; + check_verify_global(Mode::Public, false, 29450, 453, 130867, 131522) } #[test] fn test_state_path_verify_global_private() -> Result<()> { - check_verify_global(Mode::Private, true, 27814, 1, 123791, 123982)?; - check_verify_global(Mode::Private, false, 27814, 1, 123791, 123982) + check_verify_global(Mode::Private, true, 29450, 1, 131319, 131522)?; + check_verify_global(Mode::Private, false, 29450, 1, 131319, 131522) } #[test] fn test_state_path_verify_local_constant() -> Result<()> { - check_verify_local(Mode::Constant, false, true, 106309, 1, 2, 2)?; - check_verify_local(Mode::Constant, false, false, 106309, 1, 2, 2)?; - check_verify_local(Mode::Constant, true, true, 106309, 1, 2, 2)?; - check_verify_local(Mode::Constant, true, false, 106309, 1, 2, 2) + check_verify_local(Mode::Constant, false, true, 112709, 1, 2, 2)?; + check_verify_local(Mode::Constant, false, false, 112709, 1, 2, 2)?; + check_verify_local(Mode::Constant, true, true, 112709, 1, 2, 2)?; + check_verify_local(Mode::Constant, true, false, 112709, 1, 2, 2) } #[test] fn test_state_path_verify_local_public() -> Result<()> { - check_verify_local(Mode::Public, false, true, 27814, 449, 123343, 123982)?; - check_verify_local(Mode::Public, false, false, 27814, 449, 123343, 123982)?; - check_verify_local(Mode::Public, true, true, 27814, 449, 123343, 123982)?; - check_verify_local(Mode::Public, true, false, 27814, 449, 123343, 123982) + check_verify_local(Mode::Public, false, true, 29450, 453, 130867, 131522)?; + check_verify_local(Mode::Public, false, false, 29450, 453, 130867, 131522)?; + check_verify_local(Mode::Public, true, true, 29450, 453, 130867, 131522)?; + check_verify_local(Mode::Public, true, false, 29450, 453, 130867, 131522) } #[test] fn test_state_path_verify_local_private() -> Result<()> { - check_verify_local(Mode::Private, false, true, 27814, 1, 123791, 123982)?; - check_verify_local(Mode::Private, false, false, 27814, 1, 123791, 123982)?; - check_verify_local(Mode::Private, true, true, 27814, 1, 123791, 123982)?; - check_verify_local(Mode::Private, true, false, 27814, 1, 123791, 123982) + check_verify_local(Mode::Private, false, true, 29450, 1, 131319, 131522)?; + check_verify_local(Mode::Private, false, false, 29450, 1, 131319, 131522)?; + check_verify_local(Mode::Private, true, true, 29450, 1, 131319, 131522)?; + check_verify_local(Mode::Private, true, false, 29450, 1, 131319, 131522) } } diff --git a/circuit/types/Cargo.toml b/circuit/types/Cargo.toml index fa86c8adf7..8650ccc513 100644 --- a/circuit/types/Cargo.toml +++ b/circuit/types/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "snarkvm-circuit-types" -version = "0.16.3" +version = "0.16.15" authors = [ "The Aleo Team " ] description = "Primitive circuit for a decentralized virtual machine" license = "Apache-2.0" @@ -8,35 +8,35 @@ edition = "2021" [dependencies.snarkvm-circuit-environment] path = "../environment" -version = "=0.16.3" +version = "=0.16.15" [dependencies.snarkvm-circuit-types-address] path = "./address" -version = "=0.16.3" +version = "=0.16.15" [dependencies.snarkvm-circuit-types-boolean] path = "./boolean" -version = "=0.16.3" +version = "=0.16.15" [dependencies.snarkvm-circuit-types-field] path = "./field" -version = "=0.16.3" +version = "=0.16.15" [dependencies.snarkvm-circuit-types-group] path = "./group" -version = "=0.16.3" +version = "=0.16.15" [dependencies.snarkvm-circuit-types-integers] path = "./integers" -version = "=0.16.3" +version = "=0.16.15" [dependencies.snarkvm-circuit-types-scalar] path = "./scalar" -version = "=0.16.3" +version = "=0.16.15" [dependencies.snarkvm-circuit-types-string] path = "./string" -version = "=0.16.3" +version = "=0.16.15" [dev-dependencies.console] package = "snarkvm-console" diff --git a/circuit/types/address/Cargo.toml b/circuit/types/address/Cargo.toml index 25c795f6a2..50e4b7c70b 100644 --- a/circuit/types/address/Cargo.toml +++ b/circuit/types/address/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "snarkvm-circuit-types-address" -version = "0.16.3" +version = "0.16.15" authors = [ "The Aleo Team " ] description = "Address circuit for a decentralized virtual machine" license = "Apache-2.0" @@ -9,28 +9,28 @@ edition = "2021" [dependencies.console] package = "snarkvm-console-types-address" path = "../../../console/types/address" -version = "=0.16.3" +version = "=0.16.15" optional = true [dependencies.snarkvm-circuit-environment] path = "../../environment" -version = "=0.16.3" +version = "=0.16.15" [dependencies.snarkvm-circuit-types-boolean] path = "../boolean" -version = "=0.16.3" +version = "=0.16.15" [dependencies.snarkvm-circuit-types-field] path = "../field" -version = "=0.16.3" +version = "=0.16.15" [dependencies.snarkvm-circuit-types-group] path = "../group" -version = "=0.16.3" +version = "=0.16.15" [dependencies.snarkvm-circuit-types-scalar] path = "../scalar" -version = "=0.16.3" +version = "=0.16.15" [features] default = [ "enable_console" ] diff --git a/circuit/types/boolean/Cargo.toml b/circuit/types/boolean/Cargo.toml index ecd072252e..d357cf45b0 100644 --- a/circuit/types/boolean/Cargo.toml +++ b/circuit/types/boolean/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "snarkvm-circuit-types-boolean" -version = "0.16.3" +version = "0.16.15" authors = [ "The Aleo Team " ] description = "Boolean circuit for a decentralized virtual machine" license = "Apache-2.0" @@ -14,12 +14,12 @@ harness = false [dependencies.console] package = "snarkvm-console-types-boolean" path = "../../../console/types/boolean" -version = "=0.16.3" +version = "=0.16.15" optional = true [dependencies.snarkvm-circuit-environment] path = "../../environment" -version = "=0.16.3" +version = "=0.16.15" [dev-dependencies.criterion] version = "0.5" diff --git a/circuit/types/boolean/src/not.rs b/circuit/types/boolean/src/not.rs index 491f7d1de2..b4aec24f37 100644 --- a/circuit/types/boolean/src/not.rs +++ b/circuit/types/boolean/src/not.rs @@ -38,7 +38,7 @@ impl Not for &Boolean { // Public and private cases. // Note: We directly instantiate a public variable to correctly represent a boolean in a linear combination. // For more information, see `LinearCombination::is_boolean_type`. - false => Boolean(Variable::Public(0, Rc::new(E::BaseField::one())) - &self.0), + false => Boolean(Variable::Public(Rc::new((0, E::BaseField::one()))) - &self.0), } } } diff --git a/circuit/types/field/Cargo.toml b/circuit/types/field/Cargo.toml index 8c434fe7ab..9f26acc523 100644 --- a/circuit/types/field/Cargo.toml +++ b/circuit/types/field/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "snarkvm-circuit-types-field" -version = "0.16.3" +version = "0.16.15" authors = [ "The Aleo Team " ] description = "Field circuit for a decentralized virtual machine" license = "Apache-2.0" @@ -9,16 +9,16 @@ edition = "2021" [dependencies.console] package = "snarkvm-console-types-field" path = "../../../console/types/field" -version = "=0.16.3" +version = "=0.16.15" optional = true [dependencies.snarkvm-circuit-environment] path = "../../environment" -version = "=0.16.3" +version = "=0.16.15" [dependencies.snarkvm-circuit-types-boolean] path = "../boolean" -version = "=0.16.3" +version = "=0.16.15" [features] default = [ "enable_console" ] diff --git a/circuit/types/group/Cargo.toml b/circuit/types/group/Cargo.toml index 2390463118..7138c5c1b9 100644 --- a/circuit/types/group/Cargo.toml +++ b/circuit/types/group/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "snarkvm-circuit-types-group" -version = "0.16.3" +version = "0.16.15" authors = [ "The Aleo Team " ] description = "Group circuit for a decentralized virtual machine" license = "Apache-2.0" @@ -9,24 +9,24 @@ edition = "2021" [dependencies.console] package = "snarkvm-console-types-group" path = "../../../console/types/group" -version = "=0.16.3" +version = "=0.16.15" optional = true [dependencies.snarkvm-circuit-environment] path = "../../environment" -version = "=0.16.3" +version = "=0.16.15" [dependencies.snarkvm-circuit-types-boolean] path = "../boolean" -version = "=0.16.3" +version = "=0.16.15" [dependencies.snarkvm-circuit-types-field] path = "../field" -version = "=0.16.3" +version = "=0.16.15" [dependencies.snarkvm-circuit-types-scalar] path = "../scalar" -version = "=0.16.3" +version = "=0.16.15" [dev-dependencies.snarkvm-utilities] path = "../../../utilities" diff --git a/circuit/types/integers/Cargo.toml b/circuit/types/integers/Cargo.toml index 36315b8dea..de9557a845 100644 --- a/circuit/types/integers/Cargo.toml +++ b/circuit/types/integers/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "snarkvm-circuit-types-integers" -version = "0.16.3" +version = "0.16.15" authors = [ "The Aleo Team " ] description = "Integer circuit for a decentralized virtual machine" license = "Apache-2.0" @@ -9,24 +9,24 @@ edition = "2021" [dependencies.console] package = "snarkvm-console-types-integers" path = "../../../console/types/integers" -version = "=0.16.3" +version = "=0.16.15" optional = true [dependencies.snarkvm-circuit-environment] path = "../../environment" -version = "=0.16.3" +version = "=0.16.15" [dependencies.snarkvm-circuit-types-boolean] path = "../boolean" -version = "=0.16.3" +version = "=0.16.15" [dependencies.snarkvm-circuit-types-field] path = "../field" -version = "=0.16.3" +version = "=0.16.15" [dependencies.snarkvm-circuit-types-scalar] path = "../scalar" -version = "=0.16.3" +version = "=0.16.15" [dev-dependencies.snarkvm-utilities] path = "../../../utilities" diff --git a/circuit/types/integers/src/mul_checked.rs b/circuit/types/integers/src/mul_checked.rs index e3cc8d7f61..1cf4588fff 100644 --- a/circuit/types/integers/src/mul_checked.rs +++ b/circuit/types/integers/src/mul_checked.rs @@ -144,7 +144,7 @@ impl Integer { Boolean::assert_bits_are_zero(&z_1_upper_bits); // Check that `z2` is zero. - E::assert_eq(&z2, E::zero()); + E::assert_eq(z2, E::zero()); // Return the product of `self` and `other`. product diff --git a/circuit/types/scalar/Cargo.toml b/circuit/types/scalar/Cargo.toml index 514c76becf..a9e64ddb22 100644 --- a/circuit/types/scalar/Cargo.toml +++ b/circuit/types/scalar/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "snarkvm-circuit-types-scalar" -version = "0.16.3" +version = "0.16.15" authors = [ "The Aleo Team " ] description = "Scalar circuit for a decentralized virtual machine" license = "Apache-2.0" @@ -9,20 +9,20 @@ edition = "2021" [dependencies.console] package = "snarkvm-console-types-scalar" path = "../../../console/types/scalar" -version = "=0.16.3" +version = "=0.16.15" optional = true [dependencies.snarkvm-circuit-environment] path = "../../environment" -version = "=0.16.3" +version = "=0.16.15" [dependencies.snarkvm-circuit-types-boolean] path = "../boolean" -version = "=0.16.3" +version = "=0.16.15" [dependencies.snarkvm-circuit-types-field] path = "../field" -version = "=0.16.3" +version = "=0.16.15" [features] default = [ "enable_console" ] diff --git a/circuit/types/string/Cargo.toml b/circuit/types/string/Cargo.toml index a76085470a..1cebb5b626 100644 --- a/circuit/types/string/Cargo.toml +++ b/circuit/types/string/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "snarkvm-circuit-types-string" -version = "0.16.3" +version = "0.16.15" authors = [ "The Aleo Team " ] description = "String circuit for a decentralized virtual machine" license = "Apache-2.0" @@ -9,24 +9,24 @@ edition = "2021" [dependencies.console] package = "snarkvm-console-types-string" path = "../../../console/types/string" -version = "=0.16.3" +version = "=0.16.15" optional = true [dependencies.snarkvm-circuit-environment] path = "../../environment" -version = "=0.16.3" +version = "=0.16.15" [dependencies.snarkvm-circuit-types-boolean] path = "../boolean" -version = "=0.16.3" +version = "=0.16.15" [dependencies.snarkvm-circuit-types-field] path = "../field" -version = "=0.16.3" +version = "=0.16.15" [dependencies.snarkvm-circuit-types-integers] path = "../integers" -version = "=0.16.3" +version = "=0.16.15" [dev-dependencies.snarkvm-utilities] path = "../../../utilities" diff --git a/console/Cargo.toml b/console/Cargo.toml index 17e5c68c1d..a33a64fb29 100644 --- a/console/Cargo.toml +++ b/console/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "snarkvm-console" -version = "0.16.3" +version = "0.16.15" authors = [ "The Aleo Team " ] description = "Console environment for a decentralized virtual machine" license = "Apache-2.0" @@ -8,32 +8,32 @@ edition = "2021" [dependencies.snarkvm-console-account] path = "./account" -version = "=0.16.3" +version = "=0.16.15" optional = true [dependencies.snarkvm-console-algorithms] path = "./algorithms" -version = "=0.16.3" +version = "=0.16.15" optional = true [dependencies.snarkvm-console-collections] path = "./collections" -version = "=0.16.3" +version = "=0.16.15" optional = true [dependencies.snarkvm-console-network] path = "./network" -version = "=0.16.3" +version = "=0.16.15" optional = true [dependencies.snarkvm-console-program] path = "./program" -version = "=0.16.3" +version = "=0.16.15" optional = true [dependencies.snarkvm-console-types] path = "./types" -version = "=0.16.3" +version = "=0.16.15" optional = true [features] diff --git a/console/account/Cargo.toml b/console/account/Cargo.toml index e2874e0f50..bb41a00122 100644 --- a/console/account/Cargo.toml +++ b/console/account/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "snarkvm-console-account" -version = "0.16.3" +version = "0.16.15" authors = [ "The Aleo Team " ] description = "Account operations for a decentralized virtual machine" license = "Apache-2.0" @@ -13,11 +13,11 @@ harness = false [dependencies.snarkvm-console-network] path = "../network" -version = "=0.16.3" +version = "=0.16.15" [dependencies.snarkvm-console-types] path = "../types" -version = "=0.16.3" +version = "=0.16.15" default-features = false features = [ "address", "boolean", "field", "group", "scalar" ] diff --git a/console/account/src/compute_key/mod.rs b/console/account/src/compute_key/mod.rs index 1fadf472be..5b7e827f7d 100644 --- a/console/account/src/compute_key/mod.rs +++ b/console/account/src/compute_key/mod.rs @@ -17,6 +17,7 @@ mod bytes; mod from_bits; mod serialize; mod size_in_bits; +mod size_in_bytes; mod to_address; mod to_bits; mod to_fields; diff --git a/console/account/src/compute_key/size_in_bytes.rs b/console/account/src/compute_key/size_in_bytes.rs new file mode 100644 index 0000000000..68bd892bef --- /dev/null +++ b/console/account/src/compute_key/size_in_bytes.rs @@ -0,0 +1,23 @@ +// Copyright (C) 2019-2023 Aleo Systems Inc. +// This file is part of the snarkVM library. + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at: +// http://www.apache.org/licenses/LICENSE-2.0 + +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +use super::*; + +impl SizeInBytes for ComputeKey { + /// Returns the compute key size in bytes. + #[inline] + fn size_in_bytes() -> usize { + Group::::size_in_bytes() + Group::::size_in_bytes() + } +} diff --git a/console/account/src/signature/mod.rs b/console/account/src/signature/mod.rs index 6b2c0b7db1..1225af3b38 100644 --- a/console/account/src/signature/mod.rs +++ b/console/account/src/signature/mod.rs @@ -18,6 +18,7 @@ mod from_bits; mod parse; mod serialize; mod size_in_bits; +mod size_in_bytes; mod to_bits; mod to_fields; mod verify; diff --git a/console/account/src/signature/size_in_bytes.rs b/console/account/src/signature/size_in_bytes.rs new file mode 100644 index 0000000000..7f1e9dced8 --- /dev/null +++ b/console/account/src/signature/size_in_bytes.rs @@ -0,0 +1,23 @@ +// Copyright (C) 2019-2023 Aleo Systems Inc. +// This file is part of the snarkVM library. + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at: +// http://www.apache.org/licenses/LICENSE-2.0 + +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +use super::*; + +impl SizeInBytes for Signature { + /// Returns the signature size in bytes. + #[inline] + fn size_in_bytes() -> usize { + Scalar::::size_in_bytes() + Scalar::::size_in_bytes() + ComputeKey::::size_in_bytes() + } +} diff --git a/console/algorithms/Cargo.toml b/console/algorithms/Cargo.toml index b2829c49ff..99291b7d20 100644 --- a/console/algorithms/Cargo.toml +++ b/console/algorithms/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "snarkvm-console-algorithms" -version = "0.16.3" +version = "0.16.15" authors = [ "The Aleo Team " ] description = "Console algorithms for a decentralized virtual machine" license = "Apache-2.0" @@ -23,18 +23,18 @@ harness = false [dependencies.snarkvm-console-types] path = "../types" -version = "=0.16.3" +version = "=0.16.15" default-features = false features = [ "field", "group", "integers", "scalar" ] [dependencies.snarkvm-fields] path = "../../fields" -version = "=0.16.3" +version = "=0.16.15" default-features = false [dependencies.snarkvm-utilities] path = "../../utilities" -version = "=0.16.3" +version = "=0.16.15" [dependencies.blake2s_simd] version = "1.0" diff --git a/console/algorithms/benches/poseidon.rs b/console/algorithms/benches/poseidon.rs index 6d03ba5cb1..be7a74b426 100644 --- a/console/algorithms/benches/poseidon.rs +++ b/console/algorithms/benches/poseidon.rs @@ -30,7 +30,7 @@ fn poseidon2(c: &mut Criterion) { c.bench_function("Poseidon2 Hash 4 -> 1", |b| b.iter(|| hash.hash(&input))); c.bench_function("Poseidon2 Hash 4 -> 2", |b| b.iter(|| hash.hash_many(&input, 2))); - let input = [F::rand(rng); 10]; + let input: Vec<_> = (0..10).map(|_| F::rand(rng)).collect(); c.bench_function("Poseidon2 Hash 10 -> 1", |b| b.iter(|| hash.hash(&input))); c.bench_function("Poseidon2 Hash 10 -> 4", |b| b.iter(|| hash.hash_many(&input, 4))); c.bench_function("Poseidon2 Hash 10 -> 8", |b| b.iter(|| hash.hash_many(&input, 8))); @@ -44,7 +44,7 @@ fn poseidon4(c: &mut Criterion) { c.bench_function("Poseidon4 Hash 4 -> 1", |b| b.iter(|| hash.hash(&input))); c.bench_function("Poseidon4 Hash 4 -> 2", |b| b.iter(|| hash.hash_many(&input, 2))); - let input = [F::rand(rng); 10]; + let input: Vec<_> = (0..10).map(|_| F::rand(rng)).collect(); c.bench_function("Poseidon4 Hash 10 -> 1", |b| b.iter(|| hash.hash(&input))); c.bench_function("Poseidon4 Hash 10 -> 4", |b| b.iter(|| hash.hash_many(&input, 4))); c.bench_function("Poseidon4 Hash 10 -> 8", |b| b.iter(|| hash.hash_many(&input, 8))); @@ -58,7 +58,7 @@ fn poseidon8(c: &mut Criterion) { c.bench_function("Poseidon8 Hash 4 -> 1", |b| b.iter(|| hash.hash(&input))); c.bench_function("Poseidon8 Hash 4 -> 2", |b| b.iter(|| hash.hash_many(&input, 2))); - let input = [F::rand(rng); 10]; + let input: Vec<_> = (0..10).map(|_| F::rand(rng)).collect(); c.bench_function("Poseidon8 Hash 10 -> 1", |b| b.iter(|| hash.hash(&input))); c.bench_function("Poseidon8 Hash 10 -> 4", |b| b.iter(|| hash.hash_many(&input, 4))); c.bench_function("Poseidon8 Hash 10 -> 8", |b| b.iter(|| hash.hash_many(&input, 8))); diff --git a/console/algorithms/src/bhp/hash_uncompressed.rs b/console/algorithms/src/bhp/hash_uncompressed.rs index 8d4ee3283d..85d16ec591 100644 --- a/console/algorithms/src/bhp/hash_uncompressed.rs +++ b/console/algorithms/src/bhp/hash_uncompressed.rs @@ -38,31 +38,31 @@ impl HashUncompres // Initialize a variable to store the hash from the current iteration. let mut digest = Group::::zero(); + // Prepare a reusable vector for the preimage. + let mut preimage = Vec::with_capacity(num_hasher_bits); + // Compute the hash of the input. for (i, input_bits) in input.chunks(max_input_bits_per_iteration).enumerate() { // Determine if this is the first iteration. - let preimage = match i == 0 { + match i == 0 { // Construct the first iteration as: [ 0...0 || DOMAIN || LENGTH(INPUT) || INPUT[0..BLOCK_SIZE] ]. true => { // Initialize a vector for the hash preimage. - let mut preimage = Vec::with_capacity(num_hasher_bits); preimage.extend(&self.domain); (input.len() as u64).write_bits_le(&mut preimage); preimage.extend(input_bits); - preimage } // Construct the subsequent iterations as: [ PREVIOUS_HASH[0..DATA_BITS] || INPUT[I * BLOCK_SIZE..(I + 1) * BLOCK_SIZE] ]. false => { // Initialize a vector for the hash preimage. - let mut preimage = Vec::with_capacity(num_hasher_bits); digest.to_x_coordinate().write_bits_le(&mut preimage); preimage.truncate(num_data_bits); preimage.extend(input_bits); - preimage } - }; + } // Hash the preimage for this iteration. digest = self.hasher.hash_uncompressed(&preimage)?; + preimage.clear(); } Ok(digest) diff --git a/console/algorithms/src/bhp/hasher/hash_uncompressed.rs b/console/algorithms/src/bhp/hasher/hash_uncompressed.rs index a8103ae41a..afad77765a 100644 --- a/console/algorithms/src/bhp/hasher/hash_uncompressed.rs +++ b/console/algorithms/src/bhp/hasher/hash_uncompressed.rs @@ -14,6 +14,8 @@ use super::*; +use std::borrow::Cow; + impl HashUncompressed for BHPHasher { @@ -36,12 +38,15 @@ impl HashUncompres ); // Pad the input to a multiple of `BHP_CHUNK_SIZE` for hashing. - let mut input = input.to_vec(); - if input.len() % BHP_CHUNK_SIZE != 0 { + let input = if input.len() % BHP_CHUNK_SIZE != 0 { let padding = BHP_CHUNK_SIZE - (input.len() % BHP_CHUNK_SIZE); - input.resize(input.len() + padding, false); - ensure!((input.len() % BHP_CHUNK_SIZE) == 0, "Input must be a multiple of {BHP_CHUNK_SIZE}"); - } + let mut padded_input = vec![false; input.len() + padding]; + padded_input[..input.len()].copy_from_slice(input); + ensure!((padded_input.len() % BHP_CHUNK_SIZE) == 0, "Input must be a multiple of {BHP_CHUNK_SIZE}"); + Cow::Owned(padded_input) + } else { + Cow::Borrowed(input) + }; // Compute sum of h_i^{sum of (1-2*c_{i,j,2})*(1+c_{i,j,0}+2*c_{i,j,1})*2^{4*(j-1)} for all j in segment} // for all i. Described in section 5.4.1.7 in the Zcash protocol specification. diff --git a/console/collections/Cargo.toml b/console/collections/Cargo.toml index 117f53d72e..2566972c39 100644 --- a/console/collections/Cargo.toml +++ b/console/collections/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "snarkvm-console-collections" -version = "0.16.3" +version = "0.16.15" authors = [ "The Aleo Team " ] description = "Collections for a decentralized virtual machine" license = "Apache-2.0" @@ -18,11 +18,11 @@ harness = false [dependencies.snarkvm-console-algorithms] path = "../algorithms" -version = "=0.16.3" +version = "=0.16.15" [dependencies.snarkvm-console-types] path = "../types" -version = "=0.16.3" +version = "=0.16.15" default-features = false features = [ "field", "integers" ] diff --git a/console/network/Cargo.toml b/console/network/Cargo.toml index 66f3d3869d..c86f68cca3 100644 --- a/console/network/Cargo.toml +++ b/console/network/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "snarkvm-console-network" -version = "0.16.3" +version = "0.16.15" authors = [ "The Aleo Team " ] description = "Network console library for a decentralized virtual machine" license = "Apache-2.0" @@ -15,45 +15,45 @@ wasm = [ [dependencies.snarkvm-algorithms] path = "../../algorithms" -version = "=0.16.3" +version = "=0.16.15" default-features = false features = [ "snark" ] [dependencies.snarkvm-console-algorithms] path = "../algorithms" -version = "=0.16.3" +version = "=0.16.15" [dependencies.snarkvm-console-collections] path = "../collections" -version = "=0.16.3" +version = "=0.16.15" [dependencies.snarkvm-console-network-environment] path = "./environment" -version = "=0.16.3" +version = "=0.16.15" [dependencies.snarkvm-console-types] path = "../types" -version = "=0.16.3" +version = "=0.16.15" default-features = false features = [ "field", "group", "scalar" ] [dependencies.snarkvm-curves] path = "../../curves" -version = "=0.16.3" +version = "=0.16.15" default-features = false [dependencies.snarkvm-fields] path = "../../fields" -version = "=0.16.3" +version = "=0.16.15" default-features = false [dependencies.snarkvm-parameters] path = "../../parameters" -version = "=0.16.3" +version = "=0.16.15" [dependencies.snarkvm-utilities] path = "../../utilities" -version = "=0.16.3" +version = "=0.16.15" [dependencies.anyhow] version = "1.0.73" diff --git a/console/network/environment/Cargo.toml b/console/network/environment/Cargo.toml index d23c0f2e37..279581b885 100644 --- a/console/network/environment/Cargo.toml +++ b/console/network/environment/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "snarkvm-console-network-environment" -version = "0.16.3" +version = "0.16.15" authors = [ "The Aleo Team " ] description = "Environment console library for a decentralized virtual machine" license = "Apache-2.0" @@ -8,17 +8,17 @@ edition = "2021" [dependencies.snarkvm-curves] path = "../../../curves" -version = "=0.16.3" +version = "=0.16.15" default-features = false [dependencies.snarkvm-fields] path = "../../../fields" -version = "=0.16.3" +version = "=0.16.15" default-features = false [dependencies.snarkvm-utilities] path = "../../../utilities" -version = "=0.16.3" +version = "=0.16.15" [dependencies.anyhow] version = "1.0.73" diff --git a/console/network/environment/src/lib.rs b/console/network/environment/src/lib.rs index f6d28304b3..bfd495d509 100644 --- a/console/network/environment/src/lib.rs +++ b/console/network/environment/src/lib.rs @@ -30,6 +30,7 @@ pub mod prelude { pub use snarkvm_curves::{AffineCurve, MontgomeryParameters, ProjectiveCurve, TwistedEdwardsParameters}; pub use snarkvm_fields::{Field as _, PrimeField as _, SquareRootField as _, Zero as _}; pub use snarkvm_utilities::{ + cfg_chunks, cfg_find, cfg_find_map, cfg_into_iter, diff --git a/console/network/src/lib.rs b/console/network/src/lib.rs index 9179b3c36a..717946cba2 100644 --- a/console/network/src/lib.rs +++ b/console/network/src/lib.rs @@ -109,8 +109,8 @@ pub trait Network: const BLOCK_TIME: u16 = 10; /// The coinbase puzzle degree. const COINBASE_PUZZLE_DEGREE: u32 = (1 << 13) - 1; // 8,191 - /// The maximum number of prover solutions that can be included per block. - const MAX_PROVER_SOLUTIONS: usize = 1 << 8; // 256 prover solutions + /// The maximum number of solutions that can be included per block. + const MAX_SOLUTIONS: usize = 1 << 8; // 256 solutions /// The number of blocks per epoch. const NUM_BLOCKS_PER_EPOCH: u32 = 3600 / Self::BLOCK_TIME as u32; // 360 blocks == ~1 hour diff --git a/console/program/Cargo.toml b/console/program/Cargo.toml index 16e5cd3923..ba81988fc7 100644 --- a/console/program/Cargo.toml +++ b/console/program/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "snarkvm-console-program" -version = "0.16.3" +version = "0.16.15" authors = [ "The Aleo Team " ] description = "Program operations for a decentralized virtual machine" license = "Apache-2.0" @@ -12,27 +12,27 @@ test = [ ] [dependencies.snarkvm-console-account] path = "../account" -version = "=0.16.3" +version = "=0.16.15" [dependencies.snarkvm-console-algorithms] path = "../algorithms" -version = "=0.16.3" +version = "=0.16.15" [dependencies.snarkvm-console-collections] path = "../collections" -version = "=0.16.3" +version = "=0.16.15" [dependencies.snarkvm-console-network] path = "../network" -version = "=0.16.3" +version = "=0.16.15" [dependencies.snarkvm-console-types] path = "../types" -version = "=0.16.3" +version = "=0.16.15" [dependencies.snarkvm-utilities] path = "../../utilities" -version = "=0.16.3" +version = "=0.16.15" [dependencies.enum_index] version = "0.2" diff --git a/console/program/src/data/ciphertext/bytes.rs b/console/program/src/data/ciphertext/bytes.rs index 212a6e84d5..d983d9bb71 100644 --- a/console/program/src/data/ciphertext/bytes.rs +++ b/console/program/src/data/ciphertext/bytes.rs @@ -66,7 +66,6 @@ mod tests { // Check the byte representation. let expected_bytes = expected.to_bytes_le()?; assert_eq!(expected, Ciphertext::read_le(&expected_bytes[..])?); - // assert!(Ciphertext::::read_le(&expected_bytes[1..]).is_err()); } Ok(()) } diff --git a/console/program/src/data/ciphertext/equal.rs b/console/program/src/data/ciphertext/equal.rs index 0dd57ac917..31bbd5fffc 100644 --- a/console/program/src/data/ciphertext/equal.rs +++ b/console/program/src/data/ciphertext/equal.rs @@ -32,25 +32,17 @@ impl Equal for Ciphertext { if self.0.len() != other.0.len() { return Boolean::new(false); } + // Check each field element for equality. - let mut equal = Boolean::new(true); - for (a, b) in self.0.iter().zip_eq(other.0.iter()) { - equal &= a.is_equal(b); + if self.0.iter().zip_eq(other.0.iter()).all(|(a, b)| *a.is_equal(b)) { + Boolean::new(true) + } else { + Boolean::new(false) } - equal } /// Returns `true` if `self` and `other` are *not* equal. fn is_not_equal(&self, other: &Self) -> Self::Output { - // Check if the ciphertexts have different numbers of field elements. - if self.0.len() != other.0.len() { - return Boolean::new(true); - } - // Recursively check each member for inequality. - let mut not_equal = Boolean::new(false); - for (a, b) in self.0.iter().zip_eq(other.0.iter()) { - not_equal |= a.is_not_equal(b); - } - not_equal + !self.is_equal(other) } } diff --git a/console/program/src/data/future/bytes.rs b/console/program/src/data/future/bytes.rs index f4e492bdd8..95878055f1 100644 --- a/console/program/src/data/future/bytes.rs +++ b/console/program/src/data/future/bytes.rs @@ -84,7 +84,6 @@ mod tests { // Check the byte representation. let expected_bytes = expected.to_bytes_le()?; assert_eq!(expected, Future::read_le(&expected_bytes[..])?); - assert!(Future::::read_le(&expected_bytes[1..]).is_err()); Ok(()) } diff --git a/console/program/src/data/identifier/bytes.rs b/console/program/src/data/identifier/bytes.rs index e90d7fbae3..757f27ba40 100644 --- a/console/program/src/data/identifier/bytes.rs +++ b/console/program/src/data/identifier/bytes.rs @@ -25,6 +25,7 @@ impl FromBytes for Identifier { reader.read_exact(&mut buffer)?; // from_str the identifier. + // Note: `Self::from_str` ensures that the identifier string is not empty. Self::from_str(&String::from_utf8(buffer).map_err(|e| error(format!("Failed to decode identifier: {e}")))?) .map_err(|e| error(format!("{e}"))) } @@ -72,8 +73,12 @@ mod tests { // Check the byte representation. let expected_bytes = expected.to_bytes_le()?; assert_eq!(expected, Identifier::read_le(&expected_bytes[..])?); - assert!(Identifier::::read_le(&expected_bytes[1..]).is_err()); } Ok(()) } + + #[test] + fn test_zero_identifier_fails() { + assert!(Identifier::::read_le(&[0u8; 1][..]).is_err()) + } } diff --git a/console/program/src/data/literal/bytes.rs b/console/program/src/data/literal/bytes.rs index d52f43045b..1f77bf3430 100644 --- a/console/program/src/data/literal/bytes.rs +++ b/console/program/src/data/literal/bytes.rs @@ -132,8 +132,6 @@ mod tests { // Check the byte representation. let expected_bytes = expected.to_bytes_le()?; assert_eq!(expected, Literal::read_le(&expected_bytes[..])?); - assert!(Literal::::read_le(&expected_bytes[1..]).is_err()); - // assert!(Literal::::read_le(&expected_bytes[2..]).is_err()); Ok(()) } diff --git a/console/program/src/data/literal/mod.rs b/console/program/src/data/literal/mod.rs index 23f085e504..5b904165ca 100644 --- a/console/program/src/data/literal/mod.rs +++ b/console/program/src/data/literal/mod.rs @@ -28,7 +28,7 @@ mod to_bits; mod to_type; mod variant; -use crate::LiteralType; +use crate::{LiteralType, ProgramID}; use snarkvm_console_account::{ComputeKey, PrivateKey, Signature}; use snarkvm_console_network::Network; use snarkvm_console_types::{prelude::*, Boolean}; diff --git a/console/program/src/data/literal/parse.rs b/console/program/src/data/literal/parse.rs index 330914363d..0a02b7200d 100644 --- a/console/program/src/data/literal/parse.rs +++ b/console/program/src/data/literal/parse.rs @@ -36,6 +36,8 @@ impl Parser for Literal { map(Scalar::::parse, |literal| Self::Scalar(literal)), map(Signature::::parse, |literal| Self::Signature(Box::new(literal))), map(StringType::::parse, |literal| Self::String(literal)), + // This allows users to implicitly declare program IDs as literals. + map_res(ProgramID::::parse, |program_id| Ok::(Self::Address(program_id.to_address()?))), ))(string) } } @@ -87,3 +89,27 @@ impl Display for Literal { } } } + +#[cfg(test)] +mod tests { + use super::*; + use snarkvm_console_network::Testnet3; + + type CurrentNetwork = Testnet3; + + #[test] + fn test_parse_program_id() -> Result<()> { + let (remainder, candidate) = Literal::::parse("credits.aleo")?; + assert!(matches!(candidate, Literal::Address(_))); + assert_eq!(candidate.to_string(), "aleo1lqmly7ez2k48ajf5hs92ulphaqr05qm4n8qwzj8v0yprmasgpqgsez59gg"); + assert_eq!("", remainder); + + let result = Literal::::parse("credits.ale"); + assert!(result.is_err()); + + let result = Literal::::parse("credits.aleo1"); + assert!(result.is_err()); + + Ok(()) + } +} diff --git a/console/program/src/data/plaintext/bytes.rs b/console/program/src/data/plaintext/bytes.rs index a6c4867bc4..99cdd82ed4 100644 --- a/console/program/src/data/plaintext/bytes.rs +++ b/console/program/src/data/plaintext/bytes.rs @@ -131,9 +131,6 @@ mod tests { // Check the byte representation. let expected_bytes = expected.to_bytes_le()?; assert_eq!(expected, Plaintext::read_le(&expected_bytes[..])?); - // assert!(Plaintext::::read_le(&expected_bytes[1..]).is_err()); - // assert!(Plaintext::::read_le(&expected_bytes[2..]).is_err()); - // assert!(Plaintext::::read_le(&expected_bytes[3..]).is_err()); Ok(()) } @@ -225,7 +222,6 @@ mod tests { // Check the byte representation. let expected_bytes = expected.to_bytes_le()?; assert_eq!(expected, Plaintext::read_le(&expected_bytes[..])?); - assert!(Plaintext::::read_le(&expected_bytes[1..]).is_err()); // Check the array manually. let expected = Plaintext::::from_str("[ 1u8, 2u8, 3u8, 4u8, 5u8, 6u8, 7u8, 8u8, 9u8, 10u8 ]")?; @@ -233,7 +229,6 @@ mod tests { // Check the byte representation. let expected_bytes = expected.to_bytes_le()?; assert_eq!(expected, Plaintext::read_le(&expected_bytes[..])?); - assert!(Plaintext::::read_le(&expected_bytes[1..]).is_err()); Ok(()) } diff --git a/console/program/src/data/record/bytes.rs b/console/program/src/data/record/bytes.rs index 6c74f0b1ad..a787040499 100644 --- a/console/program/src/data/record/bytes.rs +++ b/console/program/src/data/record/bytes.rs @@ -95,7 +95,6 @@ mod tests { // Check the byte representation. let expected_bytes = expected.to_bytes_le()?; assert_eq!(expected, Record::read_le(&expected_bytes[..])?); - assert!(Record::>::read_le(&expected_bytes[1..]).is_err()); Ok(()) } } diff --git a/console/program/src/data/record/equal.rs b/console/program/src/data/record/equal.rs index 0be7c4fc2f..61bf58b714 100644 --- a/console/program/src/data/record/equal.rs +++ b/console/program/src/data/record/equal.rs @@ -27,41 +27,33 @@ impl>> Equal for Reco type Output = Boolean; /// Returns `true` if `self` and `other` are equal. - /// - /// Note: This method does **not** check the `nonce` equality. fn is_equal(&self, other: &Self) -> Self::Output { // Ensure the `data` lengths are equal. if self.data.len() != other.data.len() { return Boolean::new(false); } - // Recursively check each entry for equality. - let mut equal = Boolean::new(true); - for ((name_a, entry_a), (name_b, entry_b)) in self.data.iter().zip_eq(other.data.iter()) { - equal = equal & name_a.is_equal(name_b) & entry_a.is_equal(entry_b); + // Check the `owner`, and `nonce`. + if !(*self.owner.is_equal(&other.owner) && *self.nonce.is_equal(&other.nonce)) { + return Boolean::new(false); } - // Check the `owner`, `data`, and `nonce`. - self.owner.is_equal(&other.owner) & equal & self.nonce.is_equal(&other.nonce) + // Recursively check each entry for equality. + if self + .data + .iter() + .zip_eq(other.data.iter()) + .all(|((name_a, entry_a), (name_b, entry_b))| *name_a.is_equal(name_b) && *entry_a.is_equal(entry_b)) + { + Boolean::new(true) + } else { + Boolean::new(false) + } } /// Returns `true` if `self` and `other` are *not* equal. - /// - /// Note: This method does **not** check the `nonce` equality. fn is_not_equal(&self, other: &Self) -> Self::Output { - // Check the `data` lengths. - if self.data.len() != other.data.len() { - return Boolean::new(true); - } - - // Recursively check each entry for inequality. - let mut not_equal = Boolean::new(false); - for ((name_a, entry_a), (name_b, entry_b)) in self.data.iter().zip_eq(other.data.iter()) { - not_equal = not_equal | name_a.is_not_equal(name_b) | entry_a.is_not_equal(entry_b); - } - - // Check the `owner`, `data`, and `nonce`. - self.owner.is_not_equal(&other.owner) | not_equal | self.nonce.is_not_equal(&other.nonce) + !self.is_equal(other) } } diff --git a/console/program/src/data/value/bytes.rs b/console/program/src/data/value/bytes.rs index 8bfa9fcade..50e8567f05 100644 --- a/console/program/src/data/value/bytes.rs +++ b/console/program/src/data/value/bytes.rs @@ -67,7 +67,6 @@ mod tests { // Check the byte representation. let expected_bytes = expected.to_bytes_le()?; assert_eq!(expected, Value::read_le(&expected_bytes[..])?); - assert!(Value::::read_le(&expected_bytes[1..]).is_err()); Ok(()) } @@ -81,7 +80,6 @@ mod tests { // Check the byte representation. let expected_bytes = expected.to_bytes_le()?; assert_eq!(expected, Value::read_le(&expected_bytes[..])?); - assert!(Value::::read_le(&expected_bytes[1..]).is_err()); Ok(()) } } diff --git a/console/program/src/request/bytes.rs b/console/program/src/request/bytes.rs index 761b6b1340..ec664b9c48 100644 --- a/console/program/src/request/bytes.rs +++ b/console/program/src/request/bytes.rs @@ -100,9 +100,6 @@ impl ToBytes for Request { #[cfg(test)] mod tests { use super::*; - use snarkvm_console_network::Testnet3; - - type CurrentNetwork = Testnet3; #[test] fn test_bytes() { @@ -112,7 +109,6 @@ mod tests { // Check the byte representation. let expected_bytes = expected.to_bytes_le().unwrap(); assert_eq!(expected, Request::read_le(&expected_bytes[..]).unwrap()); - assert!(Request::::read_le(&expected_bytes[1..]).is_err()); } } } diff --git a/console/program/src/request/serialize.rs b/console/program/src/request/serialize.rs index 650279e492..905b67d991 100644 --- a/console/program/src/request/serialize.rs +++ b/console/program/src/request/serialize.rs @@ -21,7 +21,7 @@ impl Serialize for Request { fn serialize(&self, serializer: S) -> Result { match serializer.is_human_readable() { true => { - let mut transition = serializer.serialize_struct("Request", 9)?; + let mut transition = serializer.serialize_struct("Request", 10)?; transition.serialize_field("signer", &self.signer)?; transition.serialize_field("network", &self.network_id)?; transition.serialize_field("program", &self.program_id)?; diff --git a/console/program/src/state_path/configuration/mod.rs b/console/program/src/state_path/configuration/mod.rs index b8ddef30cf..fac6225f8f 100644 --- a/console/program/src/state_path/configuration/mod.rs +++ b/console/program/src/state_path/configuration/mod.rs @@ -19,14 +19,18 @@ use snarkvm_console_network::BHPMerkleTree; pub const BLOCKS_DEPTH: u8 = 32; /// The depth of the Merkle tree for the block header. pub const HEADER_DEPTH: u8 = 3; +/// The depth of the Merkle tree for finalize operations in a transaction. +pub const FINALIZE_ID_DEPTH: u8 = TRANSACTION_DEPTH + 4; // '+ 4' is to support 16 finalize operations per transition. /// The depth of the Merkle tree for finalize operations in a block. -pub const FINALIZE_OPERATIONS_DEPTH: u8 = 20; +pub const FINALIZE_OPERATIONS_DEPTH: u8 = TRANSACTIONS_DEPTH; /// The depth of the Merkle tree for the ratifications in a block. pub const RATIFICATIONS_DEPTH: u8 = 16; /// The depth the Merkle tree for the subdag certificates in a block. pub const SUBDAG_CERTIFICATES_DEPTH: u8 = 16; /// The depth of the Merkle tree for transactions in a block. -pub const TRANSACTIONS_DEPTH: u8 = 16; +/// Note: The technical limit is 2^20 - 1 transactions, to allow compatibility with the +/// finalize operations tree, which requires 1 leaf for the ratified finalize ID. +pub const TRANSACTIONS_DEPTH: u8 = 20; /// The depth of the Merkle tree for the transaction. pub const TRANSACTION_DEPTH: u8 = 5; /// The depth of the Merkle tree for the transition. diff --git a/console/program/src/state_path/header_leaf/bytes.rs b/console/program/src/state_path/header_leaf/bytes.rs index cde4608989..16f2b03bca 100644 --- a/console/program/src/state_path/header_leaf/bytes.rs +++ b/console/program/src/state_path/header_leaf/bytes.rs @@ -39,9 +39,6 @@ impl ToBytes for HeaderLeaf { #[cfg(test)] mod tests { use super::*; - use snarkvm_console_network::Testnet3; - - type CurrentNetwork = Testnet3; const ITERATIONS: u64 = 1000; @@ -56,7 +53,6 @@ mod tests { // Check the byte representation. let expected_bytes = expected.to_bytes_le()?; assert_eq!(expected, HeaderLeaf::read_le(&expected_bytes[..])?); - assert!(HeaderLeaf::::read_le(&expected_bytes[1..]).is_err()); } Ok(()) } diff --git a/console/program/src/state_path/transaction_leaf/bytes.rs b/console/program/src/state_path/transaction_leaf/bytes.rs index 602c66689c..db10cf1f90 100644 --- a/console/program/src/state_path/transaction_leaf/bytes.rs +++ b/console/program/src/state_path/transaction_leaf/bytes.rs @@ -43,9 +43,6 @@ impl ToBytes for TransactionLeaf { #[cfg(test)] mod tests { use super::*; - use snarkvm_console_network::Testnet3; - - type CurrentNetwork = Testnet3; const ITERATIONS: u64 = 1000; @@ -60,7 +57,6 @@ mod tests { // Check the byte representation. let expected_bytes = expected.to_bytes_le()?; assert_eq!(expected, TransactionLeaf::read_le(&expected_bytes[..])?); - assert!(TransactionLeaf::::read_le(&expected_bytes[1..]).is_err()); } Ok(()) } diff --git a/console/program/src/state_path/transition_leaf/bytes.rs b/console/program/src/state_path/transition_leaf/bytes.rs index 90a6ac9beb..b06f9def4e 100644 --- a/console/program/src/state_path/transition_leaf/bytes.rs +++ b/console/program/src/state_path/transition_leaf/bytes.rs @@ -51,9 +51,6 @@ impl ToBytes for TransitionLeaf { #[cfg(test)] mod tests { use super::*; - use snarkvm_console_network::Testnet3; - - type CurrentNetwork = Testnet3; const ITERATIONS: u64 = 1000; @@ -68,7 +65,6 @@ mod tests { // Check the byte representation. let expected_bytes = expected.to_bytes_le()?; assert_eq!(expected, TransitionLeaf::read_le(&expected_bytes[..])?); - assert!(TransitionLeaf::::read_le(&expected_bytes[1..]).is_err()); } Ok(()) } diff --git a/console/types/Cargo.toml b/console/types/Cargo.toml index a86ac4c7a7..17dc90de05 100644 --- a/console/types/Cargo.toml +++ b/console/types/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "snarkvm-console-types" -version = "0.16.3" +version = "0.16.15" authors = [ "The Aleo Team " ] description = "Console types for a decentralized virtual machine" license = "Apache-2.0" @@ -8,41 +8,41 @@ edition = "2021" [dependencies.snarkvm-console-network-environment] path = "../network/environment" -version = "=0.16.3" +version = "=0.16.15" [dependencies.snarkvm-console-types-address] path = "./address" -version = "=0.16.3" +version = "=0.16.15" optional = true [dependencies.snarkvm-console-types-boolean] path = "./boolean" -version = "=0.16.3" +version = "=0.16.15" optional = true [dependencies.snarkvm-console-types-field] path = "./field" -version = "=0.16.3" +version = "=0.16.15" optional = true [dependencies.snarkvm-console-types-group] path = "./group" -version = "=0.16.3" +version = "=0.16.15" optional = true [dependencies.snarkvm-console-types-integers] path = "./integers" -version = "=0.16.3" +version = "=0.16.15" optional = true [dependencies.snarkvm-console-types-scalar] path = "./scalar" -version = "=0.16.3" +version = "=0.16.15" optional = true [dependencies.snarkvm-console-types-string] path = "./string" -version = "=0.16.3" +version = "=0.16.15" optional = true [features] diff --git a/console/types/address/Cargo.toml b/console/types/address/Cargo.toml index 20f753d0c3..2c17e364a5 100644 --- a/console/types/address/Cargo.toml +++ b/console/types/address/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "snarkvm-console-types-address" -version = "0.16.3" +version = "0.16.15" authors = [ "The Aleo Team " ] description = "Type operations for a decentralized virtual machine" license = "Apache-2.0" @@ -8,19 +8,19 @@ edition = "2021" [dependencies.snarkvm-console-network-environment] path = "../../network/environment" -version = "=0.16.3" +version = "=0.16.15" [dependencies.snarkvm-console-types-boolean] path = "../boolean" -version = "=0.16.3" +version = "=0.16.15" [dependencies.snarkvm-console-types-field] path = "../field" -version = "=0.16.3" +version = "=0.16.15" [dependencies.snarkvm-console-types-group] path = "../group" -version = "=0.16.3" +version = "=0.16.15" [dev-dependencies.bincode] version = "1.3" diff --git a/console/types/boolean/Cargo.toml b/console/types/boolean/Cargo.toml index 66ef99cc09..be2c33dbee 100644 --- a/console/types/boolean/Cargo.toml +++ b/console/types/boolean/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "snarkvm-console-types-boolean" -version = "0.16.3" +version = "0.16.15" authors = [ "The Aleo Team " ] description = "Type operations for a decentralized virtual machine" license = "Apache-2.0" @@ -8,7 +8,7 @@ edition = "2021" [dependencies.snarkvm-console-network-environment] path = "../../network/environment" -version = "=0.16.3" +version = "=0.16.15" [dev-dependencies.bincode] version = "1.3" diff --git a/console/types/field/Cargo.toml b/console/types/field/Cargo.toml index 28f964f61d..bc29ee8fde 100644 --- a/console/types/field/Cargo.toml +++ b/console/types/field/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "snarkvm-console-types-field" -version = "0.16.3" +version = "0.16.15" authors = [ "The Aleo Team " ] description = "Type operations for a decentralized virtual machine" license = "Apache-2.0" @@ -8,11 +8,11 @@ edition = "2021" [dependencies.snarkvm-console-network-environment] path = "../../network/environment" -version = "=0.16.3" +version = "=0.16.15" [dependencies.snarkvm-console-types-boolean] path = "../boolean" -version = "=0.16.3" +version = "=0.16.15" [dependencies.zeroize] version = "1" diff --git a/console/types/group/Cargo.toml b/console/types/group/Cargo.toml index b4b66705bf..4fc8365c8f 100644 --- a/console/types/group/Cargo.toml +++ b/console/types/group/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "snarkvm-console-types-group" -version = "0.16.3" +version = "0.16.15" authors = [ "The Aleo Team " ] description = "Type operations for a decentralized virtual machine" license = "Apache-2.0" @@ -8,19 +8,19 @@ edition = "2021" [dependencies.snarkvm-console-network-environment] path = "../../network/environment" -version = "=0.16.3" +version = "=0.16.15" [dependencies.snarkvm-console-types-boolean] path = "../boolean" -version = "=0.16.3" +version = "=0.16.15" [dependencies.snarkvm-console-types-field] path = "../field" -version = "=0.16.3" +version = "=0.16.15" [dependencies.snarkvm-console-types-scalar] path = "../scalar" -version = "=0.16.3" +version = "=0.16.15" [dev-dependencies.bincode] version = "1.3" diff --git a/console/types/integers/Cargo.toml b/console/types/integers/Cargo.toml index d65bc5d143..0b35f6f1a8 100644 --- a/console/types/integers/Cargo.toml +++ b/console/types/integers/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "snarkvm-console-types-integers" -version = "0.16.3" +version = "0.16.15" authors = [ "The Aleo Team " ] description = "Type operations for a decentralized virtual machine" license = "Apache-2.0" @@ -8,19 +8,19 @@ edition = "2021" [dependencies.snarkvm-console-network-environment] path = "../../network/environment" -version = "=0.16.3" +version = "=0.16.15" [dependencies.snarkvm-console-types-boolean] path = "../boolean" -version = "=0.16.3" +version = "=0.16.15" [dependencies.snarkvm-console-types-field] path = "../field" -version = "=0.16.3" +version = "=0.16.15" [dependencies.snarkvm-console-types-scalar] path = "../scalar" -version = "=0.16.3" +version = "=0.16.15" [dev-dependencies.bincode] version = "1.3" diff --git a/console/types/scalar/Cargo.toml b/console/types/scalar/Cargo.toml index 905263266f..03db6c53ec 100644 --- a/console/types/scalar/Cargo.toml +++ b/console/types/scalar/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "snarkvm-console-types-scalar" -version = "0.16.3" +version = "0.16.15" authors = [ "The Aleo Team " ] description = "Type operations for a decentralized virtual machine" license = "Apache-2.0" @@ -8,15 +8,15 @@ edition = "2021" [dependencies.snarkvm-console-network-environment] path = "../../network/environment" -version = "=0.16.3" +version = "=0.16.15" [dependencies.snarkvm-console-types-boolean] path = "../boolean" -version = "=0.16.3" +version = "=0.16.15" [dependencies.snarkvm-console-types-field] path = "../field" -version = "=0.16.3" +version = "=0.16.15" [dependencies.zeroize] version = "1" diff --git a/console/types/scalar/src/from_bits.rs b/console/types/scalar/src/from_bits.rs index 7384a5c481..25b98715b7 100644 --- a/console/types/scalar/src/from_bits.rs +++ b/console/types/scalar/src/from_bits.rs @@ -34,9 +34,8 @@ impl FromBits for Scalar { // If `num_bits` is greater than `size_in_data_bits`, check it is less than `Scalar::MODULUS`. if num_bits > size_in_data_bits { - // Retrieve the modulus & subtract by 1 as we'll check `bits_le` is less than or *equal* to this value. - // (For advanced users) Scalar::MODULUS - 1 is equivalent to -1 in the field. - let modulus_minus_one = E::Scalar::modulus(); + // Retrieve the modulus as we'll check `bits_le` is less than this value. + let modulus = E::Scalar::modulus(); // Recover the scalar as a `BigInteger` for comparison. // As `bits_le[size_in_bits..]` is guaranteed to be zero from the above logic, @@ -44,7 +43,7 @@ impl FromBits for Scalar { let scalar = E::BigInteger::from_bits_le(&bits_le[..size_in_bits])?; // Ensure the scalar is less than `Scalar::MODULUS`. - ensure!(scalar < modulus_minus_one, "The scalar is greater than or equal to the modulus."); + ensure!(scalar < modulus, "The scalar is greater than or equal to the modulus."); // Return the scalar. Ok(Scalar { scalar: E::Scalar::from_bigint(scalar).ok_or_else(|| anyhow!("Invalid scalar from bits"))? }) diff --git a/console/types/string/Cargo.toml b/console/types/string/Cargo.toml index ccebdec9c6..4e98ffd9e0 100644 --- a/console/types/string/Cargo.toml +++ b/console/types/string/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "snarkvm-console-types-string" -version = "0.16.3" +version = "0.16.15" authors = [ "The Aleo Team " ] description = "Type operations for a decentralized virtual machine" license = "Apache-2.0" @@ -8,19 +8,19 @@ edition = "2021" [dependencies.snarkvm-console-network-environment] path = "../../network/environment" -version = "=0.16.3" +version = "=0.16.15" [dependencies.snarkvm-console-types-boolean] path = "../boolean" -version = "=0.16.3" +version = "=0.16.15" [dependencies.snarkvm-console-types-field] path = "../field" -version = "=0.16.3" +version = "=0.16.15" [dependencies.snarkvm-console-types-integers] path = "../integers" -version = "=0.16.3" +version = "=0.16.15" [dev-dependencies.bincode] version = "1.3" diff --git a/console/types/string/src/bytes.rs b/console/types/string/src/bytes.rs index 137dd5f0ac..46fd7fcf33 100644 --- a/console/types/string/src/bytes.rs +++ b/console/types/string/src/bytes.rs @@ -67,7 +67,6 @@ mod tests { // Check the byte representation. let expected_bytes = expected.to_bytes_le()?; assert_eq!(expected, StringType::read_le(&expected_bytes[..])?); - assert!(StringType::::read_le(&expected_bytes[1..]).is_err()); } Ok(()) } diff --git a/curves/Cargo.toml b/curves/Cargo.toml index 7492d77a8f..369622d10e 100644 --- a/curves/Cargo.toml +++ b/curves/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "snarkvm-curves" -version = "0.16.3" +version = "0.16.15" authors = [ "The Aleo Team " ] description = "Curves for a decentralized virtual machine" homepage = "https://aleo.org" @@ -36,12 +36,12 @@ harness = false [dependencies.snarkvm-fields] path = "../fields" -version = "=0.16.3" +version = "=0.16.15" default-features = false [dependencies.snarkvm-utilities] path = "../utilities" -version = "=0.16.3" +version = "=0.16.15" default-features = false [dependencies.rand] diff --git a/curves/src/templates/bls12/bls12.rs b/curves/src/templates/bls12/bls12.rs index 4870b3d629..71a57c22e9 100644 --- a/curves/src/templates/bls12/bls12.rs +++ b/curves/src/templates/bls12/bls12.rs @@ -193,7 +193,7 @@ where // r = f^((p^6 - 1)(p^2 + 1)) r *= &f2; - // Hard part of the final exponentation is below: + // Hard part of the final exponentiation is below: // From https://eprint.iacr.org/2016/130.pdf, Table 1 let mut y0 = r.cyclotomic_square(); y0.conjugate(); diff --git a/curves/src/templates/bls12/g2.rs b/curves/src/templates/bls12/g2.rs index af3a62ff63..5c27d12909 100644 --- a/curves/src/templates/bls12/g2.rs +++ b/curves/src/templates/bls12/g2.rs @@ -65,7 +65,7 @@ impl ToBytes for G2Prepared

{ impl FromBytes for G2Prepared

{ fn read_le(mut reader: R) -> IoResult { let ell_coeffs_len: u32 = FromBytes::read_le(&mut reader)?; - let mut ell_coeffs = Vec::with_capacity(ell_coeffs_len as usize); + let mut ell_coeffs = Vec::new(); for _ in 0..ell_coeffs_len { let coeff_1: Fp2 = FromBytes::read_le(&mut reader)?; let coeff_2: Fp2 = FromBytes::read_le(&mut reader)?; @@ -92,7 +92,7 @@ impl G2Prepared

{ let mut r = G2HomProjective { x: q.x, y: q.y, z: Fp2::one() }; let bit_iterator = BitIteratorBE::new(P::X); - let mut ell_coeffs = Vec::with_capacity(bit_iterator.len()); + let mut ell_coeffs = Vec::with_capacity(bit_iterator.len() * 3 / 2); // `one_half` = 1/2 in the field. let one_half = P::Fp::half(); diff --git a/fields/Cargo.toml b/fields/Cargo.toml index f0deb47d5b..9f15b2f15c 100644 --- a/fields/Cargo.toml +++ b/fields/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "snarkvm-fields" -version = "0.16.3" +version = "0.16.15" authors = [ "The Aleo Team " ] description = "Fields for a decentralized virtual machine" homepage = "https://aleo.org" @@ -25,7 +25,7 @@ edition = "2021" [dependencies.snarkvm-utilities] path = "../utilities" -version = "=0.16.3" +version = "=0.16.15" default-features = false [dependencies.aleo-std] diff --git a/fields/src/fp_256.rs b/fields/src/fp_256.rs index 02760186e3..15af78a1e1 100644 --- a/fields/src/fp_256.rs +++ b/fields/src/fp_256.rs @@ -583,6 +583,10 @@ impl ToBits for Fp256

{ self.write_bits_le(vec); vec[initial_len..].reverse(); } + + fn num_bits() -> Option { + Some(256) + } } impl ToBytes for Fp256

{ diff --git a/fields/src/fp_384.rs b/fields/src/fp_384.rs index 02e8791fa6..596ef055c2 100644 --- a/fields/src/fp_384.rs +++ b/fields/src/fp_384.rs @@ -612,6 +612,10 @@ impl ToBits for Fp384

{ self.write_bits_le(vec); vec[initial_len..].reverse(); } + + fn num_bits() -> Option { + Some(384) + } } impl ToBytes for Fp384

{ diff --git a/fields/src/traits/poseidon_grain_lfsr.rs b/fields/src/traits/poseidon_grain_lfsr.rs index c8931f4f68..bcefe30fa6 100644 --- a/fields/src/traits/poseidon_grain_lfsr.rs +++ b/fields/src/traits/poseidon_grain_lfsr.rs @@ -117,17 +117,25 @@ impl PoseidonGrainLFSR { pub fn get_field_elements_mod_p(&mut self, num_elems: usize) -> Result> { // Ensure the number of bits matches the modulus. - if self.field_size_in_bits != F::Parameters::MODULUS_BITS as u64 { + let num_bits = self.field_size_in_bits; + if num_bits != F::Parameters::MODULUS_BITS as u64 { bail!("The number of bits in the field must match the modulus"); } + // Prepare reusable vectors for the intermediate bits and bytes. + let mut bits = Vec::with_capacity(num_bits as usize); + let mut bytes = Vec::with_capacity((num_bits as usize + 7) / 8); + let mut output = Vec::with_capacity(num_elems); for _ in 0..num_elems { // Obtain `n` bits and make it most-significant-bit first. - let mut bits = self.get_bits(self.field_size_in_bits as usize).collect::>(); + let bits_iter = self.get_bits(num_bits as usize); + for bit in bits_iter { + bits.push(bit); + } bits.reverse(); - let bytes = bits + for byte in bits .chunks(8) .map(|chunk| { let mut sum = chunk[0] as u8; @@ -139,9 +147,16 @@ impl PoseidonGrainLFSR { sum }) .rev() - .collect::>(); + { + bytes.push(byte); + } output.push(F::from_bytes_be_mod_order(&bytes)); + + // Clear the vectors of bits and bytes so they can be reused + // in the next iteration. + bits.clear(); + bytes.clear(); } Ok(output) } @@ -199,3 +214,9 @@ impl<'a> Iterator for LFSRIter<'a> { } } } + +impl<'a> ExactSizeIterator for LFSRIter<'a> { + fn len(&self) -> usize { + self.num_bits + } +} diff --git a/ledger/Cargo.toml b/ledger/Cargo.toml index a175b2c95c..8258be5fc0 100644 --- a/ledger/Cargo.toml +++ b/ledger/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "snarkvm-ledger" -version = "0.16.3" +version = "0.16.15" authors = [ "The Aleo Team " ] description = "A node ledger for a decentralized virtual machine" homepage = "https://aleo.org" @@ -33,6 +33,7 @@ async = [ "ledger-query/async", "synthesizer/async" ] +metrics = [ "ledger-committee/metrics" ] rocks = [ "ledger-store/rocks" ] serial = [ "console/serial", @@ -45,7 +46,7 @@ serial = [ "ledger-store/serial", "synthesizer/serial" ] -test = [ "ledger-block/test" ] +test = [ "ledger-block/test", "ledger-store/test" ] test-helpers = [ "ledger-test-helpers", "ledger-committee/test-helpers", @@ -56,54 +57,54 @@ timer = [ "aleo-std/timer" ] [dependencies.console] package = "snarkvm-console" path = "../console" -version = "=0.16.3" +version = "=0.16.15" [dependencies.ledger-authority] package = "snarkvm-ledger-authority" path = "./authority" -version = "=0.16.3" +version = "=0.16.15" [dependencies.ledger-block] package = "snarkvm-ledger-block" path = "./block" -version = "=0.16.3" +version = "=0.16.15" [dependencies.ledger-coinbase] package = "snarkvm-ledger-coinbase" path = "./coinbase" -version = "=0.16.3" +version = "=0.16.15" [dependencies.ledger-committee] package = "snarkvm-ledger-committee" path = "./committee" -version = "=0.16.3" +version = "=0.16.15" [dependencies.ledger-narwhal] package = "snarkvm-ledger-narwhal" path = "./narwhal" -version = "=0.16.3" +version = "=0.16.15" [dependencies.ledger-query] package = "snarkvm-ledger-query" path = "./query" -version = "=0.16.3" +version = "=0.16.15" features = [ "query" ] [dependencies.ledger-store] package = "snarkvm-ledger-store" path = "./store" -version = "=0.16.3" +version = "=0.16.15" [dependencies.ledger-test-helpers] package = "snarkvm-ledger-test-helpers" path = "./test-helpers" -version = "=0.16.3" +version = "=0.16.15" optional = true [dependencies.synthesizer] package = "snarkvm-synthesizer" path = "../synthesizer" -version = "=0.16.3" +version = "=0.16.15" [dependencies.aleo-std] version = "0.1.18" diff --git a/ledger/authority/Cargo.toml b/ledger/authority/Cargo.toml index 5dcfd955f1..2be5317994 100644 --- a/ledger/authority/Cargo.toml +++ b/ledger/authority/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "snarkvm-ledger-authority" -version = "0.16.3" +version = "0.16.15" authors = [ "The Aleo Team " ] description = "Data structures for a block authority in a decentralized virtual machine" homepage = "https://aleo.org" @@ -32,12 +32,12 @@ test-helpers = [ "narwhal-subdag/test-helpers" ] [dependencies.console] package = "snarkvm-console" path = "../../console" -version = "=0.16.3" +version = "=0.16.15" [dependencies.narwhal-subdag] package = "snarkvm-ledger-narwhal-subdag" path = "../narwhal/subdag" -version = "=0.16.3" +version = "=0.16.15" [dependencies.anyhow] version = "1" diff --git a/ledger/authority/src/bytes.rs b/ledger/authority/src/bytes.rs index 45ccbe65b4..1d710be1c6 100644 --- a/ledger/authority/src/bytes.rs +++ b/ledger/authority/src/bytes.rs @@ -52,9 +52,7 @@ impl ToBytes for Authority { #[cfg(test)] mod tests { use super::*; - use console::{network::Testnet3, prelude::TestRng}; - - type CurrentNetwork = Testnet3; + use console::prelude::TestRng; #[test] fn test_bytes() { @@ -64,7 +62,6 @@ mod tests { // Check the byte representation. let expected_bytes = expected.to_bytes_le().unwrap(); assert_eq!(expected, Authority::read_le(&expected_bytes[..]).unwrap()); - assert!(Authority::::read_le(&expected_bytes[1..]).is_err()); } } } diff --git a/ledger/benches/transaction.rs b/ledger/benches/transaction.rs index c37267d923..233e15aa37 100644 --- a/ledger/benches/transaction.rs +++ b/ledger/benches/transaction.rs @@ -80,7 +80,7 @@ function hello: c.bench_function("Transaction::Deploy - verify", |b| { let transaction = vm.deploy(&private_key, &program, Some(records[0].clone()), 600000, None, rng).unwrap(); - b.iter(|| vm.check_transaction(&transaction, None).unwrap()) + b.iter(|| vm.check_transaction(&transaction, None, rng).unwrap()) }); } @@ -92,34 +92,79 @@ fn execute(c: &mut Criterion) { let address = Address::try_from(&private_key).unwrap(); // Initialize the VM. - let (vm, _records) = initialize_vm(&private_key, rng); - - // Prepare the inputs. - let inputs = - [Value::::from_str(&address.to_string()).unwrap(), Value::::from_str("1u64").unwrap()] - .into_iter(); - - // Authorize the execution. - let execute_authorization = vm.authorize(&private_key, "credits.aleo", "transfer_public", inputs, rng).unwrap(); - // Retrieve the execution ID. - let execution_id = execute_authorization.to_execution_id().unwrap(); - // Authorize the fee. - let fee_authorization = vm.authorize_fee_public(&private_key, 100000, 1000, execution_id, rng).unwrap(); - - c.bench_function("Transaction::Execute(transfer_public)", |b| { - b.iter(|| { - vm.execute_authorization(execute_authorization.replicate(), Some(fee_authorization.replicate()), None, rng) + let (vm, records) = initialize_vm(&private_key, rng); + + { + // Prepare the inputs. + let inputs = + [Value::::from_str(&address.to_string()).unwrap(), Value::::from_str("1u64").unwrap()] + .into_iter(); + + // Authorize the execution. + let execute_authorization = vm.authorize(&private_key, "credits.aleo", "transfer_public", inputs, rng).unwrap(); + // Retrieve the execution ID. + let execution_id = execute_authorization.to_execution_id().unwrap(); + // Authorize the fee. + let fee_authorization = vm.authorize_fee_public(&private_key, 300000, 1000, execution_id, rng).unwrap(); + + c.bench_function("Transaction::Execute(transfer_public)", |b| { + b.iter(|| { + vm.execute_authorization( + execute_authorization.replicate(), + Some(fee_authorization.replicate()), + None, + rng, + ) .unwrap(); - }) - }); + }) + }); + + let transaction = vm + .execute_authorization(execute_authorization.replicate(), Some(fee_authorization.replicate()), None, rng) + .unwrap(); + + c.bench_function("Transaction::Execute(transfer_public) - verify", |b| { + b.iter(|| vm.check_transaction(&transaction, None, rng).unwrap()) + }); + } + + { + // Prepare the inputs. + let inputs = [ + Value::::Record(records[0].clone()), + Value::::from_str(&address.to_string()).unwrap(), + Value::::from_str("1u64").unwrap(), + ] + .into_iter(); + + // Authorize the execution. + let execute_authorization = + vm.authorize(&private_key, "credits.aleo", "transfer_private", inputs, rng).unwrap(); + // Retrieve the execution ID. + let execution_id = execute_authorization.to_execution_id().unwrap(); + // Authorize the fee. + let fee_authorization = vm.authorize_fee_public(&private_key, 300000, 1000, execution_id, rng).unwrap(); + + c.bench_function("Transaction::Execute(transfer_private)", |b| { + b.iter(|| { + vm.execute_authorization( + execute_authorization.replicate(), + Some(fee_authorization.replicate()), + None, + rng, + ) + .unwrap(); + }) + }); - let transaction = vm - .execute_authorization(execute_authorization.replicate(), Some(fee_authorization.replicate()), None, rng) - .unwrap(); + let transaction = vm + .execute_authorization(execute_authorization.replicate(), Some(fee_authorization.replicate()), None, rng) + .unwrap(); - c.bench_function("Transaction::Execute(transfer_public) - verify", |b| { - b.iter(|| vm.check_transaction(&transaction, None).unwrap()) - }); + c.bench_function("Transaction::Execute(transfer_private) - verify", |b| { + b.iter(|| vm.check_transaction(&transaction, None, rng).unwrap()) + }); + } } criterion_group! { diff --git a/ledger/block/Cargo.toml b/ledger/block/Cargo.toml index 5741edcd3a..c3dff51f0e 100644 --- a/ledger/block/Cargo.toml +++ b/ledger/block/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "snarkvm-ledger-block" -version = "0.16.3" +version = "0.16.15" authors = [ "The Aleo Team " ] description = "A block for a decentralized virtual machine" homepage = "https://aleo.org" @@ -39,42 +39,42 @@ test = [ ] [dependencies.console] package = "snarkvm-console" path = "../../console" -version = "=0.16.3" +version = "=0.16.15" [dependencies.ledger-authority] package = "snarkvm-ledger-authority" path = "../authority" -version = "=0.16.3" +version = "=0.16.15" [dependencies.ledger-coinbase] package = "snarkvm-ledger-coinbase" path = "../../ledger/coinbase" -version = "=0.16.3" +version = "=0.16.15" [dependencies.ledger-committee] package = "snarkvm-ledger-committee" path = "../../ledger/committee" -version = "=0.16.3" +version = "=0.16.15" [dependencies.ledger-narwhal-subdag] package = "snarkvm-ledger-narwhal-subdag" path = "../narwhal/subdag" -version = "=0.16.3" +version = "=0.16.15" [dependencies.ledger-narwhal-transmission-id] package = "snarkvm-ledger-narwhal-transmission-id" path = "../narwhal/transmission-id" -version = "=0.16.3" +version = "=0.16.15" [dependencies.synthesizer-program] package = "snarkvm-synthesizer-program" path = "../../synthesizer/program" -version = "=0.16.3" +version = "=0.16.15" [dependencies.synthesizer-snark] package = "snarkvm-synthesizer-snark" path = "../../synthesizer/snark" -version = "=0.16.3" +version = "=0.16.15" [dependencies.indexmap] version = "2.0" @@ -100,6 +100,10 @@ package = "snarkvm-ledger-committee" path = "../../ledger/committee" features = [ "test-helpers" ] +[dev-dependencies.ledger-narwhal-batch-header] +package = "snarkvm-ledger-narwhal-batch-header" +path = "../narwhal/batch-header" + [dev-dependencies.ledger-query] package = "snarkvm-ledger-query" path = "../query" diff --git a/ledger/block/src/bytes.rs b/ledger/block/src/bytes.rs index 7c72e849f3..448c24cc3d 100644 --- a/ledger/block/src/bytes.rs +++ b/ledger/block/src/bytes.rs @@ -134,7 +134,6 @@ mod tests { // Check the byte representation. let expected_bytes = expected.to_bytes_le()?; assert_eq!(expected, Block::read_le(&expected_bytes[..])?); - assert!(Block::::read_le(&expected_bytes[1..]).is_err()); } Ok(()) } @@ -147,7 +146,6 @@ mod tests { // Check the byte representation. let expected_bytes = genesis_block.to_bytes_le()?; assert_eq!(genesis_block, Block::read_le(&expected_bytes[..])?); - assert!(Block::::read_le(&expected_bytes[1..]).is_err()); Ok(()) } diff --git a/ledger/block/src/helpers/target.rs b/ledger/block/src/helpers/target.rs index b396234257..ffb6265bd2 100644 --- a/ledger/block/src/helpers/target.rs +++ b/ledger/block/src/helpers/target.rs @@ -27,7 +27,7 @@ pub const fn block_reward(total_supply: u64, block_time: u16, coinbase_reward: u // Compute the expected block height at year 1. let block_height_at_year_1 = block_height_at_year(block_time, 1); // Compute the annual reward: (0.05 * S). - let annual_reward = (total_supply / 1000) * 50; + let annual_reward = total_supply / 20; // Compute the block reward: (0.05 * S) / H_Y1. let block_reward = annual_reward / block_height_at_year_1 as u64; // Return the sum of the block reward, coinbase reward, and transaction fees. diff --git a/ledger/block/src/lib.rs b/ledger/block/src/lib.rs index 7aa9595c2c..ff495b2ade 100644 --- a/ledger/block/src/lib.rs +++ b/ledger/block/src/lib.rs @@ -129,8 +129,19 @@ impl Block { ensure!(!transactions.is_empty(), "Cannot create a block with zero transactions"); // Ensure the number of transactions is within the allowed range. - if transactions.len() + aborted_transaction_ids.len() > Transactions::::MAX_TRANSACTIONS { - bail!("Cannot initialize a block with {} transactions (w/ aborted)", Transactions::::MAX_TRANSACTIONS); + if transactions.len() > Transactions::::MAX_TRANSACTIONS { + bail!( + "Cannot initialize a block with more than {} confirmed transactions", + Transactions::::MAX_TRANSACTIONS + ); + } + + // Ensure the number of aborted transaction IDs is within the allowed range. + if aborted_transaction_ids.len() > Transactions::::MAX_TRANSACTIONS { + bail!( + "Cannot initialize a block with more than {} aborted transaction IDs", + Transactions::::MAX_TRANSACTIONS + ); } // Compute the block hash. @@ -607,7 +618,7 @@ pub mod test_helpers { let authorization = process.authorize::(&private_key, locator.0, locator.1, inputs.iter(), rng).unwrap(); // Execute the function. - let (_, mut trace) = process.execute::(authorization).unwrap(); + let (_, mut trace) = process.execute::(authorization, rng).unwrap(); // Initialize a new block store. let block_store = BlockStore::>::open(None).unwrap(); diff --git a/ledger/block/src/ratifications/bytes.rs b/ledger/block/src/ratifications/bytes.rs index 0db7fd3311..aa8d6f4a9a 100644 --- a/ledger/block/src/ratifications/bytes.rs +++ b/ledger/block/src/ratifications/bytes.rs @@ -53,9 +53,6 @@ impl ToBytes for Ratifications { #[cfg(test)] mod tests { use super::*; - use console::network::Testnet3; - - type CurrentNetwork = Testnet3; const ITERATIONS: u32 = 100; @@ -68,7 +65,6 @@ mod tests { // Check the byte representation. let expected_bytes = expected.to_bytes_le()?; assert_eq!(expected, Ratifications::read_le(&expected_bytes[..])?); - assert!(Ratifications::::read_le(&expected_bytes[1..]).is_err()); } Ok(()) } diff --git a/ledger/block/src/ratify/bytes.rs b/ledger/block/src/ratify/bytes.rs index 2cd5cad850..bab8503907 100644 --- a/ledger/block/src/ratify/bytes.rs +++ b/ledger/block/src/ratify/bytes.rs @@ -94,9 +94,6 @@ impl ToBytes for Ratify { #[cfg(test)] mod tests { use super::*; - use console::network::Testnet3; - - type CurrentNetwork = Testnet3; #[test] fn test_bytes() { @@ -106,7 +103,6 @@ mod tests { // Check the byte representation. let expected_bytes = expected.to_bytes_le().unwrap(); assert_eq!(expected, Ratify::read_le(&expected_bytes[..]).unwrap()); - assert!(Ratify::::read_le(&expected_bytes[1..]).is_err()); } } } diff --git a/ledger/block/src/serialize.rs b/ledger/block/src/serialize.rs index b50a82c808..24edfa8687 100644 --- a/ledger/block/src/serialize.rs +++ b/ledger/block/src/serialize.rs @@ -19,7 +19,7 @@ impl Serialize for Block { fn serialize(&self, serializer: S) -> Result { match serializer.is_human_readable() { true => { - let mut block = serializer.serialize_struct("Block", 6)?; + let mut block = serializer.serialize_struct("Block", 7 + self.solutions.is_some() as usize)?; block.serialize_field("block_hash", &self.block_hash)?; block.serialize_field("previous_hash", &self.previous_hash)?; block.serialize_field("header", &self.header)?; diff --git a/ledger/block/src/transaction/bytes.rs b/ledger/block/src/transaction/bytes.rs index ee6c194dcc..299c27be99 100644 --- a/ledger/block/src/transaction/bytes.rs +++ b/ledger/block/src/transaction/bytes.rs @@ -139,9 +139,6 @@ impl ToBytes for Transaction { #[cfg(test)] mod tests { use super::*; - use console::network::Testnet3; - - type CurrentNetwork = Testnet3; #[test] fn test_bytes() -> Result<()> { @@ -158,7 +155,6 @@ mod tests { // Check the byte representation. let expected_bytes = expected.to_bytes_le()?; assert_eq!(expected, Transaction::read_le(&expected_bytes[..])?); - assert!(Transaction::::read_le(&expected_bytes[1..]).is_err()); } Ok(()) } diff --git a/ledger/block/src/transaction/deployment/bytes.rs b/ledger/block/src/transaction/deployment/bytes.rs index 09a8e90966..e8e97757a6 100644 --- a/ledger/block/src/transaction/deployment/bytes.rs +++ b/ledger/block/src/transaction/deployment/bytes.rs @@ -76,9 +76,6 @@ impl ToBytes for Deployment { #[cfg(test)] mod tests { use super::*; - use console::network::Testnet3; - - type CurrentNetwork = Testnet3; #[test] fn test_bytes() -> Result<()> { @@ -90,7 +87,6 @@ mod tests { // Check the byte representation. let expected_bytes = expected.to_bytes_le()?; assert_eq!(expected, Deployment::read_le(&expected_bytes[..])?); - assert!(Deployment::::read_le(&expected_bytes[1..]).is_err()); Ok(()) } } diff --git a/ledger/block/src/transaction/execution/bytes.rs b/ledger/block/src/transaction/execution/bytes.rs index 417530360b..c10a8c8fda 100644 --- a/ledger/block/src/transaction/execution/bytes.rs +++ b/ledger/block/src/transaction/execution/bytes.rs @@ -75,9 +75,6 @@ impl ToBytes for Execution { #[cfg(test)] mod tests { use super::*; - use console::network::Testnet3; - - type CurrentNetwork = Testnet3; #[test] fn test_bytes() -> Result<()> { @@ -89,7 +86,6 @@ mod tests { // Check the byte representation. let expected_bytes = expected.to_bytes_le()?; assert_eq!(expected, Execution::read_le(&expected_bytes[..])?); - assert!(Execution::::read_le(&expected_bytes[1..]).is_err()); Ok(()) } } diff --git a/ledger/block/src/transaction/execution/serialize.rs b/ledger/block/src/transaction/execution/serialize.rs index 08724acbbb..b432dd8e72 100644 --- a/ledger/block/src/transaction/execution/serialize.rs +++ b/ledger/block/src/transaction/execution/serialize.rs @@ -19,7 +19,7 @@ impl Serialize for Execution { fn serialize(&self, serializer: S) -> Result { match serializer.is_human_readable() { true => { - let mut execution = serializer.serialize_struct("Execution", 3)?; + let mut execution = serializer.serialize_struct("Execution", 2 + self.proof.is_some() as usize)?; execution .serialize_field("transitions", &self.transitions.values().collect::>>())?; execution.serialize_field("global_state_root", &self.global_state_root)?; diff --git a/ledger/block/src/transaction/fee/bytes.rs b/ledger/block/src/transaction/fee/bytes.rs index 42443a9981..1f156f622f 100644 --- a/ledger/block/src/transaction/fee/bytes.rs +++ b/ledger/block/src/transaction/fee/bytes.rs @@ -64,9 +64,6 @@ impl ToBytes for Fee { #[cfg(test)] mod tests { use super::*; - use console::network::Testnet3; - - type CurrentNetwork = Testnet3; #[test] fn test_bytes() -> Result<()> { @@ -78,7 +75,6 @@ mod tests { // Check the byte representation. let expected_bytes = expected.to_bytes_le()?; assert_eq!(expected, Fee::read_le(&expected_bytes[..])?); - assert!(Fee::::read_le(&expected_bytes[1..]).is_err()); // Construct a new public fee. let expected = crate::transaction::fee::test_helpers::sample_fee_public_hardcoded(rng); @@ -86,7 +82,6 @@ mod tests { // Check the byte representation. let expected_bytes = expected.to_bytes_le()?; assert_eq!(expected, Fee::read_le(&expected_bytes[..])?); - assert!(Fee::::read_le(&expected_bytes[1..]).is_err()); Ok(()) } diff --git a/ledger/block/src/transaction/fee/mod.rs b/ledger/block/src/transaction/fee/mod.rs index 8ee356c9fb..233704fca4 100644 --- a/ledger/block/src/transaction/fee/mod.rs +++ b/ledger/block/src/transaction/fee/mod.rs @@ -253,7 +253,7 @@ pub mod test_helpers { ) .unwrap(); // Construct the fee trace. - let (_, mut trace) = process.execute::(authorization).unwrap(); + let (_, mut trace) = process.execute::(authorization, rng).unwrap(); // Initialize a new block store. let block_store = BlockStore::>::open(None).unwrap(); @@ -309,7 +309,7 @@ pub mod test_helpers { ) .unwrap(); // Construct the fee trace. - let (_, mut trace) = process.execute::(authorization).unwrap(); + let (_, mut trace) = process.execute::(authorization, rng).unwrap(); // Initialize a new block store. let block_store = BlockStore::>::open(None).unwrap(); diff --git a/ledger/block/src/transaction/fee/serialize.rs b/ledger/block/src/transaction/fee/serialize.rs index 97c1f08f64..b73ec5e037 100644 --- a/ledger/block/src/transaction/fee/serialize.rs +++ b/ledger/block/src/transaction/fee/serialize.rs @@ -19,7 +19,7 @@ impl Serialize for Fee { fn serialize(&self, serializer: S) -> Result { match serializer.is_human_readable() { true => { - let mut fee = serializer.serialize_struct("Fee", 3)?; + let mut fee = serializer.serialize_struct("Fee", 2 + self.proof.is_some() as usize)?; fee.serialize_field("transition", &self.transition)?; fee.serialize_field("global_state_root", &self.global_state_root)?; if let Some(proof) = &self.proof { diff --git a/ledger/block/src/transaction/mod.rs b/ledger/block/src/transaction/mod.rs index 10fa27de2e..fe1ace4c76 100644 --- a/ledger/block/src/transaction/mod.rs +++ b/ledger/block/src/transaction/mod.rs @@ -82,19 +82,19 @@ impl Transaction { impl Transaction { /// Returns `true` if the transaction is a deploy transaction. #[inline] - pub fn is_deploy(&self) -> bool { + pub const fn is_deploy(&self) -> bool { matches!(self, Self::Deploy(..)) } /// Returns `true` if the transaction is an execute transaction. #[inline] - pub fn is_execute(&self) -> bool { + pub const fn is_execute(&self) -> bool { matches!(self, Self::Execute(..)) } /// Returns `true` if the transaction is a fee transaction. #[inline] - pub fn is_fee(&self) -> bool { + pub const fn is_fee(&self) -> bool { matches!(self, Self::Fee(..)) } } diff --git a/ledger/block/src/transaction/serialize.rs b/ledger/block/src/transaction/serialize.rs index 9e9b7f941e..ec6aae1448 100644 --- a/ledger/block/src/transaction/serialize.rs +++ b/ledger/block/src/transaction/serialize.rs @@ -29,7 +29,7 @@ impl Serialize for Transaction { transaction.end() } Self::Execute(id, execution, fee) => { - let mut transaction = serializer.serialize_struct("Transaction", 4)?; + let mut transaction = serializer.serialize_struct("Transaction", 3 + fee.is_some() as usize)?; transaction.serialize_field("type", "execute")?; transaction.serialize_field("id", &id)?; transaction.serialize_field("execution", &execution)?; diff --git a/ledger/block/src/transactions/bytes.rs b/ledger/block/src/transactions/bytes.rs index 99df0668db..d76e8625ba 100644 --- a/ledger/block/src/transactions/bytes.rs +++ b/ledger/block/src/transactions/bytes.rs @@ -53,9 +53,6 @@ impl ToBytes for Transactions { #[cfg(test)] mod tests { use super::*; - use console::network::Testnet3; - - type CurrentNetwork = Testnet3; #[test] fn test_bytes() -> Result<()> { @@ -65,7 +62,6 @@ mod tests { // Check the byte representation. let expected_bytes = expected.to_bytes_le()?; assert_eq!(expected, Transactions::read_le(&expected_bytes[..])?); - assert!(Transactions::::read_le(&expected_bytes[1..]).is_err()); } Ok(()) } diff --git a/ledger/block/src/transactions/confirmed/bytes.rs b/ledger/block/src/transactions/confirmed/bytes.rs index ff7ceb4772..16f929215c 100644 --- a/ledger/block/src/transactions/confirmed/bytes.rs +++ b/ledger/block/src/transactions/confirmed/bytes.rs @@ -143,9 +143,6 @@ impl ToBytes for ConfirmedTransaction { #[cfg(test)] mod tests { use super::*; - use console::network::Testnet3; - - type CurrentNetwork = Testnet3; #[test] fn test_bytes() { @@ -153,7 +150,6 @@ mod tests { // Check the byte representation. let expected_bytes = expected.to_bytes_le().unwrap(); assert_eq!(expected, ConfirmedTransaction::read_le(&expected_bytes[..]).unwrap()); - assert!(ConfirmedTransaction::::read_le(&expected_bytes[1..]).is_err()); } } } diff --git a/ledger/block/src/transactions/confirmed/mod.rs b/ledger/block/src/transactions/confirmed/mod.rs index 9d16b7915d..c617c1b3f4 100644 --- a/ledger/block/src/transactions/confirmed/mod.rs +++ b/ledger/block/src/transactions/confirmed/mod.rs @@ -17,7 +17,7 @@ mod serialize; mod string; use crate::{rejected::Rejected, Transaction}; -use console::{network::prelude::*, types::Field}; +use console::{network::prelude::*, program::FINALIZE_ID_DEPTH, types::Field}; use synthesizer_program::FinalizeOperation; pub type NumFinalizeSize = u16; @@ -261,6 +261,16 @@ impl ConfirmedTransaction { } } + /// Returns the finalize ID, by computing the root of a (small) Merkle tree comprised of + /// the ordered finalize operations for the transaction. + pub fn to_finalize_id(&self) -> Result> { + // Prepare the leaves. + let leaves = self.finalize_operations().iter().map(ToBits::to_bits_le).collect::>(); + // Compute the finalize ID. + // Note: This call will ensure the number of finalize operations is within the size of the Merkle tree. + Ok(*N::merkle_tree_bhp::(&leaves)?.root()) + } + /// Returns the rejected ID, if the confirmed transaction is rejected. pub fn to_rejected_id(&self) -> Result>> { match self { diff --git a/ledger/block/src/transactions/merkle.rs b/ledger/block/src/transactions/merkle.rs index 4efc914ba5..7f666f4ff5 100644 --- a/ledger/block/src/transactions/merkle.rs +++ b/ledger/block/src/transactions/merkle.rs @@ -17,13 +17,23 @@ use super::*; impl Transactions { /// Returns the finalize root of the transactions. pub fn to_finalize_root(&self, ratified_finalize_operations: Vec>) -> Result> { - // Prepare the leaves. - let leaves = self.finalize_operations().chain(&ratified_finalize_operations).map(ToBits::to_bits_le); - // Compute the finalize tree. - // Note: This call will check the number of finalize operations is within the size of the Merkle tree. - let tree = N::merkle_tree_bhp::(&leaves.collect::>())?; - // Return the finalize root. - Ok(*tree.root()) + // Prepare the ratified finalize ID - a Merkle tree composed of the ratified finalize operations. + let ratified_finalize_id = *N::merkle_tree_bhp::( + &ratified_finalize_operations.iter().map(ToBits::to_bits_le).collect::>(), + )? + .root(); + + // Prepare the leaves, composed of: + // | transaction_0 finalize ID, ..., transaction_n finalize ID | ratified finalize ID | + let leaves = self + .iter() + .map(|tx| tx.to_finalize_id().map(|id| id.to_bits_le())) + .chain(std::iter::once(Ok(ratified_finalize_id.to_bits_le()))) + .collect::>>()?; + + // Compute the finalize root. + // Note: This call will ensure the number of finalize operations is within the size of the Merkle tree. + Ok(*N::merkle_tree_bhp::(&leaves)?.root()) } } @@ -74,6 +84,10 @@ mod tests { #[test] fn test_transactions_depth() { // Ensure the log2 relationship between depth and the maximum number of transactions. - assert_eq!(2usize.pow(TRANSACTIONS_DEPTH as u32), Transactions::::MAX_TRANSACTIONS); + // Note: This test uses 'checked_sub' to ensure the depth is not zero. + assert_eq!( + 2usize.pow(TRANSACTIONS_DEPTH as u32).checked_sub(1).expect("Invalid depth"), + Transactions::::MAX_TRANSACTIONS + ); } } diff --git a/ledger/block/src/transactions/mod.rs b/ledger/block/src/transactions/mod.rs index be1a1f6b70..98ee5f02ab 100644 --- a/ledger/block/src/transactions/mod.rs +++ b/ledger/block/src/transactions/mod.rs @@ -32,6 +32,7 @@ use console::{ Record, TransactionsPath, TransactionsTree, + FINALIZE_ID_DEPTH, FINALIZE_OPERATIONS_DEPTH, TRANSACTIONS_DEPTH, }, @@ -167,7 +168,7 @@ impl Transactions { impl Transactions { /// The maximum number of transactions allowed in a block. - pub const MAX_TRANSACTIONS: usize = usize::pow(2, TRANSACTIONS_DEPTH as u32); + pub const MAX_TRANSACTIONS: usize = usize::pow(2, TRANSACTIONS_DEPTH as u32).saturating_sub(1); /// Returns an iterator over all transactions, for all transactions in `self`. pub fn iter(&self) -> impl '_ + ExactSizeIterator> { @@ -339,3 +340,27 @@ pub mod test_helpers { crate::test_helpers::sample_genesis_block(rng).transactions().clone() } } + +#[cfg(test)] +mod tests { + use super::*; + use ledger_narwhal_batch_header::BatchHeader; + + type CurrentNetwork = console::network::Testnet3; + + #[test] + fn test_max_transmissions() { + // Determine the maximum number of transmissions in a block. + let max_transmissions_per_block = BatchHeader::::MAX_TRANSMISSIONS_PER_BATCH + * usize::try_from(BatchHeader::::MAX_GC_ROUNDS).unwrap() + * BatchHeader::::MAX_CERTIFICATES as usize; + + // Note: The maximum number of *transmissions* in a block cannot exceed the maximum number of *transactions* in a block. + // If you intended to change the number of 'MAX_TRANSACTIONS', note that this will break the inclusion proof, + // and you will need to migrate all users to a new circuit for the inclusion proof. + assert!( + max_transmissions_per_block <= Transactions::::MAX_TRANSACTIONS, + "The maximum number of transmissions in a block is too large" + ); + } +} diff --git a/ledger/block/src/transactions/rejected/bytes.rs b/ledger/block/src/transactions/rejected/bytes.rs index 402b2daeff..c5cff5fe43 100644 --- a/ledger/block/src/transactions/rejected/bytes.rs +++ b/ledger/block/src/transactions/rejected/bytes.rs @@ -63,9 +63,6 @@ impl ToBytes for Rejected { #[cfg(test)] mod tests { use super::*; - use console::network::Testnet3; - - type CurrentNetwork = Testnet3; #[test] fn test_bytes() { @@ -73,7 +70,6 @@ mod tests { // Check the byte representation. let expected_bytes = expected.to_bytes_le().unwrap(); assert_eq!(expected, Rejected::read_le(&expected_bytes[..]).unwrap()); - assert!(Rejected::::read_le(&expected_bytes[1..]).is_err()); } } } diff --git a/ledger/block/src/transition/bytes.rs b/ledger/block/src/transition/bytes.rs index e59403fead..dacc875df8 100644 --- a/ledger/block/src/transition/bytes.rs +++ b/ledger/block/src/transition/bytes.rs @@ -98,9 +98,6 @@ impl ToBytes for Transition { #[cfg(test)] mod tests { use super::*; - use console::network::Testnet3; - - type CurrentNetwork = Testnet3; #[test] fn test_bytes() -> Result<()> { @@ -112,7 +109,6 @@ mod tests { // Check the byte representation. let expected_bytes = expected.to_bytes_le()?; assert_eq!(expected, Transition::read_le(&expected_bytes[..])?); - assert!(Transition::::read_le(&expected_bytes[1..]).is_err()); Ok(()) } diff --git a/ledger/block/src/transition/input/bytes.rs b/ledger/block/src/transition/input/bytes.rs index e5d53ba277..faceae3a05 100644 --- a/ledger/block/src/transition/input/bytes.rs +++ b/ledger/block/src/transition/input/bytes.rs @@ -115,9 +115,6 @@ impl ToBytes for Input { #[cfg(test)] mod tests { use super::*; - use console::network::Testnet3; - - type CurrentNetwork = Testnet3; #[test] fn test_bytes() { @@ -125,7 +122,6 @@ mod tests { // Check the byte representation. let expected_bytes = expected.to_bytes_le().unwrap(); assert_eq!(expected, Input::read_le(&expected_bytes[..]).unwrap()); - assert!(Input::::read_le(&expected_bytes[1..]).is_err()); } } } diff --git a/ledger/block/src/transition/input/mod.rs b/ledger/block/src/transition/input/mod.rs index 2ef959be24..284af88f76 100644 --- a/ledger/block/src/transition/input/mod.rs +++ b/ledger/block/src/transition/input/mod.rs @@ -200,7 +200,8 @@ pub(crate) mod test_helpers { let plaintext = Plaintext::Literal(Literal::Field(Uniform::rand(rng)), Default::default()); let plaintext_hash = CurrentNetwork::hash_bhp1024(&plaintext.to_bits_le()).unwrap(); // Sample a random ciphertext. - let ciphertext = Ciphertext::from_fields(&vec![Uniform::rand(rng); 10]).unwrap(); + let fields: Vec<_> = (0..10).map(|_| Uniform::rand(rng)).collect(); + let ciphertext = Ciphertext::from_fields(&fields).unwrap(); let ciphertext_hash = CurrentNetwork::hash_bhp1024(&ciphertext.to_bits_le()).unwrap(); vec![ diff --git a/ledger/block/src/transition/input/serialize.rs b/ledger/block/src/transition/input/serialize.rs index 281d47cfff..3a959b7c40 100644 --- a/ledger/block/src/transition/input/serialize.rs +++ b/ledger/block/src/transition/input/serialize.rs @@ -20,7 +20,7 @@ impl Serialize for Input { match serializer.is_human_readable() { true => match self { Self::Constant(id, value) => { - let mut input = serializer.serialize_struct("Input", 3)?; + let mut input = serializer.serialize_struct("Input", 2 + value.is_some() as usize)?; input.serialize_field("type", "constant")?; input.serialize_field("id", &id)?; if let Some(value) = value { @@ -29,7 +29,7 @@ impl Serialize for Input { input.end() } Self::Public(id, value) => { - let mut input = serializer.serialize_struct("Input", 3)?; + let mut input = serializer.serialize_struct("Input", 2 + value.is_some() as usize)?; input.serialize_field("type", "public")?; input.serialize_field("id", &id)?; if let Some(value) = value { @@ -38,7 +38,7 @@ impl Serialize for Input { input.end() } Self::Private(id, value) => { - let mut input = serializer.serialize_struct("Input", 3)?; + let mut input = serializer.serialize_struct("Input", 2 + value.is_some() as usize)?; input.serialize_field("type", "private")?; input.serialize_field("id", &id)?; if let Some(value) = value { diff --git a/ledger/block/src/transition/merkle.rs b/ledger/block/src/transition/merkle.rs index d5a8fb33b1..e2f05de05b 100644 --- a/ledger/block/src/transition/merkle.rs +++ b/ledger/block/src/transition/merkle.rs @@ -36,7 +36,7 @@ impl Transition { // Check if the input ID matches the given ID. if id == input.id() { // Return the transition leaf. - return Ok(input.to_transition_leaf(index as u8)); + return Ok(input.to_transition_leaf(u8::try_from(index)?)); } } // Error if the input ID was not found. @@ -48,7 +48,7 @@ impl Transition { // Check if the output ID matches the given ID. if id == output.id() { // Return the transition leaf. - return Ok(output.to_transition_leaf((self.inputs.len() + index) as u8)); + return Ok(output.to_transition_leaf(u8::try_from(self.inputs.len() + index)?)); } } // Error if the output ID was not found. diff --git a/ledger/block/src/transition/output/bytes.rs b/ledger/block/src/transition/output/bytes.rs index 0ef6003f7e..867b6600d6 100644 --- a/ledger/block/src/transition/output/bytes.rs +++ b/ledger/block/src/transition/output/bytes.rs @@ -148,9 +148,6 @@ impl ToBytes for Output { #[cfg(test)] mod tests { use super::*; - use console::network::Testnet3; - - type CurrentNetwork = Testnet3; #[test] fn test_bytes() { @@ -158,7 +155,6 @@ mod tests { // Check the byte representation. let expected_bytes = expected.to_bytes_le().unwrap(); assert_eq!(expected, Output::read_le(&expected_bytes[..]).unwrap()); - assert!(Output::::read_le(&expected_bytes[1..]).is_err()); } } } diff --git a/ledger/block/src/transition/output/mod.rs b/ledger/block/src/transition/output/mod.rs index 2e4e066552..3f0f5ff7a4 100644 --- a/ledger/block/src/transition/output/mod.rs +++ b/ledger/block/src/transition/output/mod.rs @@ -277,7 +277,8 @@ pub(crate) mod test_helpers { let plaintext = Plaintext::Literal(Literal::Field(Uniform::rand(rng)), Default::default()); let plaintext_hash = CurrentNetwork::hash_bhp1024(&plaintext.to_bits_le()).unwrap(); // Sample a random ciphertext. - let ciphertext = Ciphertext::from_fields(&vec![Uniform::rand(rng); 10]).unwrap(); + let fields: Vec<_> = (0..10).map(|_| Uniform::rand(rng)).collect(); + let ciphertext = Ciphertext::from_fields(&fields).unwrap(); let ciphertext_hash = CurrentNetwork::hash_bhp1024(&ciphertext.to_bits_le()).unwrap(); // Sample a random record. let randomizer = Uniform::rand(rng); diff --git a/ledger/block/src/transition/output/serialize.rs b/ledger/block/src/transition/output/serialize.rs index bdf49c4baa..47041735a0 100644 --- a/ledger/block/src/transition/output/serialize.rs +++ b/ledger/block/src/transition/output/serialize.rs @@ -20,7 +20,7 @@ impl Serialize for Output { match serializer.is_human_readable() { true => match self { Self::Constant(id, value) => { - let mut output = serializer.serialize_struct("Output", 3)?; + let mut output = serializer.serialize_struct("Output", 2 + value.is_some() as usize)?; output.serialize_field("type", "constant")?; output.serialize_field("id", &id)?; if let Some(value) = value { @@ -29,7 +29,7 @@ impl Serialize for Output { output.end() } Self::Public(id, value) => { - let mut output = serializer.serialize_struct("Output", 3)?; + let mut output = serializer.serialize_struct("Output", 2 + value.is_some() as usize)?; output.serialize_field("type", "public")?; output.serialize_field("id", &id)?; if let Some(value) = value { @@ -38,7 +38,7 @@ impl Serialize for Output { output.end() } Self::Private(id, value) => { - let mut output = serializer.serialize_struct("Output", 3)?; + let mut output = serializer.serialize_struct("Output", 2 + value.is_some() as usize)?; output.serialize_field("type", "private")?; output.serialize_field("id", &id)?; if let Some(value) = value { @@ -47,7 +47,7 @@ impl Serialize for Output { output.end() } Self::Record(id, checksum, value) => { - let mut output = serializer.serialize_struct("Output", 5)?; + let mut output = serializer.serialize_struct("Output", 3 + value.is_some() as usize)?; output.serialize_field("type", "record")?; output.serialize_field("id", &id)?; output.serialize_field("checksum", &checksum)?; @@ -63,7 +63,7 @@ impl Serialize for Output { output.end() } Self::Future(id, value) => { - let mut output = serializer.serialize_struct("Output", 3)?; + let mut output = serializer.serialize_struct("Output", 2 + value.is_some() as usize)?; output.serialize_field("type", "future")?; output.serialize_field("id", &id)?; if let Some(value) = value { diff --git a/ledger/block/src/verify.rs b/ledger/block/src/verify.rs index 057ff2b562..22931ef5e0 100644 --- a/ledger/block/src/verify.rs +++ b/ledger/block/src/verify.rs @@ -137,16 +137,12 @@ impl Block { previous_height: u32, current_committee: &Committee, ) -> Result<(u64, u32, i64)> { + // Note: Do not remove this. This ensures that all blocks after genesis are quorum blocks. + #[cfg(not(any(test, feature = "test")))] + ensure!(self.authority.is_quorum(), "The next block must be a quorum block"); + // Determine the expected height. let expected_height = previous_height.saturating_add(1); - // Ensure the block type is correct. - match expected_height == 0 { - true => ensure!(self.authority.is_beacon(), "The genesis block must be a beacon block"), - false => { - #[cfg(not(any(test, feature = "test")))] - ensure!(self.authority.is_quorum(), "The next block must be a quorum block"); - } - } // Determine the expected round. let expected_round = match &self.authority { @@ -226,7 +222,7 @@ impl Block { let height = self.height(); // Ensure there are sufficient ratifications. - ensure!(!self.ratifications.len() >= 2, "Block {height} must contain at least 2 ratifications"); + ensure!(self.ratifications.len() >= 2, "Block {height} must contain at least 2 ratifications"); // Initialize a ratifications iterator. let mut ratifications_iter = self.ratifications.iter(); @@ -271,10 +267,10 @@ impl Block { Some(coinbase) => { // Ensure the number of solutions is within the allowed range. ensure!( - coinbase.len() <= N::MAX_PROVER_SOLUTIONS, + coinbase.len() <= N::MAX_SOLUTIONS, "Block {height} contains too many prover solutions (found '{}', expected '{}')", coinbase.len(), - N::MAX_PROVER_SOLUTIONS + N::MAX_SOLUTIONS ); // Ensure the solutions are not accepted after the block height at year 10. if height > block_height_at_year(N::BLOCK_TIME, 10) { @@ -390,8 +386,19 @@ impl Block { ensure!(!self.transactions.is_empty(), "Block {height} must contain at least 1 transaction"); // Ensure the number of transactions is within the allowed range. - if self.transactions.len() + self.aborted_transaction_ids.len() > Transactions::::MAX_TRANSACTIONS { - bail!("Cannot validate a block with more than {} transactions", Transactions::::MAX_TRANSACTIONS); + if self.transactions.len() > Transactions::::MAX_TRANSACTIONS { + bail!( + "Cannot validate a block with more than {} confirmed transactions", + Transactions::::MAX_TRANSACTIONS + ); + } + + // Ensure the number of aborted transaction IDs is within the allowed range. + if self.aborted_transaction_ids.len() > Transactions::::MAX_TRANSACTIONS { + bail!( + "Cannot validate a block with more than {} aborted transaction IDs", + Transactions::::MAX_TRANSACTIONS + ); } // Ensure there are no duplicate transaction IDs. @@ -404,6 +411,13 @@ impl Block { bail!("Found a duplicate transition in block {height}"); } + // Ensure there are no duplicate program IDs. + if has_duplicates( + self.transactions().iter().filter_map(|tx| tx.transaction().deployment().map(|d| d.program_id())), + ) { + bail!("Found a duplicate program ID in block {height}"); + } + /* Input */ // Ensure there are no duplicate input IDs. diff --git a/ledger/coinbase/Cargo.toml b/ledger/coinbase/Cargo.toml index ba32e58a4f..4af319fcec 100644 --- a/ledger/coinbase/Cargo.toml +++ b/ledger/coinbase/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "snarkvm-ledger-coinbase" -version = "0.16.3" +version = "0.16.15" authors = [ "The Aleo Team " ] description = "Coinbase puzzle for a decentralized virtual machine" homepage = "https://aleo.org" @@ -50,27 +50,27 @@ wasm = [ [dependencies.console] package = "snarkvm-console" path = "../../console" -version = "=0.16.3" +version = "=0.16.15" [dependencies.snarkvm-algorithms] path = "../../algorithms" -version = "=0.16.3" +version = "=0.16.15" [dependencies.snarkvm-curves] path = "../../curves" -version = "=0.16.3" +version = "=0.16.15" [dependencies.snarkvm-fields] path = "../../fields" -version = "=0.16.3" +version = "=0.16.15" [dependencies.snarkvm-synthesizer-snark] path = "../../synthesizer/snark" -version = "=0.16.3" +version = "=0.16.15" [dependencies.snarkvm-utilities] path = "../../utilities" -version = "=0.16.3" +version = "=0.16.15" default-features = false [dependencies.aleo-std] diff --git a/ledger/coinbase/benches/coinbase_puzzle.rs b/ledger/coinbase/benches/coinbase_puzzle.rs index 418466b67f..fe8c86c4ca 100644 --- a/ledger/coinbase/benches/coinbase_puzzle.rs +++ b/ledger/coinbase/benches/coinbase_puzzle.rs @@ -95,7 +95,7 @@ fn coinbase_puzzle_verify(c: &mut Criterion) { let puzzle = CoinbasePuzzleInst::trim(&universal_srs, config).unwrap(); let epoch_challenge = sample_epoch_challenge(degree, rng); - for batch_size in [10, 100, ::MAX_PROVER_SOLUTIONS] { + for batch_size in [10, 100, ::MAX_SOLUTIONS] { let solutions = (0..batch_size) .map(|_| { let (address, nonce) = sample_address_and_nonce(rng); diff --git a/ledger/coinbase/src/helpers/coinbase_solution/mod.rs b/ledger/coinbase/src/helpers/coinbase_solution/mod.rs index bfdacbace4..a735607a53 100644 --- a/ledger/coinbase/src/helpers/coinbase_solution/mod.rs +++ b/ledger/coinbase/src/helpers/coinbase_solution/mod.rs @@ -33,11 +33,11 @@ impl CoinbaseSolution { // Ensure the solutions are not empty. ensure!(!solutions.is_empty(), "There are no solutions to verify for the coinbase puzzle"); // Ensure the number of partial solutions does not exceed `MAX_PROVER_SOLUTIONS`. - if solutions.len() > N::MAX_PROVER_SOLUTIONS { + if solutions.len() > N::MAX_SOLUTIONS { bail!( "The solutions exceed the allowed number of partial solutions. ({} > {})", solutions.len(), - N::MAX_PROVER_SOLUTIONS + N::MAX_SOLUTIONS ); } // Ensure the puzzle commitments are unique. diff --git a/ledger/coinbase/src/helpers/prover_solution/serialize.rs b/ledger/coinbase/src/helpers/prover_solution/serialize.rs index 32ce1b22dd..564cba8bed 100644 --- a/ledger/coinbase/src/helpers/prover_solution/serialize.rs +++ b/ledger/coinbase/src/helpers/prover_solution/serialize.rs @@ -21,7 +21,8 @@ impl Serialize for ProverSolution { fn serialize(&self, serializer: S) -> Result { match serializer.is_human_readable() { true => { - let mut prover_solution = serializer.serialize_struct("ProverSolution", 3)?; + let mut prover_solution = + serializer.serialize_struct("ProverSolution", 2 + self.proof.random_v.is_some() as usize)?; prover_solution.serialize_field("partial_solution", &self.partial_solution)?; prover_solution.serialize_field("proof.w", &self.proof.w)?; if let Some(random_v) = &self.proof.random_v { diff --git a/ledger/coinbase/src/lib.rs b/ledger/coinbase/src/lib.rs index d07e0109e7..c3016e150b 100644 --- a/ledger/coinbase/src/lib.rs +++ b/ledger/coinbase/src/lib.rs @@ -122,11 +122,10 @@ impl CoinbasePuzzle { let product_evaluations = { let polynomial_evaluations = pk.product_domain.in_order_fft_with_pc(&polynomial, &pk.fft_precomputation); - let product_evaluations = pk.product_domain.mul_polynomials_in_evaluation_domain( + pk.product_domain.mul_polynomials_in_evaluation_domain( polynomial_evaluations, &epoch_challenge.epoch_polynomial_evaluations().evaluations, - ); - product_evaluations + )? }; let (commitment, _rand) = KZG10::commit_lagrange(&pk.lagrange_basis(), &product_evaluations, None, None)?; @@ -171,11 +170,11 @@ impl CoinbasePuzzle { ensure!(!solutions.is_empty(), "There are no solutions to verify for the coinbase puzzle"); // Ensure the number of partial solutions does not exceed `MAX_PROVER_SOLUTIONS`. - if solutions.len() > N::MAX_PROVER_SOLUTIONS { + if solutions.len() > N::MAX_SOLUTIONS { bail!( "The solutions exceed the allowed number of partial solutions. ({} > {})", solutions.len(), - N::MAX_PROVER_SOLUTIONS + N::MAX_SOLUTIONS ); } diff --git a/ledger/coinbase/src/tests.rs b/ledger/coinbase/src/tests.rs index 5c5546ce26..15a39fc6a3 100644 --- a/ledger/coinbase/src/tests.rs +++ b/ledger/coinbase/src/tests.rs @@ -133,7 +133,7 @@ fn test_profiler() -> Result<()> { // Generate proof inputs let epoch_challenge = EpochChallenge::new(rng.next_u32(), Default::default(), degree).unwrap(); - for batch_size in [10, 100, ::MAX_PROVER_SOLUTIONS] { + for batch_size in [10, 100, ::MAX_SOLUTIONS] { // Generate the solutions. let solutions = (0..batch_size) .map(|_| { diff --git a/ledger/committee/Cargo.toml b/ledger/committee/Cargo.toml index 5b92ac673e..544fbe97da 100644 --- a/ledger/committee/Cargo.toml +++ b/ledger/committee/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "snarkvm-ledger-committee" -version = "0.16.3" +version = "0.16.15" authors = [ "The Aleo Team " ] description = "A committee for a decentralized virtual machine" homepage = "https://aleo.org" @@ -27,18 +27,29 @@ edition = "2021" default = [ ] serial = [ "console/serial" ] wasm = [ "console/wasm" ] +metrics = [ "dep:metrics" ] prop-tests = [ "rand", "rand_chacha", "test-strategy", "proptest", "anyhow" ] test-helpers = [ "prop-tests", "rand_distr" ] [dependencies.console] package = "snarkvm-console" path = "../../console" -version = "=0.16.3" +version = "=0.16.15" + +[dependencies.ledger-narwhal-batch-header] +package = "snarkvm-ledger-narwhal-batch-header" +path = "../narwhal/batch-header" [dependencies.indexmap] version = "2.0" features = [ "serde", "rayon" ] +[dependencies.metrics] +package = "snarkvm-metrics" +path = "../../metrics" +version = "=0.16.15" +optional = true + [dependencies.serde_json] version = "1.0" features = [ "preserve_order" ] @@ -79,6 +90,10 @@ version = "0.4" [dev-dependencies.rayon] version = "1" +[dev-dependencies.ledger-narwhal-batch-header] +package = "snarkvm-ledger-narwhal-batch-header" +path = "../narwhal/batch-header" + [dev-dependencies.snarkvm-ledger-committee] path = "." features = [ "prop-tests" ] diff --git a/ledger/committee/src/bytes.rs b/ledger/committee/src/bytes.rs index 9ae159af1f..2c3eba9771 100644 --- a/ledger/committee/src/bytes.rs +++ b/ledger/committee/src/bytes.rs @@ -28,6 +28,13 @@ impl FromBytes for Committee { let starting_round = u64::read_le(&mut reader)?; // Read the number of members. let num_members = u16::read_le(&mut reader)?; + // Ensure the number of members is within the allowed limit. + if num_members > Self::MAX_COMMITTEE_SIZE { + return Err(error(format!( + "Committee cannot exceed {} members, found {num_members}", + Self::MAX_COMMITTEE_SIZE, + ))); + } // Read the members. let mut members = IndexMap::with_capacity(num_members as usize); for _ in 0..num_members { @@ -78,9 +85,6 @@ impl ToBytes for Committee { #[cfg(test)] mod tests { use super::*; - use console::network::Testnet3; - - type CurrentNetwork = Testnet3; #[test] fn test_bytes() { @@ -90,7 +94,6 @@ mod tests { // Check the byte representation. let expected_bytes = expected.to_bytes_le().unwrap(); assert_eq!(expected, Committee::read_le(&expected_bytes[..]).unwrap()); - assert!(Committee::::read_le(&expected_bytes[1..]).is_err()); } } } diff --git a/ledger/committee/src/lib.rs b/ledger/committee/src/lib.rs index 6a08e72b51..fd67bf9983 100644 --- a/ledger/committee/src/lib.rs +++ b/ledger/committee/src/lib.rs @@ -12,6 +12,9 @@ // See the License for the specific language governing permissions and // limitations under the License. +#![forbid(unsafe_code)] +#![warn(clippy::cast_possible_truncation)] + mod bytes; mod serialize; mod string; @@ -26,6 +29,7 @@ use console::{ }; use indexmap::IndexMap; +use ledger_narwhal_batch_header::BatchHeader; use std::collections::HashSet; /// The minimum amount of stake required for a validator to bond. @@ -45,20 +49,18 @@ pub struct Committee { impl Committee { /// The maximum number of members that may be in a committee. - pub const MAX_COMMITTEE_SIZE: u16 = 200; + pub const MAX_COMMITTEE_SIZE: u16 = BatchHeader::::MAX_CERTIFICATES; /// Initializes a new `Committee` instance. pub fn new_genesis(members: IndexMap, (u64, bool)>) -> Result { - // Ensure there are exactly 4 members. - ensure!(members.len() == 4, "Genesis committee must have 4 members"); // Return the new committee. Self::new(0u64, members) } /// Initializes a new `Committee` instance. pub fn new(starting_round: u64, members: IndexMap, (u64, bool)>) -> Result { - // Ensure there are at least 4 members. - ensure!(members.len() >= 4, "Committee must have at least 4 members"); + // Ensure there are at least 3 members. + ensure!(members.len() >= 3, "Committee must have at least 3 members"); // Ensure there are no more than the maximum number of members. ensure!( members.len() <= Self::MAX_COMMITTEE_SIZE as usize, @@ -72,6 +74,8 @@ impl Committee { ); // Compute the total stake of the committee for this round. let total_stake = Self::compute_total_stake(&members)?; + #[cfg(feature = "metrics")] + metrics::gauge(metrics::committee::TOTAL_STAKE, total_stake as f64); // Return the new committee. Ok(Self { starting_round, members, total_stake }) } @@ -198,7 +202,8 @@ impl Committee { /// Note: This ensures the method returns a deterministic result that is SNARK-friendly. fn sorted_members(&self) -> indexmap::map::IntoIter, (u64, bool)> { let members = self.members.clone(); - members.sorted_unstable_by(|address1, stake1, address2, stake2| { + // Note: The use of 'sorted_unstable_by' is safe here because the addresses are guaranteed to be unique. + members.sorted_unstable_by(|address1, (stake1, _), address2, (stake2, _)| { // Sort by stake in decreasing order. let cmp = stake2.cmp(stake1); // If the stakes are equal, sort by x-coordinate in decreasing order. @@ -282,14 +287,31 @@ pub mod test_helpers { Committee::::new(round, committee_members).unwrap() } + /// Samples a committee where all validators have the same stake. + pub fn sample_committee_equal_stake_committee(num_members: u16, rng: &mut TestRng) -> Committee { + assert!(num_members >= 4); + // Sample the members. + let mut members = IndexMap::new(); + // Add in the minimum and maximum staked nodes. + members.insert(Address::::new(rng.gen()), (MIN_VALIDATOR_STAKE, false)); + while members.len() < num_members as usize - 1 { + let stake = MIN_VALIDATOR_STAKE; + let is_open = rng.gen(); + members.insert(Address::::new(rng.gen()), (stake, is_open)); + } + // Return the committee. + Committee::::new(1, members).unwrap() + } + /// Samples a random committee. + #[allow(clippy::cast_possible_truncation)] pub fn sample_committee_custom(num_members: u16, rng: &mut TestRng) -> Committee { - assert!(num_members >= 4); + assert!(num_members >= 3); // Set the maximum amount staked in the node. const MAX_STAKE: u64 = 100_000_000_000_000; // Initialize the Exponential distribution. let distribution = Exp::new(2.0).unwrap(); - // Initialize an RNG for the stake. + // Initialize maximum stake range. let range = (MAX_STAKE - MIN_VALIDATOR_STAKE) as f64; // Sample the members. let mut members = IndexMap::new(); @@ -376,7 +398,7 @@ mod tests { // Set the number of rounds. const NUM_ROUNDS: u64 = 256 * 2_000; // Sample the number of members. - let num_members = rng.gen_range(4..50); + let num_members = rng.gen_range(3..50); // Sample a committee. let committee = crate::test_helpers::sample_committee_custom(num_members, rng); // Check the leader distribution. @@ -405,4 +427,31 @@ mod tests { } } } + + #[test] + fn test_sorted_members_with_equal_stake() { + // Initialize the RNG. + let rng = &mut TestRng::default(); + // Sample a committee. + let committee = crate::test_helpers::sample_committee_equal_stake_committee(200, rng); + // Start a timer. + let timer = std::time::Instant::now(); + // Sort the members. + let sorted_members = committee.sorted_members().collect::>(); + println!("sorted_members: {}ms", timer.elapsed().as_millis()); + // Check that the members are sorted based on our sorting criteria. + for i in 0..sorted_members.len() - 1 { + let (address1, (stake1, _)) = sorted_members[i]; + let (address2, (stake2, _)) = sorted_members[i + 1]; + assert!(stake1 >= stake2); + if stake1 == stake2 { + assert!(address1.to_x_coordinate() > address2.to_x_coordinate()); + } + } + } + + #[test] + fn test_maximum_committee_size() { + assert_eq!(Committee::::MAX_COMMITTEE_SIZE, BatchHeader::::MAX_CERTIFICATES); + } } diff --git a/ledger/committee/src/prop_tests.rs b/ledger/committee/src/prop_tests.rs index c66769fa98..f0aa2e7bb2 100644 --- a/ledger/committee/src/prop_tests.rs +++ b/ledger/committee/src/prop_tests.rs @@ -125,7 +125,7 @@ impl Arbitrary for ValidatorSet { fn arbitrary_with(_: Self::Parameters) -> Self::Strategy { // use minimal validator set to speed up tests that require signing from the committee members - validator_set(any_valid_validator(), size_range(4..=4usize)).boxed() + validator_set(any_valid_validator(), size_range(3..=4usize)).boxed() } } @@ -149,7 +149,7 @@ pub fn any_valid_private_key() -> BoxedStrategy> { #[allow(dead_code)] fn too_small_committee() -> BoxedStrategy>> { - (1u64.., validator_set(any_valid_validator(), 0..4)).prop_map(to_committee).boxed() + (1u64.., validator_set(any_valid_validator(), 0..3)).prop_map(to_committee).boxed() } #[allow(dead_code)] @@ -199,5 +199,5 @@ fn invalid_stakes(#[strategy(too_low_stake_committee())] committee: Result>) { - assert!(matches!(committee, Err(e) if e.to_string().as_str() == "Committee must have at least 4 members")) + assert!(matches!(committee, Err(e) if e.to_string().as_str() == "Committee must have at least 3 members")) } diff --git a/ledger/narwhal/Cargo.toml b/ledger/narwhal/Cargo.toml index 08f2fea265..014a7a0599 100644 --- a/ledger/narwhal/Cargo.toml +++ b/ledger/narwhal/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "snarkvm-ledger-narwhal" -version = "0.16.3" +version = "0.16.15" authors = [ "The Aleo Team " ] description = "Data structures for a Narwhal-style memory pool in a decentralized virtual machine" homepage = "https://aleo.org" @@ -64,37 +64,37 @@ transmission-id = [ "narwhal-transmission-id" ] [dependencies.narwhal-batch-certificate] package = "snarkvm-ledger-narwhal-batch-certificate" path = "./batch-certificate" -version = "=0.16.3" +version = "=0.16.15" optional = true [dependencies.narwhal-batch-header] package = "snarkvm-ledger-narwhal-batch-header" path = "./batch-header" -version = "=0.16.3" +version = "=0.16.15" optional = true [dependencies.narwhal-data] package = "snarkvm-ledger-narwhal-data" path = "./data" -version = "=0.16.3" +version = "=0.16.15" optional = true [dependencies.narwhal-subdag] package = "snarkvm-ledger-narwhal-subdag" path = "./subdag" -version = "=0.16.3" +version = "=0.16.15" optional = true [dependencies.narwhal-transmission] package = "snarkvm-ledger-narwhal-transmission" path = "./transmission" -version = "=0.16.3" +version = "=0.16.15" optional = true [dependencies.narwhal-transmission-id] package = "snarkvm-ledger-narwhal-transmission-id" path = "./transmission-id" -version = "=0.16.3" +version = "=0.16.15" optional = true [dev-dependencies.snarkvm-ledger-narwhal] diff --git a/ledger/narwhal/batch-certificate/Cargo.toml b/ledger/narwhal/batch-certificate/Cargo.toml index f494ee91e5..0e91b4f2e5 100644 --- a/ledger/narwhal/batch-certificate/Cargo.toml +++ b/ledger/narwhal/batch-certificate/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "snarkvm-ledger-narwhal-batch-certificate" -version = "0.16.3" +version = "0.16.15" authors = [ "The Aleo Team " ] description = "A batch certificate for a Narwhal-style memory pool in a decentralized virtual machine" homepage = "https://aleo.org" @@ -24,38 +24,38 @@ license = "Apache-2.0" edition = "2021" [features] -default = [ ] +default = [ "rayon" ] serial = [ "console/serial" ] wasm = [ "console/wasm" ] -test-helpers = [ "narwhal-batch-header/test-helpers", "time" ] +test-helpers = [ "narwhal-batch-header/test-helpers" ] [dependencies.console] package = "snarkvm-console" path = "../../../console" -version = "=0.16.3" +version = "=0.16.15" [dependencies.narwhal-batch-header] package = "snarkvm-ledger-narwhal-batch-header" path = "../batch-header" -version = "=0.16.3" +version = "=0.16.15" [dependencies.narwhal-transmission-id] package = "snarkvm-ledger-narwhal-transmission-id" path = "../transmission-id" -version = "=0.16.3" +version = "=0.16.15" [dependencies.indexmap] version = "2.0" features = [ "serde" ] +[dependencies.rayon] +version = "1" +optional = true + [dependencies.serde_json] version = "1.0" features = [ "preserve_order" ] -[dependencies.time] -version = "0.3" -optional = true - [dev-dependencies.bincode] version = "1.3" diff --git a/ledger/narwhal/batch-certificate/src/bytes.rs b/ledger/narwhal/batch-certificate/src/bytes.rs index d7dbd25dc2..baa793c1ff 100644 --- a/ledger/narwhal/batch-certificate/src/bytes.rs +++ b/ledger/narwhal/batch-certificate/src/bytes.rs @@ -20,48 +20,97 @@ impl FromBytes for BatchCertificate { // Read the version. let version = u8::read_le(&mut reader)?; // Ensure the version is valid. - if version != 1 { - return Err(error("Invalid batch version")); + if version != 1 && version != 2 { + return Err(error("Invalid batch certificate version")); } - // Read the certificate ID. - let certificate_id = Field::read_le(&mut reader)?; - // Read the batch header. - let batch_header = BatchHeader::read_le(&mut reader)?; - // Read the number of signatures. - let num_signatures = u32::read_le(&mut reader)?; - // Read the signatures. - let mut signatures = IndexMap::with_capacity(num_signatures as usize); - for _ in 0..num_signatures { - // Read the signature. - let signature = Signature::read_le(&mut reader)?; - // Read the timestamp. - let timestamp = i64::read_le(&mut reader)?; - // Insert the signature and timestamp. - signatures.insert(signature, timestamp); + if version == 1 { + // Read the certificate ID. + let certificate_id = Field::read_le(&mut reader)?; + // Read the batch header. + let batch_header = BatchHeader::read_le(&mut reader)?; + // Read the number of signatures. + let num_signatures = u32::read_le(&mut reader)?; + // Ensure the number of signatures is within bounds. + if num_signatures > Self::MAX_SIGNATURES as u32 { + return Err(error(format!( + "Number of signatures ({num_signatures}) exceeds the maximum ({})", + Self::MAX_SIGNATURES + ))); + } + // Read the signatures. + let mut signatures = IndexMap::with_capacity(num_signatures as usize); + for _ in 0..num_signatures { + // Read the signature. + let signature = Signature::read_le(&mut reader)?; + // Read the timestamp. + let timestamp = i64::read_le(&mut reader)?; + // Insert the signature and timestamp. + signatures.insert(signature, timestamp); + } + // Return the batch certificate. + Self::from_v1_deprecated(certificate_id, batch_header, signatures).map_err(error) + } else if version == 2 { + // Read the batch header. + let batch_header = BatchHeader::read_le(&mut reader)?; + // Read the number of signatures. + let num_signatures = u16::read_le(&mut reader)?; + // Ensure the number of signatures is within bounds. + if num_signatures > Self::MAX_SIGNATURES { + return Err(error(format!( + "Number of signatures ({num_signatures}) exceeds the maximum ({})", + Self::MAX_SIGNATURES + ))); + } + // Read the signature bytes. + let mut signature_bytes = vec![0u8; num_signatures as usize * Signature::::size_in_bytes()]; + reader.read_exact(&mut signature_bytes)?; + // Read the signatures. + let signatures = cfg_chunks!(signature_bytes, Signature::::size_in_bytes()) + .map(Signature::read_le) + .collect::, _>>()?; + // Return the batch certificate. + Self::from(batch_header, signatures).map_err(error) + } else { + unreachable!("Invalid batch certificate version") } - // Return the batch certificate. - Self::from(certificate_id, batch_header, signatures).map_err(|e| error(e.to_string())) } } impl ToBytes for BatchCertificate { /// Writes the batch certificate to the buffer. fn write_le(&self, mut writer: W) -> IoResult<()> { - // Write the version. - 1u8.write_le(&mut writer)?; - // Write the certificate ID. - self.certificate_id.write_le(&mut writer)?; - // Write the batch header. - self.batch_header.write_le(&mut writer)?; - // Write the number of signatures. - u32::try_from(self.signatures.len()).map_err(|e| error(e.to_string()))?.write_le(&mut writer)?; - // Write the signatures. - for (signature, timestamp) in &self.signatures { - // Write the signature. - signature.write_le(&mut writer)?; - // Write the timestamp. - timestamp.write_le(&mut writer)?; + match self { + Self::V1 { certificate_id, batch_header, signatures } => { + // Write the version. + 1u8.write_le(&mut writer)?; + // Write the certificate ID. + certificate_id.write_le(&mut writer)?; + // Write the batch header. + batch_header.write_le(&mut writer)?; + // Write the number of signatures. + u32::try_from(signatures.len()).map_err(error)?.write_le(&mut writer)?; + // Write the signatures. + for (signature, timestamp) in signatures.iter() { + // Write the signature. + signature.write_le(&mut writer)?; + // Write the timestamp. + timestamp.write_le(&mut writer)?; + } + } + Self::V2 { batch_header, signatures } => { + // Write the version. + 2u8.write_le(&mut writer)?; + // Write the batch header. + batch_header.write_le(&mut writer)?; + // Write the number of signatures. + u16::try_from(signatures.len()).map_err(error)?.write_le(&mut writer)?; + // Write the signatures. + for signature in signatures.iter() { + // Write the signature. + signature.write_le(&mut writer)?; + } + } } Ok(()) } @@ -70,9 +119,6 @@ impl ToBytes for BatchCertificate { #[cfg(test)] mod tests { use super::*; - use console::network::Testnet3; - - type CurrentNetwork = Testnet3; #[test] fn test_bytes() { @@ -82,7 +128,6 @@ mod tests { // Check the byte representation. let expected_bytes = expected.to_bytes_le().unwrap(); assert_eq!(expected, BatchCertificate::read_le(&expected_bytes[..]).unwrap()); - assert!(BatchCertificate::::read_le(&expected_bytes[1..]).is_err()); } } } diff --git a/ledger/narwhal/batch-certificate/src/lib.rs b/ledger/narwhal/batch-certificate/src/lib.rs index a71f640ce0..c053433e5c 100644 --- a/ledger/narwhal/batch-certificate/src/lib.rs +++ b/ledger/narwhal/batch-certificate/src/lib.rs @@ -30,33 +30,63 @@ use narwhal_transmission_id::TransmissionID; use core::hash::{Hash, Hasher}; use indexmap::{IndexMap, IndexSet}; -#[derive(Clone, PartialEq, Eq)] -pub struct BatchCertificate { - /// The certificate ID. - certificate_id: Field, - /// The batch header. - batch_header: BatchHeader, - /// The `(signature, timestamp)` pairs for the batch ID from the committee. - signatures: IndexMap, i64>, +#[cfg(not(feature = "serial"))] +use rayon::prelude::*; + +#[derive(Clone)] +pub enum BatchCertificate { + // TODO (howardwu): For mainnet - Delete V1 and switch everyone to V2 as the default. + V1 { + /// The certificate ID. + certificate_id: Field, + /// The batch header. + batch_header: BatchHeader, + /// The `(signature, timestamp)` pairs for the batch ID from the committee. + signatures: IndexMap, i64>, + }, + V2 { + /// The batch header. + batch_header: BatchHeader, + /// The signatures for the batch ID from the committee. + signatures: IndexSet>, + }, } impl BatchCertificate { - /// Initializes a new batch certificate. - pub fn new(batch_header: BatchHeader, signatures: IndexMap, i64>) -> Result { - // Compute the certificate ID. - let certificate_id = Self::compute_certificate_id(batch_header.batch_id(), &signatures)?; - // Return the batch certificate. - Self::from(certificate_id, batch_header, signatures) - } + /// The maximum number of signatures in a batch certificate. + pub const MAX_SIGNATURES: u16 = BatchHeader::::MAX_CERTIFICATES; +} - /// Initializes a new batch certificate. - pub fn from( +impl BatchCertificate { + // TODO (howardwu): For mainnet - Delete V1 and switch everyone to V2 as the default. + /// Initializes a (deprecated) V1 batch certificate. + pub fn from_v1_deprecated( certificate_id: Field, batch_header: BatchHeader, signatures: IndexMap, i64>, ) -> Result { + /// Returns the certificate ID. + fn compute_certificate_id( + batch_id: Field, + signatures: &IndexMap, i64>, + ) -> Result> { + let mut preimage = Vec::new(); + // Insert the batch ID. + batch_id.write_le(&mut preimage)?; + // Insert the signatures. + for (signature, timestamp) in signatures { + // Insert the signature. + signature.write_le(&mut preimage)?; + // Insert the timestamp. + timestamp.write_le(&mut preimage)?; + } + // Hash the preimage. + N::hash_bhp1024(&preimage.to_bits_le()) + } + // Ensure that the number of signatures is within bounds. + ensure!(signatures.len() <= Self::MAX_SIGNATURES as usize, "Invalid number of signatures"); // Compute the certificate ID. - if certificate_id != Self::compute_certificate_id(batch_header.batch_id(), &signatures)? { + if certificate_id != compute_certificate_id(batch_header.batch_id(), &signatures)? { bail!("Invalid batch certificate ID") } // Verify the signatures are valid. @@ -66,102 +96,122 @@ impl BatchCertificate { bail!("Invalid batch certificate signature") } } + // Return the V1 batch certificate. + Ok(Self::V1 { certificate_id, batch_header, signatures }) + } + + /// Initializes a new batch certificate. + pub fn from(batch_header: BatchHeader, signatures: IndexSet>) -> Result { + // Ensure that the number of signatures is within bounds. + ensure!(signatures.len() <= Self::MAX_SIGNATURES as usize, "Invalid number of signatures"); + + // Verify the signatures are valid. + cfg_iter!(signatures).try_for_each(|signature| { + if !signature.verify(&signature.to_address(), &[batch_header.batch_id()]) { + bail!("Invalid batch certificate signature") + } + Ok(()) + })?; // Return the batch certificate. - Self::from_unchecked(certificate_id, batch_header, signatures) + Self::from_unchecked(batch_header, signatures) } /// Initializes a new batch certificate. - pub fn from_unchecked( - certificate_id: Field, - batch_header: BatchHeader, - signatures: IndexMap, i64>, - ) -> Result { + pub fn from_unchecked(batch_header: BatchHeader, signatures: IndexSet>) -> Result { // Ensure the signatures are not empty. ensure!(!signatures.is_empty(), "Batch certificate must contain signatures"); // Return the batch certificate. - Ok(Self { certificate_id, batch_header, signatures }) + Ok(Self::V2 { batch_header, signatures }) + } +} + +impl PartialEq for BatchCertificate { + fn eq(&self, other: &Self) -> bool { + self.batch_id() == other.batch_id() + } +} + +impl Eq for BatchCertificate {} + +impl Hash for BatchCertificate { + fn hash(&self, state: &mut H) { + match self { + Self::V1 { batch_header, signatures, .. } => { + batch_header.batch_id().hash(state); + (signatures.len() as u64).hash(state); + for signature in signatures.iter() { + signature.hash(state); + } + } + Self::V2 { batch_header, .. } => { + batch_header.batch_id().hash(state); + } + } } } impl BatchCertificate { /// Returns the certificate ID. - pub const fn certificate_id(&self) -> Field { - self.certificate_id + pub const fn id(&self) -> Field { + match self { + Self::V1 { certificate_id, .. } => *certificate_id, + Self::V2 { batch_header, .. } => batch_header.batch_id(), + } } /// Returns the batch header. pub const fn batch_header(&self) -> &BatchHeader { - &self.batch_header + match self { + Self::V1 { batch_header, .. } => batch_header, + Self::V2 { batch_header, .. } => batch_header, + } } /// Returns the batch ID. pub const fn batch_id(&self) -> Field { - self.batch_header.batch_id() + self.batch_header().batch_id() } /// Returns the author. pub const fn author(&self) -> Address { - self.batch_header.author() + self.batch_header().author() } /// Returns the round. pub const fn round(&self) -> u64 { - self.batch_header.round() + self.batch_header().round() } /// Returns the transmission IDs. pub const fn transmission_ids(&self) -> &IndexSet> { - self.batch_header.transmission_ids() + self.batch_header().transmission_ids() } /// Returns the batch certificate IDs for the previous round. pub const fn previous_certificate_ids(&self) -> &IndexSet> { - self.batch_header.previous_certificate_ids() + self.batch_header().previous_certificate_ids() } - /// Returns the median timestamp of the batch ID from the committee. - pub fn median_timestamp(&self) -> i64 { - let mut timestamps = self.timestamps().chain([self.batch_header.timestamp()].into_iter()).collect::>(); - timestamps.sort_unstable(); - timestamps[timestamps.len() / 2] - } - - /// Returns the timestamps of the batch ID from the committee. - pub fn timestamps(&self) -> impl '_ + ExactSizeIterator { - self.signatures.values().copied() - } - - /// Returns the signatures of the batch ID from the committee. - pub fn signatures(&self) -> impl ExactSizeIterator> { - self.signatures.keys() - } -} - -impl Hash for BatchCertificate { - fn hash(&self, state: &mut H) { - self.batch_header.batch_id().hash(state); - (self.signatures.len() as u64).hash(state); - for signature in &self.signatures { - signature.hash(state); + /// Returns the timestamp of the batch header. + pub fn timestamp(&self) -> i64 { + match self { + Self::V1 { batch_header, signatures, .. } => { + // Return the median timestamp. + let mut timestamps = + signatures.values().copied().chain([batch_header.timestamp()].into_iter()).collect::>(); + timestamps.sort_unstable(); + timestamps[timestamps.len() / 2] + } + Self::V2 { batch_header, .. } => batch_header.timestamp(), } } -} -impl BatchCertificate { - /// Returns the certificate ID. - pub fn compute_certificate_id(batch_id: Field, signatures: &IndexMap, i64>) -> Result> { - let mut preimage = Vec::new(); - // Insert the batch ID. - batch_id.write_le(&mut preimage)?; - // Insert the signatures. - for (signature, timestamp) in signatures { - // Insert the signature. - signature.write_le(&mut preimage)?; - // Insert the timestamp. - timestamp.write_le(&mut preimage)?; + /// Returns the signatures of the batch ID from the committee. + pub fn signatures(&self) -> Box>> { + match self { + Self::V1 { signatures, .. } => Box::new(signatures.keys()), + Self::V2 { signatures, .. } => Box::new(signatures.iter()), } - // Hash the preimage. - N::hash_bhp1024(&preimage.to_bits_le()) } } @@ -201,15 +251,13 @@ pub mod test_helpers { rng, ); // Sample a list of signatures. - let mut signatures = IndexMap::with_capacity(5); + let mut signatures = IndexSet::with_capacity(5); for _ in 0..5 { let private_key = PrivateKey::new(rng).unwrap(); - let timestamp = time::OffsetDateTime::now_utc().unix_timestamp(); - let timestamp_field = Field::from_u64(timestamp as u64); - signatures.insert(private_key.sign(&[batch_header.batch_id(), timestamp_field], rng).unwrap(), timestamp); + signatures.insert(private_key.sign(&[batch_header.batch_id()], rng).unwrap()); } // Return the batch certificate. - BatchCertificate::new(batch_header, signatures).unwrap() + BatchCertificate::from(batch_header, signatures).unwrap() } /// Returns a list of sample batch certificates, sampled at random. @@ -245,7 +293,7 @@ pub mod test_helpers { sample_batch_certificate_for_round(previous_round, rng), ]; // Construct the previous certificate IDs. - let previous_certificate_ids: IndexSet<_> = previous_certificates.iter().map(|c| c.certificate_id()).collect(); + let previous_certificate_ids: IndexSet<_> = previous_certificates.iter().map(|c| c.id()).collect(); // Sample the leader certificate. let certificate = sample_batch_certificate_for_round_with_previous_certificate_ids( current_round, @@ -256,3 +304,15 @@ pub mod test_helpers { (certificate, previous_certificates) } } + +#[cfg(test)] +mod tests { + use super::*; + + type CurrentNetwork = console::network::Testnet3; + + #[test] + fn test_maximum_signatures() { + assert_eq!(BatchHeader::::MAX_CERTIFICATES, BatchCertificate::::MAX_SIGNATURES); + } +} diff --git a/ledger/narwhal/batch-certificate/src/serialize.rs b/ledger/narwhal/batch-certificate/src/serialize.rs index 9035b4330b..1e06e50823 100644 --- a/ledger/narwhal/batch-certificate/src/serialize.rs +++ b/ledger/narwhal/batch-certificate/src/serialize.rs @@ -18,13 +18,21 @@ impl Serialize for BatchCertificate { /// Serializes the batch certificate to a JSON-string or buffer. fn serialize(&self, serializer: S) -> Result { match serializer.is_human_readable() { - true => { - let mut certificate = serializer.serialize_struct("BatchCertificate", 3)?; - certificate.serialize_field("certificate_id", &self.certificate_id)?; - certificate.serialize_field("batch_header", &self.batch_header)?; - certificate.serialize_field("signatures", &self.signatures)?; - certificate.end() - } + true => match self { + Self::V1 { certificate_id, batch_header, signatures } => { + let mut state = serializer.serialize_struct("BatchCertificate", 3)?; + state.serialize_field("certificate_id", certificate_id)?; + state.serialize_field("batch_header", batch_header)?; + state.serialize_field("signatures", signatures)?; + state.end() + } + Self::V2 { batch_header, signatures } => { + let mut state = serializer.serialize_struct("BatchCertificate", 2)?; + state.serialize_field("batch_header", batch_header)?; + state.serialize_field("signatures", signatures)?; + state.end() + } + }, false => ToBytesSerializer::serialize_with_size_encoding(self, serializer), } } @@ -36,12 +44,28 @@ impl<'de, N: Network> Deserialize<'de> for BatchCertificate { match deserializer.is_human_readable() { true => { let mut value = serde_json::Value::deserialize(deserializer)?; - Ok(Self::from( - DeserializeExt::take_from_value::(&mut value, "certificate_id")?, - DeserializeExt::take_from_value::(&mut value, "batch_header")?, - DeserializeExt::take_from_value::(&mut value, "signatures")?, - ) - .map_err(de::Error::custom)?) + + // Check if a certificate ID field is present. + let certificate_id = match value.get("certificate_id") { + Some(..) => Some(DeserializeExt::take_from_value::(&mut value, "certificate_id")?), + None => None, + }; + + // Parse for V1 and V2 batch certificates. + if let Some(certificate_id) = certificate_id { + Self::from_v1_deprecated( + certificate_id, + DeserializeExt::take_from_value::(&mut value, "batch_header")?, + DeserializeExt::take_from_value::(&mut value, "signatures")?, + ) + .map_err(de::Error::custom) + } else { + Self::from( + DeserializeExt::take_from_value::(&mut value, "batch_header")?, + DeserializeExt::take_from_value::(&mut value, "signatures")?, + ) + .map_err(de::Error::custom) + } } false => FromBytesDeserializer::::deserialize_with_size_encoding(deserializer, "batch certificate"), } diff --git a/ledger/narwhal/batch-header/Cargo.toml b/ledger/narwhal/batch-header/Cargo.toml index cd382be768..2469093f4f 100644 --- a/ledger/narwhal/batch-header/Cargo.toml +++ b/ledger/narwhal/batch-header/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "snarkvm-ledger-narwhal-batch-header" -version = "0.16.3" +version = "0.16.15" authors = [ "The Aleo Team " ] description = "A batch header for a Narwhal-style memory pool in a decentralized virtual machine" homepage = "https://aleo.org" @@ -32,12 +32,12 @@ test-helpers = [ "narwhal-transmission-id/test-helpers", "time" ] [dependencies.console] package = "snarkvm-console" path = "../../../console" -version = "=0.16.3" +version = "=0.16.15" [dependencies.narwhal-transmission-id] package = "snarkvm-ledger-narwhal-transmission-id" path = "../transmission-id" -version = "=0.16.3" +version = "=0.16.15" [dependencies.indexmap] version = "2.0" diff --git a/ledger/narwhal/batch-header/src/bytes.rs b/ledger/narwhal/batch-header/src/bytes.rs index 30f7a2cb83..a59e9cd89f 100644 --- a/ledger/narwhal/batch-header/src/bytes.rs +++ b/ledger/narwhal/batch-header/src/bytes.rs @@ -20,7 +20,8 @@ impl FromBytes for BatchHeader { // Read the version. let version = u8::read_le(&mut reader)?; // Ensure the version is valid. - if version != 1 { + // TODO (howardwu): For mainnet - Change the version back to 1. + if version != 1 && version != 2 { return Err(error("Invalid batch header version")); } @@ -34,29 +35,75 @@ impl FromBytes for BatchHeader { let timestamp = i64::read_le(&mut reader)?; // Read the number of transmission IDs. - let num_transmissions = u32::read_le(&mut reader)?; + let num_transmission_ids = u32::read_le(&mut reader)?; + // Ensure the number of transmission IDs is within bounds. + if num_transmission_ids as usize > Self::MAX_TRANSMISSIONS_PER_BATCH { + return Err(error(format!( + "Number of transmission IDs ({num_transmission_ids}) exceeds the maximum ({})", + Self::MAX_TRANSMISSIONS_PER_BATCH, + ))); + } // Read the transmission IDs. let mut transmission_ids = IndexSet::new(); - for _ in 0..num_transmissions { + for _ in 0..num_transmission_ids { // Insert the transmission ID. transmission_ids.insert(TransmissionID::read_le(&mut reader)?); } // Read the number of previous certificate IDs. - let num_previous_certificate_ids = u32::read_le(&mut reader)?; + let num_previous_certificate_ids = u16::read_le(&mut reader)?; + // Ensure the number of previous certificate IDs is within bounds. + if num_previous_certificate_ids > Self::MAX_CERTIFICATES { + return Err(error(format!( + "Number of previous certificate IDs ({num_previous_certificate_ids}) exceeds the maximum ({})", + Self::MAX_CERTIFICATES + ))); + } // Read the previous certificate IDs. - let mut previous_certificate_ids = IndexSet::with_capacity(num_previous_certificate_ids as usize); + let mut previous_certificate_ids = IndexSet::new(); for _ in 0..num_previous_certificate_ids { // Read the certificate ID. previous_certificate_ids.insert(Field::read_le(&mut reader)?); } + // TODO (howardwu): For mainnet - Change this to always encode the number of committed certificate IDs. + // We currently only encode the size and certificates in the new version, for backwards compatibility. + let num_last_election_certificate_ids = if version == 2 { + // Read the number of last election certificate IDs. + u16::read_le(&mut reader)? + } else { + // Set the number of last election certificate IDs to zero. + 0 + }; + // Ensure the number of last election certificate IDs is within bounds. + if num_last_election_certificate_ids > Self::MAX_CERTIFICATES { + return Err(error(format!( + "Number of last election certificate IDs ({num_last_election_certificate_ids}) exceeds the maximum ({})", + Self::MAX_CERTIFICATES + ))); + } + // Read the last election certificate IDs. + let mut last_election_certificate_ids = IndexSet::new(); + for _ in 0..num_last_election_certificate_ids { + // Read the certificate ID. + last_election_certificate_ids.insert(Field::read_le(&mut reader)?); + } + // Read the signature. let signature = Signature::read_le(&mut reader)?; // Construct the batch. - let batch = Self::from(author, round, timestamp, transmission_ids, previous_certificate_ids, signature) - .map_err(|e| error(e.to_string()))?; + let batch = Self::from( + version, + author, + round, + timestamp, + transmission_ids, + previous_certificate_ids, + last_election_certificate_ids, + signature, + ) + .map_err(|e| error(e.to_string()))?; // Return the batch. match batch.batch_id == batch_id { @@ -70,7 +117,8 @@ impl ToBytes for BatchHeader { /// Writes the batch header to the buffer. fn write_le(&self, mut writer: W) -> IoResult<()> { // Write the version. - 1u8.write_le(&mut writer)?; + // TODO (howardwu): For mainnet - Change this back to '1u8.write_le(&mut writer)?'; + self.version.write_le(&mut writer)?; // Write the batch ID. self.batch_id.write_le(&mut writer)?; // Write the author. @@ -87,12 +135,25 @@ impl ToBytes for BatchHeader { transmission_id.write_le(&mut writer)?; } // Write the number of previous certificate IDs. - u32::try_from(self.previous_certificate_ids.len()).map_err(|e| error(e.to_string()))?.write_le(&mut writer)?; + u16::try_from(self.previous_certificate_ids.len()).map_err(|e| error(e.to_string()))?.write_le(&mut writer)?; // Write the previous certificate IDs. for certificate_id in &self.previous_certificate_ids { // Write the certificate ID. certificate_id.write_le(&mut writer)?; } + // TODO (howardwu): For mainnet - Change this to always encode the number of committed certificate IDs. + // We currently only encode the size and certificates in the new version, for backwards compatibility. + if self.version != 1 { + // Write the number of last election certificate IDs. + u16::try_from(self.last_election_certificate_ids.len()) + .map_err(|e| error(e.to_string()))? + .write_le(&mut writer)?; + // Write the last election certificate IDs. + for certificate_id in &self.last_election_certificate_ids { + // Write the certificate ID. + certificate_id.write_le(&mut writer)?; + } + } // Write the signature. self.signature.write_le(&mut writer) } @@ -101,9 +162,6 @@ impl ToBytes for BatchHeader { #[cfg(test)] mod tests { use super::*; - use console::network::Testnet3; - - type CurrentNetwork = Testnet3; #[test] fn test_bytes() { @@ -113,7 +171,6 @@ mod tests { // Check the byte representation. let expected_bytes = expected.to_bytes_le().unwrap(); assert_eq!(expected, BatchHeader::read_le(&expected_bytes[..]).unwrap()); - assert!(BatchHeader::::read_le(&expected_bytes[1..]).is_err()); } } } diff --git a/ledger/narwhal/batch-header/src/lib.rs b/ledger/narwhal/batch-header/src/lib.rs index abe3948722..237406a4ec 100644 --- a/ledger/narwhal/batch-header/src/lib.rs +++ b/ledger/narwhal/batch-header/src/lib.rs @@ -14,6 +14,7 @@ #![forbid(unsafe_code)] #![warn(clippy::cast_possible_truncation)] +#![allow(clippy::too_many_arguments)] mod bytes; mod serialize; @@ -30,7 +31,12 @@ use narwhal_transmission_id::TransmissionID; #[derive(Clone, PartialEq, Eq)] pub struct BatchHeader { - /// The batch ID, defined as the hash of the round number, timestamp, transmission IDs, and previous batch certificate IDs. + /// The version of the batch header. + /// TODO (howardwu): For mainnet - Remove this version from the struct, we only use it here for backwards compatibility. + /// NOTE: You must keep the version encoding in the byte serialization, just remove it from the struct in memory. + version: u8, + /// The batch ID, defined as the hash of the author, round number, timestamp, transmission IDs, + /// previous batch certificate IDs, and last election certificate IDs. batch_id: Field, /// The author of the batch. author: Address, @@ -42,10 +48,24 @@ pub struct BatchHeader { transmission_ids: IndexSet>, /// The batch certificate IDs of the previous round. previous_certificate_ids: IndexSet>, + /// The last election batch certificate IDs. + last_election_certificate_ids: IndexSet>, /// The signature of the batch ID from the creator. signature: Signature, } +impl BatchHeader { + /// The maximum number of certificates in a batch. + pub const MAX_CERTIFICATES: u16 = 200; + /// The maximum number of rounds to store before garbage collecting. + pub const MAX_GC_ROUNDS: u64 = 100; + /// The maximum number of transmissions in a batch. + /// Note: This limit is set to 50 as part of safety measures to prevent DoS attacks. + /// This limit can be increased in the future as performance improves. Alternatively, + /// the rate of block production can be sped up to compensate for the limit set here. + pub const MAX_TRANSMISSIONS_PER_BATCH: usize = 50; +} + impl BatchHeader { /// Initializes a new batch header. pub fn new( @@ -54,47 +74,139 @@ impl BatchHeader { timestamp: i64, transmission_ids: IndexSet>, previous_certificate_ids: IndexSet>, + last_election_certificate_ids: IndexSet>, rng: &mut R, ) -> Result { + // Set the version. + // TODO (howardwu): For mainnet - Remove this version from the struct, we only use it here for backwards compatibility. + // NOTE: You must keep the version encoding in the byte serialization, just remove it from the struct in memory. + let version = 2u8; + match round { - // If the round is zero or one, then there should be no previous certificate IDs. - 0 | 1 => ensure!(previous_certificate_ids.is_empty(), "Invalid round number, must not have certificates"), + 0 | 1 => { + // If the round is zero or one, then there should be no previous certificate IDs. + ensure!(previous_certificate_ids.is_empty(), "Invalid round number, must not have certificates"); + // If the round is zero or one, then there should be no last election certificate IDs. + ensure!(last_election_certificate_ids.is_empty(), "Invalid batch, contains election certificates"); + } // If the round is not zero and not one, then there should be at least one previous certificate ID. _ => ensure!(!previous_certificate_ids.is_empty(), "Invalid round number, must have certificates"), } + + // Ensure that the number of transmissions is within bounds. + ensure!( + transmission_ids.len() <= Self::MAX_TRANSMISSIONS_PER_BATCH, + "Invalid number of transmission ids ({})", + transmission_ids.len() + ); + // Ensure that the number of previous certificate IDs is within bounds. + ensure!( + previous_certificate_ids.len() <= Self::MAX_CERTIFICATES as usize, + "Invalid number of previous certificate IDs ({})", + previous_certificate_ids.len() + ); + // Ensure the number of last election certificate IDs is within bounds. + ensure!( + last_election_certificate_ids.len() <= Self::MAX_CERTIFICATES as usize, + "Invalid number of last election certificate IDs ({})", + last_election_certificate_ids.len() + ); + // Retrieve the address. let author = Address::try_from(private_key)?; // Compute the batch ID. - let batch_id = Self::compute_batch_id(author, round, timestamp, &transmission_ids, &previous_certificate_ids)?; + let batch_id = Self::compute_batch_id( + version, + author, + round, + timestamp, + &transmission_ids, + &previous_certificate_ids, + &last_election_certificate_ids, + )?; // Sign the preimage. let signature = private_key.sign(&[batch_id], rng)?; // Return the batch header. - Ok(Self { author, batch_id, round, timestamp, transmission_ids, previous_certificate_ids, signature }) + Ok(Self { + version, + author, + batch_id, + round, + timestamp, + transmission_ids, + previous_certificate_ids, + last_election_certificate_ids, + signature, + }) } /// Initializes a new batch header. pub fn from( + version: u8, author: Address, round: u64, timestamp: i64, transmission_ids: IndexSet>, previous_certificate_ids: IndexSet>, + last_election_certificate_ids: IndexSet>, signature: Signature, ) -> Result { match round { - // If the round is zero or one, then there should be no previous certificate IDs. - 0 | 1 => ensure!(previous_certificate_ids.is_empty(), "Invalid round number, must not have certificates"), + 0 | 1 => { + // If the round is zero or one, then there should be no previous certificate IDs. + ensure!(previous_certificate_ids.is_empty(), "Invalid round number, must not have certificates"); + // If the round is zero or one, then there should be no last election certificate IDs. + ensure!(last_election_certificate_ids.is_empty(), "Invalid batch, contains election certificates"); + } // If the round is not zero and not one, then there should be at least one previous certificate ID. _ => ensure!(!previous_certificate_ids.is_empty(), "Invalid round number, must have certificates"), } + + // Ensure that the number of transmissions is within bounds. + ensure!( + transmission_ids.len() <= Self::MAX_TRANSMISSIONS_PER_BATCH, + "Invalid number of transmission ids ({})", + transmission_ids.len() + ); + // Ensure that the number of previous certificate IDs is within bounds. + ensure!( + previous_certificate_ids.len() <= Self::MAX_CERTIFICATES as usize, + "Invalid number of previous certificate IDs ({})", + previous_certificate_ids.len() + ); + // Ensure the number of last election certificate IDs is within bounds. + ensure!( + last_election_certificate_ids.len() <= Self::MAX_CERTIFICATES as usize, + "Invalid number of last election certificate IDs ({})", + last_election_certificate_ids.len() + ); + // Compute the batch ID. - let batch_id = Self::compute_batch_id(author, round, timestamp, &transmission_ids, &previous_certificate_ids)?; + let batch_id = Self::compute_batch_id( + version, + author, + round, + timestamp, + &transmission_ids, + &previous_certificate_ids, + &last_election_certificate_ids, + )?; // Verify the signature. if !signature.verify(&author, &[batch_id]) { bail!("Invalid signature for the batch header"); } // Return the batch header. - Ok(Self { author, batch_id, round, timestamp, transmission_ids, previous_certificate_ids, signature }) + Ok(Self { + version, + author, + batch_id, + round, + timestamp, + transmission_ids, + previous_certificate_ids, + last_election_certificate_ids, + signature, + }) } } @@ -129,6 +241,11 @@ impl BatchHeader { &self.previous_certificate_ids } + /// Returns the last election batch certificate IDs. + pub const fn last_election_certificate_ids(&self) -> &IndexSet> { + &self.last_election_certificate_ids + } + /// Returns the signature. pub const fn signature(&self) -> &Signature { &self.signature @@ -187,8 +304,19 @@ pub mod test_helpers { narwhal_transmission_id::test_helpers::sample_transmission_ids(rng).into_iter().collect::>(); // Checkpoint the timestamp for the batch. let timestamp = OffsetDateTime::now_utc().unix_timestamp(); + // Sample the last election certificate IDs. + let last_election_certificate_ids = (0..5).map(|_| Field::::rand(rng)).collect::>(); // Return the batch header. - BatchHeader::new(&private_key, round, timestamp, transmission_ids, previous_certificate_ids, rng).unwrap() + BatchHeader::new( + &private_key, + round, + timestamp, + transmission_ids, + previous_certificate_ids, + last_election_certificate_ids, + rng, + ) + .unwrap() } /// Returns a list of sample batch headers, sampled at random. diff --git a/ledger/narwhal/batch-header/src/serialize.rs b/ledger/narwhal/batch-header/src/serialize.rs index 6acddaead2..15cefd3a70 100644 --- a/ledger/narwhal/batch-header/src/serialize.rs +++ b/ledger/narwhal/batch-header/src/serialize.rs @@ -19,13 +19,16 @@ impl Serialize for BatchHeader { fn serialize(&self, serializer: S) -> Result { match serializer.is_human_readable() { true => { - let mut header = serializer.serialize_struct("BatchHeader", 7)?; + let mut header = serializer.serialize_struct("BatchHeader", 9)?; + // TODO (howardwu): For mainnet - Remove the version field, and update the 'len' above to 8. + header.serialize_field("version", &self.version)?; header.serialize_field("batch_id", &self.batch_id)?; header.serialize_field("author", &self.author)?; header.serialize_field("round", &self.round)?; header.serialize_field("timestamp", &self.timestamp)?; header.serialize_field("transmission_ids", &self.transmission_ids)?; header.serialize_field("previous_certificate_ids", &self.previous_certificate_ids)?; + header.serialize_field("last_election_certificate_ids", &self.last_election_certificate_ids)?; header.serialize_field("signature", &self.signature)?; header.end() } @@ -42,13 +45,31 @@ impl<'de, N: Network> Deserialize<'de> for BatchHeader { let mut header = serde_json::Value::deserialize(deserializer)?; let batch_id: Field = DeserializeExt::take_from_value::(&mut header, "batch_id")?; + // TODO (howardwu): For mainnet - Remove the version parsing. + // If the version field is present, then parse the version. + let version = DeserializeExt::take_from_value::(&mut header, "version").unwrap_or(1); + // TODO (howardwu): For mainnet - Remove the version checking. + // Ensure the version is valid. + if version != 1 && version != 2 { + return Err(error("Invalid batch header version")).map_err(de::Error::custom); + } + // TODO (howardwu): For mainnet - Always take from the 'header', no need to use this match case anymore. + // If the version is not 1, then parse the last election certificate IDs. + let last_election_certificate_ids = match version { + 1 => IndexSet::new(), + 2 => DeserializeExt::take_from_value::(&mut header, "last_election_certificate_ids")?, + _ => unreachable!(), + }; + // Recover the header. let batch_header = Self::from( + version, DeserializeExt::take_from_value::(&mut header, "author")?, DeserializeExt::take_from_value::(&mut header, "round")?, DeserializeExt::take_from_value::(&mut header, "timestamp")?, DeserializeExt::take_from_value::(&mut header, "transmission_ids")?, DeserializeExt::take_from_value::(&mut header, "previous_certificate_ids")?, + last_election_certificate_ids, DeserializeExt::take_from_value::(&mut header, "signature")?, ) .map_err(de::Error::custom)?; diff --git a/ledger/narwhal/batch-header/src/to_id.rs b/ledger/narwhal/batch-header/src/to_id.rs index 1d99cbda77..1d3eea042b 100644 --- a/ledger/narwhal/batch-header/src/to_id.rs +++ b/ledger/narwhal/batch-header/src/to_id.rs @@ -18,11 +18,13 @@ impl BatchHeader { /// Returns the batch ID. pub fn to_id(&self) -> Result> { Self::compute_batch_id( + self.version, self.author, self.round, self.timestamp, &self.transmission_ids, &self.previous_certificate_ids, + &self.last_election_certificate_ids, ) } } @@ -30,11 +32,13 @@ impl BatchHeader { impl BatchHeader { /// Returns the batch ID. pub fn compute_batch_id( + version: u8, author: Address, round: u64, timestamp: i64, transmission_ids: &IndexSet>, previous_certificate_ids: &IndexSet>, + last_election_certificate_ids: &IndexSet>, ) -> Result> { let mut preimage = Vec::new(); // Insert the author. @@ -56,6 +60,17 @@ impl BatchHeader { // Insert the certificate ID. certificate_id.write_le(&mut preimage)?; } + // TODO (howardwu): For mainnet - Change this to always encode the number of committed certificate IDs. + // We currently only encode the size and certificates only in the new version, for backwards compatibility. + if version != 1 { + // Insert the number of last election certificate IDs. + u32::try_from(last_election_certificate_ids.len())?.write_le(&mut preimage)?; + // Insert the last election certificate IDs. + for certificate_id in last_election_certificate_ids { + // Insert the certificate ID. + certificate_id.write_le(&mut preimage)?; + } + } // Hash the preimage. N::hash_bhp1024(&preimage.to_bits_le()) } diff --git a/ledger/narwhal/data/Cargo.toml b/ledger/narwhal/data/Cargo.toml index bc1b03c985..0605db8cd8 100644 --- a/ledger/narwhal/data/Cargo.toml +++ b/ledger/narwhal/data/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "snarkvm-ledger-narwhal-data" -version = "0.16.3" +version = "0.16.15" authors = [ "The Aleo Team " ] description = "A batch certificate for a Narwhal-style memory pool in a decentralized virtual machine" homepage = "https://aleo.org" @@ -29,7 +29,7 @@ async = [ "tokio" ] [dependencies.console] package = "snarkvm-console" path = "../../../console" -version = "=0.16.3" +version = "=0.16.15" [dependencies.bytes] version = "1" diff --git a/ledger/narwhal/subdag/Cargo.toml b/ledger/narwhal/subdag/Cargo.toml index fb8e24576d..34c72d5888 100644 --- a/ledger/narwhal/subdag/Cargo.toml +++ b/ledger/narwhal/subdag/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "snarkvm-ledger-narwhal-subdag" -version = "0.16.3" +version = "0.16.15" authors = [ "The Aleo Team " ] description = "A subdag for a Narwhal-style memory pool in a decentralized virtual machine" homepage = "https://aleo.org" @@ -32,17 +32,22 @@ test-helpers = [ "narwhal-batch-certificate/test-helpers" ] [dependencies.console] package = "snarkvm-console" path = "../../../console" -version = "=0.16.3" +version = "=0.16.15" [dependencies.narwhal-batch-certificate] package = "snarkvm-ledger-narwhal-batch-certificate" path = "../batch-certificate" -version = "=0.16.3" +version = "=0.16.15" + +[dependencies.narwhal-batch-header] +package = "snarkvm-ledger-narwhal-batch-header" +path = "../batch-header" +version = "=0.16.15" [dependencies.narwhal-transmission-id] package = "snarkvm-ledger-narwhal-transmission-id" path = "../transmission-id" -version = "=0.16.3" +version = "=0.16.15" [dependencies.indexmap] version = "2.0" diff --git a/ledger/narwhal/subdag/src/bytes.rs b/ledger/narwhal/subdag/src/bytes.rs index a5cc0e5dc9..703f1c5ead 100644 --- a/ledger/narwhal/subdag/src/bytes.rs +++ b/ledger/narwhal/subdag/src/bytes.rs @@ -20,21 +20,33 @@ impl FromBytes for Subdag { // Read the version. let version = u8::read_le(&mut reader)?; // Ensure the version is valid. - if version != 1 { - return Err(error("Invalid batch version")); + // TODO (howardwu): For mainnet - Change the version back to 1. + if version != 1 && version != 2 { + return Err(error(format!("Invalid subdag version ({version})"))); } // Read the number of rounds. let num_rounds = u32::read_le(&mut reader)?; + // Ensure the number of rounds is within bounds. + if num_rounds as u64 > Self::MAX_ROUNDS { + return Err(error(format!("Number of rounds ({num_rounds}) exceeds the maximum ({})", Self::MAX_ROUNDS))); + } // Read the round certificates. let mut subdag = BTreeMap::new(); for _ in 0..num_rounds { // Read the round. let round = u64::read_le(&mut reader)?; // Read the number of certificates. - let num_certificates = u32::read_le(&mut reader)?; + let num_certificates = u16::read_le(&mut reader)?; + // Ensure the number of certificates is within bounds. + if num_certificates > BatchHeader::::MAX_CERTIFICATES { + return Err(error(format!( + "Number of certificates ({num_certificates}) exceeds the maximum ({})", + BatchHeader::::MAX_CERTIFICATES + ))); + } // Read the certificates. - let mut certificates = IndexSet::with_capacity(num_certificates as usize); + let mut certificates = IndexSet::new(); for _ in 0..num_certificates { // Read the certificate. certificates.insert(BatchCertificate::read_le(&mut reader)?); @@ -42,8 +54,28 @@ impl FromBytes for Subdag { // Insert the round and certificates. subdag.insert(round, certificates); } + + // Read the election certificate IDs. + let mut election_certificate_ids = IndexSet::new(); + // TODO (howardwu): For mainnet - Always attempt to deserialize the election certificate IDs. + if version != 1 { + // Read the number of election certificate IDs. + let num_election_certificate_ids = u16::read_le(&mut reader)?; + // Ensure the number of election certificate IDs is within bounds. + if num_election_certificate_ids > BatchHeader::::MAX_CERTIFICATES { + return Err(error(format!( + "Number of election certificate IDs ({num_election_certificate_ids}) exceeds the maximum ({})", + BatchHeader::::MAX_CERTIFICATES + ))); + } + for _ in 0..num_election_certificate_ids { + // Read the election certificate ID. + election_certificate_ids.insert(Field::read_le(&mut reader)?); + } + } + // Return the subdag. - Self::from(subdag).map_err(|e| error(e.to_string())) + Self::from(subdag, election_certificate_ids).map_err(error) } } @@ -51,21 +83,29 @@ impl ToBytes for Subdag { /// Writes the subdag to the buffer. fn write_le(&self, mut writer: W) -> IoResult<()> { // Write the version. - 1u8.write_le(&mut writer)?; + // TODO (howardwu): For mainnet - Change the version back to 1. + 2u8.write_le(&mut writer)?; // Write the number of rounds. - u32::try_from(self.subdag.len()).map_err(|e| error(e.to_string()))?.write_le(&mut writer)?; + u32::try_from(self.subdag.len()).map_err(error)?.write_le(&mut writer)?; // Write the round certificates. for (round, certificates) in &self.subdag { // Write the round. round.write_le(&mut writer)?; // Write the number of certificates. - u32::try_from(certificates.len()).map_err(|e| error(e.to_string()))?.write_le(&mut writer)?; + u16::try_from(certificates.len()).map_err(error)?.write_le(&mut writer)?; // Write the certificates. for certificate in certificates { // Write the certificate. certificate.write_le(&mut writer)?; } } + // Write the number of election certificate IDs. + u16::try_from(self.election_certificate_ids.len()).map_err(error)?.write_le(&mut writer)?; + // Write the election certificate IDs. + for election_certificate_id in &self.election_certificate_ids { + // Write the election certificate ID. + election_certificate_id.write_le(&mut writer)?; + } Ok(()) } } @@ -73,9 +113,6 @@ impl ToBytes for Subdag { #[cfg(test)] mod tests { use super::*; - use console::network::Testnet3; - - type CurrentNetwork = Testnet3; #[test] fn test_bytes() { @@ -85,7 +122,6 @@ mod tests { // Check the byte representation. let expected_bytes = expected.to_bytes_le().unwrap(); assert_eq!(expected, Subdag::read_le(&expected_bytes[..]).unwrap()); - assert!(Subdag::::read_le(&expected_bytes[1..]).is_err()); } } } diff --git a/ledger/narwhal/subdag/src/lib.rs b/ledger/narwhal/subdag/src/lib.rs index a5f46e2914..2e55377b2f 100644 --- a/ledger/narwhal/subdag/src/lib.rs +++ b/ledger/narwhal/subdag/src/lib.rs @@ -21,6 +21,7 @@ mod string; use console::{account::Address, prelude::*, program::SUBDAG_CERTIFICATES_DEPTH, types::Field}; use narwhal_batch_certificate::BatchCertificate; +use narwhal_batch_header::BatchHeader; use narwhal_transmission_id::TransmissionID; use indexmap::IndexSet; @@ -57,14 +58,15 @@ fn sanity_check_subdag_with_dfs(subdag: &BTreeMap(subdag: &BTreeMap { /// The subdag of round certificates. subdag: BTreeMap>>, + /// The election certificate IDs. + election_certificate_ids: IndexSet>, } +impl PartialEq for Subdag { + fn eq(&self, other: &Self) -> bool { + // Note: We do not check equality on `election_certificate_ids` as it would cause `Block::eq` to trigger false-positives. + self.subdag == other.subdag + } +} + +impl Eq for Subdag {} + impl Subdag { /// Initializes a new subdag. - pub fn from(subdag: BTreeMap>>) -> Result { + pub fn from( + subdag: BTreeMap>>, + election_certificate_ids: IndexSet>, + ) -> Result { // Ensure the subdag is not empty. ensure!(!subdag.is_empty(), "Subdag cannot be empty"); + // Ensure the subdag does not exceed the maximum number of rounds. + ensure!( + subdag.len() <= usize::try_from(Self::MAX_ROUNDS)?, + "Subdag cannot exceed the maximum number of rounds" + ); // Ensure the anchor round is even. ensure!(subdag.iter().next_back().map_or(0, |(r, _)| *r) % 2 == 0, "Anchor round must be even"); // Ensure there is only one leader certificate. ensure!(subdag.iter().next_back().map_or(0, |(_, c)| c.len()) == 1, "Subdag cannot have multiple leaders"); + // Ensure the number of election certificate IDs is within bounds. + ensure!( + election_certificate_ids.len() <= usize::try_from(BatchHeader::::MAX_CERTIFICATES)?, + "Number of election certificate IDs exceeds the maximum" + ); // Ensure the rounds are sequential. ensure!(is_sequential(&subdag), "Subdag rounds must be sequential"); // Ensure the subdag structure matches the commit. ensure!(sanity_check_subdag_with_dfs(&subdag), "Subdag structure does not match commit"); // Ensure the leader certificate is an even round. - Ok(Self { subdag }) + Ok(Self { subdag, election_certificate_ids }) } } +impl Subdag { + /// The maximum number of rounds in a subdag (bounded up to GC depth). + pub const MAX_ROUNDS: u64 = BatchHeader::::MAX_GC_ROUNDS; +} + impl Subdag { /// Returns the anchor round. pub fn anchor_round(&self) -> u64 { @@ -108,7 +139,7 @@ impl Subdag { /// Returns the certificate IDs of the subdag (from earliest round to latest round). pub fn certificate_ids(&self) -> impl Iterator> + '_ { - self.values().flatten().map(BatchCertificate::certificate_id) + self.values().flatten().map(BatchCertificate::id) } /// Returns the leader certificate. @@ -134,28 +165,40 @@ impl Subdag { self.values().flatten().flat_map(BatchCertificate::transmission_ids) } - /// Returns the timestamp of the anchor round, defined as the median timestamp of the leader certificate. + /// Returns the timestamp of the anchor round, defined as the median timestamp of the subdag. pub fn timestamp(&self) -> i64 { - // Retrieve the median timestamp from the leader certificate. - self.leader_certificate().median_timestamp() + match self.leader_certificate() { + BatchCertificate::V1 { .. } => self.leader_certificate().timestamp(), + BatchCertificate::V2 { .. } => { + // Retrieve the timestamps of the certificates. + let mut timestamps = self.values().flatten().map(BatchCertificate::timestamp).collect::>(); + // Sort the timestamps. + #[cfg(not(feature = "serial"))] + timestamps.par_sort_unstable(); + #[cfg(feature = "serial")] + timestamps.sort_unstable(); + // Return the median timestamp. + timestamps[timestamps.len() / 2] + } + } + } + + /// Returns the election certificate IDs. + pub fn election_certificate_ids(&self) -> &IndexSet> { + &self.election_certificate_ids } - /// Returns the subdag root of the transactions. + /// Returns the subdag root of the certificates. pub fn to_subdag_root(&self) -> Result> { // Prepare the leaves. let leaves = cfg_iter!(self.subdag) .map(|(_, certificates)| { - certificates - .iter() - .flat_map(|certificate| certificate.certificate_id().to_bits_le()) - .collect::>() + certificates.iter().flat_map(|certificate| certificate.id().to_bits_le()).collect::>() }) .collect::>(); - // Compute the subdag tree. - let tree = N::merkle_tree_bhp::(&leaves)?; - // Return the subdag root. - Ok(*tree.root()) + // Compute the subdag root. + Ok(*N::merkle_tree_bhp::(&leaves)?.root()) } } @@ -201,7 +244,7 @@ pub mod test_helpers { for _ in 0..AVAILABILITY_THRESHOLD { let certificate = narwhal_batch_certificate::test_helpers::sample_batch_certificate_for_round(starting_round, rng); - previous_certificate_ids.insert(certificate.certificate_id()); + previous_certificate_ids.insert(certificate.id()); subdag.entry(starting_round).or_default().insert(certificate); } @@ -210,7 +253,7 @@ pub mod test_helpers { for _ in 0..QUORUM_THRESHOLD { let certificate = narwhal_batch_certificate::test_helpers::sample_batch_certificate_for_round_with_previous_certificate_ids(starting_round + 1, previous_certificate_ids.clone(), rng); - previous_certificate_ids_2.insert(certificate.certificate_id()); + previous_certificate_ids_2.insert(certificate.id()); subdag.entry(starting_round + 1).or_default().insert(certificate); } @@ -223,8 +266,14 @@ pub mod test_helpers { ); subdag.insert(starting_round + 2, indexset![certificate]); + // Initialize the election certificate IDs. + let mut election_certificate_ids = IndexSet::new(); + for _ in 0..AVAILABILITY_THRESHOLD { + election_certificate_ids.insert(rng.gen()); + } + // Return the subdag. - Subdag::from(subdag).unwrap() + Subdag::from(subdag, election_certificate_ids).unwrap() } /// Returns a list of sample subdags, sampled at random. @@ -239,3 +288,24 @@ pub mod test_helpers { sample } } + +#[cfg(test)] +mod tests { + use super::*; + use narwhal_batch_header::BatchHeader; + + type CurrentNetwork = console::network::Testnet3; + + #[test] + fn test_max_certificates() { + // Determine the maximum number of certificates in a block. + let max_certificates_per_block = usize::try_from(BatchHeader::::MAX_GC_ROUNDS).unwrap() + * BatchHeader::::MAX_CERTIFICATES as usize; + + // Note: The maximum number of certificates in a block must be able to be Merklized. + assert!( + max_certificates_per_block <= 2u32.checked_pow(SUBDAG_CERTIFICATES_DEPTH as u32).unwrap() as usize, + "The maximum number of certificates in a block is too large" + ); + } +} diff --git a/ledger/narwhal/subdag/src/serialize.rs b/ledger/narwhal/subdag/src/serialize.rs index 961d91d286..fb73011d91 100644 --- a/ledger/narwhal/subdag/src/serialize.rs +++ b/ledger/narwhal/subdag/src/serialize.rs @@ -19,8 +19,9 @@ impl Serialize for Subdag { fn serialize(&self, serializer: S) -> Result { match serializer.is_human_readable() { true => { - let mut certificate = serializer.serialize_struct("Subdag", 1)?; + let mut certificate = serializer.serialize_struct("Subdag", 2)?; certificate.serialize_field("subdag", &self.subdag)?; + certificate.serialize_field("election_certificate_ids", &self.election_certificate_ids)?; certificate.end() } false => ToBytesSerializer::serialize_with_size_encoding(self, serializer), @@ -34,7 +35,12 @@ impl<'de, N: Network> Deserialize<'de> for Subdag { match deserializer.is_human_readable() { true => { let mut value = serde_json::Value::deserialize(deserializer)?; - Ok(Self::from(DeserializeExt::take_from_value::(&mut value, "subdag")?) + + // TODO (howardwu): For mainnet - Directly take the value, do not check if its missing. + let election_certificate_ids = + DeserializeExt::take_from_value::(&mut value, "election_certificate_ids").unwrap_or_default(); + + Ok(Self::from(DeserializeExt::take_from_value::(&mut value, "subdag")?, election_certificate_ids) .map_err(de::Error::custom)?) } false => FromBytesDeserializer::::deserialize_with_size_encoding(deserializer, "subdag"), diff --git a/ledger/narwhal/transmission-id/Cargo.toml b/ledger/narwhal/transmission-id/Cargo.toml index 6b105746ca..337d55df0a 100644 --- a/ledger/narwhal/transmission-id/Cargo.toml +++ b/ledger/narwhal/transmission-id/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "snarkvm-ledger-narwhal-transmission-id" -version = "0.16.3" +version = "0.16.15" authors = [ "The Aleo Team " ] description = "A transmission ID for a Narwhal-style memory pool in a decentralized virtual machine" homepage = "https://aleo.org" @@ -32,12 +32,12 @@ test-helpers = [ ] [dependencies.console] package = "snarkvm-console" path = "../../../console" -version = "=0.16.3" +version = "=0.16.15" [dependencies.ledger-coinbase] package = "snarkvm-ledger-coinbase" path = "../../coinbase" -version = "=0.16.3" +version = "=0.16.15" [dev-dependencies.bincode] version = "1.3" diff --git a/ledger/narwhal/transmission-id/src/bytes.rs b/ledger/narwhal/transmission-id/src/bytes.rs index ed46201b1f..a8a49a3d7b 100644 --- a/ledger/narwhal/transmission-id/src/bytes.rs +++ b/ledger/narwhal/transmission-id/src/bytes.rs @@ -50,9 +50,6 @@ impl ToBytes for TransmissionID { #[cfg(test)] mod tests { use super::*; - use console::network::Testnet3; - - type CurrentNetwork = Testnet3; #[test] fn test_bytes() { @@ -62,7 +59,6 @@ mod tests { // Check the byte representation. let expected_bytes = expected.to_bytes_le().unwrap(); assert_eq!(expected, TransmissionID::read_le(&expected_bytes[..]).unwrap()); - assert!(TransmissionID::::read_le(&expected_bytes[1..]).is_err()); } } } diff --git a/ledger/narwhal/transmission-id/src/lib.rs b/ledger/narwhal/transmission-id/src/lib.rs index a2d911ccfc..43ffea0191 100644 --- a/ledger/narwhal/transmission-id/src/lib.rs +++ b/ledger/narwhal/transmission-id/src/lib.rs @@ -33,14 +33,14 @@ pub enum TransmissionID { } impl From> for TransmissionID { - /// Converts the puzzle commitment into an transmission ID. + /// Converts the puzzle commitment into a transmission ID. fn from(puzzle_commitment: PuzzleCommitment) -> Self { Self::Solution(puzzle_commitment) } } impl From<&N::TransactionID> for TransmissionID { - /// Converts the transaction ID into an transmission ID. + /// Converts the transaction ID into a transmission ID. fn from(transaction_id: &N::TransactionID) -> Self { Self::Transaction(*transaction_id) } diff --git a/ledger/narwhal/transmission/Cargo.toml b/ledger/narwhal/transmission/Cargo.toml index d4fdffa89d..e50f7952c5 100644 --- a/ledger/narwhal/transmission/Cargo.toml +++ b/ledger/narwhal/transmission/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "snarkvm-ledger-narwhal-transmission" -version = "0.16.3" +version = "0.16.15" authors = [ "The Aleo Team " ] description = "A transmission for a Narwhal-style memory pool in a decentralized virtual machine" homepage = "https://aleo.org" @@ -32,22 +32,22 @@ test-helpers = [ ] [dependencies.console] package = "snarkvm-console" path = "../../../console" -version = "=0.16.3" +version = "=0.16.15" [dependencies.ledger-block] package = "snarkvm-ledger-block" path = "../../block" -version = "=0.16.3" +version = "=0.16.15" [dependencies.ledger-coinbase] package = "snarkvm-ledger-coinbase" path = "../../coinbase" -version = "=0.16.3" +version = "=0.16.15" [dependencies.ledger-narwhal-data] package = "snarkvm-ledger-narwhal-data" path = "../data" -version = "=0.16.3" +version = "=0.16.15" [dependencies.bytes] version = "1" diff --git a/ledger/narwhal/transmission/src/bytes.rs b/ledger/narwhal/transmission/src/bytes.rs index f7349fbac3..7d06ef6077 100644 --- a/ledger/narwhal/transmission/src/bytes.rs +++ b/ledger/narwhal/transmission/src/bytes.rs @@ -59,9 +59,6 @@ impl ToBytes for Transmission { #[cfg(test)] mod tests { use super::*; - use console::network::Testnet3; - - type CurrentNetwork = Testnet3; #[test] fn test_bytes() { @@ -71,7 +68,6 @@ mod tests { // Check the byte representation. let expected_bytes = expected.to_bytes_le().unwrap(); assert_eq!(expected, Transmission::read_le(&expected_bytes[..]).unwrap()); - assert!(Transmission::::read_le(&expected_bytes[1..]).is_err()); } } } diff --git a/ledger/narwhal/transmission/src/lib.rs b/ledger/narwhal/transmission/src/lib.rs index bb7aa80695..a18d096241 100644 --- a/ledger/narwhal/transmission/src/lib.rs +++ b/ledger/narwhal/transmission/src/lib.rs @@ -37,28 +37,28 @@ pub enum Transmission { } impl From> for Transmission { - /// Converts the prover solution into an transmission. + /// Converts the prover solution into a transmission. fn from(solution: ProverSolution) -> Self { Self::Solution(Data::Object(solution)) } } impl From> for Transmission { - /// Converts the transaction into an transmission. + /// Converts the transaction into a transmission. fn from(transaction: Transaction) -> Self { Self::Transaction(Data::Object(transaction)) } } impl From>> for Transmission { - /// Converts the prover solution into an transmission. + /// Converts the prover solution into a transmission. fn from(solution: Data>) -> Self { Self::Solution(solution) } } impl From>> for Transmission { - /// Converts the transaction into an transmission. + /// Converts the transaction into a transmission. fn from(transaction: Data>) -> Self { Self::Transaction(transaction) } diff --git a/ledger/narwhal/transmission/src/serialize.rs b/ledger/narwhal/transmission/src/serialize.rs index 2acd037507..4657a7ff76 100644 --- a/ledger/narwhal/transmission/src/serialize.rs +++ b/ledger/narwhal/transmission/src/serialize.rs @@ -18,23 +18,25 @@ impl Serialize for Transmission { #[inline] fn serialize(&self, serializer: S) -> Result { match serializer.is_human_readable() { - true => { - let mut transmission = serializer.serialize_struct("Transmission", 2)?; - match self { - Self::Ratification => { - transmission.serialize_field("type", "ratification")?; - } - Self::Solution(solution) => { - transmission.serialize_field("type", "solution")?; - transmission.serialize_field("transmission", solution)?; - } - Self::Transaction(transaction) => { - transmission.serialize_field("type", "transaction")?; - transmission.serialize_field("transmission", transaction)?; - } + true => match self { + Self::Ratification => { + let mut transmission = serializer.serialize_struct("Transmission", 1)?; + transmission.serialize_field("type", "ratification")?; + transmission.end() } - transmission.end() - } + Self::Solution(solution) => { + let mut transmission = serializer.serialize_struct("Transmission", 2)?; + transmission.serialize_field("type", "solution")?; + transmission.serialize_field("transmission", solution)?; + transmission.end() + } + Self::Transaction(transaction) => { + let mut transmission = serializer.serialize_struct("Transmission", 2)?; + transmission.serialize_field("type", "transaction")?; + transmission.serialize_field("transmission", transaction)?; + transmission.end() + } + }, false => ToBytesSerializer::serialize_with_size_encoding(self, serializer), } } diff --git a/ledger/query/Cargo.toml b/ledger/query/Cargo.toml index 5b7e129e01..5feb1fba04 100644 --- a/ledger/query/Cargo.toml +++ b/ledger/query/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "snarkvm-ledger-query" -version = "0.16.3" +version = "0.16.15" authors = [ "The Aleo Team " ] description = "A query for a decentralized virtual machine" homepage = "https://aleo.org" @@ -24,24 +24,28 @@ serial = [ "ledger-store/serial", "synthesizer-program/serial" ] -wasm = [ "console/wasm", "synthesizer-program/wasm" ] +wasm = [ + "console/wasm", + "ledger-store/wasm", + "synthesizer-program/wasm" +] query = [ "ledger-store", "synthesizer-program", "ureq" ] [dependencies.console] package = "snarkvm-console" path = "../../console" -version = "=0.16.3" +version = "=0.16.15" [dependencies.ledger-store] package = "snarkvm-ledger-store" path = "../store" -version = "=0.16.3" +version = "=0.16.15" optional = true [dependencies.synthesizer-program] package = "snarkvm-synthesizer-program" path = "../../synthesizer/program" -version = "=0.16.3" +version = "=0.16.15" optional = true [dependencies.async-trait] diff --git a/ledger/src/advance.rs b/ledger/src/advance.rs index b8896e7611..ee49488d93 100644 --- a/ledger/src/advance.rs +++ b/ledger/src/advance.rs @@ -127,14 +127,29 @@ impl> Ledger { let coinbase_verifying_key = self.coinbase_puzzle.coinbase_verifying_key(); // Retrieve the latest epoch challenge. let latest_epoch_challenge = self.latest_epoch_challenge()?; + // TODO: For mainnet - Add `aborted_solution_ids` to the block. And optimize this logic. + // Verify the candidate solutions. + let verification_results: Vec<_> = cfg_into_iter!(candidate_solutions) + .map(|solution| { + ( + solution, + solution + .verify(coinbase_verifying_key, &latest_epoch_challenge, self.latest_proof_target()) + .unwrap_or(false), + ) + }) + .collect(); // Separate the candidate solutions into valid and aborted solutions. - // TODO: Add `aborted_solution_ids` to the block. - let (valid_candidate_solutions, _aborted_candidate_solutions): (Vec<_>, Vec<_>) = - cfg_into_iter!(candidate_solutions).partition(|solution| { - solution - .verify(coinbase_verifying_key, &latest_epoch_challenge, self.latest_proof_target()) - .unwrap_or(false) - }); + let mut valid_candidate_solutions = Vec::with_capacity(N::MAX_SOLUTIONS); + let mut aborted_candidate_solutions = Vec::new(); + for (solution, is_valid) in verification_results.into_iter() { + if is_valid && valid_candidate_solutions.len() < N::MAX_SOLUTIONS { + valid_candidate_solutions.push(solution); + } else { + aborted_candidate_solutions.push(solution); + } + } + // Check if there are any valid solutions. match valid_candidate_solutions.is_empty() { true => (None, Field::::zero(), 0u128), diff --git a/ledger/src/check_next_block.rs b/ledger/src/check_next_block.rs index e4dafb81e9..18a1ee2544 100644 --- a/ledger/src/check_next_block.rs +++ b/ledger/src/check_next_block.rs @@ -14,9 +14,11 @@ use super::*; +use rand::{rngs::StdRng, SeedableRng}; + impl> Ledger { /// Checks the given block is valid next block. - pub fn check_next_block(&self, block: &Block) -> Result<()> { + pub fn check_next_block(&self, block: &Block, rng: &mut R) -> Result<()> { let height = block.height(); // Ensure the block hash does not already exist. @@ -41,8 +43,9 @@ impl> Ledger { // Ensure each transaction is well-formed and unique. // TODO: this intermediate allocation shouldn't be necessary; this is most likely https://github.com/rust-lang/rust/issues/89418. let transactions = block.transactions().iter().collect::>(); - cfg_iter!(transactions).try_for_each(|transaction| { - self.check_transaction_basic(*transaction, transaction.to_rejected_id()?) + let rngs = (0..transactions.len()).map(|_| StdRng::from_seed(rng.gen())).collect::>(); + cfg_iter!(transactions).zip(rngs).try_for_each(|(transaction, mut rng)| { + self.check_transaction_basic(*transaction, transaction.to_rejected_id()?, &mut rng) .map_err(|e| anyhow!("Invalid transaction found in the transactions list: {e}")) })?; diff --git a/ledger/src/check_transaction_basic.rs b/ledger/src/check_transaction_basic.rs index 0d2218272f..cfdb6b7a76 100644 --- a/ledger/src/check_transaction_basic.rs +++ b/ledger/src/check_transaction_basic.rs @@ -16,7 +16,12 @@ use super::*; impl> Ledger { /// Checks the given transaction is well-formed and unique. - pub fn check_transaction_basic(&self, transaction: &Transaction, rejected_id: Option>) -> Result<()> { - self.vm().check_transaction(transaction, rejected_id) + pub fn check_transaction_basic( + &self, + transaction: &Transaction, + rejected_id: Option>, + rng: &mut R, + ) -> Result<()> { + self.vm().check_transaction(transaction, rejected_id, rng) } } diff --git a/ledger/src/contains.rs b/ledger/src/contains.rs index d7d5310c87..64a52bbda5 100644 --- a/ledger/src/contains.rs +++ b/ledger/src/contains.rs @@ -58,10 +58,7 @@ impl> Ledger { /// Returns `true` if the given transaction ID exists. pub fn contains_transaction_id(&self, transaction_id: &N::TransactionID) -> Result { - self.vm - .transaction_store() - .contains_transaction_id(transaction_id) - .or(self.vm.block_store().contains_rejected_or_aborted_transaction_id(transaction_id)) + self.vm.block_store().contains_transaction_id(transaction_id) } /* Transition */ diff --git a/ledger/src/lib.rs b/ledger/src/lib.rs index 568d501ac2..ecb5c9c59d 100644 --- a/ledger/src/lib.rs +++ b/ledger/src/lib.rs @@ -144,9 +144,11 @@ impl> Ledger { pub fn load_unchecked(genesis_block: Block, dev: Option) -> Result { let timer = timer!("Ledger::load_unchecked"); + info!("Loading the ledger from storage..."); // Initialize the consensus store. - let Ok(store) = ConsensusStore::::open(dev) else { - bail!("Failed to load ledger (run 'snarkos clean' and try again)"); + let store = match ConsensusStore::::open(dev) { + Ok(store) => store, + Err(e) => bail!("Failed to load ledger (run 'snarkos clean' and try again)\n\n{e}\n"), }; lap!(timer, "Load consensus store"); @@ -319,21 +321,19 @@ impl> Ledger { /// Creates a deploy transaction. /// /// The `priority_fee_in_microcredits` is an additional fee **on top** of the deployment fee. - pub fn create_deploy( + pub fn create_deploy( &self, private_key: &PrivateKey, program: &Program, priority_fee_in_microcredits: u64, query: Option>, + rng: &mut R, ) -> Result> { // Fetch the unspent records. let records = self.find_unspent_credits_records(&ViewKey::try_from(private_key)?)?; ensure!(!records.len().is_zero(), "The Aleo account has no records to spend."); let mut records = records.values(); - // Initialize an RNG. - let rng = &mut ::rand::thread_rng(); - // Prepare the fee record. let fee_record = Some(records.next().unwrap().clone()); @@ -344,22 +344,20 @@ impl> Ledger { /// Creates a transfer transaction. /// /// The `priority_fee_in_microcredits` is an additional fee **on top** of the execution fee. - pub fn create_transfer( + pub fn create_transfer( &self, private_key: &PrivateKey, to: Address, amount_in_microcredits: u64, priority_fee_in_microcredits: u64, query: Option>, + rng: &mut R, ) -> Result> { // Fetch the unspent records. let records = self.find_unspent_credits_records(&ViewKey::try_from(private_key)?)?; ensure!(!records.len().is_zero(), "The Aleo account has no records to spend."); let mut records = records.values(); - // Initialize an RNG. - let rng = &mut rand::thread_rng(); - // Prepare the inputs. let inputs = [ Value::Record(records.next().unwrap().clone()), @@ -392,11 +390,23 @@ pub(crate) mod test_helpers { prelude::*, }; use ledger_block::Block; - use ledger_store::{helpers::memory::ConsensusMemory, ConsensusStore}; + use ledger_store::ConsensusStore; use synthesizer::vm::VM; pub(crate) type CurrentNetwork = Testnet3; - pub(crate) type CurrentLedger = Ledger>; + + #[cfg(not(feature = "rocks"))] + pub(crate) type CurrentLedger = + Ledger>; + #[cfg(feature = "rocks")] + pub(crate) type CurrentLedger = Ledger>; + + #[cfg(not(feature = "rocks"))] + pub(crate) type CurrentConsensusStore = + ConsensusStore>; + #[cfg(feature = "rocks")] + pub(crate) type CurrentConsensusStore = + ConsensusStore>; #[allow(dead_code)] pub(crate) struct TestEnv { @@ -426,7 +436,7 @@ pub(crate) mod test_helpers { rng: &mut (impl Rng + CryptoRng), ) -> CurrentLedger { // Initialize the store. - let store = ConsensusStore::<_, ConsensusMemory<_>>::open(None).unwrap(); + let store = CurrentConsensusStore::open(None).unwrap(); // Create a genesis block. let genesis = VM::from(store).unwrap().genesis_beacon(&private_key, rng).unwrap(); // Initialize the ledger with the genesis block. diff --git a/ledger/src/tests.rs b/ledger/src/tests.rs index d033edef82..273394ac95 100644 --- a/ledger/src/tests.rs +++ b/ledger/src/tests.rs @@ -81,7 +81,7 @@ fn test_state_path() { } #[test] -fn test_insufficient_fees() { +fn test_insufficient_private_fees() { let rng = &mut TestRng::default(); // Initialize the test environment. @@ -113,7 +113,7 @@ fn test_insufficient_fees() { let authorization = ledger.vm.authorize(&private_key, "credits.aleo", "split", inputs.into_iter(), rng).unwrap(); let split_transaction_without_fee = ledger.vm.execute_authorization(authorization, None, None, rng).unwrap(); - assert!(ledger.check_transaction_basic(&split_transaction_without_fee, None).is_ok()); + assert!(ledger.check_transaction_basic(&split_transaction_without_fee, None, rng).is_ok()); } // Check fee amount requirements for executions. @@ -143,7 +143,7 @@ fn test_insufficient_fees() { .unwrap(); let fee = ledger.vm.execute_fee_authorization(fee_authorization, None, rng).unwrap(); let sufficient_fee_transaction = Transaction::from_execution(execution.clone(), Some(fee)).unwrap(); - assert!(ledger.check_transaction_basic(&sufficient_fee_transaction, None).is_ok()); + assert!(ledger.check_transaction_basic(&sufficient_fee_transaction, None, rng).is_ok()); // Check that a transaction with insufficient fee will fail. let insufficient_fee_authorization = ledger @@ -153,7 +153,7 @@ fn test_insufficient_fees() { let insufficient_fee = ledger.vm.execute_fee_authorization(insufficient_fee_authorization, None, rng).unwrap(); let insufficient_fee_transaction = Transaction::from_execution(execution.clone(), Some(insufficient_fee)).unwrap(); - assert!(ledger.check_transaction_basic(&insufficient_fee_transaction, None).is_err()); + assert!(ledger.check_transaction_basic(&insufficient_fee_transaction, None, rng).is_err()); } // Check fee amount requirements for deployment. @@ -175,7 +175,7 @@ finalize foo: // Check that a deployment transaction with sufficient fee will succeed. let transaction = ledger.vm.deploy(&private_key, &program, Some(record_2.clone()), 0, None, rng).unwrap(); - assert!(ledger.check_transaction_basic(&transaction, None).is_ok()); + assert!(ledger.check_transaction_basic(&transaction, None, rng).is_ok()); // Check that a deployment transaction with insufficient fee will fail. let deployment = transaction.deployment().unwrap(); @@ -186,7 +186,57 @@ finalize foo: let insufficient_fee = ledger.vm.execute_fee_authorization(insufficient_fee_authorization, None, rng).unwrap(); let insufficient_fee_transaction = Transaction::from_deployment(*transaction.owner().unwrap(), deployment.clone(), insufficient_fee).unwrap(); - assert!(ledger.check_transaction_basic(&insufficient_fee_transaction, None).is_err()); + assert!(ledger.check_transaction_basic(&insufficient_fee_transaction, None, rng).is_err()); + } +} + +#[test] +fn test_insufficient_public_fees() { + let rng = &mut TestRng::default(); + + // Initialize the test environment. + let crate::test_helpers::TestEnv { ledger, private_key, .. } = crate::test_helpers::sample_test_env(rng); + + // Sample recipient. + let recipient_private_key = PrivateKey::new(rng).unwrap(); + let recipient_address = Address::try_from(&recipient_private_key).unwrap(); + + // Fund the recipient with 1 million credits. + { + let inputs = + [Value::from_str(&format!("{recipient_address}")).unwrap(), Value::from_str("1000000000000u64").unwrap()]; + let transaction = ledger + .vm + .execute(&private_key, ("credits.aleo", "transfer_public"), inputs.into_iter(), None, 0, None, rng) + .unwrap(); + + let block = + ledger.prepare_advance_to_next_beacon_block(&private_key, vec![], vec![], vec![transaction], rng).unwrap(); + + // Check that the next block is valid. + ledger.check_next_block(&block, rng).unwrap(); + // Add the deployment block to the ledger. + ledger.advance_to_next_block(&block).unwrap(); + } + + println!("-----------"); + + // Attempt to bond the node with insufficient public fees. + { + let inputs = + [Value::from_str(&format!("{recipient_address}")).unwrap(), Value::from_str("1000000000000u64").unwrap()]; + let transaction = ledger + .vm + .execute(&recipient_private_key, ("credits.aleo", "bond_public"), inputs.into_iter(), None, 0, None, rng) + .unwrap(); + + let block = + ledger.prepare_advance_to_next_beacon_block(&private_key, vec![], vec![], vec![transaction], rng).unwrap(); + + // Check that the next block is valid. + ledger.check_next_block(&block, rng).unwrap(); + // Add the deployment block to the ledger. + ledger.advance_to_next_block(&block).unwrap(); } } @@ -233,7 +283,7 @@ finalize foo: // Deploy. let transaction = ledger.vm.deploy(&private_key, &program, credits, 0, None, rng).unwrap(); // Verify. - ledger.vm().check_transaction(&transaction, None).unwrap(); + ledger.vm().check_transaction(&transaction, None, rng).unwrap(); // Construct the next block. let block = @@ -244,7 +294,7 @@ finalize foo: assert_eq!(ledger.latest_hash(), block.hash()); // Create a transfer transaction to produce a record with insufficient balance to pay for fees. - let transfer_transaction = ledger.create_transfer(&private_key, address, 100, 0, None).unwrap(); + let transfer_transaction = ledger.create_transfer(&private_key, address, 100, 0, None, rng).unwrap(); // Construct the next block. let block = ledger @@ -287,9 +337,9 @@ finalize foo: let transaction = ledger.vm.execute(&private_key, ("dummy.aleo", "foo"), inputs, Some(sufficient_record), 0, None, rng).unwrap(); // Verify. - ledger.vm.check_transaction(&transaction, None).unwrap(); + ledger.vm.check_transaction(&transaction, None, rng).unwrap(); // Ensure that the ledger deems the transaction valid. - assert!(ledger.check_transaction_basic(&transaction, None).is_ok()); + assert!(ledger.check_transaction_basic(&transaction, None, rng).is_ok()); } #[test] @@ -341,7 +391,7 @@ finalize failed_assert: .unwrap(); // Check that the next block is valid. - ledger.check_next_block(&deployment_block).unwrap(); + ledger.check_next_block(&deployment_block, rng).unwrap(); // Add the deployment block to the ledger. ledger.advance_to_next_block(&deployment_block).unwrap(); @@ -388,7 +438,7 @@ finalize failed_assert: assert_eq!(confirmed_transaction.to_unconfirmed_transaction_id().unwrap(), failed_assert_transaction_id); // Check that the next block is valid. - ledger.check_next_block(&next_block).unwrap(); + ledger.check_next_block(&next_block, rng).unwrap(); // Add the block with the rejected transaction to the ledger. ledger.advance_to_next_block(&next_block).unwrap(); @@ -419,7 +469,7 @@ finalize foo: // Deploy. let transaction = ledger.vm.deploy(&private_key, &program, None, 0, None, rng).unwrap(); // Verify. - ledger.vm().check_transaction(&transaction, None).unwrap(); + ledger.vm().check_transaction(&transaction, None, rng).unwrap(); // Construct the next block. let block = @@ -459,7 +509,7 @@ fn test_bond_and_unbond_validator() { .unwrap(); // Check that the next block is valid. - ledger.check_next_block(&transfer_block).unwrap(); + ledger.check_next_block(&transfer_block, rng).unwrap(); // Add the deployment block to the ledger. ledger.advance_to_next_block(&transfer_block).unwrap(); @@ -485,7 +535,7 @@ fn test_bond_and_unbond_validator() { assert!(!committee.is_committee_member(new_member_address)); // Check that the next block is valid. - ledger.check_next_block(&bond_public_block).unwrap(); + ledger.check_next_block(&bond_public_block, rng).unwrap(); // Add the bond public block to the ledger. ledger.advance_to_next_block(&bond_public_block).unwrap(); @@ -508,7 +558,7 @@ fn test_bond_and_unbond_validator() { .unwrap(); // Check that the next block is valid. - ledger.check_next_block(&unbond_public_block).unwrap(); + ledger.check_next_block(&unbond_public_block, rng).unwrap(); // Add the bond public block to the ledger. ledger.advance_to_next_block(&unbond_public_block).unwrap(); @@ -517,3 +567,264 @@ fn test_bond_and_unbond_validator() { let committee = ledger.latest_committee().unwrap(); assert!(!committee.is_committee_member(new_member_address)); } + +#[test] +fn test_aborted_transaction_indexing() { + let rng = &mut TestRng::default(); + + // Initialize the test environment. + let crate::test_helpers::TestEnv { ledger, private_key, .. } = crate::test_helpers::sample_test_env(rng); + + // Sample a recipient account. + let recipient_private_key = PrivateKey::::new(rng).unwrap(); + let recipient_address = Address::try_from(&recipient_private_key).unwrap(); + + // Sample another recipient account. + let recipient_private_key_2 = PrivateKey::::new(rng).unwrap(); + let recipient_address_2 = Address::try_from(&recipient_private_key_2).unwrap(); + + // Fund a new address. + let inputs = [Value::from_str(&format!("{recipient_address}")).unwrap(), Value::from_str("185000u64").unwrap()]; + let transfer_transaction = ledger + .vm + .execute(&private_key, ("credits.aleo", "transfer_public"), inputs.iter(), None, 0, None, rng) + .unwrap(); + + // Construct the next block. + let transfer_block = ledger + .prepare_advance_to_next_beacon_block(&private_key, vec![], vec![], vec![transfer_transaction], rng) + .unwrap(); + + // Check that the next block is valid. + ledger.check_next_block(&transfer_block, rng).unwrap(); + + // Add the deployment block to the ledger. + ledger.advance_to_next_block(&transfer_block).unwrap(); + + // Send a transaction that will be aborted due to insufficient fee. + let inputs = [Value::from_str(&format!("{recipient_address_2}")).unwrap(), Value::from_str("1u64").unwrap()]; + let transfer_transaction = ledger + .vm + .execute(&recipient_private_key_2, ("credits.aleo", "transfer_public"), inputs.iter(), None, 0, None, rng) + .unwrap(); + let aborted_transaction_id = transfer_transaction.id(); + + // Create another arbitrary transaction. + let inputs = [Value::from_str(&format!("{recipient_address_2}")).unwrap(), Value::from_str("1u64").unwrap()]; + let transfer_transaction_2 = ledger + .vm + .execute(&private_key, ("credits.aleo", "transfer_public"), inputs.iter(), None, 0, None, rng) + .unwrap(); + + // Create a block. + let block = ledger + .prepare_advance_to_next_beacon_block( + &private_key, + vec![], + vec![], + vec![transfer_transaction, transfer_transaction_2], + rng, + ) + .unwrap(); + + // Check that the block contains the aborted transaction. + assert_eq!(block.aborted_transaction_ids(), &[aborted_transaction_id]); + + // Check that the next block is valid. + ledger.check_next_block(&block, rng).unwrap(); + + // Add the deployment block to the ledger. + ledger.advance_to_next_block(&block).unwrap(); +} + +#[test] +fn test_execute_duplicate_input_ids() { + let rng = &mut TestRng::default(); + + // Initialize the test environment. + let crate::test_helpers::TestEnv { ledger, private_key, view_key, address, .. } = + crate::test_helpers::sample_test_env(rng); + + // A helper function to find records. + let find_records = || { + let microcredits = Identifier::from_str("microcredits").unwrap(); + ledger + .find_records(&view_key, RecordsFilter::SlowUnspent(private_key)) + .unwrap() + .filter(|(_, record)| match record.data().get(µcredits) { + Some(Entry::Private(Plaintext::Literal(Literal::U64(amount), _))) => !amount.is_zero(), + _ => false, + }) + .collect::>() + }; + + // Fetch the unspent records. + let records = find_records(); + let record_1 = records[0].clone(); + + // Prepare a transfer that spends the record. + let inputs = [ + Value::Record(record_1.clone()), + Value::from_str(&format!("{address}")).unwrap(), + Value::from_str("100u64").unwrap(), + ]; + let transfer_1 = ledger + .vm + .execute(&private_key, ("credits.aleo", "transfer_private"), inputs.into_iter(), None, 0, None, rng) + .unwrap(); + let transfer_1_id = transfer_1.id(); + + // Prepare a transfer that attempts to spend the same record. + let inputs = [ + Value::Record(record_1.clone()), + Value::from_str(&format!("{address}")).unwrap(), + Value::from_str("1000u64").unwrap(), + ]; + let transfer_2 = ledger + .vm + .execute(&private_key, ("credits.aleo", "transfer_private"), inputs.into_iter(), None, 0, None, rng) + .unwrap(); + let transfer_2_id = transfer_2.id(); + + // Prepare a transfer that attempts to spend the same record in the fee. + let inputs = [Value::from_str(&format!("{address}")).unwrap(), Value::from_str("100u64").unwrap()]; + let transfer_3 = ledger + .vm + .execute( + &private_key, + ("credits.aleo", "transfer_public"), + inputs.into_iter(), + Some(record_1.clone()), + 0, + None, + rng, + ) + .unwrap(); + let transfer_3_id = transfer_3.id(); + + // Prepare a transfer that attempts to spend the same record for the subsequent block. + let inputs = + [Value::Record(record_1), Value::from_str(&format!("{address}")).unwrap(), Value::from_str("1000u64").unwrap()]; + let transfer_4 = ledger + .vm + .execute(&private_key, ("credits.aleo", "transfer_private"), inputs.into_iter(), None, 0, None, rng) + .unwrap(); + let transfer_4_id = transfer_4.id(); + + // Create a block. + let block = ledger + .prepare_advance_to_next_beacon_block( + &private_key, + vec![], + vec![], + vec![transfer_1, transfer_2, transfer_3], + rng, + ) + .unwrap(); + + // Check that the next block is valid. + ledger.check_next_block(&block, rng).unwrap(); + + // Add the block to the ledger. + ledger.advance_to_next_block(&block).unwrap(); + + // Enforce that the block transactions were correct. + assert_eq!(block.transactions().num_accepted(), 1); + assert_eq!(block.transactions().transaction_ids().collect::>(), vec![&transfer_1_id]); + assert_eq!(block.aborted_transaction_ids(), &vec![transfer_2_id, transfer_3_id]); + + // Prepare a transfer that will succeed for the subsequent block. + let inputs = [Value::from_str(&format!("{address}")).unwrap(), Value::from_str("1000u64").unwrap()]; + let transfer_5 = ledger + .vm + .execute(&private_key, ("credits.aleo", "transfer_public"), inputs.into_iter(), None, 0, None, rng) + .unwrap(); + let transfer_5_id = transfer_5.id(); + + // Create a block. + let block = ledger + .prepare_advance_to_next_beacon_block(&private_key, vec![], vec![], vec![transfer_4, transfer_5], rng) + .unwrap(); + + // Check that the next block is valid. + ledger.check_next_block(&block, rng).unwrap(); + + // Add the block to the ledger. + ledger.advance_to_next_block(&block).unwrap(); + + // Enforce that the block transactions were correct. + assert_eq!(block.transactions().num_accepted(), 1); + assert_eq!(block.transactions().transaction_ids().collect::>(), vec![&transfer_5_id]); + assert_eq!(block.aborted_transaction_ids(), &vec![transfer_4_id]); +} + +#[test] +fn test_deployment_duplicate_program_id() { + let rng = &mut TestRng::default(); + + // Initialize the test environment. + let crate::test_helpers::TestEnv { ledger, private_key, .. } = crate::test_helpers::sample_test_env(rng); + + // Create two programs with a duplicate program ID but different mappings + let program_1 = Program::::from_str( + r" +program dummy_program.aleo; +mapping abcd1: + key as address.public; + value as u64.public; +function foo: + input r0 as u8.private; + async foo r0 into r1; + output r1 as dummy_program.aleo/foo.future; +finalize foo: + input r0 as u8.public; + add r0 r0 into r1;", + ) + .unwrap(); + + let program_2 = Program::::from_str( + r" +program dummy_program.aleo; +mapping abcd2: + key as address.public; + value as u64.public; +function foo2: + input r0 as u8.private; + async foo2 r0 into r1; + output r1 as dummy_program.aleo/foo2.future; +finalize foo2: + input r0 as u8.public; + add r0 r0 into r1;", + ) + .unwrap(); + + // Create a deployment transaction for the first program. + let deployment_1 = ledger.vm.deploy(&private_key, &program_1, None, 0, None, rng).unwrap(); + let deployment_1_id = deployment_1.id(); + assert!(ledger.check_transaction_basic(&deployment_1, None, rng).is_ok()); + + // Create a deployment transaction for the second program. + let deployment_2 = ledger.vm.deploy(&private_key, &program_2, None, 0, None, rng).unwrap(); + let deployment_2_id = deployment_2.id(); + assert!(ledger.check_transaction_basic(&deployment_2, None, rng).is_ok()); + + // Create a block. + let block = ledger + .prepare_advance_to_next_beacon_block(&private_key, vec![], vec![], vec![deployment_1, deployment_2], rng) + .unwrap(); + + // Check that the next block is valid. + ledger.check_next_block(&block, rng).unwrap(); + + // Add the block to the ledger. + ledger.advance_to_next_block(&block).unwrap(); + + // Enforce that the block transactions were correct. + assert_eq!(block.transactions().num_accepted(), 1); + assert_eq!(block.transactions().num_rejected(), 1); + + // Enforce that the first program was deployed and the second was rejected. + assert_eq!(ledger.get_program(*program_1.id()).unwrap(), program_1); + assert!(ledger.vm.transaction_store().contains_transaction_id(&deployment_1_id).unwrap()); + assert!(ledger.vm.block_store().contains_rejected_or_aborted_transaction_id(&deployment_2_id).unwrap()); +} diff --git a/ledger/store/Cargo.toml b/ledger/store/Cargo.toml index fea8ba949f..e17fa0900f 100644 --- a/ledger/store/Cargo.toml +++ b/ledger/store/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "snarkvm-ledger-store" -version = "0.16.3" +version = "0.16.15" authors = [ "The Aleo Team " ] description = "A data store for a decentralized virtual machine" homepage = "https://aleo.org" @@ -27,47 +27,57 @@ serial = [ "synthesizer-program/serial", "synthesizer-snark/serial" ] +wasm = [ + "console/wasm", + "ledger-authority/wasm", + "ledger-block/wasm", + "ledger-coinbase/wasm", + "ledger-committee/wasm", + "ledger-narwhal-batch-certificate/wasm", + "synthesizer-program/wasm", + "synthesizer-snark/wasm" +] test = [ ] [dependencies.console] package = "snarkvm-console" path = "../../console" -version = "=0.16.3" +version = "=0.16.15" [dependencies.ledger-authority] package = "snarkvm-ledger-authority" path = "../authority" -version = "=0.16.3" +version = "=0.16.15" [dependencies.ledger-block] package = "snarkvm-ledger-block" path = "../block" -version = "=0.16.3" +version = "=0.16.15" [dependencies.ledger-coinbase] package = "snarkvm-ledger-coinbase" path = "../coinbase" -version = "=0.16.3" +version = "=0.16.15" [dependencies.ledger-committee] package = "snarkvm-ledger-committee" path = "../committee" -version = "=0.16.3" +version = "=0.16.15" [dependencies.ledger-narwhal-batch-certificate] package = "snarkvm-ledger-narwhal-batch-certificate" path = "../narwhal/batch-certificate" -version = "=0.16.3" +version = "=0.16.15" [dependencies.synthesizer-program] package = "snarkvm-synthesizer-program" path = "../../synthesizer/program" -version = "=0.16.3" +version = "=0.16.15" [dependencies.synthesizer-snark] package = "snarkvm-synthesizer-snark" path = "../../synthesizer/snark" -version = "=0.16.3" +version = "=0.16.15" [dependencies.aleo-std] version = "0.1.18" diff --git a/ledger/store/src/block/mod.rs b/ledger/store/src/block/mod.rs index f44dd30f14..4ffb292fec 100644 --- a/ledger/store/src/block/mod.rs +++ b/ledger/store/src/block/mod.rs @@ -62,21 +62,22 @@ pub enum ConfirmedTxType { } /// Separates the confirmed transaction into a tuple. +#[allow(clippy::type_complexity)] fn to_confirmed_tuple( confirmed: ConfirmedTransaction, -) -> Result<(ConfirmedTxType, Transaction, Vec)> { +) -> Result<(ConfirmedTxType, Transaction, Vec, Option>)> { match confirmed { ConfirmedTransaction::AcceptedDeploy(index, tx, finalize) => { // Retrieve the number of finalize operations. let num_finalize = NumFinalizeSize::try_from(finalize.len())?; // Return the confirmed tuple. - Ok((ConfirmedTxType::AcceptedDeploy(index), tx, (num_finalize, finalize).to_bytes_le()?)) + Ok((ConfirmedTxType::AcceptedDeploy(index), tx, (num_finalize, finalize).to_bytes_le()?, None)) } ConfirmedTransaction::AcceptedExecute(index, tx, finalize) => { // Retrieve the number of finalize operations. let num_finalize = NumFinalizeSize::try_from(finalize.len())?; // Return the confirmed tuple. - Ok((ConfirmedTxType::AcceptedExecute(index), tx, (num_finalize, finalize).to_bytes_le()?)) + Ok((ConfirmedTxType::AcceptedExecute(index), tx, (num_finalize, finalize).to_bytes_le()?, None)) } ConfirmedTransaction::RejectedDeploy(index, tx, rejected, finalize) => { // Retrieve the number of finalize operations. @@ -92,7 +93,7 @@ fn to_confirmed_tuple( finalize.write_le(&mut blob)?; // Return the confirmed tuple. - Ok((ConfirmedTxType::RejectedDeploy(index), tx, blob)) + Ok((ConfirmedTxType::RejectedDeploy(index), tx, blob, Some(rejected))) } ConfirmedTransaction::RejectedExecute(index, tx, rejected, finalize) => { // Retrieve the number of finalize operations. @@ -108,7 +109,7 @@ fn to_confirmed_tuple( finalize.write_le(&mut blob)?; // Return the confirmed tuple. - Ok((ConfirmedTxType::RejectedExecute(index), tx, blob)) + Ok((ConfirmedTxType::RejectedExecute(index), tx, blob, Some(rejected))) } } } @@ -195,7 +196,11 @@ pub trait BlockStorage: 'static + Clone + Send + Sync { /// The mapping of rejected or aborted `transaction ID` to `block hash`. type RejectedOrAbortedTransactionIDMap: for<'a> Map<'a, N::TransactionID, N::BlockHash>; /// The mapping of `transaction ID` to `(block hash, confirmed tx type, confirmed blob)`. + /// TODO (howardwu): For mainnet - With recent DB changes, to prevent breaking compatibility, + /// include rejected (d or e) ID into `ConfirmedTxType`, and change from `Vec` to `Vec`. type ConfirmedTransactionsMap: for<'a> Map<'a, N::TransactionID, (N::BlockHash, ConfirmedTxType, Vec)>; + /// The rejected deployment or execution map. + type RejectedDeploymentOrExecutionMap: for<'a> Map<'a, Field, Rejected>; /// The transaction storage. type TransactionStorage: TransactionStorage; /// The transition storage. @@ -232,6 +237,8 @@ pub trait BlockStorage: 'static + Clone + Send + Sync { fn rejected_or_aborted_transaction_id_map(&self) -> &Self::RejectedOrAbortedTransactionIDMap; /// Returns the confirmed transactions map. fn confirmed_transactions_map(&self) -> &Self::ConfirmedTransactionsMap; + /// Returns the rejected deployment or execution map. + fn rejected_deployment_or_execution_map(&self) -> &Self::RejectedDeploymentOrExecutionMap; /// Returns the transaction store. fn transaction_store(&self) -> &TransactionStore; @@ -261,6 +268,7 @@ pub trait BlockStorage: 'static + Clone + Send + Sync { self.aborted_transaction_ids_map().start_atomic(); self.rejected_or_aborted_transaction_id_map().start_atomic(); self.confirmed_transactions_map().start_atomic(); + self.rejected_deployment_or_execution_map().start_atomic(); self.transaction_store().start_atomic(); } @@ -280,6 +288,7 @@ pub trait BlockStorage: 'static + Clone + Send + Sync { || self.aborted_transaction_ids_map().is_atomic_in_progress() || self.rejected_or_aborted_transaction_id_map().is_atomic_in_progress() || self.confirmed_transactions_map().is_atomic_in_progress() + || self.rejected_deployment_or_execution_map().is_atomic_in_progress() || self.transaction_store().is_atomic_in_progress() } @@ -299,6 +308,7 @@ pub trait BlockStorage: 'static + Clone + Send + Sync { self.aborted_transaction_ids_map().atomic_checkpoint(); self.rejected_or_aborted_transaction_id_map().atomic_checkpoint(); self.confirmed_transactions_map().atomic_checkpoint(); + self.rejected_deployment_or_execution_map().atomic_checkpoint(); self.transaction_store().atomic_checkpoint(); } @@ -318,6 +328,7 @@ pub trait BlockStorage: 'static + Clone + Send + Sync { self.aborted_transaction_ids_map().clear_latest_checkpoint(); self.rejected_or_aborted_transaction_id_map().clear_latest_checkpoint(); self.confirmed_transactions_map().clear_latest_checkpoint(); + self.rejected_deployment_or_execution_map().clear_latest_checkpoint(); self.transaction_store().clear_latest_checkpoint(); } @@ -337,6 +348,7 @@ pub trait BlockStorage: 'static + Clone + Send + Sync { self.aborted_transaction_ids_map().atomic_rewind(); self.rejected_or_aborted_transaction_id_map().atomic_rewind(); self.confirmed_transactions_map().atomic_rewind(); + self.rejected_deployment_or_execution_map().atomic_rewind(); self.transaction_store().atomic_rewind(); } @@ -356,6 +368,7 @@ pub trait BlockStorage: 'static + Clone + Send + Sync { self.aborted_transaction_ids_map().abort_atomic(); self.rejected_or_aborted_transaction_id_map().abort_atomic(); self.confirmed_transactions_map().abort_atomic(); + self.rejected_deployment_or_execution_map().abort_atomic(); self.transaction_store().abort_atomic(); } @@ -375,6 +388,7 @@ pub trait BlockStorage: 'static + Clone + Send + Sync { self.aborted_transaction_ids_map().finish_atomic()?; self.rejected_or_aborted_transaction_id_map().finish_atomic()?; self.confirmed_transactions_map().finish_atomic()?; + self.rejected_deployment_or_execution_map().finish_atomic()?; self.transaction_store().finish_atomic() } @@ -391,10 +405,9 @@ pub trait BlockStorage: 'static + Clone + Send + Sync { // Retrieve the certificate IDs to store. let certificates_to_store = match block.authority() { Authority::Beacon(_) => Vec::new(), - Authority::Quorum(subdag) => subdag - .iter() - .flat_map(|(round, certificates)| certificates.iter().map(|c| (c.certificate_id(), *round))) - .collect(), + Authority::Quorum(subdag) => { + subdag.iter().flat_map(|(round, certificates)| certificates.iter().map(|c| (c.id(), *round))).collect() + } }; // Prepare the rejected transaction IDs and their corresponding unconfirmed transaction IDs. @@ -454,9 +467,13 @@ pub trait BlockStorage: 'static + Clone + Send + Sync { } // Store the confirmed transactions. - for (confirmed_type, transaction, blob) in confirmed { + for (confirmed_type, transaction, blob, rejected) in confirmed { // Store the block hash and confirmed transaction data. self.confirmed_transactions_map().insert(transaction.id(), (block.hash(), confirmed_type, blob))?; + // Store the rejected deployment or execution. + if let Some(rejected) = rejected { + self.rejected_deployment_or_execution_map().insert(rejected.to_id()?, rejected)?; + } // Store the transaction. self.transaction_store().insert(&transaction)?; } @@ -496,12 +513,12 @@ pub trait BlockStorage: 'static + Clone + Send + Sync { None => Vec::new(), }; - // Retrieve the rejected transaction IDs. - let rejected_transaction_ids = match self.get_block_transactions(block_hash)? { + // Retrieve the rejected transaction IDs, and the deployment or execution ID. + let rejected_transaction_ids_and_deployment_or_execution_id = match self.get_block_transactions(block_hash)? { Some(transactions) => transactions .iter() .filter(|tx| tx.is_rejected()) - .map(|tx| tx.to_unconfirmed_transaction_id()) + .map(|tx| Ok((tx.to_unconfirmed_transaction_id()?, tx.to_rejected_id()?))) .collect::>>()?, None => Vec::new(), }; @@ -511,7 +528,7 @@ pub trait BlockStorage: 'static + Clone + Send + Sync { Some(authority) => match authority { Cow::Owned(Authority::Beacon(_)) | Cow::Borrowed(Authority::Beacon(_)) => Vec::new(), Cow::Owned(Authority::Quorum(ref subdag)) | Cow::Borrowed(Authority::Quorum(ref subdag)) => { - subdag.values().flatten().map(|c| c.certificate_id()).collect() + subdag.values().flatten().map(|c| c.id()).collect() } }, None => bail!("Failed to remove block: missing authority for block '{block_height}' ('{block_hash}')"), @@ -560,9 +577,14 @@ pub trait BlockStorage: 'static + Clone + Send + Sync { self.rejected_or_aborted_transaction_id_map().remove(&aborted_transaction_id)?; } - // Remove the rejected transaction IDs. - for rejected_transaction_id in rejected_transaction_ids { + // Remove the rejected state. + for (rejected_transaction_id, rejected_id) in rejected_transaction_ids_and_deployment_or_execution_id { + // Remove the rejected transaction ID. self.rejected_or_aborted_transaction_id_map().remove(&rejected_transaction_id)?; + // Remove the rejected deployment or execution. + if let Some(rejected_id) = rejected_id { + self.rejected_deployment_or_execution_map().remove(&rejected_id)?; + } } // Remove the block transactions. @@ -577,6 +599,22 @@ pub trait BlockStorage: 'static + Clone + Send + Sync { }) } + /// Returns `true` if the given transaction ID exists. + fn contains_transaction_id(&self, transaction_id: &N::TransactionID) -> Result { + Ok(self.transaction_store().contains_transaction_id(transaction_id)? + || self.contains_rejected_or_aborted_transaction_id(transaction_id)?) + } + + /// Returns `true` if the given rejected transaction ID or aborted transaction ID exists. + fn contains_rejected_or_aborted_transaction_id(&self, transaction_id: &N::TransactionID) -> Result { + self.rejected_or_aborted_transaction_id_map().contains_key_confirmed(transaction_id) + } + + /// Returns `true` if the given rejected deployment or execution ID. + fn contains_rejected_deployment_or_execution_id(&self, rejected_id: &Field) -> Result { + self.rejected_deployment_or_execution_map().contains_key_confirmed(rejected_id) + } + /// Returns the block height that contains the given `state root`. fn find_block_height_from_state_root(&self, state_root: N::StateRoot) -> Result> { match self.reverse_state_root_map().get_confirmed(&state_root)? { @@ -590,7 +628,11 @@ pub trait BlockStorage: 'static + Clone + Send + Sync { match self.confirmed_transactions_map().get_confirmed(transaction_id)? { Some(Cow::Borrowed((block_hash, _, _))) => Ok(Some(*block_hash)), Some(Cow::Owned((block_hash, _, _))) => Ok(Some(block_hash)), - None => Ok(None), + None => match self.rejected_or_aborted_transaction_id_map().get_confirmed(transaction_id)? { + Some(Cow::Borrowed(block_hash)) => Ok(Some(*block_hash)), + Some(Cow::Owned(block_hash)) => Ok(Some(block_hash)), + None => Ok(None), + }, } } @@ -759,7 +801,7 @@ pub trait BlockStorage: 'static + Clone + Send + Sync { match subdag.get(&round) { Some(certificates) => { // Retrieve the certificate for the given certificate ID. - match certificates.iter().find(|certificate| &certificate.certificate_id() == certificate_id) { + match certificates.iter().find(|certificate| &certificate.id() == certificate_id) { Some(certificate) => Ok(Some(certificate.clone())), None => bail!("The certificate '{certificate_id}' is missing in block storage"), } @@ -847,7 +889,15 @@ pub trait BlockStorage: 'static + Clone + Send + Sync { Some(transactions) => { match transactions.find_confirmed_transaction_for_unconfirmed_transaction_id(transaction_id) { Some(confirmed) => Ok(Some(confirmed.transaction().clone())), - None => bail!("Missing transaction '{transaction_id}' in block storage"), + None => { + // Check if the transaction was aborted. + if let Some(aborted_ids) = self.get_block_aborted_transaction_ids(&block_hash)? { + if aborted_ids.contains(transaction_id) { + bail!("Transaction '{transaction_id}' was aborted in block '{block_hash}'"); + } + } + bail!("Missing transaction '{transaction_id}' in block storage"); + } } } None => bail!("Missing transactions for block '{block_hash}' in block storage"), @@ -1238,9 +1288,19 @@ impl> BlockStore { self.storage.reverse_id_map().contains_key_confirmed(block_hash) } - /// Returns `true` if the given rejected or aborted transaction ID exists. + /// Returns `true` if the given transaction ID exists. + pub fn contains_transaction_id(&self, transaction_id: &N::TransactionID) -> Result { + self.storage.contains_transaction_id(transaction_id) + } + + /// Returns `true` if the given rejected transaction ID or aborted transaction ID exists. pub fn contains_rejected_or_aborted_transaction_id(&self, transaction_id: &N::TransactionID) -> Result { - self.storage.rejected_or_aborted_transaction_id_map().contains_key_confirmed(transaction_id) + self.storage.contains_rejected_or_aborted_transaction_id(transaction_id) + } + + /// Returns `true` if the given rejected deployment or execution ID. + pub fn contains_rejected_deployment_or_execution_id(&self, rejected_id: &Field) -> Result { + self.storage.contains_rejected_deployment_or_execution_id(rejected_id) } /// Returns `true` if the given certificate ID exists. diff --git a/ledger/store/src/helpers/memory/block.rs b/ledger/store/src/helpers/memory/block.rs index b64af3a484..c09fa09f2e 100644 --- a/ledger/store/src/helpers/memory/block.rs +++ b/ledger/store/src/helpers/memory/block.rs @@ -21,7 +21,7 @@ use crate::{ }; use console::{prelude::*, types::Field}; use ledger_authority::Authority; -use ledger_block::{Header, Ratifications}; +use ledger_block::{Header, Ratifications, Rejected}; use ledger_coinbase::{CoinbaseSolution, PuzzleCommitment}; /// An in-memory block storage. @@ -51,10 +51,12 @@ pub struct BlockMemory { transactions_map: MemoryMap>, /// The aborted transaction IDs map. aborted_transaction_ids_map: MemoryMap>, - /// The rejected or aborted transaction ID map. + /// The rejected transaction ID or aborted transaction ID map. rejected_or_aborted_transaction_id_map: MemoryMap, /// The confirmed transactions map. confirmed_transactions_map: MemoryMap)>, + /// The rejected deployment or execution map. + rejected_deployment_or_execution_map: MemoryMap, Rejected>, /// The transaction store. transaction_store: TransactionStore>, } @@ -75,6 +77,7 @@ impl BlockStorage for BlockMemory { type AbortedTransactionIDsMap = MemoryMap>; type RejectedOrAbortedTransactionIDMap = MemoryMap; type ConfirmedTransactionsMap = MemoryMap)>; + type RejectedDeploymentOrExecutionMap = MemoryMap, Rejected>; type TransactionStorage = TransactionMemory; type TransitionStorage = TransitionMemory; @@ -100,6 +103,7 @@ impl BlockStorage for BlockMemory { aborted_transaction_ids_map: MemoryMap::default(), rejected_or_aborted_transaction_id_map: MemoryMap::default(), confirmed_transactions_map: MemoryMap::default(), + rejected_deployment_or_execution_map: MemoryMap::default(), transaction_store, }) } @@ -164,7 +168,7 @@ impl BlockStorage for BlockMemory { &self.aborted_transaction_ids_map } - /// Returns the rejected or aborted transaction ID map. + /// Returns the rejected transaction ID or aborted transaction ID map. fn rejected_or_aborted_transaction_id_map(&self) -> &Self::RejectedOrAbortedTransactionIDMap { &self.rejected_or_aborted_transaction_id_map } @@ -174,6 +178,11 @@ impl BlockStorage for BlockMemory { &self.confirmed_transactions_map } + /// Returns the rejected deployment or execution map. + fn rejected_deployment_or_execution_map(&self) -> &Self::RejectedDeploymentOrExecutionMap { + &self.rejected_deployment_or_execution_map + } + /// Returns the transaction store. fn transaction_store(&self) -> &TransactionStore { &self.transaction_store diff --git a/ledger/store/src/helpers/rocksdb/block.rs b/ledger/store/src/helpers/rocksdb/block.rs index dbf9975755..121ae2a994 100644 --- a/ledger/store/src/helpers/rocksdb/block.rs +++ b/ledger/store/src/helpers/rocksdb/block.rs @@ -27,7 +27,7 @@ use crate::{ }; use console::{prelude::*, types::Field}; use ledger_authority::Authority; -use ledger_block::{Header, Ratifications}; +use ledger_block::{Header, Ratifications, Rejected}; use ledger_coinbase::{CoinbaseSolution, PuzzleCommitment}; /// A RocksDB block storage. @@ -61,6 +61,8 @@ pub struct BlockDB { rejected_or_aborted_transaction_id_map: DataMap, /// The confirmed transactions map. confirmed_transactions_map: DataMap)>, + /// The rejected deployment or execution map. + rejected_deployment_or_execution_map: DataMap, Rejected>, /// The transaction store. transaction_store: TransactionStore>, } @@ -81,6 +83,7 @@ impl BlockStorage for BlockDB { type AbortedTransactionIDsMap = DataMap>; type RejectedOrAbortedTransactionIDMap = DataMap; type ConfirmedTransactionsMap = DataMap)>; + type RejectedDeploymentOrExecutionMap = DataMap, Rejected>; type TransactionStorage = TransactionDB; type TransitionStorage = TransitionDB; @@ -106,6 +109,7 @@ impl BlockStorage for BlockDB { aborted_transaction_ids_map: internal::RocksDB::open_map(N::ID, dev, MapID::Block(BlockMap::AbortedTransactionIDs))?, rejected_or_aborted_transaction_id_map: internal::RocksDB::open_map(N::ID, dev, MapID::Block(BlockMap::RejectedOrAbortedTransactionID))?, confirmed_transactions_map: internal::RocksDB::open_map(N::ID, dev, MapID::Block(BlockMap::ConfirmedTransactions))?, + rejected_deployment_or_execution_map: internal::RocksDB::open_map(N::ID, dev, MapID::Block(BlockMap::RejectedDeploymentOrExecution))?, transaction_store, }) } @@ -170,7 +174,7 @@ impl BlockStorage for BlockDB { &self.aborted_transaction_ids_map } - /// Returns the rejected or aborted transaction ID map. + /// Returns the rejected transaction ID or aborted transaction ID map. fn rejected_or_aborted_transaction_id_map(&self) -> &Self::RejectedOrAbortedTransactionIDMap { &self.rejected_or_aborted_transaction_id_map } @@ -180,6 +184,11 @@ impl BlockStorage for BlockDB { &self.confirmed_transactions_map } + /// Returns the rejected deployment or execution map. + fn rejected_deployment_or_execution_map(&self) -> &Self::RejectedDeploymentOrExecutionMap { + &self.rejected_deployment_or_execution_map + } + /// Returns the transaction store. fn transaction_store(&self) -> &TransactionStore { &self.transaction_store diff --git a/ledger/store/src/helpers/rocksdb/internal/id.rs b/ledger/store/src/helpers/rocksdb/internal/id.rs index c8562c6cc7..4a8dd7d3ad 100644 --- a/ledger/store/src/helpers/rocksdb/internal/id.rs +++ b/ledger/store/src/helpers/rocksdb/internal/id.rs @@ -17,6 +17,7 @@ #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] #[repr(u16)] pub enum MapID { + BFT(BFTMap), Block(BlockMap), Committee(CommitteeMap), Deployment(DeploymentMap), @@ -34,6 +35,7 @@ pub enum MapID { impl From for u16 { fn from(id: MapID) -> u16 { match id { + MapID::BFT(id) => id as u16, MapID::Block(id) => id as u16, MapID::Committee(id) => id as u16, MapID::Deployment(id) => id as u16, @@ -50,6 +52,15 @@ impl From for u16 { } } +/// The RocksDB map prefix for BFT-related entries. +// Note: the order of these variants can be changed at any point in time, +// as long as the corresponding DataID values remain the same. +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] +#[repr(u16)] +pub enum BFTMap { + Transmissions = DataID::BFTTransmissionsMap as u16, +} + /// The RocksDB map prefix for block-related entries. // Note: the order of these variants can be changed at any point in time, // as long as the corresponding DataID values remain the same. @@ -70,6 +81,7 @@ pub enum BlockMap { AbortedTransactionIDs = DataID::BlockAbortedTransactionIDsMap as u16, RejectedOrAbortedTransactionID = DataID::BlockRejectedOrAbortedTransactionIDMap as u16, ConfirmedTransactions = DataID::BlockConfirmedTransactionsMap as u16, + RejectedDeploymentOrExecution = DataID::BlockRejectedDeploymentOrExecutionMap as u16, } /// The RocksDB map prefix for committee-related entries. @@ -194,6 +206,7 @@ pub enum TestMap { Test2 = DataID::Test2 as u16, Test3 = DataID::Test3 as u16, Test4 = DataID::Test4 as u16, + Test5 = DataID::Test5 as u16, } /// The RocksDB map prefix. @@ -269,6 +282,10 @@ enum DataID { ProgramIDMap, KeyValueMap, + // TODO (howardwu): For mainnet - Reorder this up above. + BlockRejectedDeploymentOrExecutionMap, + BFTTransmissionsMap, + // Testing #[cfg(test)] Test, @@ -278,4 +295,6 @@ enum DataID { Test3, #[cfg(test)] Test4, + #[cfg(test)] + Test5, } diff --git a/ledger/store/src/helpers/rocksdb/internal/nested_map.rs b/ledger/store/src/helpers/rocksdb/internal/nested_map.rs index 73bf49bee0..58b795730f 100644 --- a/ledger/store/src/helpers/rocksdb/internal/nested_map.rs +++ b/ledger/store/src/helpers/rocksdb/internal/nested_map.rs @@ -16,7 +16,7 @@ use super::*; use crate::helpers::{NestedMap, NestedMapRead}; -use console::prelude::FromBytes; +use console::prelude::{anyhow, FromBytes}; use core::{fmt, fmt::Debug, hash::Hash, mem}; use std::{borrow::Cow, sync::atomic::Ordering}; @@ -80,10 +80,20 @@ impl Result<(&[u8], &[u8])> { - let map_len = u32::from_bytes_le(&map_key[PREFIX_LEN..][..4])? as usize; - let map = &map_key[PREFIX_LEN + 4..][..map_len]; - let key = &map_key[PREFIX_LEN + 4 + map_len..]; + // Retrieve the map length. + let map_len = u32::from_bytes_le( + map_key.get(PREFIX_LEN..PREFIX_LEN + 4).ok_or_else(|| anyhow!("NestedMap map_len index out of range"))?, + )? as usize; + // Retrieve the map bytes. + let map = map_key + .get(PREFIX_LEN + 4..PREFIX_LEN + 4 + map_len) + .ok_or_else(|| anyhow!("NestedMap map index out of range"))?; + + // Retrieve the key bytes. + let key = map_key.get(PREFIX_LEN + 4 + map_len..).ok_or_else(|| anyhow!("NestedMap key index out of range"))?; + + // Return the map and key bytes. Ok((map, key)) } @@ -130,17 +140,16 @@ impl< let mut batch = rocksdb::WriteBatch::default(); // Construct an iterator over the DB with the specified prefix. - let iterator = self.database.iterator(rocksdb::IteratorMode::From( - &self.create_prefixed_map(map)?, - rocksdb::Direction::Forward, - )); + let iterator = self.database.prefix_iterator(&self.create_prefixed_map(map)?); // Iterate over the entries in the DB with the specified prefix. for entry in iterator { let (map_key, _) = entry?; // Extract the bytes belonging to the map and the key. - let (entry_map, _) = get_map_and_key(&map_key)?; + let Ok((entry_map, _)) = get_map_and_key(&map_key) else { + break; + }; // If the 'entry_map' matches 'serialized_map', delete the key. if entry_map == serialized_map { @@ -272,17 +281,16 @@ impl< let serialized_map = bincode::serialize(&map)?; // Construct an iterator over the DB with the specified prefix. - let iterator = self.database.iterator(rocksdb::IteratorMode::From( - &self.create_prefixed_map(&map)?, - rocksdb::Direction::Forward, - )); + let iterator = self.database.prefix_iterator(&self.create_prefixed_map(&map)?); // Iterate over the entries in the DB with the specified prefix. for entry in iterator { let (map_key, _) = entry?; // Extract the bytes belonging to the map and the key. - let (entry_map, _) = get_map_and_key(&map_key)?; + let Ok((entry_map, _)) = get_map_and_key(&map_key) else { + break; + }; // If the 'entry_map' matches 'serialized_map', delete the key. if entry_map == serialized_map { @@ -388,16 +396,16 @@ impl< let mut entries = Vec::new(); // Construct an iterator over the DB with the specified prefix. - let iterator = self - .database - .iterator(rocksdb::IteratorMode::From(&self.create_prefixed_map(map)?, rocksdb::Direction::Forward)); + let iterator = self.database.prefix_iterator(&self.create_prefixed_map(map)?); // Iterate over the entries in the DB with the specified prefix. for entry in iterator { let (map_key, value) = entry?; // Extract the bytes belonging to the map and the key. - let (entry_map, entry_key) = get_map_and_key(&map_key)?; + let Ok((entry_map, entry_key)) = get_map_and_key(&map_key) else { + break; + }; // If the 'entry_map' matches 'serialized_map', deserialize the key and value. if entry_map == serialized_map { @@ -708,7 +716,10 @@ mod tests { use crate::{ atomic_batch_scope, atomic_finalize, - helpers::rocksdb::{internal::tests::temp_dir, MapID, TestMap}, + helpers::{ + rocksdb::{internal::tests::temp_dir, MapID, TestMap}, + traits::Map, + }, FinalizeMode, }; use console::{ @@ -748,6 +759,28 @@ mod tests { } } + fn open_non_nested_map_testing_from_db< + K: Serialize + DeserializeOwned, + V: Serialize + DeserializeOwned, + T: Into, + >( + database: RocksDB, + map_id: T, + ) -> DataMap { + // Combine contexts to create a new scope. + let mut context = database.network_id.to_le_bytes().to_vec(); + context.extend_from_slice(&(map_id.into()).to_le_bytes()); + + // Return the DataMap. + DataMap(Arc::new(InnerDataMap { + database, + context, + atomic_batch: Default::default(), + batch_in_progress: Default::default(), + checkpoints: Default::default(), + })) + } + struct TestStorage { own_map: NestedDataMap, extra_maps: TestStorage2, @@ -798,7 +831,7 @@ mod tests { self.own_map.is_atomic_in_progress() && self.extra_maps.own_map1.is_atomic_in_progress() && self.extra_maps.own_map1.is_atomic_in_progress() - && self.extra_maps.extra_maps.own_map.is_atomic_in_progress() + && self.extra_maps.extra_maps.own_nested_map.is_atomic_in_progress() } } @@ -855,35 +888,44 @@ mod tests { } struct TestStorage3 { - own_map: NestedDataMap, + own_nested_map: NestedDataMap, + own_map: DataMap, } impl TestStorage3 { fn open(database: RocksDB) -> Self { - Self { own_map: open_map_testing_from_db(database, MapID::Test(TestMap::Test4)) } + Self { + own_nested_map: open_map_testing_from_db(database.clone(), MapID::Test(TestMap::Test4)), + own_map: open_non_nested_map_testing_from_db(database, MapID::Test(TestMap::Test5)), + } } fn start_atomic(&self) { + self.own_nested_map.start_atomic(); self.own_map.start_atomic(); } fn is_atomic_in_progress(&self) -> bool { - self.own_map.is_atomic_in_progress() + self.own_nested_map.is_atomic_in_progress() || self.own_map.is_atomic_in_progress() } fn atomic_checkpoint(&self) { + self.own_nested_map.atomic_checkpoint(); self.own_map.atomic_checkpoint(); } fn clear_latest_checkpoint(&self) { + self.own_nested_map.clear_latest_checkpoint(); self.own_map.clear_latest_checkpoint(); } fn atomic_rewind(&self) { + self.own_nested_map.atomic_rewind(); self.own_map.atomic_rewind(); } fn finish_atomic(&self) -> Result<()> { + self.own_nested_map.finish_atomic()?; self.own_map.finish_atomic() } } @@ -966,6 +1008,29 @@ mod tests { crate::helpers::test_helpers::nested_map::check_iterators_match(map); } + #[test] + #[serial] + #[traced_test] + fn test_iter_from_nested_to_non_nested() { + // Open a storage with a DataMap right after a NestedDataMap. + let database = RocksDB::open_testing(temp_dir(), None).expect("Failed to open a test database"); + let test_storage = TestStorage3::open(database); + + // Insert 5 (confirmed) records into a nested map 77. + for i in 0..5 { + test_storage.own_nested_map.insert(77, i, i.to_string()).expect("Failed to insert"); + } + + // Insert 5 (confirmed) records into the neighboring data map; the keys are large on purpose. + for i in 0..5 { + test_storage.own_map.insert(usize::MAX - i, (usize::MAX - i).to_string()).expect("Failed to insert"); + } + + // We should be able to collect the 5 records from the nested data map. + let confirmed = test_storage.own_nested_map.get_map_confirmed(&77).unwrap(); + assert_eq!(confirmed.len(), 5); + } + #[test] #[serial] #[traced_test] @@ -1574,7 +1639,7 @@ mod tests { assert!(test_storage.own_map.iter_confirmed().next().is_none()); assert!(test_storage.extra_maps.own_map1.iter_confirmed().next().is_none()); assert!(test_storage.extra_maps.own_map2.iter_confirmed().next().is_none()); - assert!(test_storage.extra_maps.extra_maps.own_map.iter_confirmed().next().is_none()); + assert!(test_storage.extra_maps.extra_maps.own_nested_map.iter_confirmed().next().is_none()); assert_eq!(test_storage.own_map.checkpoints.lock().last(), None); @@ -1603,11 +1668,11 @@ mod tests { test_storage.extra_maps.own_map2.insert(2, 2, 2.to_string()).unwrap(); // Start another atomic write batch. - atomic_batch_scope!(test_storage.extra_maps.extra_maps.own_map, { - assert!(test_storage.extra_maps.extra_maps.own_map.is_atomic_in_progress()); + atomic_batch_scope!(test_storage.extra_maps.extra_maps.own_nested_map, { + assert!(test_storage.extra_maps.extra_maps.own_nested_map.is_atomic_in_progress()); // Write an item into the fourth map. - test_storage.extra_maps.extra_maps.own_map.insert(3, 3, 3.to_string()).unwrap(); + test_storage.extra_maps.extra_maps.own_nested_map.insert(3, 3, 3.to_string()).unwrap(); Ok(()) })?; @@ -1628,7 +1693,7 @@ mod tests { assert_eq!(test_storage.own_map.iter_confirmed().count(), 1); assert_eq!(test_storage.extra_maps.own_map1.iter_confirmed().count(), 1); assert_eq!(test_storage.extra_maps.own_map2.iter_confirmed().count(), 1); - assert_eq!(test_storage.extra_maps.extra_maps.own_map.iter_confirmed().count(), 1); + assert_eq!(test_storage.extra_maps.extra_maps.own_nested_map.iter_confirmed().count(), 1); // The atomic_write_batch macro uses ?, so the test returns a Result for simplicity. Ok(()) @@ -1659,11 +1724,11 @@ mod tests { test_storage.extra_maps.own_map2.insert(2, 2, 2.to_string()).unwrap(); // Start another atomic write batch. - let result: Result<()> = atomic_batch_scope!(test_storage.extra_maps.extra_maps.own_map, { + let result: Result<()> = atomic_batch_scope!(test_storage.extra_maps.extra_maps.own_nested_map, { assert!(test_storage.is_atomic_in_progress_everywhere()); // Write an item into the fourth map. - test_storage.extra_maps.extra_maps.own_map.insert(3, 3, 3.to_string()).unwrap(); + test_storage.extra_maps.extra_maps.own_nested_map.insert(3, 3, 3.to_string()).unwrap(); // Rewind the atomic batch via a simulated error. bail!("An error that will trigger a single rewind."); @@ -1690,7 +1755,7 @@ mod tests { assert!(test_storage.own_map.iter_confirmed().next().is_none()); assert!(test_storage.extra_maps.own_map1.iter_confirmed().next().is_none()); assert!(test_storage.extra_maps.own_map2.iter_confirmed().next().is_none()); - assert!(test_storage.extra_maps.extra_maps.own_map.iter_confirmed().next().is_none()); + assert!(test_storage.extra_maps.extra_maps.own_nested_map.iter_confirmed().next().is_none()); // Note: all the checks going through .database can be performed on any one // of the objects, as all of them share the same instance of the database. @@ -1706,6 +1771,6 @@ mod tests { assert_eq!(test_storage.own_map.iter_confirmed().count(), 1); assert_eq!(test_storage.extra_maps.own_map1.iter_confirmed().count(), 1); assert_eq!(test_storage.extra_maps.own_map2.iter_confirmed().count(), 1); - assert_eq!(test_storage.extra_maps.extra_maps.own_map.iter_confirmed().count(), 0); + assert_eq!(test_storage.extra_maps.extra_maps.own_nested_map.iter_confirmed().count(), 0); } } diff --git a/ledger/store/src/program/committee.rs b/ledger/store/src/program/committee.rs index 71c764d51a..62cd6e0bbb 100644 --- a/ledger/store/src/program/committee.rs +++ b/ledger/store/src/program/committee.rs @@ -114,7 +114,10 @@ pub trait CommitteeStorage: 'static + Clone + Send + Sync { // If the current round is 0, ensure the next round is 0. Err(..) => ensure!(next_round == 0, "Next round must be block round 0"), // Otherwise, ensure the next round sequentially follows the current round. - Ok(current_round) => ensure!(next_round > current_round, "Next round must be greater than current round"), + Ok(current_round) => ensure!( + next_round > current_round, + "Next round {next_round} must be greater than current round {current_round}" + ), } // Check the next height. diff --git a/ledger/test-helpers/Cargo.toml b/ledger/test-helpers/Cargo.toml index 487f73f0a9..a9889f8940 100644 --- a/ledger/test-helpers/Cargo.toml +++ b/ledger/test-helpers/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "snarkvm-ledger-test-helpers" -version = "0.16.3" +version = "0.16.15" authors = [ "The Aleo Team " ] description = "Test helpers for a decentralized virtual machine" homepage = "https://aleo.org" @@ -19,39 +19,39 @@ edition = "2021" [dependencies.console] package = "snarkvm-console" path = "../../console" -version = "=0.16.3" +version = "=0.16.15" [dependencies.circuit] package = "snarkvm-circuit" path = "../../circuit" -version = "=0.16.3" +version = "=0.16.15" [dependencies.ledger-block] package = "snarkvm-ledger-block" path = "../block" -version = "=0.16.3" +version = "=0.16.15" [dependencies.ledger-query] package = "snarkvm-ledger-query" path = "../query" -version = "=0.16.3" +version = "=0.16.15" default-features = false features = [ "query" ] [dependencies.ledger-store] package = "snarkvm-ledger-store" path = "../store" -version = "=0.16.3" +version = "=0.16.15" [dependencies.synthesizer-program] package = "snarkvm-synthesizer-program" path = "../../synthesizer/program" -version = "=0.16.3" +version = "=0.16.15" [dependencies.synthesizer-process] package = "snarkvm-synthesizer-process" path = "../../synthesizer/process" -version = "=0.16.3" +version = "=0.16.15" [dependencies.once_cell] version = "1.18" diff --git a/ledger/test-helpers/src/lib.rs b/ledger/test-helpers/src/lib.rs index 81afab9fda..813fb8cb0b 100644 --- a/ledger/test-helpers/src/lib.rs +++ b/ledger/test-helpers/src/lib.rs @@ -65,7 +65,8 @@ pub fn sample_inputs() -> Vec<(::TransitionID, Input< let plaintext = Plaintext::Literal(Literal::Field(Uniform::rand(rng)), Default::default()); let plaintext_hash = CurrentNetwork::hash_bhp1024(&plaintext.to_bits_le()).unwrap(); // Sample a random ciphertext. - let ciphertext = Ciphertext::from_fields(&vec![Uniform::rand(rng); 10]).unwrap(); + let fields: Vec<_> = (0..10).map(|_| Uniform::rand(rng)).collect(); + let ciphertext = Ciphertext::from_fields(&fields).unwrap(); let ciphertext_hash = CurrentNetwork::hash_bhp1024(&ciphertext.to_bits_le()).unwrap(); vec![ @@ -97,7 +98,8 @@ pub fn sample_outputs() -> Vec<(::TransitionID, Outpu let plaintext = Plaintext::Literal(Literal::Field(Uniform::rand(rng)), Default::default()); let plaintext_hash = CurrentNetwork::hash_bhp1024(&plaintext.to_bits_le()).unwrap(); // Sample a random ciphertext. - let ciphertext = Ciphertext::from_fields(&vec![Uniform::rand(rng); 10]).unwrap(); + let fields: Vec<_> = (0..10).map(|_| Uniform::rand(rng)).collect(); + let ciphertext = Ciphertext::from_fields(&fields).unwrap(); let ciphertext_hash = CurrentNetwork::hash_bhp1024(&ciphertext.to_bits_le()).unwrap(); // Sample a random record. let randomizer = Uniform::rand(rng); @@ -210,7 +212,7 @@ pub fn sample_fee_private(deployment_or_execution_id: Field, rng ) .unwrap(); // Construct the fee trace. - let (_, mut trace) = process.execute::(authorization).unwrap(); + let (_, mut trace) = process.execute::(authorization, rng).unwrap(); // Initialize a new block store. let block_store = BlockStore::>::open(None).unwrap(); @@ -263,7 +265,7 @@ pub fn sample_fee_public(deployment_or_execution_id: Field, rng: ) .unwrap(); // Construct the fee trace. - let (_, mut trace) = process.execute::(authorization).unwrap(); + let (_, mut trace) = process.execute::(authorization, rng).unwrap(); // Initialize a new block store. let block_store = BlockStore::>::open(None).unwrap(); @@ -393,7 +395,7 @@ fn sample_genesis_block_and_components_raw( let authorization = process.authorize::(&private_key, locator.0, locator.1, inputs.iter(), rng).unwrap(); // Execute the function. - let (_, mut trace) = process.execute::(authorization).unwrap(); + let (_, mut trace) = process.execute::(authorization, rng).unwrap(); // Initialize a new block store. let block_store = BlockStore::>::open(None).unwrap(); diff --git a/metrics/Cargo.toml b/metrics/Cargo.toml new file mode 100644 index 0000000000..04dc3a620d --- /dev/null +++ b/metrics/Cargo.toml @@ -0,0 +1,30 @@ +[package] +name = "snarkvm-metrics" +version = "0.16.15" +authors = [ "The Aleo Team " ] +description = "Metrics for a decentralized virtual machine" +homepage = "https://aleo.org" +repository = "https://github.com/AleoHQ/snarkVM" +keywords = [ + "aleo", + "cryptography", + "blockchain", + "decentralized", + "zero-knowledge" +] +categories = [ + "compilers", + "cryptography", + "mathematics", + "wasm", + "web-programming" +] +include = [ "Cargo.toml", "src", "README.md", "LICENSE.md" ] +license = "Apache-2.0" +edition = "2021" + +[dependencies.metrics] +version = "0.21" + +[dependencies.metrics-exporter-prometheus] +version = "0.12" diff --git a/metrics/LICENSE.md b/metrics/LICENSE.md new file mode 100644 index 0000000000..d0af96c393 --- /dev/null +++ b/metrics/LICENSE.md @@ -0,0 +1,194 @@ +Apache License +============== + +_Version 2.0, January 2004_ +_<>_ + +### Terms and Conditions for use, reproduction, and distribution + +#### 1. Definitions + +β€œLicense” shall mean the terms and conditions for use, reproduction, and +distribution as defined by Sections 1 through 9 of this document. + +β€œLicensor” shall mean the copyright owner or entity authorized by the copyright +owner that is granting the License. + +β€œLegal Entity” shall mean the union of the acting entity and all other entities +that control, are controlled by, or are under common control with that entity. +For the purposes of this definition, β€œcontrol” means **(i)** the power, direct or +indirect, to cause the direction or management of such entity, whether by +contract or otherwise, or **(ii)** ownership of fifty percent (50%) or more of the +outstanding shares, or **(iii)** beneficial ownership of such entity. + +β€œYou” (or β€œYour”) shall mean an individual or Legal Entity exercising +permissions granted by this License. + +β€œSource” form shall mean the preferred form for making modifications, including +but not limited to software source code, documentation source, and configuration +files. + +β€œObject” form shall mean any form resulting from mechanical transformation or +translation of a Source form, including but not limited to compiled object code, +generated documentation, and conversions to other media types. + +β€œWork” shall mean the work of authorship, whether in Source or Object form, made +available under the License, as indicated by a copyright notice that is included +in or attached to the work (an example is provided in the Appendix below). + +β€œDerivative Works” shall mean any work, whether in Source or Object form, that +is based on (or derived from) the Work and for which the editorial revisions, +annotations, elaborations, or other modifications represent, as a whole, an +original work of authorship. For the purposes of this License, Derivative Works +shall not include works that remain separable from, or merely link (or bind by +name) to the interfaces of, the Work and Derivative Works thereof. + +β€œContribution” shall mean any work of authorship, including the original version +of the Work and any modifications or additions to that Work or Derivative Works +thereof, that is intentionally submitted to Licensor for inclusion in the Work +by the copyright owner or by an individual or Legal Entity authorized to submit +on behalf of the copyright owner. For the purposes of this definition, +β€œsubmitted” means any form of electronic, verbal, or written communication sent +to the Licensor or its representatives, including but not limited to +communication on electronic mailing lists, source code control systems, and +issue tracking systems that are managed by, or on behalf of, the Licensor for +the purpose of discussing and improving the Work, but excluding communication +that is conspicuously marked or otherwise designated in writing by the copyright +owner as β€œNot a Contribution.” + +β€œContributor” shall mean Licensor and any individual or Legal Entity on behalf +of whom a Contribution has been received by Licensor and subsequently +incorporated within the Work. + +#### 2. Grant of Copyright License + +Subject to the terms and conditions of this License, each Contributor hereby +grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, +irrevocable copyright license to reproduce, prepare Derivative Works of, +publicly display, publicly perform, sublicense, and distribute the Work and such +Derivative Works in Source or Object form. + +#### 3. Grant of Patent License + +Subject to the terms and conditions of this License, each Contributor hereby +grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, +irrevocable (except as stated in this section) patent license to make, have +made, use, offer to sell, sell, import, and otherwise transfer the Work, where +such license applies only to those patent claims licensable by such Contributor +that are necessarily infringed by their Contribution(s) alone or by combination +of their Contribution(s) with the Work to which such Contribution(s) was +submitted. If You institute patent litigation against any entity (including a +cross-claim or counterclaim in a lawsuit) alleging that the Work or a +Contribution incorporated within the Work constitutes direct or contributory +patent infringement, then any patent licenses granted to You under this License +for that Work shall terminate as of the date such litigation is filed. + +#### 4. Redistribution + +You may reproduce and distribute copies of the Work or Derivative Works thereof +in any medium, with or without modifications, and in Source or Object form, +provided that You meet the following conditions: + +* **(a)** You must give any other recipients of the Work or Derivative Works a copy of +this License; and +* **(b)** You must cause any modified files to carry prominent notices stating that You +changed the files; and +* **(c)** You must retain, in the Source form of any Derivative Works that You distribute, +all copyright, patent, trademark, and attribution notices from the Source form +of the Work, excluding those notices that do not pertain to any part of the +Derivative Works; and +* **(d)** If the Work includes a β€œNOTICE” text file as part of its distribution, then any +Derivative Works that You distribute must include a readable copy of the +attribution notices contained within such NOTICE file, excluding those notices +that do not pertain to any part of the Derivative Works, in at least one of the +following places: within a NOTICE text file distributed as part of the +Derivative Works; within the Source form or documentation, if provided along +with the Derivative Works; or, within a display generated by the Derivative +Works, if and wherever such third-party notices normally appear. The contents of +the NOTICE file are for informational purposes only and do not modify the +License. You may add Your own attribution notices within Derivative Works that +You distribute, alongside or as an addendum to the NOTICE text from the Work, +provided that such additional attribution notices cannot be construed as +modifying the License. + +You may add Your own copyright statement to Your modifications and may provide +additional or different license terms and conditions for use, reproduction, or +distribution of Your modifications, or for any such Derivative Works as a whole, +provided Your use, reproduction, and distribution of the Work otherwise complies +with the conditions stated in this License. + +#### 5. Submission of Contributions + +Unless You explicitly state otherwise, any Contribution intentionally submitted +for inclusion in the Work by You to the Licensor shall be under the terms and +conditions of this License, without any additional terms or conditions. +Notwithstanding the above, nothing herein shall supersede or modify the terms of +any separate license agreement you may have executed with Licensor regarding +such Contributions. + +#### 6. Trademarks + +This License does not grant permission to use the trade names, trademarks, +service marks, or product names of the Licensor, except as required for +reasonable and customary use in describing the origin of the Work and +reproducing the content of the NOTICE file. + +#### 7. Disclaimer of Warranty + +Unless required by applicable law or agreed to in writing, Licensor provides the +Work (and each Contributor provides its Contributions) on an β€œAS IS” BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, +including, without limitation, any warranties or conditions of TITLE, +NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are +solely responsible for determining the appropriateness of using or +redistributing the Work and assume any risks associated with Your exercise of +permissions under this License. + +#### 8. Limitation of Liability + +In no event and under no legal theory, whether in tort (including negligence), +contract, or otherwise, unless required by applicable law (such as deliberate +and grossly negligent acts) or agreed to in writing, shall any Contributor be +liable to You for damages, including any direct, indirect, special, incidental, +or consequential damages of any character arising as a result of this License or +out of the use or inability to use the Work (including but not limited to +damages for loss of goodwill, work stoppage, computer failure or malfunction, or +any and all other commercial damages or losses), even if such Contributor has +been advised of the possibility of such damages. + +#### 9. Accepting Warranty or Additional Liability + +While redistributing the Work or Derivative Works thereof, You may choose to +offer, and charge a fee for, acceptance of support, warranty, indemnity, or +other liability obligations and/or rights consistent with this License. However, +in accepting such obligations, You may act only on Your own behalf and on Your +sole responsibility, not on behalf of any other Contributor, and only if You +agree to indemnify, defend, and hold each Contributor harmless for any liability +incurred by, or claims asserted against, such Contributor by reason of your +accepting any such warranty or additional liability. + +_END OF TERMS AND CONDITIONS_ + +### APPENDIX: How to apply the Apache License to your work + +To apply the Apache License to your work, attach the following boilerplate +notice, with the fields enclosed by brackets `[]` replaced with your own +identifying information. (Don't include the brackets!) The text should be +enclosed in the appropriate comment syntax for the file format. We also +recommend that a file or class name and description of purpose be included on +the same β€œprinted page” as the copyright notice for easier identification within +third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/metrics/README.md b/metrics/README.md new file mode 100644 index 0000000000..cab86375b3 --- /dev/null +++ b/metrics/README.md @@ -0,0 +1,5 @@ +# snarkvm-metrics + +[![Crates.io](https://img.shields.io/crates/v/snarkvm-metrics.svg?color=neon)](https://crates.io/crates/snarkvm-metrics) +[![Authors](https://img.shields.io/badge/authors-Aleo-orange.svg)](https://aleo.org) +[![License](https://img.shields.io/badge/License-Apache%202.0-blue.svg)](./LICENSE.md) diff --git a/metrics/src/lib.rs b/metrics/src/lib.rs new file mode 100644 index 0000000000..4108fb9a86 --- /dev/null +++ b/metrics/src/lib.rs @@ -0,0 +1,94 @@ +// Copyright (C) 2019-2023 Aleo Systems Inc. +// This file is part of the snarkVM library. + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at: +// http://www.apache.org/licenses/LICENSE-2.0 + +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#![forbid(unsafe_code)] + +const GAUGE_NAMES: [&str; 1] = [committee::TOTAL_STAKE]; + +pub mod committee { + pub const TOTAL_STAKE: &str = "snarkvm_ledger_committee_total_stake"; +} + +/// Registers all snarkVM metrics. +pub fn register_metrics() { + for name in GAUGE_NAMES { + register_gauge(name); + } +} + +/******** Counter ********/ + +/// Registers a counter with the given name. +pub fn register_counter(name: &'static str) { + ::metrics::register_counter!(name); +} + +/// Updates a counter with the given name to the given value. +/// +/// Counters represent a single monotonic value, which means the value can only be incremented, +/// not decremented, and always starts out with an initial value of zero. +pub fn counter>(name: &'static str, value: V) { + ::metrics::counter!(name, value.into()); +} + +/// Increments a counter with the given name by one. +/// +/// Counters represent a single monotonic value, which means the value can only be incremented, +/// not decremented, and always starts out with an initial value of zero. +pub fn increment_counter(name: &'static str) { + ::metrics::increment_counter!(name); +} + +/******** Gauge ********/ + +/// Registers a gauge with the given name. +pub fn register_gauge(name: &'static str) { + ::metrics::register_gauge!(name); +} + +/// Updates a gauge with the given name to the given value. +/// +/// Gauges represent a single value that can go up or down over time, +/// and always starts out with an initial value of zero. +pub fn gauge>(name: &'static str, value: V) { + ::metrics::gauge!(name, value.into()); +} + +/// Increments a gauge with the given name by the given value. +/// +/// Gauges represent a single value that can go up or down over time, +/// and always starts out with an initial value of zero. +pub fn increment_gauge>(name: &'static str, value: V) { + ::metrics::increment_gauge!(name, value.into()); +} + +/// Decrements a gauge with the given name by the given value. +/// +/// Gauges represent a single value that can go up or down over time, +/// and always starts out with an initial value of zero. +pub fn decrement_gauge>(name: &'static str, value: V) { + ::metrics::decrement_gauge!(name, value.into()); +} + +/******** Histogram ********/ + +/// Registers a histogram with the given name. +pub fn register_histogram(name: &'static str) { + ::metrics::register_histogram!(name); +} + +/// Updates a histogram with the given name to the given value. +pub fn histogram>(name: &'static str, value: V) { + ::metrics::histogram!(name, value.into()); +} diff --git a/parameters/Cargo.toml b/parameters/Cargo.toml index 4b3e2e7a80..932f9ba0e2 100644 --- a/parameters/Cargo.toml +++ b/parameters/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "snarkvm-parameters" -version = "0.16.3" +version = "0.16.15" authors = [ "The Aleo Team " ] description = "Parameters for a decentralized virtual machine" homepage = "https://aleo.org" @@ -31,12 +31,12 @@ wasm = [ "encoding", "js-sys", "web-sys" ] [dependencies.snarkvm-curves] path = "../curves" -version = "=0.16.3" +version = "=0.16.15" default-features = false [dependencies.snarkvm-utilities] path = "../utilities" -version = "=0.16.3" +version = "=0.16.15" [dependencies.aleo-std] version = "0.1.18" @@ -76,6 +76,9 @@ optional = true [dependencies.lazy_static] version = "1.4" +[dependencies.parking_lot] +version = "0.12" + [dependencies.paste] version = "1" diff --git a/parameters/src/testnet3/mod.rs b/parameters/src/testnet3/mod.rs index 4039577458..5233242d83 100644 --- a/parameters/src/testnet3/mod.rs +++ b/parameters/src/testnet3/mod.rs @@ -21,8 +21,14 @@ pub use powers::*; const REMOTE_URL: &str = "https://s3-us-west-1.amazonaws.com/testnet3.parameters"; // Degrees +#[cfg(not(feature = "wasm"))] impl_local!(Degree15, "resources/", "powers-of-beta-15", "usrs"); +#[cfg(feature = "wasm")] +impl_remote!(Degree15, REMOTE_URL, "resources/", "powers-of-beta-15", "usrs"); +#[cfg(not(feature = "wasm"))] impl_local!(Degree16, "resources/", "powers-of-beta-16", "usrs"); +#[cfg(feature = "wasm")] +impl_remote!(Degree16, REMOTE_URL, "resources/", "powers-of-beta-16", "usrs"); impl_remote!(Degree17, REMOTE_URL, "resources/", "powers-of-beta-17", "usrs"); impl_remote!(Degree18, REMOTE_URL, "resources/", "powers-of-beta-18", "usrs"); impl_remote!(Degree19, REMOTE_URL, "resources/", "powers-of-beta-19", "usrs"); @@ -37,7 +43,13 @@ impl_remote!(Degree27, REMOTE_URL, "resources/", "powers-of-beta-27", "usrs"); impl_remote!(Degree28, REMOTE_URL, "resources/", "powers-of-beta-28", "usrs"); // Shifted Degrees +#[cfg(not(feature = "wasm"))] impl_local!(ShiftedDegree15, "resources/", "shifted-powers-of-beta-15", "usrs"); +#[cfg(feature = "wasm")] +impl_remote!(ShiftedDegree15, REMOTE_URL, "resources/", "shifted-powers-of-beta-15", "usrs"); +#[cfg(not(feature = "wasm"))] +impl_local!(ShiftedDegree16, "resources/", "shifted-powers-of-beta-16", "usrs"); +#[cfg(feature = "wasm")] impl_remote!(ShiftedDegree16, REMOTE_URL, "resources/", "shifted-powers-of-beta-16", "usrs"); impl_remote!(ShiftedDegree17, REMOTE_URL, "resources/", "shifted-powers-of-beta-17", "usrs"); impl_remote!(ShiftedDegree18, REMOTE_URL, "resources/", "shifted-powers-of-beta-18", "usrs"); diff --git a/parameters/src/testnet3/powers.rs b/parameters/src/testnet3/powers.rs index 6559a18c83..577c8dc01d 100644 --- a/parameters/src/testnet3/powers.rs +++ b/parameters/src/testnet3/powers.rs @@ -28,6 +28,7 @@ use snarkvm_utilities::{ }; use anyhow::{anyhow, bail, ensure, Result}; +use parking_lot::RwLock; use std::{collections::BTreeMap, ops::Range, sync::Arc}; const NUM_POWERS_15: usize = 1 << 15; @@ -57,16 +58,16 @@ lazy_static::lazy_static! { } /// A vector of powers of beta G. -#[derive(Debug, Clone)] +#[derive(Debug)] pub struct PowersOfG { /// The powers of beta G. - powers_of_beta_g: PowersOfBetaG, + powers_of_beta_g: RwLock>, /// Group elements of form `{ \beta^i \gamma G }`, where `i` is from 0 to `degree`, /// This is used for hiding. - powers_of_beta_times_gamma_g: Arc>, + powers_of_beta_times_gamma_g: BTreeMap, /// Group elements of form `{ \beta^{max_degree - i} H }`, where `i` /// is of the form `2^k - 1` for `k` in `1` to `log_2(max_degree)`. - negative_powers_of_beta_h: Arc>, + negative_powers_of_beta_h: BTreeMap, /// Information required to enforce degree bounds. Each pair is of the form `(degree_bound, shifting_advice)`. /// Each pair is in the form `(degree_bound, \beta^{max_degree - i} H),` where `H` is the generator of G2, /// and `i` is of the form `2^k - 1` for `k` in `1` to `log_2(max_degree)`. @@ -78,15 +79,14 @@ pub struct PowersOfG { impl PowersOfG { /// Initializes the hard-coded instance of the powers. pub fn load() -> Result { - let powers_of_beta_g = PowersOfBetaG::load()?; + let powers_of_beta_g = RwLock::new(PowersOfBetaG::load()?); // Reconstruct powers of beta_times_gamma_g. - let powers_of_beta_times_gamma_g = - Arc::new(BTreeMap::deserialize_uncompressed_unchecked(&**POWERS_OF_BETA_GAMMA_G)?); + let powers_of_beta_times_gamma_g = BTreeMap::deserialize_uncompressed_unchecked(&**POWERS_OF_BETA_GAMMA_G)?; // Reconstruct negative powers of beta_h. - let negative_powers_of_beta_h: Arc> = - Arc::new(BTreeMap::deserialize_uncompressed_unchecked(&**NEG_POWERS_OF_BETA_H)?); + let negative_powers_of_beta_h: BTreeMap = + BTreeMap::deserialize_uncompressed_unchecked(&**NEG_POWERS_OF_BETA_H)?; // Compute the prepared negative powers of beta_h. let prepared_negative_powers_of_beta_h: Arc::Prepared>> = @@ -105,13 +105,13 @@ impl PowersOfG { } /// Download the powers of beta G specified by `range`. - pub fn download_powers_for(&mut self, range: Range) -> Result<()> { - self.powers_of_beta_g.download_powers_for(&range) + pub fn download_powers_for(&self, range: Range) -> Result<()> { + self.powers_of_beta_g.write().download_powers_for(&range) } /// Returns the number of contiguous powers of beta G starting from the 0-th power. pub fn num_powers(&self) -> usize { - self.powers_of_beta_g.num_powers() + self.powers_of_beta_g.read().num_powers() } /// Returns the maximum possible number of contiguous powers of beta G starting from the 0-th power. @@ -120,22 +120,22 @@ impl PowersOfG { } /// Returns the powers of beta * gamma G. - pub fn powers_of_beta_gamma_g(&self) -> Arc> { - self.powers_of_beta_times_gamma_g.clone() + pub fn powers_of_beta_gamma_g(&self) -> &BTreeMap { + &self.powers_of_beta_times_gamma_g } /// Returns the `index`-th power of beta * G. - pub fn power_of_beta_g(&mut self, index: usize) -> Result { - self.powers_of_beta_g.power(index) + pub fn power_of_beta_g(&self, index: usize) -> Result { + self.powers_of_beta_g.write().power(index) } /// Returns the powers of `beta * G` that lie within `range`. - pub fn powers_of_beta_g(&mut self, range: Range) -> Result<&[E::G1Affine]> { - self.powers_of_beta_g.powers(range) + pub fn powers_of_beta_g(&self, range: Range) -> Result> { + Ok(self.powers_of_beta_g.write().powers(range)?.to_vec()) } - pub fn negative_powers_of_beta_h(&self) -> Arc> { - self.negative_powers_of_beta_h.clone() + pub fn negative_powers_of_beta_h(&self) -> &BTreeMap { + &self.negative_powers_of_beta_h } pub fn prepared_negative_powers_of_beta_h(&self) -> Arc::Prepared>> { @@ -149,7 +149,7 @@ impl PowersOfG { impl CanonicalSerialize for PowersOfG { fn serialize_with_mode(&self, mut writer: W, mode: Compress) -> Result<(), SerializationError> { - self.powers_of_beta_g.serialize_with_mode(&mut writer, mode)?; + self.powers_of_beta_g.read().serialize_with_mode(&mut writer, mode)?; self.powers_of_beta_times_gamma_g.serialize_with_mode(&mut writer, mode)?; self.negative_powers_of_beta_h.serialize_with_mode(&mut writer, mode)?; self.beta_h.serialize_with_mode(&mut writer, mode)?; @@ -157,7 +157,7 @@ impl CanonicalSerialize for PowersOfG { } fn serialized_size(&self, mode: Compress) -> usize { - self.powers_of_beta_g.serialized_size(mode) + self.powers_of_beta_g.read().serialized_size(mode) + self.powers_of_beta_times_gamma_g.serialized_size(mode) + self.negative_powers_of_beta_h.serialized_size(mode) + self.beta_h.serialized_size(mode) @@ -170,15 +170,14 @@ impl CanonicalDeserialize for PowersOfG { compress: Compress, validate: Validate, ) -> Result { - let powers_of_beta_g = PowersOfBetaG::deserialize_with_mode(&mut reader, compress, Validate::No)?; + let powers_of_beta_g = RwLock::new(PowersOfBetaG::deserialize_with_mode(&mut reader, compress, Validate::No)?); // Reconstruct powers of beta_times_gamma_g. - let powers_of_beta_times_gamma_g = - Arc::new(BTreeMap::deserialize_with_mode(&mut reader, compress, Validate::No)?); + let powers_of_beta_times_gamma_g = BTreeMap::deserialize_with_mode(&mut reader, compress, Validate::No)?; // Reconstruct negative powers of beta_h. - let negative_powers_of_beta_h: Arc> = - Arc::new(BTreeMap::deserialize_with_mode(&mut reader, compress, Validate::No)?); + let negative_powers_of_beta_h: BTreeMap = + BTreeMap::deserialize_with_mode(&mut reader, compress, Validate::No)?; // Compute the prepared negative powers of beta_h. let prepared_negative_powers_of_beta_h: Arc::Prepared>> = @@ -202,7 +201,7 @@ impl CanonicalDeserialize for PowersOfG { impl Valid for PowersOfG { fn check(&self) -> Result<(), SerializationError> { - self.powers_of_beta_g.check()?; + self.powers_of_beta_g.read().check()?; self.powers_of_beta_times_gamma_g.check()?; self.negative_powers_of_beta_h.check()?; self.prepared_negative_powers_of_beta_h.check()?; @@ -224,7 +223,7 @@ impl ToBytes for PowersOfG { } } -#[derive(Debug, Clone, CanonicalSerialize, CanonicalDeserialize)] +#[derive(Debug, CanonicalSerialize, CanonicalDeserialize)] pub struct PowersOfBetaG { /// Group elements of form `[G, \beta * G, \beta^2 * G, ..., \beta^d G]`. powers_of_beta_g: Vec, @@ -499,11 +498,7 @@ impl PowersOfBetaG { // Deserialize the group elements. let additional_powers = Vec::deserialize_uncompressed_unchecked(&*additional_bytes)?; - if final_powers.is_empty() { - final_powers = additional_powers; - } else { - final_powers.extend(additional_powers); - } + final_powers.extend(additional_powers.iter()); } final_powers.extend(self.shifted_powers_of_beta_g.iter()); self.shifted_powers_of_beta_g = final_powers; diff --git a/parameters/src/testnet3/resources/block.genesis b/parameters/src/testnet3/resources/block.genesis index c33b0398b6..8d66149388 100644 Binary files a/parameters/src/testnet3/resources/block.genesis and b/parameters/src/testnet3/resources/block.genesis differ diff --git a/parameters/src/testnet3/resources/inclusion.metadata b/parameters/src/testnet3/resources/inclusion.metadata index 3532ae5ea8..48d10aca82 100644 --- a/parameters/src/testnet3/resources/inclusion.metadata +++ b/parameters/src/testnet3/resources/inclusion.metadata @@ -1,6 +1,6 @@ { - "prover_checksum": "cd85cc53639becf39b9fc927643abda23f9d385ff2cb890f5df809e7a338bff8", - "prover_size": 232051458, - "verifier_checksum": "e6f3add8fb9f911e02e1aa08b761f24cc8ae5fb70df4da47a36a5bbb83b189ec", + "prover_checksum": "2ccd040f31b1ee3a1e8ed64b046b5f6a81403d434439e40d35a62c734365c7e7", + "prover_size": 233812212, + "verifier_checksum": "cc0dbd07fea975bb869db792e62647d3a17cd3e75228a34e860665f8394424fd", "verifier_size": 665 } \ No newline at end of file diff --git a/parameters/src/testnet3/resources/inclusion.verifier b/parameters/src/testnet3/resources/inclusion.verifier index 845a9beed7..cd3fd0726c 100644 Binary files a/parameters/src/testnet3/resources/inclusion.verifier and b/parameters/src/testnet3/resources/inclusion.verifier differ diff --git a/parameters/src/testnet3/resources/shifted-powers-of-beta-16.usrs b/parameters/src/testnet3/resources/shifted-powers-of-beta-16.usrs new file mode 100644 index 0000000000..d92d3c8bc1 Binary files /dev/null and b/parameters/src/testnet3/resources/shifted-powers-of-beta-16.usrs differ diff --git a/synthesizer/Cargo.toml b/synthesizer/Cargo.toml index 2f9d10ddd7..9afbf50875 100644 --- a/synthesizer/Cargo.toml +++ b/synthesizer/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "snarkvm-synthesizer" -version = "0.16.3" +version = "0.16.15" authors = [ "The Aleo Team " ] description = "Synthesizer for a decentralized virtual machine" homepage = "https://aleo.org" @@ -55,6 +55,7 @@ wasm = [ "ledger-coinbase/wasm", "ledger-committee/wasm", "ledger-query/wasm", + "ledger-store/wasm", "synthesizer-process/wasm", "synthesizer-program/wasm", "synthesizer-snark/wasm" @@ -68,61 +69,61 @@ harness = false [dependencies.algorithms] package = "snarkvm-algorithms" path = "../algorithms" -version = "=0.16.3" +version = "=0.16.15" [dependencies.circuit] package = "snarkvm-circuit" path = "../circuit" -version = "=0.16.3" +version = "=0.16.15" [dependencies.console] package = "snarkvm-console" path = "../console" -version = "=0.16.3" +version = "=0.16.15" [dependencies.ledger-block] package = "snarkvm-ledger-block" path = "../ledger/block" -version = "=0.16.3" +version = "=0.16.15" [dependencies.ledger-coinbase] package = "snarkvm-ledger-coinbase" path = "../ledger/coinbase" -version = "=0.16.3" +version = "=0.16.15" [dependencies.ledger-committee] package = "snarkvm-ledger-committee" path = "../ledger/committee" -version = "=0.16.3" +version = "=0.16.15" [dependencies.ledger-query] package = "snarkvm-ledger-query" path = "../ledger/query" -version = "=0.16.3" +version = "=0.16.15" default-features = false features = [ "query" ] [dependencies.ledger-store] package = "snarkvm-ledger-store" path = "../ledger/store" -version = "=0.16.3" +version = "=0.16.15" [dependencies.synthesizer-process] package = "snarkvm-synthesizer-process" path = "./process" -version = "=0.16.3" +version = "=0.16.15" optional = true [dependencies.synthesizer-program] package = "snarkvm-synthesizer-program" path = "./program" -version = "=0.16.3" +version = "=0.16.15" optional = true [dependencies.synthesizer-snark] package = "snarkvm-synthesizer-snark" path = "./snark" -version = "=0.16.3" +version = "=0.16.15" optional = true [dependencies.aleo-std] diff --git a/synthesizer/process/Cargo.toml b/synthesizer/process/Cargo.toml index 742079dd72..8b3ec8171d 100644 --- a/synthesizer/process/Cargo.toml +++ b/synthesizer/process/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "snarkvm-synthesizer-process" -version = "0.16.3" +version = "0.16.15" authors = [ "The Aleo Team " ] description = "A process for a decentralized virtual machine" homepage = "https://aleo.org" @@ -48,40 +48,45 @@ timer = [ "aleo-std/timer" ] [dependencies.console] package = "snarkvm-console" path = "../../console" -version = "=0.16.3" +version = "=0.16.15" default-features = false features = [ "network", "program", "types" ] [dependencies.circuit] package = "snarkvm-circuit" path = "../../circuit" -version = "=0.16.3" +version = "=0.16.15" [dependencies.ledger-block] package = "snarkvm-ledger-block" path = "../../ledger/block" -version = "=0.16.3" +version = "=0.16.15" [dependencies.ledger-query] package = "snarkvm-ledger-query" path = "../../ledger/query" -version = "=0.16.3" +version = "=0.16.15" default-features = false [dependencies.ledger-store] package = "snarkvm-ledger-store" path = "../../ledger/store" -version = "=0.16.3" +version = "=0.16.15" [dependencies.synthesizer-program] package = "snarkvm-synthesizer-program" path = "../../synthesizer/program" -version = "=0.16.3" +version = "=0.16.15" [dependencies.synthesizer-snark] package = "snarkvm-synthesizer-snark" path = "../../synthesizer/snark" -version = "=0.16.3" +version = "=0.16.15" + +[dependencies.utilities] +package = "snarkvm-utilities" +path = "../../utilities" +version = "=0.16.15" [dependencies.aleo-std] version = "0.1.18" diff --git a/synthesizer/process/src/execute.rs b/synthesizer/process/src/execute.rs index 1e4bd132fa..7311f018be 100644 --- a/synthesizer/process/src/execute.rs +++ b/synthesizer/process/src/execute.rs @@ -17,9 +17,10 @@ use super::*; impl Process { /// Executes the given authorization. #[inline] - pub fn execute>( + pub fn execute, R: CryptoRng + Rng>( &self, authorization: Authorization, + rng: &mut R, ) -> Result<(Response, Trace)> { let timer = timer!("Process::execute"); @@ -40,7 +41,7 @@ impl Process { // Retrieve the stack. let stack = self.get_stack(request.program_id())?; // Execute the circuit. - let response = stack.execute_function::(call_stack, None)?; + let response = stack.execute_function::(call_stack, None, rng)?; lap!(timer, "Execute the function"); // Extract the trace. @@ -100,7 +101,7 @@ mod tests { assert!(authorization.is_fee_private(), "Authorization must be for a call to 'credits.aleo/fee_private'"); // Execute the authorization. - let (response, trace) = process.execute::(authorization).unwrap(); + let (response, trace) = process.execute::(authorization, rng).unwrap(); // Ensure the response has 1 output. assert_eq!(response.outputs().len(), 1, "Execution of 'credits.aleo/fee_private' must contain 1 output"); // Ensure the response has 1 output ID. @@ -142,7 +143,7 @@ mod tests { assert!(authorization.is_fee_public(), "Authorization must be for a call to 'credits.aleo/fee_public'"); // Execute the authorization. - let (response, trace) = process.execute::(authorization).unwrap(); + let (response, trace) = process.execute::(authorization, rng).unwrap(); // Ensure the response has 1 outputs. assert_eq!(response.outputs().len(), 1, "Execution of 'credits.aleo/fee_public' must contain 1 output"); // Ensure the response has 1 output IDs. diff --git a/synthesizer/process/src/finalize.rs b/synthesizer/process/src/finalize.rs index 513fb5a8e2..974ed1e158 100644 --- a/synthesizer/process/src/finalize.rs +++ b/synthesizer/process/src/finalize.rs @@ -15,6 +15,7 @@ use super::*; use console::program::{Future, Register}; use synthesizer_program::{Await, FinalizeRegistersState, Operand}; +use utilities::handle_halting; impl Process { /// Finalizes the deployment and fee. @@ -224,7 +225,7 @@ fn finalize_transition>( // Finalize the command. match &command { Command::BranchEq(branch_eq) => { - let result = std::panic::catch_unwind(std::panic::AssertUnwindSafe(|| { + let result = handle_halting!(panic::AssertUnwindSafe(|| { branch_to(counter, branch_eq, finalize, stack, ®isters) })); match result { @@ -238,7 +239,7 @@ fn finalize_transition>( } } Command::BranchNeq(branch_neq) => { - let result = std::panic::catch_unwind(std::panic::AssertUnwindSafe(|| { + let result = handle_halting!(panic::AssertUnwindSafe(|| { branch_to(counter, branch_neq, finalize, stack, ®isters) })); match result { @@ -276,7 +277,7 @@ fn finalize_transition>( None => bail!("Transition ID '{transition_id}' not found in call graph"), }; - let callee_state = match std::panic::catch_unwind(std::panic::AssertUnwindSafe(|| { + let callee_state = match handle_halting!(panic::AssertUnwindSafe(|| { // Set up the finalize state for the await. setup_await(state, await_, stack, ®isters, child_transition_id) })) { @@ -306,9 +307,8 @@ fn finalize_transition>( break; } _ => { - let result = std::panic::catch_unwind(std::panic::AssertUnwindSafe(|| { - command.finalize(stack, store, &mut registers) - })); + let result = + handle_halting!(panic::AssertUnwindSafe(|| { command.finalize(stack, store, &mut registers) })); match result { // If the evaluation succeeds with an operation, add it to the list. Ok(Ok(Some(finalize_operation))) => finalize_operations.push(finalize_operation), @@ -356,7 +356,7 @@ fn initialize_finalize_state<'a, N: Network>( let (finalize, stack) = match stack.program_id() == future.program_id() { true => (stack.get_function_ref(future.function_name())?.finalize_logic(), stack), false => { - let stack = stack.get_external_stack(future.program_id())?; + let stack = stack.get_external_stack(future.program_id())?.as_ref(); (stack.get_function_ref(future.function_name())?.finalize_logic(), stack) } }; diff --git a/synthesizer/process/src/lib.rs b/synthesizer/process/src/lib.rs index eaa24104e4..392574ee2b 100644 --- a/synthesizer/process/src/lib.rs +++ b/synthesizer/process/src/lib.rs @@ -75,7 +75,7 @@ pub struct Process { /// The universal SRS. universal_srs: Arc>, /// The mapping of program IDs to stacks. - stacks: IndexMap, Stack>, + stacks: IndexMap, Arc>>, } impl Process { @@ -129,7 +129,7 @@ impl Process { #[inline] pub fn add_stack(&mut self, stack: Stack) { // Add the stack to the process. - self.stacks.insert(*stack.program_id(), stack); + self.stacks.insert(*stack.program_id(), Arc::new(stack)); } } @@ -202,7 +202,7 @@ impl Process { /// Returns the stack for the given program ID. #[inline] - pub fn get_stack(&self, program_id: impl TryInto>) -> Result<&Stack> { + pub fn get_stack(&self, program_id: impl TryInto>) -> Result<&Arc>> { // Prepare the program ID. let program_id = program_id.try_into().map_err(|_| anyhow!("Invalid program ID"))?; // Retrieve the stack. @@ -216,7 +216,7 @@ impl Process { /// Returns the program for the given program ID. #[inline] pub fn get_program(&self, program_id: impl TryInto>) -> Result<&Program> { - self.get_stack(program_id).map(Stack::program) + Ok(self.get_stack(program_id)?.program()) } /// Returns the proving key for the given program ID and function name. @@ -380,7 +380,7 @@ function compute: .unwrap(); assert_eq!(authorization.len(), 1); // Execute the request. - let (_response, mut trace) = process.execute::(authorization).unwrap(); + let (_response, mut trace) = process.execute::(authorization, rng).unwrap(); assert_eq!(trace.transitions().len(), 1); // Prepare the trace. diff --git a/synthesizer/process/src/stack/authorization/bytes.rs b/synthesizer/process/src/stack/authorization/bytes.rs index f2aa2ee130..d9caf4dfea 100644 --- a/synthesizer/process/src/stack/authorization/bytes.rs +++ b/synthesizer/process/src/stack/authorization/bytes.rs @@ -71,9 +71,6 @@ impl ToBytes for Authorization { #[cfg(test)] mod tests { use super::*; - use console::network::Testnet3; - - type CurrentNetwork = Testnet3; #[test] fn test_bytes() -> Result<()> { @@ -85,7 +82,6 @@ mod tests { // Check the byte representation. let expected_bytes = expected.to_bytes_le()?; assert_eq!(expected, Authorization::read_le(&expected_bytes[..])?); - assert!(Authorization::::read_le(&expected_bytes[1..]).is_err()); Ok(()) } } diff --git a/synthesizer/process/src/stack/authorize.rs b/synthesizer/process/src/stack/authorize.rs index 9af4dce5c5..5cc289c185 100644 --- a/synthesizer/process/src/stack/authorize.rs +++ b/synthesizer/process/src/stack/authorize.rs @@ -42,7 +42,7 @@ impl Stack { // Construct the call stack. let call_stack = CallStack::Authorize(vec![request], *private_key, authorization.clone()); // Construct the authorization from the function. - let _response = self.execute_function::(call_stack, None)?; + let _response = self.execute_function::(call_stack, None, rng)?; finish!(timer, "Construct the authorization from the function"); // Return the authorization. diff --git a/synthesizer/process/src/stack/call/mod.rs b/synthesizer/process/src/stack/call/mod.rs index 707c8768e2..5d6578ab73 100644 --- a/synthesizer/process/src/stack/call/mod.rs +++ b/synthesizer/process/src/stack/call/mod.rs @@ -13,6 +13,7 @@ // limitations under the License. use crate::{CallStack, Registers, RegistersCall, StackEvaluate, StackExecute}; +use aleo_std::prelude::{finish, lap, timer}; use console::{network::prelude::*, program::Request}; use synthesizer_program::{ Call, @@ -36,7 +37,7 @@ pub trait CallTrait { ) -> Result<()>; /// Executes the instruction. - fn execute>( + fn execute, R: CryptoRng + Rng>( &self, stack: &(impl StackEvaluate + StackExecute + StackMatches + StackProgram), registers: &mut ( @@ -45,6 +46,7 @@ pub trait CallTrait { + RegistersLoadCircuit + RegistersStoreCircuit ), + rng: &mut R, ) -> Result<()>; } @@ -56,14 +58,17 @@ impl CallTrait for Call { stack: &(impl StackEvaluate + StackMatches + StackProgram), registers: &mut Registers, ) -> Result<()> { + let timer = timer!("Call::evaluate"); + // Load the operands values. - let inputs: Vec<_> = self.operands().iter().map(|operand| registers.load(stack, operand)).try_collect()?; + let inputs: Vec<_> = + self.operands().iter().map(|operand| registers.load(stack.deref(), operand)).try_collect()?; // Retrieve the substack and resource. let (substack, resource) = match self.operator() { // Retrieve the call stack and resource from the locator. CallOperator::Locator(locator) => { - (stack.get_external_stack(locator.program_id())?.clone(), locator.resource()) + (stack.get_external_stack(locator.program_id())?.as_ref(), locator.resource()) } CallOperator::Resource(resource) => { // TODO (howardwu): Revisit this decision to forbid calling internal functions. A record cannot be spent again. @@ -73,9 +78,10 @@ impl CallTrait for Call { bail!("Cannot call '{resource}'. Use a closure ('closure {resource}:') instead.") } - (stack.clone(), resource) + (stack, resource) } }; + lap!(timer, "Retrieved the substack and resource"); // If the operator is a closure, retrieve the closure and compute the output. let outputs = if let Ok(closure) = substack.program().get_closure(resource) { @@ -110,19 +116,21 @@ impl CallTrait for Call { else { bail!("Call operator '{}' is invalid or unsupported.", self.operator()) }; + lap!(timer, "Computed outputs"); // Assign the outputs to the destination registers. for (output, register) in outputs.into_iter().zip_eq(&self.destinations()) { // Assign the output to the register. registers.store(stack, register, output)?; } + finish!(timer); Ok(()) } /// Executes the instruction. #[inline] - fn execute>( + fn execute, R: Rng + CryptoRng>( &self, stack: &(impl StackEvaluate + StackExecute + StackMatches + StackProgram), registers: &mut ( @@ -131,7 +139,10 @@ impl CallTrait for Call { + RegistersLoadCircuit + RegistersStoreCircuit ), + rng: &mut R, ) -> Result<()> { + let timer = timer!("Call::execute"); + // Load the operands values. let inputs: Vec<_> = self.operands().iter().map(|operand| registers.load_circuit(stack, operand)).try_collect()?; @@ -150,7 +161,7 @@ impl CallTrait for Call { if is_credits_program && (is_fee_private || is_fee_public) { bail!("Cannot perform an external call to 'credits.aleo/fee_private' or 'credits.aleo/fee_public'.") } else { - (stack.get_external_stack(locator.program_id())?.clone(), locator.resource()) + (stack.get_external_stack(locator.program_id())?.as_ref(), locator.resource()) } } CallOperator::Resource(resource) => { @@ -161,12 +172,14 @@ impl CallTrait for Call { bail!("Cannot call '{resource}'. Use a closure ('closure {resource}:') instead.") } - (stack.clone(), resource) + (stack, resource) } }; + lap!(timer, "Retrieve the substack and resource"); // If the operator is a closure, retrieve the closure and compute the output. let outputs = if let Ok(closure) = substack.program().get_closure(resource) { + lap!(timer, "Execute the closure"); // Execute the closure, and load the outputs. substack.execute_closure( &closure, @@ -179,6 +192,7 @@ impl CallTrait for Call { } // If the operator is a function, retrieve the function and compute the output. else if let Ok(function) = substack.program().get_function(resource) { + lap!(timer, "Execute the function"); // Retrieve the number of inputs. let num_inputs = function.inputs().len(); // Ensure the number of inputs matches the number of input statements. @@ -196,9 +210,6 @@ impl CallTrait for Call { // Eject the circuit inputs. let inputs = inputs.eject_value(); - // Initialize an RNG. - let rng = &mut rand::thread_rng(); - // Set the (console) caller. let console_caller = Some(*stack.program_id()); @@ -225,7 +236,7 @@ impl CallTrait for Call { authorization.push(request.clone()); // Execute the request. - let response = substack.execute_function::(call_stack, console_caller)?; + let response = substack.execute_function::(call_stack, console_caller, rng)?; // Return the request and response. (request, response) @@ -247,7 +258,7 @@ impl CallTrait for Call { call_stack.push(request.clone())?; // Execute the request. - let response = substack.execute_function::(call_stack, console_caller)?; + let response = substack.execute_function::(call_stack, console_caller, rng)?; // Return the request and response. (request, response) } @@ -269,7 +280,8 @@ impl CallTrait for Call { let console_response = substack.evaluate_function::(registers.call_stack().replicate(), console_caller)?; // Execute the request. - let response = substack.execute_function::(registers.call_stack(), console_caller)?; + let response = + substack.execute_function::(registers.call_stack(), console_caller, rng)?; // Ensure the values are equal. if console_response.outputs() != response.outputs() { #[cfg(debug_assertions)] @@ -281,6 +293,8 @@ impl CallTrait for Call { } } }; + lap!(timer, "Computed the request and response"); + // Inject the existing circuit. A::inject_r1cs(r1cs); @@ -330,6 +344,7 @@ impl CallTrait for Call { None, ); A::assert(check_input_ids); + lap!(timer, "Checked the input ids"); // Inject the outputs as `Mode::Private` (with the 'tcm' and output IDs as `Mode::Public`). let outputs = circuit::Response::process_outputs_from_callback( @@ -342,6 +357,7 @@ impl CallTrait for Call { response.outputs().to_vec(), &function.output_types(), ); + lap!(timer, "Checked the outputs"); // Return the circuit outputs. outputs } @@ -355,6 +371,9 @@ impl CallTrait for Call { // Assign the output to the register. registers.store_circuit(stack, register, output)?; } + lap!(timer, "Assigned the outputs to registers"); + + finish!(timer); Ok(()) } diff --git a/synthesizer/process/src/stack/deploy.rs b/synthesizer/process/src/stack/deploy.rs index ad85387c5b..39b46f2f6f 100644 --- a/synthesizer/process/src/stack/deploy.rs +++ b/synthesizer/process/src/stack/deploy.rs @@ -14,6 +14,8 @@ use super::*; +use rand::{rngs::StdRng, SeedableRng}; + impl Stack { /// Deploys the given program ID, if it does not exist. #[inline] @@ -116,10 +118,11 @@ impl Stack { } // Verify the certificates. - cfg_iter!(call_stacks).zip_eq(deployment.verifying_keys()).try_for_each( - |((function_name, call_stack, assignments), (_, (verifying_key, certificate)))| { + let rngs = (0..call_stacks.len()).map(|_| StdRng::from_seed(rng.gen())).collect::>(); + cfg_iter!(call_stacks).zip_eq(deployment.verifying_keys()).zip_eq(rngs).try_for_each( + |(((function_name, call_stack, assignments), (_, (verifying_key, certificate))), mut rng)| { // Synthesize the circuit. - if let Err(err) = self.execute_function::(call_stack.clone(), None) { + if let Err(err) = self.execute_function::(call_stack.clone(), None, &mut rng) { bail!("Failed to synthesize the circuit for '{function_name}': {err}") } // Check the certificate. diff --git a/synthesizer/process/src/stack/evaluate.rs b/synthesizer/process/src/stack/evaluate.rs index 4110da8a2c..9dda9b6db2 100644 --- a/synthesizer/process/src/stack/evaluate.rs +++ b/synthesizer/process/src/stack/evaluate.rs @@ -218,8 +218,6 @@ impl StackEvaluate for Stack { .collect::>>()?; lap!(timer, "Load the outputs"); - finish!(timer); - // Map the output operands to registers. let output_registers = output_operands .iter() @@ -228,9 +226,10 @@ impl StackEvaluate for Stack { _ => None, }) .collect::>(); + lap!(timer, "Loaded the output registers"); // Compute the response. - Response::new( + let response = Response::new( request.network_id(), self.program.id(), function.name(), @@ -240,6 +239,9 @@ impl StackEvaluate for Stack { outputs, &function.output_types(), &output_registers, - ) + ); + finish!(timer); + + response } } diff --git a/synthesizer/process/src/stack/execute.rs b/synthesizer/process/src/stack/execute.rs index 45284c133a..20758e5e84 100644 --- a/synthesizer/process/src/stack/execute.rs +++ b/synthesizer/process/src/stack/execute.rs @@ -131,10 +131,11 @@ impl StackExecute for Stack { /// # Errors /// This method will halt if the given inputs are not the same length as the input statements. #[inline] - fn execute_function>( + fn execute_function, R: CryptoRng + Rng>( &self, mut call_stack: CallStack, console_caller: Option>, + rng: &mut R, ) -> Result> { let timer = timer!("Stack::execute_function"); @@ -272,7 +273,7 @@ impl StackExecute for Stack { // Execute the instruction. let result = match instruction { // If the instruction is a `call` instruction, we need to handle it separately. - Instruction::Call(call) => CallTrait::execute(call, self, &mut registers), + Instruction::Call(call) => CallTrait::execute(call, self, &mut registers, rng), // Otherwise, execute the instruction normally. _ => instruction.execute(self, &mut registers), }; diff --git a/synthesizer/process/src/stack/finalize_types/initialize.rs b/synthesizer/process/src/stack/finalize_types/initialize.rs index 9a7bc33440..e36fe3d552 100644 --- a/synthesizer/process/src/stack/finalize_types/initialize.rs +++ b/synthesizer/process/src/stack/finalize_types/initialize.rs @@ -21,6 +21,7 @@ use synthesizer_program::{ Contains, Get, GetOrUse, + MappingLocator, RandChaCha, Remove, Set, @@ -38,18 +39,46 @@ impl FinalizeTypes { // Initialize a map of registers to their types. let mut finalize_types = Self { inputs: IndexMap::new(), destinations: IndexMap::new() }; - // Step 1. Check the inputs are well-formed. + // Initialize a list of input futures. + let mut input_futures = Vec::new(); + + // Step 1. Check the inputs are well-formed. Store the input futures. for input in finalize.inputs() { // Check the input register type. finalize_types.check_input(stack, input.register(), input.finalize_type())?; + + // If the input is a future, add it to the list of input futures. + if let FinalizeType::Future(locator) = input.finalize_type() { + input_futures.push((input.register(), *locator)); + } } - // Step 2. Check the commands are well-formed. + // Initialize a list of consumed futures. + let mut consumed_futures = Vec::new(); + + // Step 2. Check the commands are well-formed. Store the futures consumed by the `await` commands. for command in finalize.commands() { // Check the command opcode, operands, and destinations. finalize_types.check_command(stack, finalize, command)?; + + // If the command is an `await`, add the future to the list of consumed futures. + if let Command::Await(await_) = command { + // Note: `check_command` ensures that the register is a future. This is an additional check. + let locator = match finalize_types.get_type(stack, await_.register())? { + FinalizeType::Future(locator) => locator, + FinalizeType::Plaintext(..) => bail!("Expected a future in '{await_}'"), + }; + consumed_futures.push((await_.register(), locator)); + } } + // Check that the input futures are consumed in the order they are passed in. + ensure!( + input_futures == consumed_futures, + "Futures in finalize '{}' are not awaited in the order they are passed in.", + finalize.name() + ); + Ok(finalize_types) } } @@ -146,8 +175,8 @@ impl FinalizeTypes { Command::Instruction(instruction) => self.check_instruction(stack, finalize.name(), instruction)?, Command::Await(await_) => self.check_await(stack, await_)?, Command::Contains(contains) => self.check_contains(stack, finalize.name(), contains)?, - Command::Get(get) => self.check_get(stack, finalize.name(), get)?, - Command::GetOrUse(get_or_use) => self.check_get_or_use(stack, finalize.name(), get_or_use)?, + Command::Get(get) => self.check_get(stack, get)?, + Command::GetOrUse(get_or_use) => self.check_get_or_use(stack, get_or_use)?, Command::RandChaCha(rand_chacha) => self.check_rand_chacha(stack, finalize.name(), rand_chacha)?, Command::Remove(remove) => self.check_remove(stack, finalize.name(), remove)?, Command::Set(set) => self.check_set(stack, finalize.name(), set)?, @@ -259,19 +288,42 @@ impl FinalizeTypes { /// Ensures the given `get` command is well-formed. #[inline] - fn check_get( - &mut self, - stack: &(impl StackMatches + StackProgram), - finalize_name: &Identifier, - get: &Get, - ) -> Result<()> { - // Ensure the declared mapping in `get` is defined in the program. - if !stack.program().contains_mapping(get.mapping_name()) { - bail!("Mapping '{}' in '{}/{finalize_name}' is not defined.", get.mapping_name(), stack.program_id()) - } - // Retrieve the mapping from the program. - // Note that the unwrap is safe, as we have already checked the mapping exists. - let mapping = stack.program().get_mapping(get.mapping_name()).unwrap(); + fn check_get(&mut self, stack: &(impl StackMatches + StackProgram), get: &Get) -> Result<()> { + // Retrieve the mapping. + let mapping = match get.mapping() { + MappingLocator::Locator(locator) => { + // Retrieve the program ID. + let program_id = locator.program_id(); + // Retrieve the mapping_name. + let mapping_name = locator.resource(); + + // Ensure the locator does not reference the current program. + if stack.program_id() == program_id { + bail!("Locator '{locator}' does not reference an external mapping."); + } + // Ensure the current program contains an import for this external program. + if !stack.program().imports().keys().contains(program_id) { + bail!("External program '{program_id}' is not imported by '{}'.", stack.program_id()); + } + // Retrieve the program. + let external = stack.get_external_program(program_id)?; + // Ensure the mapping exists in the program. + if !external.contains_mapping(mapping_name) { + bail!("Mapping '{mapping_name}' in '{program_id}' is not defined.") + } + // Retrieve the mapping from the program. + external.get_mapping(mapping_name)? + } + MappingLocator::Resource(mapping_name) => { + // Ensure the declared mapping in `get` is defined in the current program. + if !stack.program().contains_mapping(mapping_name) { + bail!("Mapping '{mapping_name}' in '{}' is not defined.", stack.program_id()) + } + // Retrieve the mapping from the program. + stack.program().get_mapping(mapping_name)? + } + }; + // Get the mapping key type. let mapping_key_type = mapping.key().plaintext_type(); // Get the mapping value type. @@ -301,16 +353,43 @@ impl FinalizeTypes { fn check_get_or_use( &mut self, stack: &(impl StackMatches + StackProgram), - finalize_name: &Identifier, get_or_use: &GetOrUse, ) -> Result<()> { - // Ensure the declared mapping in `get.or_use` is defined in the program. - if !stack.program().contains_mapping(get_or_use.mapping_name()) { - bail!("Mapping '{}' in '{}/{finalize_name}' is not defined.", get_or_use.mapping_name(), stack.program_id()) - } - // Retrieve the mapping from the program. - // Note that the unwrap is safe, as we have already checked the mapping exists. - let mapping = stack.program().get_mapping(get_or_use.mapping_name()).unwrap(); + // Retrieve the mapping. + let mapping = match get_or_use.mapping() { + MappingLocator::Locator(locator) => { + // Retrieve the program ID. + let program_id = locator.program_id(); + // Retrieve the mapping_name. + let mapping_name = locator.resource(); + + // Ensure the locator does not reference the current program. + if stack.program_id() == program_id { + bail!("Locator '{locator}' does not reference an external mapping."); + } + // Ensure the current program contains an import for this external program. + if !stack.program().imports().keys().contains(program_id) { + bail!("External program '{locator}' is not imported by '{program_id}'."); + } + // Retrieve the program. + let external = stack.get_external_program(program_id)?; + // Ensure the mapping exists in the program. + if !external.contains_mapping(mapping_name) { + bail!("Mapping '{mapping_name}' in '{program_id}' is not defined.") + } + // Retrieve the mapping from the program. + external.get_mapping(mapping_name)? + } + MappingLocator::Resource(mapping_name) => { + // Ensure the declared mapping in `get.or_use` is defined in the current program. + if !stack.program().contains_mapping(mapping_name) { + bail!("Mapping '{mapping_name}' in '{}' is not defined.", stack.program_id()) + } + // Retrieve the mapping from the program. + stack.program().get_mapping(mapping_name)? + } + }; + // Get the mapping key type. let mapping_key_type = mapping.key().plaintext_type(); // Get the mapping value type. diff --git a/synthesizer/process/src/stack/helpers/initialize.rs b/synthesizer/process/src/stack/helpers/initialize.rs index 416bcc6a9a..46b6bd2465 100644 --- a/synthesizer/process/src/stack/helpers/initialize.rs +++ b/synthesizer/process/src/stack/helpers/initialize.rs @@ -58,7 +58,7 @@ impl Stack { impl Stack { /// Inserts the given external stack to the stack. #[inline] - fn insert_external_stack(&mut self, external_stack: Stack) -> Result<()> { + fn insert_external_stack(&mut self, external_stack: Arc>) -> Result<()> { // Retrieve the program ID. let program_id = *external_stack.program_id(); // Ensure the external stack is not already added. diff --git a/synthesizer/process/src/stack/helpers/matches.rs b/synthesizer/process/src/stack/helpers/matches.rs index 796608f5ab..975045943f 100644 --- a/synthesizer/process/src/stack/helpers/matches.rs +++ b/synthesizer/process/src/stack/helpers/matches.rs @@ -223,8 +223,14 @@ impl Stack { Plaintext::Array(..) => bail!("'{struct_name}' is invalid: expected struct, found array"), }; - // Ensure the number of struct members does not exceed the maximum. let num_members = members.len(); + // Ensure the number of struct members does not go below the minimum. + ensure!( + num_members >= N::MIN_STRUCT_ENTRIES, + "'{struct_name}' cannot be less than {} entries", + N::MIN_STRUCT_ENTRIES + ); + // Ensure the number of struct members does not exceed the maximum. ensure!( num_members <= N::MAX_STRUCT_ENTRIES, "'{struct_name}' cannot exceed {} entries", diff --git a/synthesizer/process/src/stack/helpers/synthesize.rs b/synthesizer/process/src/stack/helpers/synthesize.rs index 9a9cbf3f1e..c3530d0071 100644 --- a/synthesizer/process/src/stack/helpers/synthesize.rs +++ b/synthesizer/process/src/stack/helpers/synthesize.rs @@ -58,7 +58,7 @@ impl Stack { // Initialize the call stack. let call_stack = CallStack::Synthesize(vec![request], burner_private_key, authorization); // Synthesize the circuit. - let _response = self.execute_function::(call_stack, None)?; + let _response = self.execute_function::(call_stack, None, rng)?; // Ensure the proving key exists. ensure!(self.contains_proving_key(function_name), "Function '{function_name}' is missing a proving key."); diff --git a/synthesizer/process/src/stack/mod.rs b/synthesizer/process/src/stack/mod.rs index 4fe6d18531..2b95b46fb7 100644 --- a/synthesizer/process/src/stack/mod.rs +++ b/synthesizer/process/src/stack/mod.rs @@ -169,7 +169,7 @@ pub struct Stack { /// The program (record types, structs, functions). program: Program, /// The mapping of external stacks as `(program ID, stack)`. - external_stacks: IndexMap, Stack>, + external_stacks: IndexMap, Arc>>, /// The mapping of closure and function names to their register types. register_types: IndexMap, RegisterTypes>, /// The mapping of finalize names to their register types. @@ -235,7 +235,7 @@ impl StackProgram for Stack { /// Returns the external stack for the given program ID. #[inline] - fn get_external_stack(&self, program_id: &ProgramID) -> Result<&Stack> { + fn get_external_stack(&self, program_id: &ProgramID) -> Result<&Arc>> { // Retrieve the external stack. self.external_stacks.get(program_id).ok_or_else(|| anyhow!("External program '{program_id}' does not exist.")) } diff --git a/synthesizer/process/src/stack/register_types/initialize.rs b/synthesizer/process/src/stack/register_types/initialize.rs index 387f0e0d2e..7b3265a1f5 100644 --- a/synthesizer/process/src/stack/register_types/initialize.rs +++ b/synthesizer/process/src/stack/register_types/initialize.rs @@ -34,7 +34,7 @@ impl RegisterTypes { // Step 2. Check the instructions are well-formed. for instruction in closure.instructions() { - // Ensure the closure contains no aysnc instructions. + // Ensure the closure contains no async instructions. ensure!(instruction.opcode() != Opcode::Async, "An 'async' instruction is not allowed in closures"); // Ensure the closure contains no call instructions. ensure!(instruction.opcode() != Opcode::Call, "A 'call' instruction is not allowed in closures"); @@ -164,33 +164,32 @@ impl RegisterTypes { let mut future_registers = register_types .destinations .iter() - .filter_map(|(_, register_type)| match register_type { - RegisterType::Future(locator) => Some(*locator), + .filter_map(|(index, register_type)| match register_type { + RegisterType::Future(locator) => Some((Register::Locator(*index), *locator)), _ => None, }) .collect::>(); - // Remove the last locator, since this is the future created by the `async` call. - future_registers.pop(); - - // Check that all the registers were consumed by the `async` call, in order. match async_ { + // If no `async` instruction exists, then there should not be any future registers. None => { - if !future_registers.is_empty() { - bail!( - "Function '{}' contains futures, but does not contain an 'async' instruction", - function.name() - ) - } + ensure!( + future_registers.is_empty(), + "Function '{}' contains futures, but does not contain an 'async' instruction", + function.name() + ) } + // Otherwise, check that all the registers were consumed by the `async` call, in order. Some(async_) => { + // Remove the last future, since this is the future created by the `async` call. + future_registers.pop(); // Get the register operands that are `future` types. let async_future_operands = async_ .operands() .iter() .filter_map(|operand| match operand { Operand::Register(register) => match register_types.get_type(stack, register).ok() { - Some(RegisterType::Future(locator)) => Some(locator), + Some(RegisterType::Future(locator)) => Some((register.clone(), locator)), _ => None, }, _ => None, @@ -306,6 +305,7 @@ impl RegisterTypes { operand: &Operand, register_type: &RegisterType, ) -> Result<()> { + #[cfg(feature = "aleo-cli")] match operand { // Inform the user the output operand is an input register, to ensure this is intended behavior. Operand::Register(register) if self.is_input(register) => { @@ -461,8 +461,22 @@ impl RegisterTypes { // Retrieve the program. let external = stack.get_external_program(program_id)?; - // Ensure the function or closure exists in the program. - if !external.contains_function(resource) && !external.contains_closure(resource) { + // Check that function exists in the program. + if let Ok(child_function) = external.get_function_ref(resource) { + // If the child function contains a finalize block, then the parent function must also contain a finalize block. + let child_contains_finalize = child_function.finalize_logic().is_some(); + let parent_contains_finalize = + stack.get_function_ref(closure_or_function_name)?.finalize_logic().is_some(); + if child_contains_finalize && !parent_contains_finalize { + bail!( + "Function '{}/{closure_or_function_name}' must contain a finalize block, since it calls '{}/{resource}'.", + stack.program_id(), + program_id + ) + } + } + // Otherwise, ensure the closure exists in the program. + else if !external.contains_closure(resource) { bail!("'{resource}' is not defined in '{}'.", external.id()) } } diff --git a/synthesizer/process/src/stack/register_types/matches.rs b/synthesizer/process/src/stack/register_types/matches.rs index 2082fa995b..3957726841 100644 --- a/synthesizer/process/src/stack/register_types/matches.rs +++ b/synthesizer/process/src/stack/register_types/matches.rs @@ -168,8 +168,6 @@ impl RegisterTypes { } /// Checks that the given record matches the layout of the record type. - /// Note: Ordering for `owner` **does** matter, however ordering - /// for record data does **not** matter, as long as all defined members are present. pub fn matches_record( &self, stack: &(impl StackMatches + StackProgram), diff --git a/synthesizer/process/src/tests/test_credits.rs b/synthesizer/process/src/tests/test_credits.rs index 6437e67b2f..aefae59a3c 100644 --- a/synthesizer/process/src/tests/test_credits.rs +++ b/synthesizer/process/src/tests/test_credits.rs @@ -251,7 +251,7 @@ fn execute_function>( process.authorize::(caller_private_key, "credits.aleo", function, inputs.iter(), rng)?; // Construct the trace. - let (_, mut trace) = process.execute::(authorization)?; + let (_, mut trace) = process.execute::(authorization, rng)?; // Construct the block store. let block_store = BlockStore::>::open(None)?; @@ -1527,7 +1527,7 @@ mod sanity_checks { // Initialize the call stack. let call_stack = CallStack::CheckDeployment(vec![request], *private_key, assignments.clone()); // Synthesize the circuit. - let _response = stack.execute_function::(call_stack, None).unwrap(); + let _response = stack.execute_function::(call_stack, None, rng).unwrap(); // Retrieve the assignment. let assignment = assignments.read().last().unwrap().0.clone(); assignment diff --git a/synthesizer/process/src/tests/test_execute.rs b/synthesizer/process/src/tests/test_execute.rs index 2af2faf97e..1837b54967 100644 --- a/synthesizer/process/src/tests/test_execute.rs +++ b/synthesizer/process/src/tests/test_execute.rs @@ -85,7 +85,7 @@ pub fn sample_fee, B: BlockStorage, P: Final .authorize_fee_public::(&private_key, base_fee_in_microcredits, priority_fee_in_microcredits, id, rng) .unwrap(); // Execute the fee. - let (_, mut trace) = process.execute::(authorization).unwrap(); + let (_, mut trace) = process.execute::(authorization, rng).unwrap(); // Prepare the assignments. trace.prepare(Query::from(block_store)).unwrap(); // Compute the proof and construct the fee. @@ -395,7 +395,7 @@ output r4 as field.private;", // Re-run to ensure state continues to work. let trace = Arc::new(RwLock::new(Trace::new())); let call_stack = CallStack::execute(authorization, trace).unwrap(); - let response = stack.execute_function::(call_stack, None).unwrap(); + let response = stack.execute_function::(call_stack, None, rng).unwrap(); let candidate = response.outputs(); assert_eq!(3, candidate.len()); assert_eq!(r2, candidate[0]); @@ -530,7 +530,7 @@ fn test_process_execute_transfer_public() { assert_eq!(authorization.len(), 1); // Execute the request. - let (response, _trace) = process.execute::(authorization).unwrap(); + let (response, _trace) = process.execute::(authorization, rng).unwrap(); let candidate = response.outputs(); assert_eq!(2, candidate.len()); assert_eq!(r2, candidate[0]); @@ -677,7 +677,7 @@ fn test_process_multirecords() { assert_eq!(authorization.len(), 1); // Execute the request. - let (response, _trace) = process.execute::(authorization).unwrap(); + let (response, _trace) = process.execute::(authorization, rng).unwrap(); let candidate = response.outputs(); assert_eq!(3, candidate.len()); assert_eq!(output_a, candidate[0]); @@ -758,7 +758,7 @@ fn test_process_self_caller() { assert_eq!(authorization.len(), 1); // Execute the request. - let (response, _trace) = process.execute::(authorization).unwrap(); + let (response, _trace) = process.execute::(authorization, rng).unwrap(); let candidate = response.outputs(); assert_eq!(1, candidate.len()); assert_eq!(output, candidate[0]); @@ -818,7 +818,7 @@ fn test_process_program_id() { assert_eq!(authorization.len(), 1); // Execute the request. - let (response, _trace) = process.execute::(authorization).unwrap(); + let (response, _trace) = process.execute::(authorization, rng).unwrap(); let candidate = response.outputs(); assert_eq!(1, candidate.len()); assert_eq!(output, candidate[0]); @@ -857,7 +857,7 @@ fn test_process_output_operand() { assert_eq!(authorization.len(), 1); // Execute the request. - let (response, _trace) = process.execute::(authorization).unwrap(); + let (response, _trace) = process.execute::(authorization, rng).unwrap(); let candidate = response.outputs(); assert_eq!(1, candidate.len()); assert_eq!(output, candidate[0]); @@ -880,7 +880,7 @@ fn test_process_output_operand() { ) .unwrap(); - // Initalize the RNG. + // Initialize the RNG. let rng = &mut TestRng::default(); // Initialize a new caller account. @@ -1020,7 +1020,7 @@ function compute: assert_eq!(authorization.len(), 1); // Execute the request. - let (response, _trace) = process.execute::(authorization).unwrap(); + let (response, _trace) = process.execute::(authorization, rng).unwrap(); let candidate = response.outputs(); assert_eq!(4, candidate.len()); assert_eq!(r3, candidate[0]); @@ -1170,7 +1170,7 @@ function transfer: assert_eq!(authorization.len(), 5); // Execute the request. - let (response, _trace) = process.execute::(authorization).unwrap(); + let (response, _trace) = process.execute::(authorization, rng).unwrap(); let candidate = response.outputs(); assert_eq!(2, candidate.len()); assert_eq!(output_a, candidate[0]); @@ -1280,7 +1280,7 @@ finalize compute: assert_eq!(authorization.len(), 1); // Execute the request. - let (response, mut trace) = process.execute::(authorization).unwrap(); + let (response, mut trace) = process.execute::(authorization, rng).unwrap(); let candidate = response.outputs(); assert_eq!(1, candidate.len()); @@ -1393,7 +1393,7 @@ finalize compute: assert_eq!(authorization.len(), 1); // Execute the request. - let (response, mut trace) = process.execute::(authorization).unwrap(); + let (response, mut trace) = process.execute::(authorization, rng).unwrap(); let candidate = response.outputs(); assert_eq!(1, candidate.len()); @@ -1524,7 +1524,7 @@ finalize mint_public: assert_eq!(authorization.len(), 1); // Execute the request. - let (response, mut trace) = process.execute::(authorization).unwrap(); + let (response, mut trace) = process.execute::(authorization, rng).unwrap(); let candidate = response.outputs(); assert_eq!(1, candidate.len()); @@ -1692,7 +1692,7 @@ finalize init: assert_eq!(authorization.len(), 2); // Execute the request. - let (response, mut trace) = process.execute::(authorization).unwrap(); + let (response, mut trace) = process.execute::(authorization, rng).unwrap(); let candidate = response.outputs(); assert_eq!(1, candidate.len()); @@ -1807,7 +1807,7 @@ finalize compute: assert_eq!(authorization.len(), 1); // Execute the request. - let (response, mut trace) = process.execute::(authorization).unwrap(); + let (response, mut trace) = process.execute::(authorization, rng).unwrap(); let candidate = response.outputs(); assert_eq!(1, candidate.len()); @@ -1919,7 +1919,7 @@ function a: assert_eq!(authorization.len(), 3); // Execute the request. - let (response, mut trace) = process.execute::(authorization).unwrap(); + let (response, mut trace) = process.execute::(authorization, rng).unwrap(); let candidate = response.outputs(); assert_eq!(1, candidate.len()); assert_eq!(output, candidate[0]); @@ -2101,7 +2101,7 @@ fn test_complex_execution_order() { assert_eq!(authorization.len(), 10); // Execute the request. - let (response, mut trace) = process.execute::(authorization).unwrap(); + let (response, mut trace) = process.execute::(authorization, rng).unwrap(); let candidate = response.outputs(); assert_eq!(1, candidate.len()); assert_eq!(output, candidate[0]); @@ -2235,7 +2235,7 @@ finalize compute: assert_eq!(authorization.len(), 1); // Execute the request. - let (response, mut trace) = process.execute::(authorization).unwrap(); + let (response, mut trace) = process.execute::(authorization, rng).unwrap(); let candidate = response.outputs(); assert_eq!(1, candidate.len()); @@ -2339,7 +2339,7 @@ function compute: assert_eq!(authorization.len(), 1); // Execute the request. - let (response, mut trace) = process.execute::(authorization).unwrap(); + let (response, mut trace) = process.execute::(authorization, rng).unwrap(); let candidate = response.outputs(); assert_eq!(3, candidate.len()); assert_eq!(r2, candidate[0]); @@ -2450,7 +2450,7 @@ function {function_name}: .unwrap(); // Execute the request. - let (response, mut trace) = process.execute::(authorization).unwrap(); + let (response, mut trace) = process.execute::(authorization, rng).unwrap(); assert_eq!(response.outputs().len(), 0); // Prepare the trace. @@ -2469,7 +2469,7 @@ function {function_name}: .unwrap(); // Execute the request. - let (response, mut trace) = process.execute::(authorization).unwrap(); + let (response, mut trace) = process.execute::(authorization, rng).unwrap(); assert_eq!(response.outputs().len(), 0); // Prepare the trace. diff --git a/synthesizer/process/src/trace/inclusion/mod.rs b/synthesizer/process/src/trace/inclusion/mod.rs index 2213586c12..b61b9d0f77 100644 --- a/synthesizer/process/src/trace/inclusion/mod.rs +++ b/synthesizer/process/src/trace/inclusion/mod.rs @@ -222,7 +222,7 @@ impl InclusionAssignment { let candidate_serial_number = circuit::Record::>::serial_number_from_gamma(&gamma, commitment.clone()); // Enforce that the candidate serial number is equal to the serial number. - A::assert_eq(&candidate_serial_number, &serial_number); + A::assert_eq(candidate_serial_number, serial_number); // Enforce the starting leaf is the claimed commitment. A::assert_eq(state_path.transition_leaf().id(), commitment); diff --git a/synthesizer/process/src/traits/mod.rs b/synthesizer/process/src/traits/mod.rs index bafbb1d8f3..c82b4ffd59 100644 --- a/synthesizer/process/src/traits/mod.rs +++ b/synthesizer/process/src/traits/mod.rs @@ -16,7 +16,7 @@ use crate::{CallStack, Closure, FinalizeTypes, RegisterTypes}; use console::{ account::Address, network::Network, - prelude::Result, + prelude::{CryptoRng, Result, Rng}, program::{Identifier, ProgramID, Response, Value}, types::Field, }; @@ -68,10 +68,11 @@ pub trait StackExecute { /// /// # Errors /// This method will halt if the given inputs are not the same length as the input statements. - fn execute_function>( + fn execute_function, R: CryptoRng + Rng>( &self, call_stack: CallStack, console_caller: Option>, + rng: &mut R, ) -> Result>; } diff --git a/synthesizer/program/Cargo.toml b/synthesizer/program/Cargo.toml index 08178f34c3..ae2ad7a9ba 100644 --- a/synthesizer/program/Cargo.toml +++ b/synthesizer/program/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "snarkvm-synthesizer-program" -version = "0.16.3" +version = "0.16.15" authors = [ "The Aleo Team " ] description = "Program for a decentralized virtual machine" homepage = "https://aleo.org" @@ -31,12 +31,12 @@ wasm = [ "console/wasm" ] [dependencies.circuit] package = "snarkvm-circuit" path = "../../circuit" -version = "=0.16.3" +version = "=0.16.15" [dependencies.console] package = "snarkvm-console" path = "../../console" -version = "=0.16.3" +version = "=0.16.15" default-features = false features = [ "account", "network", "program", "types" ] diff --git a/synthesizer/program/src/closure/bytes.rs b/synthesizer/program/src/closure/bytes.rs index 1afb0e623e..60d717bf30 100644 --- a/synthesizer/program/src/closure/bytes.rs +++ b/synthesizer/program/src/closure/bytes.rs @@ -23,6 +23,9 @@ impl> FromBytes for ClosureCore u16::try_from(N::MAX_INPUTS).map_err(error)? { return Err(error(format!("Failed to deserialize a closure: too many inputs ({num_inputs})"))); } @@ -33,6 +36,9 @@ impl> FromBytes for ClosureCore u32::try_from(N::MAX_INSTRUCTIONS).map_err(error)? { return Err(error(format!("Failed to deserialize a closure: too many instructions ({num_instructions})"))); } @@ -70,7 +76,7 @@ impl> ToBytes for ClosureCore u16::try_from(num_inputs).map_err(error)?.write_le(&mut writer)?, false => return Err(error(format!("Failed to write {num_inputs} inputs as bytes"))), } @@ -82,7 +88,7 @@ impl> ToBytes for ClosureCore u32::try_from(num_instructions).map_err(error)?.write_le(&mut writer)?, false => return Err(error(format!("Failed to write {num_instructions} instructions as bytes"))), } diff --git a/synthesizer/program/src/closure/parse.rs b/synthesizer/program/src/closure/parse.rs index fe88ef2e1e..4d9894b0d7 100644 --- a/synthesizer/program/src/closure/parse.rs +++ b/synthesizer/program/src/closure/parse.rs @@ -32,7 +32,7 @@ impl> Parser for ClosureCore> FromBytes for FinalizeCore u16::try_from(N::MAX_COMMANDS).map_err(error)? { return Err(error(format!("Failed to deserialize finalize: too many commands ({num_commands})"))); } @@ -71,7 +74,7 @@ impl> ToBytes for FinalizeCore // Write the number of commands for the finalize. let num_commands = self.commands.len(); - match num_commands <= N::MAX_COMMANDS { + match 0 < num_commands && num_commands <= N::MAX_COMMANDS { true => u16::try_from(num_commands).map_err(error)?.write_le(&mut writer)?, false => return Err(error(format!("Failed to write {num_commands} commands as bytes"))), } @@ -108,7 +111,9 @@ finalize main: add r0 r1 into r8; add r0 r1 into r9; add r0 r1 into r10; - add r0 r1 into r11;"; + add r0 r1 into r11; + get accounts[r0] into r12; + get accounts[r1] into r13;"; let expected = Finalize::::from_str(finalize_string)?; let expected_bytes = expected.to_bytes_le()?; diff --git a/synthesizer/program/src/lib.rs b/synthesizer/program/src/lib.rs index a6597740b3..6e32c776f8 100644 --- a/synthesizer/program/src/lib.rs +++ b/synthesizer/program/src/lib.rs @@ -575,6 +575,7 @@ impl, Command: CommandTrait> Pro "u64", "u128", "scalar", + "signature", "string", // Boolean "true", @@ -589,6 +590,7 @@ impl, Command: CommandTrait> Pro "owner", // Program "transition", + "import", "function", "struct", "closure", diff --git a/synthesizer/program/src/logic/command/get.rs b/synthesizer/program/src/logic/command/get.rs index fc698a0969..ac56eb92c0 100644 --- a/synthesizer/program/src/logic/command/get.rs +++ b/synthesizer/program/src/logic/command/get.rs @@ -19,21 +19,143 @@ use crate::{ }; use console::{ network::prelude::*, - program::{Identifier, Register, Value}, + program::{Identifier, Locator, Register, Value}, }; +use std::io::{BufRead, BufReader}; + +/// The operator references a local or external mapping name. +#[derive(Clone, PartialEq, Eq, Hash)] +pub enum MappingLocator { + /// The reference to a non-local mapping name. + Locator(Locator), + /// The reference to a local mapping name. + Resource(Identifier), +} + +impl Parser for MappingLocator { + /// Parses a string into an operator. + #[inline] + fn parse(string: &str) -> ParserResult { + alt(( + map(Locator::parse, |locator| MappingLocator::Locator(locator)), + map(Identifier::parse, |identifier| MappingLocator::Resource(identifier)), + ))(string) + } +} + +impl FromStr for MappingLocator { + type Err = Error; + + /// Parses a string into an operator. + #[inline] + fn from_str(string: &str) -> Result { + match Self::parse(string) { + Ok((remainder, object)) => { + // Ensure the remainder is empty. + ensure!(remainder.is_empty(), "Failed to parse string. Found invalid character in: \"{remainder}\""); + // Return the object. + Ok(object) + } + Err(error) => bail!("Failed to parse string. {error}"), + } + } +} + +impl Debug for MappingLocator { + /// Prints the operator as a string. + fn fmt(&self, f: &mut Formatter) -> fmt::Result { + Display::fmt(self, f) + } +} + +impl Display for MappingLocator { + /// Prints the operator to a string. + fn fmt(&self, f: &mut Formatter) -> fmt::Result { + match self { + MappingLocator::Locator(locator) => Display::fmt(locator, f), + MappingLocator::Resource(resource) => Display::fmt(resource, f), + } + } +} + +impl FromBytes for MappingLocator { + /// Reads the operation from a buffer. + fn read_le(mut reader: R) -> IoResult { + // Read the version. + let version = u8::read_le(&mut reader)?; + // Ensure the version is valid. + if version != 0 { + return Err(error("Failed to read MappingLocator. Invalid version.")); + } + // Read the variant. + let variant = u8::read_le(&mut reader)?; + // Match the variant. + match variant { + 0 => Ok(MappingLocator::Locator(Locator::read_le(&mut reader)?)), + 1 => Ok(MappingLocator::Resource(Identifier::read_le(&mut reader)?)), + _ => Err(error("Failed to read MappingLocator. Invalid variant.")), + } + } +} + +impl ToBytes for MappingLocator { + /// Writes the operation to a buffer. + fn write_le(&self, mut writer: W) -> IoResult<()> { + match self { + MappingLocator::Locator(locator) => { + // Write the version. + 0u8.write_le(&mut writer)?; + // Write the variant. + 0u8.write_le(&mut writer)?; + // Write the locator. + locator.write_le(&mut writer) + } + MappingLocator::Resource(resource) => { + // Write the version. + 0u8.write_le(&mut writer)?; + // Write the variant. + 1u8.write_le(&mut writer)?; + // Write the resource. + resource.write_le(&mut writer) + } + } + } +} + /// A get command, e.g. `get accounts[r0] into r1;`. /// Gets the value stored at `operand` in `mapping` and stores the result in `destination`. -#[derive(Clone, PartialEq, Eq, Hash)] +#[derive(Clone)] pub struct Get { - /// The mapping name. - mapping: Identifier, + /// The mapping. + // TODO (howardwu): For mainnet - Use `CallOperator`, delete the above `MappingLocator`. + mapping: MappingLocator, /// The key to access the mapping. key: Operand, /// The destination register. destination: Register, } +impl PartialEq for Get { + /// Returns true if the two objects are equal. + #[inline] + fn eq(&self, other: &Self) -> bool { + self.mapping == other.mapping && self.key == other.key && self.destination == other.destination + } +} + +impl Eq for Get {} + +impl std::hash::Hash for Get { + /// Returns the hash of the object. + #[inline] + fn hash(&self, state: &mut H) { + self.mapping.hash(state); + self.key.hash(state); + self.destination.hash(state); + } +} + impl Get { /// Returns the opcode. #[inline] @@ -47,9 +169,9 @@ impl Get { vec![self.key.clone()] } - /// Returns the mapping name. + /// Returns the mapping. #[inline] - pub const fn mapping_name(&self) -> &Identifier { + pub const fn mapping(&self) -> &MappingLocator { &self.mapping } @@ -75,21 +197,27 @@ impl Get { store: &impl FinalizeStoreTrait, registers: &mut (impl RegistersLoad + RegistersStore), ) -> Result<()> { + // Determine the program ID and mapping name. + let (program_id, mapping_name) = match self.mapping { + MappingLocator::Locator(locator) => (*locator.program_id(), *locator.resource()), + MappingLocator::Resource(mapping_name) => (*stack.program_id(), mapping_name), + }; + // Ensure the mapping exists in storage. - if !store.contains_mapping_confirmed(stack.program_id(), &self.mapping)? { - bail!("Mapping '{}/{}' does not exist in storage", stack.program_id(), self.mapping); + if !store.contains_mapping_confirmed(&program_id, &mapping_name)? { + bail!("Mapping '{program_id}/{mapping_name}' does not exist in storage"); } // Load the operand as a plaintext. let key = registers.load_plaintext(stack, &self.key)?; // Retrieve the value from storage as a literal. - let value = match store.get_value_speculative(*stack.program_id(), self.mapping, &key)? { + let value = match store.get_value_speculative(program_id, mapping_name, &key)? { Some(Value::Plaintext(plaintext)) => Value::Plaintext(plaintext), Some(Value::Record(..)) => bail!("Cannot 'get' a 'record'"), Some(Value::Future(..)) => bail!("Cannot 'get' a 'future'",), // If a key does not exist, then bail. - None => bail!("Key '{}' does not exist in mapping '{}/{}'", key, stack.program_id(), self.mapping), + None => bail!("Key '{key}' does not exist in mapping '{program_id}/{mapping_name}'"), }; // Assign the value to the destination register. @@ -111,7 +239,7 @@ impl Parser for Get { let (string, _) = Sanitizer::parse_whitespaces(string)?; // Parse the mapping name from the string. - let (string, mapping) = Identifier::parse(string)?; + let (string, mapping) = MappingLocator::parse(string)?; // Parse the "[" from the string. let (string, _) = tag("[")(string)?; // Parse the whitespace from the string. @@ -180,9 +308,22 @@ impl Display for Get { impl FromBytes for Get { /// Reads the command from a buffer. - fn read_le(mut reader: R) -> IoResult { - // Read the mapping name. - let mapping = Identifier::read_le(&mut reader)?; + fn read_le(reader: R) -> IoResult { + // Peek at the first byte. + // TODO (howardwu): For mainnet - Read a `MappingLocator`. + let mut reader = BufReader::with_capacity(1, reader); + let first_byte = { + let buffer = reader.fill_buf()?; + match buffer.first() { + Some(byte) => *byte, + None => return Err(error("Failed to read `get`. Expected byte.")), + } + }; + // If the first byte is zero, then read a `MappingLocator`, otherwise read an `Identifier`. + let mapping = match first_byte { + 0u8 => MappingLocator::read_le(&mut reader)?, + _ => MappingLocator::Resource(Identifier::read_le(&mut reader)?), + }; // Read the key operand. let key = Operand::read_le(&mut reader)?; // Read the destination register. @@ -196,7 +337,11 @@ impl ToBytes for Get { /// Writes the operation to a buffer. fn write_le(&self, mut writer: W) -> IoResult<()> { // Write the mapping name. - self.mapping.write_le(&mut writer)?; + // TODO (howardwu): For mainnet - Write `self.mapping` directly, instead of matching on the identifier case. + match &self.mapping { + MappingLocator::Locator(_) => self.mapping.write_le(&mut writer)?, + MappingLocator::Resource(identifier) => identifier.write_le(&mut writer)?, + } // Write the key operand. self.key.write_le(&mut writer)?; // Write the destination register. @@ -211,13 +356,59 @@ mod tests { type CurrentNetwork = Testnet3; + struct OldGet { + mapping: Identifier, + key: Operand, + destination: Register, + } + + impl ToBytes for OldGet { + fn write_le(&self, mut writer: W) -> IoResult<()> + where + Self: Sized, + { + // Write the mapping name. + self.mapping.write_le(&mut writer)?; + // Write the key operand. + self.key.write_le(&mut writer)?; + // Write the destination register. + self.destination.write_le(&mut writer) + } + } + #[test] fn test_parse() { let (string, get) = Get::::parse("get account[r0] into r1;").unwrap(); assert!(string.is_empty(), "Parser did not consume all of the string: '{string}'"); - assert_eq!(get.mapping, Identifier::from_str("account").unwrap()); + assert_eq!(get.mapping, MappingLocator::from_str("account").unwrap()); assert_eq!(get.operands().len(), 1, "The number of operands is incorrect"); assert_eq!(get.key, Operand::Register(Register::Locator(0)), "The first operand is incorrect"); assert_eq!(get.destination, Register::Locator(1), "The second operand is incorrect"); + + let (string, get) = Get::::parse("get token.aleo/balances[r0] into r1;").unwrap(); + assert!(string.is_empty(), "Parser did not consume all of the string: '{string}'"); + assert_eq!(get.mapping, MappingLocator::from_str("token.aleo/balances").unwrap()); + assert_eq!(get.operands().len(), 1, "The number of operands is incorrect"); + assert_eq!(get.key, Operand::Register(Register::Locator(0)), "The first operand is incorrect"); + assert_eq!(get.destination, Register::Locator(1), "The second operand is incorrect"); + } + + #[test] + fn test_from_bytes() { + let (string, get) = Get::::parse("get account[r0] into r1;").unwrap(); + assert!(string.is_empty()); + + let old_get = OldGet:: { + mapping: Identifier::from_str("account").unwrap(), + key: Operand::Register(Register::Locator(0)), + destination: Register::Locator(1), + }; + + let get_bytes = get.to_bytes_le().unwrap(); + let old_get_bytes = old_get.to_bytes_le().unwrap(); + + let first = Get::::from_bytes_le(&get_bytes[..]).unwrap(); + let second = Get::::from_bytes_le(&old_get_bytes[..]).unwrap(); + assert_eq!(first, second); } } diff --git a/synthesizer/program/src/logic/command/get_or_use.rs b/synthesizer/program/src/logic/command/get_or_use.rs index 27c0e6faec..7c2ef6d7d7 100644 --- a/synthesizer/program/src/logic/command/get_or_use.rs +++ b/synthesizer/program/src/logic/command/get_or_use.rs @@ -14,21 +14,26 @@ use crate::{ traits::{FinalizeStoreTrait, RegistersLoad, RegistersStore, StackMatches, StackProgram}, + MappingLocator, Opcode, Operand, }; use console::{ network::prelude::*, - program::{Identifier, Register, Value}, + program::{Register, Value}, }; +use console::program::Identifier; +use std::io::{BufRead, BufReader}; + /// A get command that uses the provided default in case of failure, e.g. `get.or_use accounts[r0] r1 into r2;`. /// Gets the value stored at `operand` in `mapping` and stores the result in `destination`. /// If the key is not present, `default` is stored in `destination`. -#[derive(Clone, PartialEq, Eq, Hash)] +#[derive(Clone)] pub struct GetOrUse { - /// The mapping name. - mapping: Identifier, + /// The mapping. + // TODO (howardwu): For mainnet - Use `CallOperator`, delete the above `MappingLocator`. + mapping: MappingLocator, /// The key to access the mapping. key: Operand, /// The default value. @@ -37,6 +42,28 @@ pub struct GetOrUse { destination: Register, } +impl PartialEq for GetOrUse { + #[inline] + fn eq(&self, other: &Self) -> bool { + self.mapping == other.mapping + && self.key == other.key + && self.default == other.default + && self.destination == other.destination + } +} + +impl Eq for GetOrUse {} + +impl std::hash::Hash for GetOrUse { + #[inline] + fn hash(&self, state: &mut H) { + self.mapping.hash(state); + self.key.hash(state); + self.default.hash(state); + self.destination.hash(state); + } +} + impl GetOrUse { /// Returns the opcode. #[inline] @@ -50,9 +77,9 @@ impl GetOrUse { vec![self.key.clone(), self.default.clone()] } - /// Returns the mapping name. + /// Returns the mapping. #[inline] - pub const fn mapping_name(&self) -> &Identifier { + pub const fn mapping(&self) -> &MappingLocator { &self.mapping } @@ -84,16 +111,22 @@ impl GetOrUse { store: &impl FinalizeStoreTrait, registers: &mut (impl RegistersLoad + RegistersStore), ) -> Result<()> { + // Determine the program ID and mapping name. + let (program_id, mapping_name) = match self.mapping { + MappingLocator::Locator(locator) => (*locator.program_id(), *locator.resource()), + MappingLocator::Resource(mapping_name) => (*stack.program_id(), mapping_name), + }; + // Ensure the mapping exists in storage. - if !store.contains_mapping_confirmed(stack.program_id(), &self.mapping)? { - bail!("Mapping '{}/{}' does not exist in storage", stack.program_id(), self.mapping); + if !store.contains_mapping_confirmed(&program_id, &mapping_name)? { + bail!("Mapping '{program_id}/{mapping_name}' does not exist in storage"); } // Load the operand as a plaintext. let key = registers.load_plaintext(stack, &self.key)?; // Retrieve the value from storage as a literal. - let value = match store.get_value_speculative(*stack.program_id(), self.mapping, &key)? { + let value = match store.get_value_speculative(program_id, mapping_name, &key)? { Some(Value::Plaintext(plaintext)) => Value::Plaintext(plaintext), Some(Value::Record(..)) => bail!("Cannot 'get.or_use' a 'record'"), Some(Value::Future(..)) => bail!("Cannot 'get.or_use' a 'future'"), @@ -121,7 +154,7 @@ impl Parser for GetOrUse { let (string, _) = Sanitizer::parse_whitespaces(string)?; // Parse the mapping name from the string. - let (string, mapping) = Identifier::parse(string)?; + let (string, mapping) = MappingLocator::parse(string)?; // Parse the "[" from the string. let (string, _) = tag("[")(string)?; // Parse the whitespace from the string. @@ -194,9 +227,22 @@ impl Display for GetOrUse { impl FromBytes for GetOrUse { /// Reads the command from a buffer. - fn read_le(mut reader: R) -> IoResult { - // Read the mapping name. - let mapping = Identifier::read_le(&mut reader)?; + fn read_le(reader: R) -> IoResult { + // Peek at the first byte. + // TODO (howardwu): For mainnet - Read a `MappingLocator`. + let mut reader = BufReader::with_capacity(1, reader); + let first_byte = { + let buffer = reader.fill_buf()?; + match buffer.first() { + Some(byte) => *byte, + None => return Err(error("Failed to read `get.or_use`. Expected byte.")), + } + }; + // If the first byte is zero, then read a `MappingLocator`, otherwise read an `Identifier`. + let mapping = match first_byte { + 0u8 => MappingLocator::read_le(&mut reader)?, + _ => MappingLocator::Resource(Identifier::read_le(&mut reader)?), + }; // Read the key operand. let key = Operand::read_le(&mut reader)?; // Read the default value. @@ -212,7 +258,11 @@ impl ToBytes for GetOrUse { /// Writes the operation to a buffer. fn write_le(&self, mut writer: W) -> IoResult<()> { // Write the mapping name. - self.mapping.write_le(&mut writer)?; + // TODO (howardwu): For mainnet - Write the `self.mapping` directly, instead of matching on the identifier case. + match &self.mapping { + MappingLocator::Locator(_) => self.mapping.write_le(&mut writer)?, + MappingLocator::Resource(identifier) => identifier.write_le(&mut writer)?, + } // Write the key operand. self.key.write_le(&mut writer)?; // Write the default value. @@ -229,14 +279,63 @@ mod tests { type CurrentNetwork = Testnet3; + pub struct OldGetOrUse { + pub mapping: Identifier, + pub key: Operand, + pub default: Operand, + pub destination: Register, + } + + impl ToBytes for OldGetOrUse { + fn write_le(&self, mut writer: W) -> IoResult<()> { + // Write the mapping name. + self.mapping.write_le(&mut writer)?; + // Write the key operand. + self.key.write_le(&mut writer)?; + // Write the default value. + self.default.write_le(&mut writer)?; + // Write the destination register. + self.destination.write_le(&mut writer) + } + } + #[test] fn test_parse() { let (string, get_or_use) = GetOrUse::::parse("get.or_use account[r0] r1 into r2;").unwrap(); assert!(string.is_empty(), "Parser did not consume all of the string: '{string}'"); - assert_eq!(get_or_use.mapping, Identifier::from_str("account").unwrap()); + assert_eq!(get_or_use.mapping, MappingLocator::from_str("account").unwrap()); + assert_eq!(get_or_use.operands().len(), 2, "The number of operands is incorrect"); + assert_eq!(get_or_use.key, Operand::Register(Register::Locator(0)), "The first operand is incorrect"); + assert_eq!(get_or_use.default, Operand::Register(Register::Locator(1)), "The second operand is incorrect"); + assert_eq!(get_or_use.destination, Register::Locator(2), "The second operand is incorrect"); + + let (string, get_or_use) = + GetOrUse::::parse("get.or_use token.aleo/balances[r0] r1 into r2;").unwrap(); + assert!(string.is_empty(), "Parser did not consume all of the string: '{string}'"); + assert_eq!(get_or_use.mapping, MappingLocator::from_str("token.aleo/balances").unwrap()); assert_eq!(get_or_use.operands().len(), 2, "The number of operands is incorrect"); assert_eq!(get_or_use.key, Operand::Register(Register::Locator(0)), "The first operand is incorrect"); assert_eq!(get_or_use.default, Operand::Register(Register::Locator(1)), "The second operand is incorrect"); assert_eq!(get_or_use.destination, Register::Locator(2), "The second operand is incorrect"); } + + #[test] + fn test_from_bytes() { + let (string, get_or_use) = GetOrUse::::parse("get.or_use account[r0] r1 into r2;").unwrap(); + assert!(string.is_empty()); + + let old_get_or_use = OldGetOrUse:: { + mapping: Identifier::from_str("account").unwrap(), + key: Operand::Register(Register::Locator(0)), + default: Operand::Register(Register::Locator(1)), + destination: Register::Locator(2), + }; + + let get_or_use_bytes = get_or_use.to_bytes_le().unwrap(); + let old_get_or_use_bytes = old_get_or_use.to_bytes_le().unwrap(); + + let first = GetOrUse::::from_bytes_le(&get_or_use_bytes[..]).unwrap(); + let second = GetOrUse::::from_bytes_le(&old_get_or_use_bytes[..]).unwrap(); + assert_eq!(first, second); + } } diff --git a/synthesizer/program/src/logic/finalize_operation/bytes.rs b/synthesizer/program/src/logic/finalize_operation/bytes.rs index 907fa3d750..a88e65a10c 100644 --- a/synthesizer/program/src/logic/finalize_operation/bytes.rs +++ b/synthesizer/program/src/logic/finalize_operation/bytes.rs @@ -134,9 +134,6 @@ impl ToBytes for FinalizeOperation { #[cfg(test)] mod tests { use super::*; - use console::network::Testnet3; - - type CurrentNetwork = Testnet3; #[test] fn test_bytes() { @@ -144,7 +141,6 @@ mod tests { // Check the byte representation. let expected_bytes = expected.to_bytes_le().unwrap(); assert_eq!(expected, FinalizeOperation::read_le(&expected_bytes[..]).unwrap()); - assert!(FinalizeOperation::::read_le(&expected_bytes[1..]).is_err()); } } } diff --git a/synthesizer/program/src/logic/finalize_operation/serialize.rs b/synthesizer/program/src/logic/finalize_operation/serialize.rs index f9f666e5de..1a128d3e94 100644 --- a/synthesizer/program/src/logic/finalize_operation/serialize.rs +++ b/synthesizer/program/src/logic/finalize_operation/serialize.rs @@ -19,41 +19,51 @@ impl Serialize for FinalizeOperation { fn serialize(&self, serializer: S) -> Result { match serializer.is_human_readable() { true => { - let mut operation = serializer.serialize_struct("FinalizeOperation", 5)?; // Serialize the components. match self { Self::InitializeMapping(mapping_id) => { + let mut operation = serializer.serialize_struct("FinalizeOperation", 2)?; operation.serialize_field("type", "initialize_mapping")?; operation.serialize_field("mapping_id", mapping_id)?; + operation.end() } Self::InsertKeyValue(mapping_id, key_id, value_id) => { + let mut operation = serializer.serialize_struct("FinalizeOperation", 4)?; operation.serialize_field("type", "insert_key_value")?; operation.serialize_field("mapping_id", mapping_id)?; operation.serialize_field("key_id", key_id)?; operation.serialize_field("value_id", value_id)?; + operation.end() } Self::UpdateKeyValue(mapping_id, index, key_id, value_id) => { + let mut operation = serializer.serialize_struct("FinalizeOperation", 5)?; operation.serialize_field("type", "update_key_value")?; operation.serialize_field("mapping_id", mapping_id)?; operation.serialize_field("index", index)?; operation.serialize_field("key_id", key_id)?; operation.serialize_field("value_id", value_id)?; + operation.end() } Self::RemoveKeyValue(mapping_id, index) => { + let mut operation = serializer.serialize_struct("FinalizeOperation", 3)?; operation.serialize_field("type", "remove_key_value")?; operation.serialize_field("mapping_id", mapping_id)?; operation.serialize_field("index", index)?; + operation.end() } Self::ReplaceMapping(mapping_id) => { + let mut operation = serializer.serialize_struct("FinalizeOperation", 2)?; operation.serialize_field("type", "replace_mapping")?; operation.serialize_field("mapping_id", mapping_id)?; + operation.end() } Self::RemoveMapping(mapping_id) => { + let mut operation = serializer.serialize_struct("FinalizeOperation", 2)?; operation.serialize_field("type", "remove_mapping")?; operation.serialize_field("mapping_id", mapping_id)?; + operation.end() } } - operation.end() } false => ToBytesSerializer::serialize_with_size_encoding(self, serializer), } diff --git a/synthesizer/program/src/logic/instruction/mod.rs b/synthesizer/program/src/logic/instruction/mod.rs index f4199eef49..930920ca5e 100644 --- a/synthesizer/program/src/logic/instruction/mod.rs +++ b/synthesizer/program/src/logic/instruction/mod.rs @@ -181,11 +181,11 @@ pub enum Instruction { RemWrapped(RemWrapped), /// Shifts `first` left by `second` bits, storing the outcome in `destination`. Shl(Shl), - /// Shifts `first` left by `second` bits, continuing past the boundary of the type, storing the outcome in `destination`. + /// Shifts `first` left by `second` bits, wrapping around at the boundary of the type, storing the outcome in `destination`. ShlWrapped(ShlWrapped), /// Shifts `first` right by `second` bits, storing the outcome in `destination`. Shr(Shr), - /// Shifts `first` right by `second` bits, continuing past the boundary of the type, storing the outcome in `destination`. + /// Shifts `first` right by `second` bits, wrapping around at the boundary of the type, storing the outcome in `destination`. ShrWrapped(ShrWrapped), /// Computes whether `signature` is valid for the given `address` and `message`. SignVerify(SignVerify), diff --git a/synthesizer/program/src/logic/instruction/operand/parse.rs b/synthesizer/program/src/logic/instruction/operand/parse.rs index 8a277ea91d..ccc699ee60 100644 --- a/synthesizer/program/src/logic/instruction/operand/parse.rs +++ b/synthesizer/program/src/logic/instruction/operand/parse.rs @@ -26,9 +26,11 @@ impl Parser for Operand { map(tag("self.signer"), |_| Self::Signer), map(tag("self.caller"), |_| Self::Caller), map(tag("block.height"), |_| Self::BlockHeight), + // Note that `Operand::ProgramID`s must be parsed before `Operand::Literal`s, since a program ID can be implicitly parsed as a literal address. + // This ensures that the string representation of a program uses the `Operand::ProgramID` variant. + map(ProgramID::parse, |program_id| Self::ProgramID(program_id)), map(Literal::parse, |literal| Self::Literal(literal)), map(Register::parse, |register| Self::Register(register)), - map(ProgramID::parse, |program_id| Self::ProgramID(program_id)), ))(string) } } diff --git a/synthesizer/program/src/logic/instruction/operation/call.rs b/synthesizer/program/src/logic/instruction/operation/call.rs index aa489f5ea8..12cc42168d 100644 --- a/synthesizer/program/src/logic/instruction/operation/call.rs +++ b/synthesizer/program/src/logic/instruction/operation/call.rs @@ -544,7 +544,6 @@ mod tests { // Check the byte representation. let expected_bytes = expected.to_bytes_le().unwrap(); assert_eq!(expected, Call::read_le(&expected_bytes[..]).unwrap()); - assert!(Call::::read_le(&expected_bytes[1..]).is_err()); } } } diff --git a/synthesizer/program/src/logic/instruction/operation/hash.rs b/synthesizer/program/src/logic/instruction/operation/hash.rs index 6a320b2ab4..cd7954e887 100644 --- a/synthesizer/program/src/logic/instruction/operation/hash.rs +++ b/synthesizer/program/src/logic/instruction/operation/hash.rs @@ -107,7 +107,10 @@ fn check_number_of_operands(variant: u8, opcode: Opcode, num_operands: usize) -> fn is_valid_destination_type(destination_type: &PlaintextType) -> bool { !matches!( destination_type, - PlaintextType::Literal(LiteralType::Boolean) | PlaintextType::Literal(LiteralType::String) + PlaintextType::Literal(LiteralType::Boolean) + | PlaintextType::Literal(LiteralType::String) + | PlaintextType::Struct(..) + | PlaintextType::Array(..) ) } diff --git a/synthesizer/program/src/resources/credits.aleo b/synthesizer/program/src/resources/credits.aleo index 8e8c279dc1..534bb4ce86 100644 --- a/synthesizer/program/src/resources/credits.aleo +++ b/synthesizer/program/src/resources/credits.aleo @@ -789,11 +789,11 @@ function fee_private: input r1 as u64.public; // Input the priority fee amount. input r2 as u64.public; - // Input the deployment or execution root. + // Input the deployment or execution ID. input r3 as field.public; // Ensure the amount is nonzero. assert.neq r1 0u64; - // Ensure the deployment or execution root is nonzero. + // Ensure the deployment or execution ID is nonzero. assert.neq r3 0field; // Add the fee and priority fee amounts. add r1 r2 into r4; @@ -815,11 +815,11 @@ function fee_public: input r0 as u64.public; // Input the priority fee amount. input r1 as u64.public; - // Input the deployment or execution root. + // Input the deployment or execution ID. input r2 as field.public; // Ensure the amount is nonzero. assert.neq r0 0u64; - // Ensure the deployment or execution root is nonzero. + // Ensure the deployment or execution ID is nonzero. assert.neq r2 0field; // Add the fee and priority fee amounts. add r0 r1 into r3; diff --git a/synthesizer/program/src/traits/stack_and_registers.rs b/synthesizer/program/src/traits/stack_and_registers.rs index 4a88281c4a..50418e894f 100644 --- a/synthesizer/program/src/traits/stack_and_registers.rs +++ b/synthesizer/program/src/traits/stack_and_registers.rs @@ -12,6 +12,8 @@ // See the License for the specific language governing permissions and // limitations under the License. +use std::sync::Arc; + use crate::{FinalizeGlobalState, Function, Operand, Program}; use console::{ network::Network, @@ -65,7 +67,7 @@ pub trait StackProgram { fn contains_external_record(&self, locator: &Locator) -> bool; /// Returns the external stack for the given program ID. - fn get_external_stack(&self, program_id: &ProgramID) -> Result<&Self>; + fn get_external_stack(&self, program_id: &ProgramID) -> Result<&Arc>; /// Returns the external program for the given program ID. fn get_external_program(&self, program_id: &ProgramID) -> Result<&Program>; diff --git a/synthesizer/snark/Cargo.toml b/synthesizer/snark/Cargo.toml index f5caaf5b46..100ce851b9 100644 --- a/synthesizer/snark/Cargo.toml +++ b/synthesizer/snark/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "snarkvm-synthesizer-snark" -version = "0.16.3" +version = "0.16.15" authors = [ "The Aleo Team " ] description = "SNARK wrappers for a decentralized virtual machine" homepage = "https://aleo.org" @@ -33,18 +33,18 @@ wasm = [ "console/wasm", "snarkvm-algorithms/wasm" ] [dependencies.circuit] package = "snarkvm-circuit" path = "../../circuit" -version = "=0.16.3" +version = "=0.16.15" [dependencies.console] package = "snarkvm-console" path = "../../console" -version = "=0.16.3" +version = "=0.16.15" default-features = false features = [ "network" ] [dependencies.snarkvm-algorithms] path = "../../algorithms" -version = "=0.16.3" +version = "=0.16.15" [dependencies.bincode] version = "1" diff --git a/synthesizer/snark/src/certificate/bytes.rs b/synthesizer/snark/src/certificate/bytes.rs index 3d21b0ad83..df0ebfecef 100644 --- a/synthesizer/snark/src/certificate/bytes.rs +++ b/synthesizer/snark/src/certificate/bytes.rs @@ -43,9 +43,6 @@ impl ToBytes for Certificate { #[cfg(test)] mod tests { use super::*; - use console::network::Testnet3; - - type CurrentNetwork = Testnet3; #[test] fn test_bytes() -> Result<()> { @@ -55,7 +52,6 @@ mod tests { // Check the byte representation. let expected_bytes = expected.to_bytes_le()?; assert_eq!(expected, Certificate::read_le(&expected_bytes[..])?); - assert!(Certificate::::read_le(&expected_bytes[1..]).is_err()); Ok(()) } diff --git a/synthesizer/snark/src/proof/bytes.rs b/synthesizer/snark/src/proof/bytes.rs index 88bc0f922a..e343178603 100644 --- a/synthesizer/snark/src/proof/bytes.rs +++ b/synthesizer/snark/src/proof/bytes.rs @@ -43,9 +43,6 @@ impl ToBytes for Proof { #[cfg(test)] mod tests { use super::*; - use console::network::Testnet3; - - type CurrentNetwork = Testnet3; #[test] fn test_bytes() -> Result<()> { @@ -55,7 +52,6 @@ mod tests { // Check the byte representation. let expected_bytes = expected.to_bytes_le()?; assert_eq!(expected, Proof::read_le(&expected_bytes[..])?); - assert!(Proof::::read_le(&expected_bytes[1..]).is_err()); Ok(()) } diff --git a/synthesizer/src/vm/execute.rs b/synthesizer/src/vm/execute.rs index 60b021426c..31a2111188 100644 --- a/synthesizer/src/vm/execute.rs +++ b/synthesizer/src/vm/execute.rs @@ -135,7 +135,7 @@ impl> VM { // Prepare the authorization. let authorization = cast_ref!(authorization as Authorization<$network>); // Execute the call. - let (_, mut trace) = $process.execute::<$aleo>(authorization.clone())?; + let (_, mut trace) = $process.execute::<$aleo, _>(authorization.clone(), rng)?; lap!(timer, "Execute the call"); // Prepare the assignments. @@ -180,7 +180,7 @@ impl> VM { // Prepare the authorization. let authorization = cast_ref!(authorization as Authorization<$network>); // Execute the call. - let (_, mut trace) = $process.execute::<$aleo>(authorization.clone())?; + let (_, mut trace) = $process.execute::<$aleo, _>(authorization.clone(), rng)?; lap!(timer, "Execute the call"); // Prepare the assignments. diff --git a/synthesizer/src/vm/finalize.rs b/synthesizer/src/vm/finalize.rs index c30166343a..8c9422d153 100644 --- a/synthesizer/src/vm/finalize.rs +++ b/synthesizer/src/vm/finalize.rs @@ -128,6 +128,14 @@ impl> VM { } impl> VM { + /// The maximum number of confirmed transactions allowed in a block. + #[cfg(not(any(test, feature = "test")))] + pub const MAXIMUM_CONFIRMED_TRANSACTIONS: usize = Transactions::::MAX_TRANSACTIONS; + /// The maximum number of confirmed transactions allowed in a block. + /// This is set to a deliberately low value (8) for testing purposes only. + #[cfg(any(test, feature = "test"))] + pub const MAXIMUM_CONFIRMED_TRANSACTIONS: usize = 8; + /// Performs atomic speculation over a list of transactions. /// /// Returns the ratifications, confirmed transactions, aborted transactions, @@ -151,6 +159,11 @@ impl> VM { Vec<(Transaction, String)>, Vec>, )> { + // Acquire the atomic lock, which is needed to ensure this function is not called concurrently + // with other `atomic_finalize!` macro calls, which will cause a `bail!` to be triggered erroneously. + // Note: This lock must be held for the entire scope of the call to `atomic_finalize!`. + let _atomic_lock = self.atomic_lock.lock(); + let timer = timer!("VM::atomic_speculate"); // Retrieve the number of transactions. @@ -159,11 +172,11 @@ impl> VM { // Perform the finalize operation on the preset finalize mode. atomic_finalize!(self.finalize_store(), FinalizeMode::DryRun, { // Ensure the number of transactions does not exceed the maximum. - if num_transactions > Transactions::::MAX_TRANSACTIONS { + if num_transactions > 2 * Transactions::::MAX_TRANSACTIONS { // Note: This will abort the entire atomic batch. return Err(format!( "Too many transactions in the block - {num_transactions} (max: {})", - Transactions::::MAX_TRANSACTIONS + 2 * Transactions::::MAX_TRANSACTIONS )); } @@ -204,12 +217,36 @@ impl> VM { let mut confirmed = Vec::with_capacity(num_transactions); // Initialize a list of the aborted transactions. let mut aborted = Vec::new(); + // Initialize a list of the successful deployments. + let mut deployments = IndexSet::new(); + // Initialize a counter for the confirmed transaction index. + let mut counter = 0u32; + // Initialize a list of spent input IDs. + let mut input_ids: IndexSet> = IndexSet::new(); // Finalize the transactions. - 'outer: for (index, transaction) in transactions.enumerate() { - // Convert the transaction index to a u32. - // Note: On failure, this will abort the entire atomic batch. - let index = u32::try_from(index).map_err(|_| "Failed to convert transaction index".to_string())?; + 'outer: for transaction in transactions { + // Ensure the number of confirmed transactions does not exceed the maximum. + // Upon reaching the maximum number of confirmed transactions, all remaining transactions are aborted. + if confirmed.len() >= Self::MAXIMUM_CONFIRMED_TRANSACTIONS { + // Store the aborted transaction. + aborted.push((transaction.clone(), "Exceeds block transaction limit".to_string())); + // Continue to the next transaction. + continue 'outer; + } + + // Ensure that the transaction is not double-spending an input. + for input_id in transaction.input_ids() { + // If the input ID is already spent in this block or previous blocks, abort the transaction. + if input_ids.contains(input_id) + || self.transition_store().contains_input_id(input_id).unwrap_or(true) + { + // Store the aborted transaction. + aborted.push((transaction.clone(), format!("Double-spending input {input_id}"))); + // Continue to the next transaction. + continue 'outer; + } + } // Process the transaction in an isolated atomic batch. // - If the transaction succeeds, the finalize operations are stored. @@ -218,25 +255,50 @@ impl> VM { // The finalize operation here involves appending the 'stack', // and adding the program to the finalize tree. Transaction::Deploy(_, program_owner, deployment, fee) => { - match process.finalize_deployment(state, store, deployment, fee) { - // Construct the accepted deploy transaction. - Ok((_, finalize)) => { - ConfirmedTransaction::accepted_deploy(index, transaction.clone(), finalize) - .map_err(|e| e.to_string()) - } - // Construct the rejected deploy transaction. - Err(_error) => { - // Finalize the fee, to ensure it is valid. - match process.finalize_fee(state, store, fee).and_then(|finalize| { - Transaction::from_fee(fee.clone()).map(|fee_tx| (fee_tx, finalize)) - }) { - Ok((fee_tx, finalize)) => { - // Construct the rejected deployment. - let rejected = Rejected::new_deployment(*program_owner, *deployment.clone()); - // Construct the rejected deploy transaction. - ConfirmedTransaction::rejected_deploy(index, fee_tx, rejected, finalize) + // Define the closure for processing a rejected deployment. + let process_rejected_deployment = + |fee: &Fee, + deployment: Deployment| + -> Result, String>> { + process + .finalize_fee(state, store, fee) + .and_then(|finalize| { + Transaction::from_fee(fee.clone()).map(|fee_tx| (fee_tx, finalize)) + }) + .map(|(fee_tx, finalize)| { + let rejected = Rejected::new_deployment(*program_owner, deployment); + ConfirmedTransaction::rejected_deploy(counter, fee_tx, rejected, finalize) .map_err(|e| e.to_string()) - } + }) + }; + + // Check if the program has already been deployed in this block. + match deployments.contains(deployment.program_id()) { + // If the program has already been deployed, construct the rejected deploy transaction. + true => match process_rejected_deployment(fee, *deployment.clone()) { + Ok(result) => result, + Err(error) => { + // Note: On failure, skip this transaction, and continue speculation. + #[cfg(debug_assertions)] + eprintln!("Failed to finalize the fee in a rejected deploy - {error}"); + // Store the aborted transaction. + aborted.push((transaction.clone(), error.to_string())); + // Continue to the next transaction. + continue 'outer; + } + }, + // If the program has not yet been deployed, attempt to deploy it. + false => match process.finalize_deployment(state, store, deployment, fee) { + // Construct the accepted deploy transaction. + Ok((_, finalize)) => { + // Add the program id to the list of deployments. + deployments.insert(*deployment.program_id()); + ConfirmedTransaction::accepted_deploy(counter, transaction.clone(), finalize) + .map_err(|e| e.to_string()) + } + // Construct the rejected deploy transaction. + Err(_error) => match process_rejected_deployment(fee, *deployment.clone()) { + Ok(result) => result, Err(error) => { // Note: On failure, skip this transaction, and continue speculation. #[cfg(debug_assertions)] @@ -246,8 +308,8 @@ impl> VM { // Continue to the next transaction. continue 'outer; } - } - } + }, + }, } } // The finalize operation here involves calling 'update_key_value', @@ -256,7 +318,7 @@ impl> VM { match process.finalize_execution(state, store, execution, fee.as_ref()) { // Construct the accepted execute transaction. Ok(finalize) => { - ConfirmedTransaction::accepted_execute(index, transaction.clone(), finalize) + ConfirmedTransaction::accepted_execute(counter, transaction.clone(), finalize) .map_err(|e| e.to_string()) } // Construct the rejected execute transaction. @@ -270,7 +332,7 @@ impl> VM { // Construct the rejected execution. let rejected = Rejected::new_execution(execution.clone()); // Construct the rejected execute transaction. - ConfirmedTransaction::rejected_execute(index, fee_tx, rejected, finalize) + ConfirmedTransaction::rejected_execute(counter, fee_tx, rejected, finalize) .map_err(|e| e.to_string()) } Err(error) => { @@ -298,7 +360,14 @@ impl> VM { match outcome { // If the transaction succeeded, store it and continue to the next transaction. - Ok(confirmed_transaction) => confirmed.push(confirmed_transaction), + Ok(confirmed_transaction) => { + // Add the input IDs to the set of spent input IDs. + input_ids.extend(confirmed_transaction.transaction().input_ids()); + // Store the confirmed transaction. + confirmed.push(confirmed_transaction); + // Increment the transaction index counter. + counter = counter.saturating_add(1); + } // If the transaction failed, abort the entire batch. Err(error) => { eprintln!("Critical bug in speculate: {error}\n\n{transaction}"); @@ -384,6 +453,11 @@ impl> VM { solutions: Option<&CoinbaseSolution>, transactions: &Transactions, ) -> Result>> { + // Acquire the atomic lock, which is needed to ensure this function is not called concurrently + // with other `atomic_finalize!` macro calls, which will cause a `bail!` to be triggered erroneously. + // Note: This lock must be held for the entire scope of the call to `atomic_finalize!`. + let _atomic_lock = self.atomic_lock.lock(); + let timer = timer!("VM::atomic_finalize"); // Perform the finalize operation on the preset finalize mode. @@ -918,7 +992,6 @@ finalize transfer_public: // Speculate on the candidate ratifications, solutions, and transactions. let (ratifications, transactions, aborted_transaction_ids, ratified_finalize_operations) = vm.speculate(sample_finalize_state(1), None, vec![], None, transactions.iter())?; - assert!(aborted_transaction_ids.is_empty()); // Construct the metadata associated with the block. let metadata = Metadata::new( @@ -1028,7 +1101,7 @@ finalize transfer_public: .execute(&caller_private_key, (program_id, function_name), inputs.into_iter(), credits, 1, None, rng) .unwrap(); // Verify. - vm.check_transaction(&transaction, None).unwrap(); + vm.check_transaction(&transaction, None, rng).unwrap(); // Return the transaction. transaction @@ -1542,4 +1615,75 @@ finalize compute: let expected = Value::::from_str("3u8").unwrap(); assert_eq!(value, expected); } + + #[test] + fn test_excess_transactions_should_be_aborted() { + let rng = &mut TestRng::default(); + + // Sample a private key. + let caller_private_key = test_helpers::sample_genesis_private_key(rng); + let caller_address = Address::try_from(&caller_private_key).unwrap(); + + // Initialize the vm. + let vm = test_helpers::sample_vm_with_genesis_block(rng); + + // Deploy a new program. + let genesis = + vm.block_store().get_block(&vm.block_store().get_block_hash(0).unwrap().unwrap()).unwrap().unwrap(); + + // Get the unspent records. + let mut unspent_records = genesis + .transitions() + .cloned() + .flat_map(Transition::into_records) + .map(|(_, record)| record) + .collect::>(); + + // Construct the deployment block. + let (program_id, deployment_block) = + new_program_deployment(&vm, &caller_private_key, &genesis, &mut unspent_records, rng).unwrap(); + + // Add the deployment block to the VM. + vm.add_next_block(&deployment_block).unwrap(); + + // Generate more records to use for the next block. + let splits_block = + generate_splits(&vm, &caller_private_key, &deployment_block, &mut unspent_records, rng).unwrap(); + + // Add the splits block to the VM. + vm.add_next_block(&splits_block).unwrap(); + + // Generate more records to use for the next block. + let splits_block = generate_splits(&vm, &caller_private_key, &splits_block, &mut unspent_records, rng).unwrap(); + + // Add the splits block to the VM. + vm.add_next_block(&splits_block).unwrap(); + + // Generate the transactions. + let mut transactions = Vec::new(); + let mut excess_transaction_ids = Vec::new(); + + for _ in 0..VM::>::MAXIMUM_CONFIRMED_TRANSACTIONS + 1 { + let transaction = + sample_mint_public(&vm, caller_private_key, &program_id, caller_address, 10, &mut unspent_records, rng); + // Abort the transaction if the block is full. + if transactions.len() >= VM::>::MAXIMUM_CONFIRMED_TRANSACTIONS { + excess_transaction_ids.push(transaction.id()); + } + + transactions.push(transaction); + } + + // Construct the next block. + let next_block = + sample_next_block(&vm, &caller_private_key, &transactions, &splits_block, &mut unspent_records, rng) + .unwrap(); + + // Ensure that the excess transactions were aborted. + assert_eq!(next_block.aborted_transaction_ids(), &excess_transaction_ids); + assert_eq!( + next_block.transactions().len(), + VM::>::MAXIMUM_CONFIRMED_TRANSACTIONS + ); + } } diff --git a/synthesizer/src/vm/helpers/committee.rs b/synthesizer/src/vm/helpers/committee.rs index 5153a68893..d44c4eafd1 100644 --- a/synthesizer/src/vm/helpers/committee.rs +++ b/synthesizer/src/vm/helpers/committee.rs @@ -232,14 +232,16 @@ pub fn to_next_commitee_map_and_bonded_map( #[cfg(test)] pub(crate) mod test_helpers { use super::*; + use crate::vm::TestRng; use ledger_committee::MIN_VALIDATOR_STAKE; - use rand::Rng; + use rand::{CryptoRng, Rng}; /// Returns the stakers, given the map of `(validator, (microcredits, is_open))` entries. /// This method simulates the existence of delegators for the members. - pub(crate) fn to_stakers( + pub(crate) fn to_stakers( members: &IndexMap, (u64, bool)>, + rng: &mut R, ) -> IndexMap, (Address, u64)> { members .into_iter() @@ -252,9 +254,9 @@ pub(crate) mod test_helpers { let num_iterations = (remaining_microcredits / staker_amount).saturating_sub(1); // Construct the map of stakers. - let mut stakers: IndexMap<_, _> = cfg_into_iter!((0..num_iterations)) - .map(|_| { - let rng = &mut rand::thread_rng(); + let rngs = (0..num_iterations).map(|_| TestRng::from_seed(rng.gen())).collect::>(); + let mut stakers: IndexMap<_, _> = cfg_into_iter!(rngs) + .map(|mut rng| { // Sample a random staker. let staker = Address::::new(rng.gen()); // Output the staker. @@ -268,7 +270,6 @@ pub(crate) mod test_helpers { // Insert the last staker. let final_amount = remaining_microcredits.saturating_sub(num_iterations * staker_amount); if final_amount > 0 { - let rng = &mut rand::thread_rng(); let staker = Address::::new(rng.gen()); stakers.insert(staker, (*validator, final_amount)); } @@ -351,7 +352,7 @@ mod tests { // Sample a committee. let committee = ledger_committee::test_helpers::sample_committee_for_round_and_size(1, 100, rng); // Convert the committee into stakers. - let expected_stakers = crate::committee::test_helpers::to_stakers(committee.members()); + let expected_stakers = crate::committee::test_helpers::to_stakers(committee.members(), rng); // Initialize the bonded map. let bonded_map = to_bonded_map(&expected_stakers); @@ -371,7 +372,7 @@ mod tests { // Sample a committee. let committee = ledger_committee::test_helpers::sample_committee_for_round_and_size(1, 100, rng); // Convert the committee into stakers. - let stakers = crate::committee::test_helpers::to_stakers(committee.members()); + let stakers = crate::committee::test_helpers::to_stakers(committee.members(), rng); // Start a timer. let timer = std::time::Instant::now(); @@ -388,7 +389,7 @@ mod tests { // Sample a committee. let committee = ledger_committee::test_helpers::sample_committee_for_round_and_size(1, 100, rng); // Convert the committee into stakers. - let stakers = crate::committee::test_helpers::to_stakers(committee.members()); + let stakers = crate::committee::test_helpers::to_stakers(committee.members(), rng); // Start a timer. let timer = std::time::Instant::now(); @@ -407,7 +408,7 @@ mod tests { // Sample a committee. let committee = ledger_committee::test_helpers::sample_committee(rng); // Convert the committee into stakers. - let stakers = crate::committee::test_helpers::to_stakers(committee.members()); + let stakers = crate::committee::test_helpers::to_stakers(committee.members(), rng); // Start a timer. let timer = std::time::Instant::now(); diff --git a/synthesizer/src/vm/helpers/cost.rs b/synthesizer/src/vm/helpers/cost.rs index a9e55368d2..6f349baa84 100644 --- a/synthesizer/src/vm/helpers/cost.rs +++ b/synthesizer/src/vm/helpers/cost.rs @@ -199,5 +199,9 @@ pub fn cost_in_microcredits(finalize: &Finalize) -> Result { Command::BranchEq(_) | Command::BranchNeq(_) => Ok(5_000), Command::Position(_) => Ok(1_000), }; - finalize.commands().iter().map(|command| cost(command)).sum() + finalize + .commands() + .iter() + .map(cost) + .try_fold(0u64, |acc, res| res.and_then(|x| acc.checked_add(x).ok_or(anyhow!("Finalize cost overflowed")))) } diff --git a/synthesizer/src/vm/helpers/rewards.rs b/synthesizer/src/vm/helpers/rewards.rs index d6aaf74ea2..ae5f9d4fba 100644 --- a/synthesizer/src/vm/helpers/rewards.rs +++ b/synthesizer/src/vm/helpers/rewards.rs @@ -169,7 +169,7 @@ mod tests { // Sample a committee. let committee = ledger_committee::test_helpers::sample_committee_for_round_and_size(1, 100, rng); // Convert the committee into stakers. - let stakers = crate::committee::test_helpers::to_stakers(committee.members()); + let stakers = crate::committee::test_helpers::to_stakers(committee.members(), rng); // Start a timer. let timer = std::time::Instant::now(); diff --git a/synthesizer/src/vm/mod.rs b/synthesizer/src/vm/mod.rs index bfcc0b6d5e..a9e29d1594 100644 --- a/synthesizer/src/vm/mod.rs +++ b/synthesizer/src/vm/mod.rs @@ -59,16 +59,23 @@ use synthesizer_process::{Authorization, Process, Trace}; use synthesizer_program::{FinalizeGlobalState, FinalizeOperation, FinalizeStoreTrait, Program}; use aleo_std::prelude::{finish, lap, timer}; -use indexmap::IndexMap; -use parking_lot::RwLock; +use indexmap::{IndexMap, IndexSet}; +use parking_lot::{Mutex, RwLock}; use std::sync::Arc; +#[cfg(not(feature = "serial"))] +use rayon::prelude::*; + #[derive(Clone)] pub struct VM> { /// The process. process: Arc>>, /// The VM store. store: ConsensusStore, + /// The lock to guarantee atomicity over calls to speculate and finalize. + atomic_lock: Arc>, + /// The lock for ensuring there is no concurrency when advancing blocks. + block_lock: Arc>, } impl> VM { @@ -88,12 +95,12 @@ impl> VM { } } - // A helper function to load the program into the process, and recursively load all imports. + // A helper function to retrieve all the deployments. fn load_deployment_and_imports>( - process: &mut Process, + process: &Process, transaction_store: &TransactionStore, transaction_id: N::TransactionID, - ) -> Result<()> { + ) -> Result, Deployment)>> { // Retrieve the deployment from the transaction id. let deployment = match transaction_store.get_deployment(&transaction_id)? { Some(deployment) => deployment, @@ -106,9 +113,12 @@ impl> VM { // Return early if the program is already loaded. if process.contains_program(program_id) { - return Ok(()); + return Ok(vec![]); } + // Prepare a vector for the deployments. + let mut deployments = vec![]; + // Iterate through the program imports. for import_program_id in program.imports().keys() { // Add the imports to the process if does not exist yet. @@ -120,29 +130,55 @@ impl> VM { bail!("Transaction id for '{program_id}' is not found in storage."); }; - // Recursively load the deployment and its imports. - load_deployment_and_imports(process, transaction_store, transaction_id)? + // Add the deployment and its imports found recursively. + deployments.extend_from_slice(&load_deployment_and_imports( + process, + transaction_store, + transaction_id, + )?); } } - // Load the deployment if it does not exist in the process yet. - if !process.contains_program(program_id) { - process.load_deployment(&deployment)?; - } + // Once all the imports have been included, add the parent deployment. + deployments.push((*program_id, deployment)); - Ok(()) + Ok(deployments) } // Retrieve the transaction store. let transaction_store = store.transaction_store(); + // Retrieve the list of deployment transaction IDs. + let deployment_ids = transaction_store.deployment_transaction_ids().collect::>(); // Load the deployments from the store. - for transaction_id in transaction_store.deployment_transaction_ids() { - // Load the deployment and its imports. - load_deployment_and_imports(&mut process, transaction_store, *transaction_id)?; + for (i, chunk) in deployment_ids.chunks(256).enumerate() { + debug!( + "Loading deployments {}-{} (of {})...", + i * 256, + ((i + 1) * 256).min(deployment_ids.len()), + deployment_ids.len() + ); + let deployments = cfg_iter!(chunk) + .map(|transaction_id| { + // Load the deployment and its imports. + load_deployment_and_imports(&process, transaction_store, **transaction_id) + }) + .collect::>>()?; + + for (program_id, deployment) in deployments.iter().flatten() { + // Load the deployment if it does not exist in the process yet. + if !process.contains_program(program_id) { + process.load_deployment(deployment)?; + } + } } // Return the new VM. - Ok(Self { process: Arc::new(RwLock::new(process)), store }) + Ok(Self { + process: Arc::new(RwLock::new(process)), + store, + atomic_lock: Arc::new(Mutex::new(())), + block_lock: Arc::new(Mutex::new(())), + }) } /// Returns `true` if a program with the given program ID exists. @@ -187,18 +223,26 @@ impl> VM { impl> VM { /// Returns a new genesis block for a beacon chain. pub fn genesis_beacon(&self, private_key: &PrivateKey, rng: &mut R) -> Result> { + let private_keys = [*private_key, PrivateKey::new(rng)?, PrivateKey::new(rng)?, PrivateKey::new(rng)?]; + // Construct the committee members. let members = indexmap::indexmap! { - Address::try_from(private_key)? => (ledger_committee::MIN_VALIDATOR_STAKE, true), - Address::try_from(PrivateKey::new(rng)?)? => (ledger_committee::MIN_VALIDATOR_STAKE, true), - Address::try_from(PrivateKey::new(rng)?)? => (ledger_committee::MIN_VALIDATOR_STAKE, true), - Address::try_from(PrivateKey::new(rng)?)? => (ledger_committee::MIN_VALIDATOR_STAKE, true), + Address::try_from(private_keys[0])? => (ledger_committee::MIN_VALIDATOR_STAKE, true), + Address::try_from(private_keys[1])? => (ledger_committee::MIN_VALIDATOR_STAKE, true), + Address::try_from(private_keys[2])? => (ledger_committee::MIN_VALIDATOR_STAKE, true), + Address::try_from(private_keys[3])? => (ledger_committee::MIN_VALIDATOR_STAKE, true), }; // Construct the committee. let committee = Committee::::new_genesis(members)?; + + // Compute the remaining supply. + let remaining_supply = N::STARTING_SUPPLY - (ledger_committee::MIN_VALIDATOR_STAKE * 4); // Construct the public balances. let public_balances = indexmap::indexmap! { - Address::try_from(private_key)? => N::STARTING_SUPPLY - (ledger_committee::MIN_VALIDATOR_STAKE * 4), + Address::try_from(private_keys[0])? => remaining_supply / 4, + Address::try_from(private_keys[1])? => remaining_supply / 4, + Address::try_from(private_keys[2])? => remaining_supply / 4, + Address::try_from(private_keys[3])? => remaining_supply / 4, }; // Return the genesis block. self.genesis_quorum(private_key, committee, public_balances, rng) @@ -275,6 +319,10 @@ impl> VM { /// Adds the given block into the VM. #[inline] pub fn add_next_block(&self, block: &Block) -> Result<()> { + // Acquire the block lock, which is needed to ensure this function is not called concurrently. + // Note: This lock must be held for the entire scope of this function. + let _block_lock = self.block_lock.lock(); + // Construct the finalize state. let state = FinalizeGlobalState::new::( block.round(), @@ -292,11 +340,16 @@ impl> VM { // Next, finalize the transactions. match self.finalize(state, block.ratifications(), block.solutions(), block.transactions()) { Ok(_ratified_finalize_operations) => Ok(()), - Err(error) => { + Err(finalize_error) => { // Rollback the block. - self.block_store().remove_last_n(1)?; - // Return the error. - Err(error) + self.block_store().remove_last_n(1).map_err(|removal_error| { + // Log the finalize error. + error!("Failed to finalize block {} - {finalize_error}", block.height()); + // Return the removal error. + removal_error + })?; + // Return the finalize error. + Err(finalize_error) } } } @@ -437,7 +490,7 @@ function compute: // Deploy. let transaction = vm.deploy(&caller_private_key, &program, credits, 10, None, rng).unwrap(); // Verify. - vm.check_transaction(&transaction, None).unwrap(); + vm.check_transaction(&transaction, None, rng).unwrap(); // Return the transaction. transaction }) @@ -480,7 +533,7 @@ function compute: // Construct the execute transaction. let transaction = vm.execute_authorization(authorization, None, None, rng).unwrap(); // Verify. - vm.check_transaction(&transaction, None).unwrap(); + vm.check_transaction(&transaction, None, rng).unwrap(); // Return the transaction. transaction }) @@ -524,7 +577,7 @@ function compute: .execute(&caller_private_key, ("credits.aleo", "transfer_public"), inputs, record, 0, None, rng) .unwrap(); // Verify. - vm.check_transaction(&transaction, None).unwrap(); + vm.check_transaction(&transaction, None, rng).unwrap(); // Return the transaction. transaction }) @@ -576,7 +629,7 @@ function compute: // Construct the transaction. let transaction = Transaction::from_execution(execution, Some(fee)).unwrap(); // Verify. - vm.check_transaction(&transaction, None).unwrap(); + vm.check_transaction(&transaction, None, rng).unwrap(); // Return the transaction. transaction }) @@ -967,4 +1020,159 @@ function multitransfer: .unwrap(); vm.add_next_block(&sample_next_block(&vm, &caller_private_key, &[execution], rng).unwrap()).unwrap(); } + + #[test] + fn test_nested_deployment_with_assert() { + let rng = &mut TestRng::default(); + + // Initialize a private key. + let private_key = sample_genesis_private_key(rng); + + // Initialize the genesis block. + let genesis = sample_genesis_block(rng); + + // Initialize the VM. + let vm = sample_vm(); + // Update the VM. + vm.add_next_block(&genesis).unwrap(); + + // Deploy the base program. + let program = Program::from_str( + r" +program child_program.aleo; + +function check: + input r0 as field.private; + assert.eq r0 123456789123456789123456789123456789123456789123456789field; + ", + ) + .unwrap(); + + let deployment = vm.deploy(&private_key, &program, None, 0, None, rng).unwrap(); + assert!(vm.check_transaction(&deployment, None, rng).is_ok()); + vm.add_next_block(&sample_next_block(&vm, &private_key, &[deployment], rng).unwrap()).unwrap(); + + // Check that program is deployed. + assert!(vm.contains_program(&ProgramID::from_str("child_program.aleo").unwrap())); + + // Deploy the program that calls the program from the previous layer. + let program = Program::from_str( + r" +import child_program.aleo; + +program parent_program.aleo; + +function check: + input r0 as field.private; + call child_program.aleo/check r0; + ", + ) + .unwrap(); + + let deployment = vm.deploy(&private_key, &program, None, 0, None, rng).unwrap(); + assert!(vm.check_transaction(&deployment, None, rng).is_ok()); + vm.add_next_block(&sample_next_block(&vm, &private_key, &[deployment], rng).unwrap()).unwrap(); + + // Check that program is deployed. + assert!(vm.contains_program(&ProgramID::from_str("parent_program.aleo").unwrap())); + } + + #[test] + #[ignore] + fn test_deployment_memory_overload() { + const NUM_DEPLOYMENTS: usize = 32; + + let rng = &mut TestRng::default(); + + // Initialize a private key. + let private_key = sample_genesis_private_key(rng); + + // Initialize a view key. + let view_key = ViewKey::try_from(&private_key).unwrap(); + + // Initialize the genesis block. + let genesis = sample_genesis_block(rng); + + // Initialize the VM. + let vm = sample_vm(); + // Update the VM. + vm.add_next_block(&genesis).unwrap(); + + // Deploy the base program. + let program = Program::from_str( + r" +program program_layer_0.aleo; + +mapping m: + key as u8.public; + value as u32.public; + +function do: + input r0 as u32.public; + async do r0 into r1; + output r1 as program_layer_0.aleo/do.future; + +finalize do: + input r0 as u32.public; + set r0 into m[0u8];", + ) + .unwrap(); + + let deployment = vm.deploy(&private_key, &program, None, 0, None, rng).unwrap(); + vm.add_next_block(&sample_next_block(&vm, &private_key, &[deployment], rng).unwrap()).unwrap(); + + // For each layer, deploy a program that calls the program from the previous layer. + for i in 1..NUM_DEPLOYMENTS { + let mut program_string = String::new(); + // Add the import statements. + for j in 0..i { + program_string.push_str(&format!("import program_layer_{}.aleo;\n", j)); + } + // Add the program body. + program_string.push_str(&format!( + "program program_layer_{i}.aleo; + +mapping m: + key as u8.public; + value as u32.public; + +function do: + input r0 as u32.public; + call program_layer_{prev}.aleo/do r0 into r1; + async do r0 r1 into r2; + output r2 as program_layer_{i}.aleo/do.future; + +finalize do: + input r0 as u32.public; + input r1 as program_layer_{prev}.aleo/do.future; + await r1; + set r0 into m[0u8];", + prev = i - 1 + )); + // Construct the program. + let program = Program::from_str(&program_string).unwrap(); + + // Deploy the program. + let deployment = vm.deploy(&private_key, &program, None, 0, None, rng).unwrap(); + + vm.add_next_block(&sample_next_block(&vm, &private_key, &[deployment], rng).unwrap()).unwrap(); + } + + // Fetch the unspent records. + let records = genesis.transitions().cloned().flat_map(Transition::into_records).collect::>(); + trace!("Unspent Records:\n{:#?}", records); + + // Select a record to spend. + let record = Some(records.values().next().unwrap().decrypt(&view_key).unwrap()); + + // Prepare the inputs. + let inputs = [Value::::from_str("1u32").unwrap()].into_iter(); + + // Execute. + let transaction = + vm.execute(&private_key, ("program_layer_30.aleo", "do"), inputs, record, 0, None, rng).unwrap(); + + // Verify. + vm.check_transaction(&transaction, None, rng).unwrap(); + } } diff --git a/synthesizer/src/vm/verify.rs b/synthesizer/src/vm/verify.rs index 2a40dfbdb4..fe036b7e3a 100644 --- a/synthesizer/src/vm/verify.rs +++ b/synthesizer/src/vm/verify.rs @@ -34,15 +34,18 @@ macro_rules! ensure_is_unique { impl> VM { /// Verifies the transaction in the VM. On failure, returns an error. #[inline] - pub fn check_transaction(&self, transaction: &Transaction, rejected_id: Option>) -> Result<()> { + pub fn check_transaction( + &self, + transaction: &Transaction, + rejected_id: Option>, + rng: &mut R, + ) -> Result<()> { let timer = timer!("VM::check_transaction"); /* Transaction */ // Ensure the transaction ID is unique. - if self.transaction_store().contains_transaction_id(&transaction.id())? - || self.block_store().contains_rejected_or_aborted_transaction_id(&transaction.id())? - { + if self.block_store().contains_transaction_id(&transaction.id())? { bail!("Transaction '{}' already exists in the ledger", transaction.id()) } @@ -97,7 +100,7 @@ impl> VM { Transaction::Deploy(id, owner, deployment, _) => { // Compute the deployment ID. let Ok(deployment_id) = deployment.to_deployment_id() else { - bail!("Failed to compute the Merkle root for deployment transaction '{id}'") + bail!("Failed to compute the Merkle root for a deployment transaction '{id}'") }; // Verify the signature corresponds to the transaction ID. ensure!(owner.verify(deployment_id), "Invalid owner signature for deployment transaction '{id}'"); @@ -110,9 +113,17 @@ impl> VM { bail!("Program ID '{}' is already deployed", deployment.program_id()) } // Verify the deployment. - self.check_deployment_internal(deployment)?; + self.check_deployment_internal(deployment, rng)?; } - Transaction::Execute(_, execution, _) => { + Transaction::Execute(id, execution, _) => { + // Compute the execution ID. + let Ok(execution_id) = execution.to_execution_id() else { + bail!("Failed to compute the Merkle root for an execution transaction '{id}'") + }; + // Ensure the execution was not previously rejected (replay attack prevention). + if self.block_store().contains_rejected_deployment_or_execution_id(&execution_id)? { + bail!("Transaction '{id}' contains a previously rejected execution") + } // Verify the execution. self.check_execution_internal(execution)?; } @@ -196,13 +207,13 @@ impl> VM { /// Note: This is an internal check only. To ensure all components of the deployment are checked, /// use `VM::check_transaction` instead. #[inline] - fn check_deployment_internal(&self, deployment: &Deployment) -> Result<()> { + fn check_deployment_internal(&self, deployment: &Deployment, rng: &mut R) -> Result<()> { macro_rules! logic { ($process:expr, $network:path, $aleo:path) => {{ // Prepare the deployment. let deployment = cast_ref!(&deployment as Deployment<$network>); // Verify the deployment. - $process.verify_deployment::<$aleo, _>(&deployment, &mut rand::thread_rng()) + $process.verify_deployment::<$aleo, _>(&deployment, rng) }}; } @@ -249,7 +260,7 @@ impl> VM { // Ensure the fee does not exceed the limit. let fee_amount = fee.amount()?; - ensure!(*fee_amount < N::MAX_FEE, "Fee verification failed: fee exceeds the maximum limit"); + ensure!(*fee_amount <= N::MAX_FEE, "Fee verification failed: fee exceeds the maximum limit"); // Verify the fee. let verification = self.process.read().verify_fee(fee, deployment_or_execution_id); @@ -312,17 +323,17 @@ mod tests { // Fetch a deployment transaction. let deployment_transaction = crate::vm::test_helpers::sample_deployment_transaction(rng); // Ensure the transaction verifies. - vm.check_transaction(&deployment_transaction, None).unwrap(); + vm.check_transaction(&deployment_transaction, None, rng).unwrap(); // Fetch an execution transaction. let execution_transaction = crate::vm::test_helpers::sample_execution_transaction_with_private_fee(rng); // Ensure the transaction verifies. - vm.check_transaction(&execution_transaction, None).unwrap(); + vm.check_transaction(&execution_transaction, None, rng).unwrap(); // Fetch an execution transaction. let execution_transaction = crate::vm::test_helpers::sample_execution_transaction_with_public_fee(rng); // Ensure the transaction verifies. - vm.check_transaction(&execution_transaction, None).unwrap(); + vm.check_transaction(&execution_transaction, None, rng).unwrap(); } #[test] @@ -337,12 +348,12 @@ mod tests { let deployment = vm.deploy_raw(&program, rng).unwrap(); // Ensure the deployment is valid. - vm.check_deployment_internal(&deployment).unwrap(); + vm.check_deployment_internal(&deployment, rng).unwrap(); // Ensure that deserialization doesn't break the transaction verification. let serialized_deployment = deployment.to_string(); let deployment_transaction: Deployment = serde_json::from_str(&serialized_deployment).unwrap(); - vm.check_deployment_internal(&deployment_transaction).unwrap(); + vm.check_deployment_internal(&deployment_transaction, rng).unwrap(); } #[test] @@ -419,15 +430,15 @@ mod tests { // Fetch a valid execution transaction with a private fee. let valid_transaction = crate::vm::test_helpers::sample_execution_transaction_with_private_fee(rng); - vm.check_transaction(&valid_transaction, None).unwrap(); + vm.check_transaction(&valid_transaction, None, rng).unwrap(); // Fetch a valid execution transaction with a public fee. let valid_transaction = crate::vm::test_helpers::sample_execution_transaction_with_public_fee(rng); - vm.check_transaction(&valid_transaction, None).unwrap(); + vm.check_transaction(&valid_transaction, None, rng).unwrap(); // Fetch an valid execution transaction with no fee. let valid_transaction = crate::vm::test_helpers::sample_execution_transaction_without_fee(rng); - vm.check_transaction(&valid_transaction, None).unwrap(); + vm.check_transaction(&valid_transaction, None, rng).unwrap(); } #[test] @@ -523,7 +534,7 @@ mod tests { vm.execute(&caller_private_key, ("testing.aleo", "initialize"), inputs, credits, 10, None, rng).unwrap(); // Verify. - vm.check_transaction(&transaction, None).unwrap(); + vm.check_transaction(&transaction, None, rng).unwrap(); } #[test] diff --git a/synthesizer/tests/expectations/process/execute/hash_into_struct.out b/synthesizer/tests/expectations/process/execute/hash_into_struct.out new file mode 100644 index 0000000000..f750db7204 --- /dev/null +++ b/synthesizer/tests/expectations/process/execute/hash_into_struct.out @@ -0,0 +1,3 @@ +errors: +- Invalid destination type in 'hash' instruction +outputs: [] diff --git a/synthesizer/tests/expectations/vm/execute_and_finalize/arrays_in_finalize.out b/synthesizer/tests/expectations/vm/execute_and_finalize/arrays_in_finalize.out index 24082f1b13..2f99747752 100644 --- a/synthesizer/tests/expectations/vm/execute_and_finalize/arrays_in_finalize.out +++ b/synthesizer/tests/expectations/vm/execute_and_finalize/arrays_in_finalize.out @@ -4,15 +4,15 @@ outputs: execute: arrays_in_finalize.aleo/test_arrays: outputs: - - '{"type":"public","id":"7478840637674867348334152936830348927618689666735497694442760373406118031677field","value":"[\n [\n true,\n false,\n true,\n false\n ]\n]"}' - - '{"type":"public","id":"5497626638973564164115408441431072571251727387638555124557359358862572601150field","value":"[\n [\n false,\n true,\n false,\n true\n ]\n]"}' - - '{"type":"public","id":"6826141182633851685078476561830773249264254215146313720362967764421378578986field","value":"[\n [\n false,\n false,\n false,\n false\n ]\n]"}' - - '{"type":"private","id":"8090041184628455649123912475425661970245522643231603870408821235652148265153field","value":"ciphertext1qvq9rlleav6excsnqaep0h3cztdzpergd8pdrhnrhapvr49kfrws6zwarcw9rxh9q3g0v8e5dutwyw34qslkjeqx6jzexjt2qrthnnazqerfvaxj939casntsm62yke9djhnked6nv4mufg8rmh27macnejpzdgpy4a"}' - - '{"type":"future","id":"3563270012340942054693097512832513108146970497297480667692085200645102263926field","value":"{\n program_id: arrays_in_finalize.aleo,\n function_name: test_arrays,\n arguments: [\n [\n [\n true,\n false,\n true,\n false\n ]\n],\n [\n [\n false,\n true,\n false,\n true\n ]\n]\n ]\n}"}' + - '{"type":"public","id":"8423501051492945494142580898503776230777967039101310769883569628562838106961field","value":"[\n [\n true,\n false,\n true,\n false\n ]\n]"}' + - '{"type":"public","id":"6373810658910535946682884531888342268371302467625666065790785860646654788892field","value":"[\n [\n false,\n true,\n false,\n true\n ]\n]"}' + - '{"type":"public","id":"3091501445020687319877822274133758457934608796618645756988915091755405157586field","value":"[\n [\n false,\n false,\n false,\n false\n ]\n]"}' + - '{"type":"private","id":"6526736422839961003683955650152924988761580233649162767954364194151255194053field","value":"ciphertext1qvqr73dv3clq8jtx7trf8k9l3eshmh0tyvvp2ta7a0y3pedjj99eczrpwk6045wektcw7mmjzdrm8f67x7egd4dfch3slf4q6ag3lxn0p6cryuecunyl2d9ffr8ntj57dvmv9rg7fhlrtc995w2ruju7j20qgndw85u"}' + - '{"type":"future","id":"5831343183208330086568343850686282945376722092994016952161543991067519254136field","value":"{\n program_id: arrays_in_finalize.aleo,\n function_name: test_arrays,\n arguments: [\n [\n [\n true,\n false,\n true,\n false\n ]\n],\n [\n [\n false,\n true,\n false,\n true\n ]\n]\n ]\n}"}' speculate: the execution was accepted add_next_block: succeeded. additional: - child_outputs: - credits.aleo/fee_private: + credits.aleo/fee_public: outputs: - - '{"type":"record","id":"2876640636542307658613598456059435130048160510932350003072343205416101435931field","checksum":"5890120327829713381983131988174812000986785644250446148915207626096694862906field","value":"record1qyqspqjnxve39nks4yt3u86vm4xek0q865ggt2x8c8k3at2heq5h9aq9qyxx66trwfhkxun9v35hguerqqpqzqyand8c8jqhkwckph93y0px07mp29ntdxa39egxwxg7rh8p6dcsq83r778w0c6vrawc9c46ueepj7l6793xm98xu82fjfnd2hfsc6ususdswdj"}' + - '{"type":"future","id":"611935698929626281656171069026748974326705967123466685855354594539166326131field","value":"{\n program_id: credits.aleo,\n function_name: fee_public,\n arguments: [\n aleo1y9t0y4lvhm43qdzlfjmfzh8985vfnx9ms368p07x5lsemet5ey8qt0ssjn,\n 21758u64\n ]\n}"}' diff --git a/synthesizer/tests/expectations/vm/execute_and_finalize/async_without_finalize_fail.out b/synthesizer/tests/expectations/vm/execute_and_finalize/async_without_finalize_fail.out new file mode 100644 index 0000000000..e12989bca9 --- /dev/null +++ b/synthesizer/tests/expectations/vm/execute_and_finalize/async_without_finalize_fail.out @@ -0,0 +1,3 @@ +errors: +- 'Failed to run `VM::deploy for program child.aleo: ''child.aleo/foo'' does not have a finalize block' +outputs: [] diff --git a/synthesizer/tests/expectations/vm/execute_and_finalize/call_after_async_fail.out b/synthesizer/tests/expectations/vm/execute_and_finalize/call_after_async_fail.out new file mode 100644 index 0000000000..cf7e6921fd --- /dev/null +++ b/synthesizer/tests/expectations/vm/execute_and_finalize/call_after_async_fail.out @@ -0,0 +1,3 @@ +errors: +- 'Failed to run `VM::deploy for program parent.aleo: The ''call'' can only be invoked before an ''async'' instruction' +outputs: [] diff --git a/synthesizer/tests/expectations/vm/execute_and_finalize/child_and_parent.out b/synthesizer/tests/expectations/vm/execute_and_finalize/child_and_parent.out index a33be90307..cf83c9d485 100644 --- a/synthesizer/tests/expectations/vm/execute_and_finalize/child_and_parent.out +++ b/synthesizer/tests/expectations/vm/execute_and_finalize/child_and_parent.out @@ -4,30 +4,30 @@ outputs: execute: child.aleo/foo: outputs: - - '{"type":"public","id":"4279509780486643035080626777023973066084837379094817161797996960689084569794field","value":"aleo1qr2ha4pfs5l28aze88yn6fhleeythklkczrule2v838uwj65n5gqxt9djx"}' - - '{"type":"public","id":"4408001848080504344165436121252640202617322612254005793329268580869751931263field","value":"aleo1qr2ha4pfs5l28aze88yn6fhleeythklkczrule2v838uwj65n5gqxt9djx"}' + - '{"type":"public","id":"3583507900097573902692207210661581535840809808651900827750728854102720512424field","value":"aleo1qr2ha4pfs5l28aze88yn6fhleeythklkczrule2v838uwj65n5gqxt9djx"}' + - '{"type":"public","id":"476166291720572191849579987891810720100233870490756615272004665719966045283field","value":"aleo1qr2ha4pfs5l28aze88yn6fhleeythklkczrule2v838uwj65n5gqxt9djx"}' speculate: the execution was accepted add_next_block: succeeded. - verified: true execute: parent.aleo/foo: outputs: - - '{"type":"public","id":"2289465818952696893871478441544321507584149448701746210208386693335929733331field","value":"aleo16w8t56s7v6ud7vu33fr388ph0dq0c7yhp597cyjt88rr3nultcyqcyk9yy"}' - - '{"type":"public","id":"7559709455224489102027191317638535668729648748642786204592894303802312565285field","value":"aleo1qr2ha4pfs5l28aze88yn6fhleeythklkczrule2v838uwj65n5gqxt9djx"}' - - '{"type":"public","id":"3386671048315419571612162210626560184351583831974432673683170066362200712484field","value":"aleo1qr2ha4pfs5l28aze88yn6fhleeythklkczrule2v838uwj65n5gqxt9djx"}' - - '{"type":"public","id":"377315782189483434546519951474821036179425135984541742851362035377982852007field","value":"aleo1qr2ha4pfs5l28aze88yn6fhleeythklkczrule2v838uwj65n5gqxt9djx"}' + - '{"type":"public","id":"3761717515751581670491990458123447761370813910339791821189914541241041772398field","value":"aleo16w8t56s7v6ud7vu33fr388ph0dq0c7yhp597cyjt88rr3nultcyqcyk9yy"}' + - '{"type":"public","id":"7311055836597830429601351369404551037507421904626326632284143897137420180918field","value":"aleo1qr2ha4pfs5l28aze88yn6fhleeythklkczrule2v838uwj65n5gqxt9djx"}' + - '{"type":"public","id":"681818897459164396968079033669192296282137044899275115497211144004195398694field","value":"aleo1qr2ha4pfs5l28aze88yn6fhleeythklkczrule2v838uwj65n5gqxt9djx"}' + - '{"type":"public","id":"3417320637392048962499244658848814085037481834432707606558978225870881469204field","value":"aleo1qr2ha4pfs5l28aze88yn6fhleeythklkczrule2v838uwj65n5gqxt9djx"}' speculate: the execution was accepted add_next_block: succeeded. additional: - child_outputs: - credits.aleo/fee_private: + credits.aleo/fee_public: outputs: - - '{"type":"record","id":"7306036686667087636279947507997825407084203811048562802739660291735242744444field","checksum":"7132981897182131254956763451386939918007763262893522220397577775796699535598field","value":"record1qyqsqx586v4tjhcdgtvpat2cmz7ztzd7drzm5c04gyeaxqmujgv7ucq3qyxx66trwfhkxun9v35hguerqqpqzqx4pcwh0jc37snpu02y8ujwh2u327ghc6yaeeyc4k74e56uvuhqp0tpta5q5eppwa48pq9eepyuln9ct5qth57klqzf67ewyqn9hresxwalp5l"}' + - '{"type":"future","id":"5245767978479482276373144091068362056657622227760198296183689243703275814117field","value":"{\n program_id: credits.aleo,\n function_name: fee_public,\n arguments: [\n aleo1qr2ha4pfs5l28aze88yn6fhleeythklkczrule2v838uwj65n5gqxt9djx,\n 1244u64\n ]\n}"}' - child_outputs: child.aleo/foo: outputs: - - '{"type":"public","id":"994990058621570799386345097567825463488929977263218477287025824851872546446field","value":"aleo16w8t56s7v6ud7vu33fr388ph0dq0c7yhp597cyjt88rr3nultcyqcyk9yy"}' - - '{"type":"public","id":"2256104170911230762284986427287133867149405204422634231010917189963141359010field","value":"aleo1qr2ha4pfs5l28aze88yn6fhleeythklkczrule2v838uwj65n5gqxt9djx"}' - credits.aleo/fee_private: + - '{"type":"public","id":"7812033131660295289207078830719348736788767283997224928167372421466500300696field","value":"aleo16w8t56s7v6ud7vu33fr388ph0dq0c7yhp597cyjt88rr3nultcyqcyk9yy"}' + - '{"type":"public","id":"4227271453559074580761782898043117548320729393319599555165417123780466734088field","value":"aleo1qr2ha4pfs5l28aze88yn6fhleeythklkczrule2v838uwj65n5gqxt9djx"}' + credits.aleo/fee_public: outputs: - - '{"type":"record","id":"3845927395044855364546905308732895913294823927145229411137666358260055754822field","checksum":"1584433976273241291707799637296934525591046977180320555626848307441674321598field","value":"record1qyqsp3c590shnra6j8rc3ezq5k229gcd8srjpxpf2vn7drkccmrq8tqrqyxx66trwfhkxun9v35hguerqqpqzq9m2h242d3v2sm53yzkqkuu6v2cwaz0jpq7nseh2uxqedph6gz9pmsvcdfnpn0htvu2lvx9cpvs6sssvhj972rpakkdzjmj4n7fhwhs6fjpcap"}' + - '{"type":"future","id":"8216972065644000579816790328783540313220942313753411501386970051803831099199field","value":"{\n program_id: credits.aleo,\n function_name: fee_public,\n arguments: [\n aleo1qr2ha4pfs5l28aze88yn6fhleeythklkczrule2v838uwj65n5gqxt9djx,\n 2123u64\n ]\n}"}' diff --git a/synthesizer/tests/expectations/vm/execute_and_finalize/complex_finalization.out b/synthesizer/tests/expectations/vm/execute_and_finalize/complex_finalization.out index 62b00223bd..d413523f61 100644 --- a/synthesizer/tests/expectations/vm/execute_and_finalize/complex_finalization.out +++ b/synthesizer/tests/expectations/vm/execute_and_finalize/complex_finalization.out @@ -4,23 +4,23 @@ outputs: execute: four_program.aleo/a: outputs: - - '{"type":"future","id":"6974972465287288859193039603112863165703788123799484516917687492504306829310field","value":"{\n program_id: four_program.aleo,\n function_name: a,\n arguments: [\n {\n program_id: two_program.aleo,\n function_name: b,\n arguments: [\n {\n program_id: zero_program.aleo,\n function_name: c,\n arguments: [\n aleo1kw4knandael9qcpxs6g36rr6h7dwvjz6q25ueah6zz9v57zjlvxsx5llq8\n ]\n },\n {\n program_id: one_program.aleo,\n function_name: d,\n arguments: [\n aleo1kw4knandael9qcpxs6g36rr6h7dwvjz6q25ueah6zz9v57zjlvxsx5llq8\n ]\n },\n aleo1kw4knandael9qcpxs6g36rr6h7dwvjz6q25ueah6zz9v57zjlvxsx5llq8\n ]\n },\n {\n program_id: three_program.aleo,\n function_name: e,\n arguments: [\n {\n program_id: two_program.aleo,\n function_name: b,\n arguments: [\n {\n program_id: zero_program.aleo,\n function_name: c,\n arguments: [\n aleo1kw4knandael9qcpxs6g36rr6h7dwvjz6q25ueah6zz9v57zjlvxsx5llq8\n ]\n },\n {\n program_id: one_program.aleo,\n function_name: d,\n arguments: [\n aleo1kw4knandael9qcpxs6g36rr6h7dwvjz6q25ueah6zz9v57zjlvxsx5llq8\n ]\n },\n aleo1kw4knandael9qcpxs6g36rr6h7dwvjz6q25ueah6zz9v57zjlvxsx5llq8\n ]\n },\n {\n program_id: one_program.aleo,\n function_name: d,\n arguments: [\n aleo1kw4knandael9qcpxs6g36rr6h7dwvjz6q25ueah6zz9v57zjlvxsx5llq8\n ]\n },\n {\n program_id: zero_program.aleo,\n function_name: c,\n arguments: [\n aleo1kw4knandael9qcpxs6g36rr6h7dwvjz6q25ueah6zz9v57zjlvxsx5llq8\n ]\n },\n aleo1kw4knandael9qcpxs6g36rr6h7dwvjz6q25ueah6zz9v57zjlvxsx5llq8\n ]\n },\n aleo1kw4knandael9qcpxs6g36rr6h7dwvjz6q25ueah6zz9v57zjlvxsx5llq8\n ]\n}"}' + - '{"type":"future","id":"665183472055988178271814702289264025474680637838536582821282925188065775088field","value":"{\n program_id: four_program.aleo,\n function_name: a,\n arguments: [\n {\n program_id: two_program.aleo,\n function_name: b,\n arguments: [\n {\n program_id: zero_program.aleo,\n function_name: c,\n arguments: [\n aleo1kw4knandael9qcpxs6g36rr6h7dwvjz6q25ueah6zz9v57zjlvxsx5llq8\n ]\n },\n {\n program_id: one_program.aleo,\n function_name: d,\n arguments: [\n aleo1kw4knandael9qcpxs6g36rr6h7dwvjz6q25ueah6zz9v57zjlvxsx5llq8\n ]\n },\n aleo1kw4knandael9qcpxs6g36rr6h7dwvjz6q25ueah6zz9v57zjlvxsx5llq8\n ]\n },\n {\n program_id: three_program.aleo,\n function_name: e,\n arguments: [\n {\n program_id: two_program.aleo,\n function_name: b,\n arguments: [\n {\n program_id: zero_program.aleo,\n function_name: c,\n arguments: [\n aleo1kw4knandael9qcpxs6g36rr6h7dwvjz6q25ueah6zz9v57zjlvxsx5llq8\n ]\n },\n {\n program_id: one_program.aleo,\n function_name: d,\n arguments: [\n aleo1kw4knandael9qcpxs6g36rr6h7dwvjz6q25ueah6zz9v57zjlvxsx5llq8\n ]\n },\n aleo1kw4knandael9qcpxs6g36rr6h7dwvjz6q25ueah6zz9v57zjlvxsx5llq8\n ]\n },\n {\n program_id: one_program.aleo,\n function_name: d,\n arguments: [\n aleo1kw4knandael9qcpxs6g36rr6h7dwvjz6q25ueah6zz9v57zjlvxsx5llq8\n ]\n },\n {\n program_id: zero_program.aleo,\n function_name: c,\n arguments: [\n aleo1kw4knandael9qcpxs6g36rr6h7dwvjz6q25ueah6zz9v57zjlvxsx5llq8\n ]\n },\n aleo1kw4knandael9qcpxs6g36rr6h7dwvjz6q25ueah6zz9v57zjlvxsx5llq8\n ]\n },\n aleo1kw4knandael9qcpxs6g36rr6h7dwvjz6q25ueah6zz9v57zjlvxsx5llq8\n ]\n}"}' speculate: the execution was accepted add_next_block: succeeded. additional: - child_outputs: zero_program.aleo/c: outputs: - - '{"type":"future","id":"6952696836717992189844193836164206763601783392883300349164619423689411832957field","value":"{\n program_id: zero_program.aleo,\n function_name: c,\n arguments: [\n aleo1kw4knandael9qcpxs6g36rr6h7dwvjz6q25ueah6zz9v57zjlvxsx5llq8\n ]\n}"}' + - '{"type":"future","id":"1480766593085211098189114488792207994373740214001639330000129810788271883137field","value":"{\n program_id: zero_program.aleo,\n function_name: c,\n arguments: [\n aleo1kw4knandael9qcpxs6g36rr6h7dwvjz6q25ueah6zz9v57zjlvxsx5llq8\n ]\n}"}' one_program.aleo/d: outputs: - - '{"type":"future","id":"3375358791246667464469024643039205630228640255715459354259647324302115771724field","value":"{\n program_id: one_program.aleo,\n function_name: d,\n arguments: [\n aleo1kw4knandael9qcpxs6g36rr6h7dwvjz6q25ueah6zz9v57zjlvxsx5llq8\n ]\n}"}' + - '{"type":"future","id":"6874436412573820073608038525233877999669336342335693667065896409420053741810field","value":"{\n program_id: one_program.aleo,\n function_name: d,\n arguments: [\n aleo1kw4knandael9qcpxs6g36rr6h7dwvjz6q25ueah6zz9v57zjlvxsx5llq8\n ]\n}"}' two_program.aleo/b: outputs: - - '{"type":"future","id":"364582187421035500339943835636532028870700065252111564274460853535036892560field","value":"{\n program_id: two_program.aleo,\n function_name: b,\n arguments: [\n {\n program_id: zero_program.aleo,\n function_name: c,\n arguments: [\n aleo1kw4knandael9qcpxs6g36rr6h7dwvjz6q25ueah6zz9v57zjlvxsx5llq8\n ]\n },\n {\n program_id: one_program.aleo,\n function_name: d,\n arguments: [\n aleo1kw4knandael9qcpxs6g36rr6h7dwvjz6q25ueah6zz9v57zjlvxsx5llq8\n ]\n },\n aleo1kw4knandael9qcpxs6g36rr6h7dwvjz6q25ueah6zz9v57zjlvxsx5llq8\n ]\n}"}' + - '{"type":"future","id":"439911297771864655257183196286454333195715414280549536084302592849654672842field","value":"{\n program_id: two_program.aleo,\n function_name: b,\n arguments: [\n {\n program_id: zero_program.aleo,\n function_name: c,\n arguments: [\n aleo1kw4knandael9qcpxs6g36rr6h7dwvjz6q25ueah6zz9v57zjlvxsx5llq8\n ]\n },\n {\n program_id: one_program.aleo,\n function_name: d,\n arguments: [\n aleo1kw4knandael9qcpxs6g36rr6h7dwvjz6q25ueah6zz9v57zjlvxsx5llq8\n ]\n },\n aleo1kw4knandael9qcpxs6g36rr6h7dwvjz6q25ueah6zz9v57zjlvxsx5llq8\n ]\n}"}' three_program.aleo/e: outputs: - - '{"type":"future","id":"5353639949292491626228591653810953466693160316414035702266036019526362677427field","value":"{\n program_id: three_program.aleo,\n function_name: e,\n arguments: [\n {\n program_id: two_program.aleo,\n function_name: b,\n arguments: [\n {\n program_id: zero_program.aleo,\n function_name: c,\n arguments: [\n aleo1kw4knandael9qcpxs6g36rr6h7dwvjz6q25ueah6zz9v57zjlvxsx5llq8\n ]\n },\n {\n program_id: one_program.aleo,\n function_name: d,\n arguments: [\n aleo1kw4knandael9qcpxs6g36rr6h7dwvjz6q25ueah6zz9v57zjlvxsx5llq8\n ]\n },\n aleo1kw4knandael9qcpxs6g36rr6h7dwvjz6q25ueah6zz9v57zjlvxsx5llq8\n ]\n },\n {\n program_id: one_program.aleo,\n function_name: d,\n arguments: [\n aleo1kw4knandael9qcpxs6g36rr6h7dwvjz6q25ueah6zz9v57zjlvxsx5llq8\n ]\n },\n {\n program_id: zero_program.aleo,\n function_name: c,\n arguments: [\n aleo1kw4knandael9qcpxs6g36rr6h7dwvjz6q25ueah6zz9v57zjlvxsx5llq8\n ]\n },\n aleo1kw4knandael9qcpxs6g36rr6h7dwvjz6q25ueah6zz9v57zjlvxsx5llq8\n ]\n}"}' - credits.aleo/fee_private: + - '{"type":"future","id":"6324349534667114127832388996854882573236888808429320919855235417654165379333field","value":"{\n program_id: three_program.aleo,\n function_name: e,\n arguments: [\n {\n program_id: two_program.aleo,\n function_name: b,\n arguments: [\n {\n program_id: zero_program.aleo,\n function_name: c,\n arguments: [\n aleo1kw4knandael9qcpxs6g36rr6h7dwvjz6q25ueah6zz9v57zjlvxsx5llq8\n ]\n },\n {\n program_id: one_program.aleo,\n function_name: d,\n arguments: [\n aleo1kw4knandael9qcpxs6g36rr6h7dwvjz6q25ueah6zz9v57zjlvxsx5llq8\n ]\n },\n aleo1kw4knandael9qcpxs6g36rr6h7dwvjz6q25ueah6zz9v57zjlvxsx5llq8\n ]\n },\n {\n program_id: one_program.aleo,\n function_name: d,\n arguments: [\n aleo1kw4knandael9qcpxs6g36rr6h7dwvjz6q25ueah6zz9v57zjlvxsx5llq8\n ]\n },\n {\n program_id: zero_program.aleo,\n function_name: c,\n arguments: [\n aleo1kw4knandael9qcpxs6g36rr6h7dwvjz6q25ueah6zz9v57zjlvxsx5llq8\n ]\n },\n aleo1kw4knandael9qcpxs6g36rr6h7dwvjz6q25ueah6zz9v57zjlvxsx5llq8\n ]\n}"}' + credits.aleo/fee_public: outputs: - - '{"type":"record","id":"5283229445241909733017633528975572727346436141849413233403355092851851760389field","checksum":"799210811176292053431285281723859450131511292642705172333396010564735450437field","value":"record1qyqsqkme4g74y49w7v70p63ywtq5nate5r75dk8m05ss5zmer5zeqrggqyxx66trwfhkxun9v35hguerqqpqzqx2jzxmtf2rksjtwnl4xz5uu33vh0hdl5gys60lw5tdywm6w864qpzvzv6t35yczkzxqpeh7ckc5wtmt5sxynlcz9fy5ellqapnz8squlrzfgw"}' + - '{"type":"future","id":"2642525527921148655203140665689909230400981948693155278410222306693462629362field","value":"{\n program_id: credits.aleo,\n function_name: fee_public,\n arguments: [\n aleo1kw4knandael9qcpxs6g36rr6h7dwvjz6q25ueah6zz9v57zjlvxsx5llq8,\n 1312883u64\n ]\n}"}' diff --git a/synthesizer/tests/expectations/vm/execute_and_finalize/count_usages.out b/synthesizer/tests/expectations/vm/execute_and_finalize/count_usages.out index 3a80e7786a..bc0c5af71d 100644 --- a/synthesizer/tests/expectations/vm/execute_and_finalize/count_usages.out +++ b/synthesizer/tests/expectations/vm/execute_and_finalize/count_usages.out @@ -4,20 +4,20 @@ outputs: execute: count_usages.aleo/add_and_subtract: outputs: - - '{"type":"private","id":"5938418890955443794463293173052677473396802601178583117662009125510762112385field","value":"ciphertext1qyqwvmg8ce9sq5ewccjlade56hhmxk4d4j9ec48up7d3mk9q4gwtqzc5kd8jk"}' - - '{"type":"future","id":"5461514865984769926527447907140509355062580300320347542454721592100955559616field","value":"{\n program_id: count_usages.aleo,\n function_name: add_and_subtract,\n arguments: [\n {\n program_id: basic_math.aleo,\n function_name: add_and_count,\n arguments: [\n aleo1mrughg5ssadd9fc2uwve7l6u24xn76kyz24zsjtzta03vgkq4vpqggl6fg\n ]\n },\n {\n program_id: basic_math.aleo,\n function_name: sub_and_count,\n arguments: [\n aleo1mrughg5ssadd9fc2uwve7l6u24xn76kyz24zsjtzta03vgkq4vpqggl6fg\n ]\n }\n \n ]\n}"}' + - '{"type":"private","id":"3123182017432916757124403751790889348245620410758014315114714729237515860473field","value":"ciphertext1qyqwsyyjjx85zuu3rh9ujc7lt33dgqj28xcpxa5vz0uscttkelcm2yglca4ja"}' + - '{"type":"future","id":"3075184327471252279705776826446991266718083739660015565935642091055852740382field","value":"{\n program_id: count_usages.aleo,\n function_name: add_and_subtract,\n arguments: [\n {\n program_id: basic_math.aleo,\n function_name: add_and_count,\n arguments: [\n aleo1mrughg5ssadd9fc2uwve7l6u24xn76kyz24zsjtzta03vgkq4vpqggl6fg\n ]\n },\n {\n program_id: basic_math.aleo,\n function_name: sub_and_count,\n arguments: [\n aleo1mrughg5ssadd9fc2uwve7l6u24xn76kyz24zsjtzta03vgkq4vpqggl6fg\n ]\n }\n \n ]\n}"}' speculate: the execution was accepted add_next_block: succeeded. additional: - child_outputs: basic_math.aleo/add_and_count: outputs: - - '{"type":"private","id":"2189963205002380951731804610675645921799015755766185151265166514396381689207field","value":"ciphertext1qyqwpu5p92d4lrcfhthx0emqpj6cldsty3ghv6mhztl69wyk7j42cpqpq2uaz"}' - - '{"type":"future","id":"1762068320811124981245699286589805951610339834807816585249322967070273785765field","value":"{\n program_id: basic_math.aleo,\n function_name: add_and_count,\n arguments: [\n aleo1mrughg5ssadd9fc2uwve7l6u24xn76kyz24zsjtzta03vgkq4vpqggl6fg\n ]\n}"}' + - '{"type":"private","id":"8146495595200999887858412726015174541046002665643175756153664146614367276693field","value":"ciphertext1qyqgrgmhxyfjr3cvy50sy2hg8y56suw5wgvytygzms53mp4ms3v26rg226k3t"}' + - '{"type":"future","id":"4731072368922094762332855593966999173744758108005904730949638980529645559135field","value":"{\n program_id: basic_math.aleo,\n function_name: add_and_count,\n arguments: [\n aleo1mrughg5ssadd9fc2uwve7l6u24xn76kyz24zsjtzta03vgkq4vpqggl6fg\n ]\n}"}' basic_math.aleo/sub_and_count: outputs: - - '{"type":"private","id":"3237068809383979468892967123729721371250496145349751597456577370896562797315field","value":"ciphertext1qyqrqc0329zggejxfkdju3e5ng8cahudh6gmmwcu9kvhp2x784lrqqgfua69p"}' - - '{"type":"future","id":"4237975671848212071086918441612015189870459327089330453971374309755212325698field","value":"{\n program_id: basic_math.aleo,\n function_name: sub_and_count,\n arguments: [\n aleo1mrughg5ssadd9fc2uwve7l6u24xn76kyz24zsjtzta03vgkq4vpqggl6fg\n ]\n}"}' - credits.aleo/fee_private: + - '{"type":"private","id":"961409263208860904485872221121989324413563522578995015597420037381610399703field","value":"ciphertext1qyqv6unr5dg8tj228pl0vgv32p92mcyxxyf9368salhtfqh0hd4scqc89y5m4"}' + - '{"type":"future","id":"4663027849889985446453275863200868545411574358675486387787337708909316167962field","value":"{\n program_id: basic_math.aleo,\n function_name: sub_and_count,\n arguments: [\n aleo1mrughg5ssadd9fc2uwve7l6u24xn76kyz24zsjtzta03vgkq4vpqggl6fg\n ]\n}"}' + credits.aleo/fee_public: outputs: - - '{"type":"record","id":"3873686337596264307426391445386469102660995373532676522475799639542084414660field","checksum":"2174613677640073315115047311154070353095418041185484773007639715599946288810field","value":"record1qyqsq758xfnepvvywvf4tjnve5qkfr0hur7gttevq66hpvl97ulxzkgyqyxx66trwfhkxun9v35hguerqqpqzq85ufcypfdv6unklsc9m066e86drkzx32yzj9kwx7enflqv7j6xqz5v6hx0r0qp5r564yuanadwjkhvx4tj3xp407hh058jl7u0v9qq2z6nqqz"}' + - '{"type":"future","id":"3084534457419184182612834880575828543753579447019853593724174249884664876235field","value":"{\n program_id: credits.aleo,\n function_name: fee_public,\n arguments: [\n aleo1kw4knandael9qcpxs6g36rr6h7dwvjz6q25ueah6zz9v57zjlvxsx5llq8,\n 263392u64\n ]\n}"}' diff --git a/synthesizer/tests/expectations/vm/execute_and_finalize/external_read_with_local_fail.out b/synthesizer/tests/expectations/vm/execute_and_finalize/external_read_with_local_fail.out new file mode 100644 index 0000000000..8c5e3e1a9f --- /dev/null +++ b/synthesizer/tests/expectations/vm/execute_and_finalize/external_read_with_local_fail.out @@ -0,0 +1,3 @@ +errors: +- 'Failed to run `VM::deploy for program relay.aleo: Locator ''relay.aleo/users'' does not reference an external mapping.' +outputs: [] diff --git a/synthesizer/tests/expectations/vm/execute_and_finalize/future_out_of_order_fail.out b/synthesizer/tests/expectations/vm/execute_and_finalize/future_out_of_order_fail.out new file mode 100644 index 0000000000..0cea7546f1 --- /dev/null +++ b/synthesizer/tests/expectations/vm/execute_and_finalize/future_out_of_order_fail.out @@ -0,0 +1,3 @@ +errors: +- 'Failed to run `VM::deploy for program parent.aleo: Function ''foo'' contains futures, but the ''async'' instruction does not consume all of them in the order they were produced' +outputs: [] diff --git a/synthesizer/tests/expectations/vm/execute_and_finalize/hello.out b/synthesizer/tests/expectations/vm/execute_and_finalize/hello.out index 7c1f6e99ba..4ee4b5c467 100644 --- a/synthesizer/tests/expectations/vm/execute_and_finalize/hello.out +++ b/synthesizer/tests/expectations/vm/execute_and_finalize/hello.out @@ -4,46 +4,46 @@ outputs: execute: hello.aleo/hello: outputs: - - '{"type":"private","id":"8125348741943322686009907907811782653304283628858249042666480861925372544888field","value":"ciphertext1qyqzh0ww4d529sp24xw2c0ek9p3mlh5amc8rlzhudhw6u4e6h9ffuzc4xu63n"}' + - '{"type":"private","id":"6254379462913920094060616443827416926361791013133233839133894947200598009041field","value":"ciphertext1qyqpmnatnq3sjej6qves695qtxu5r6lqnfnx8ck87ce3pez28x90qzgzeh4qh"}' speculate: the execution was accepted add_next_block: succeeded. - verified: true execute: hello.aleo/hello: outputs: - - '{"type":"private","id":"4190523942746941151788535679482167833023294290248250731893857652327309412596field","value":"ciphertext1qyqg0dpeew6mjk7zgcw423k99k74k5nnqpshnjsjvjezfqsfel7q6pq9k0ny2"}' + - '{"type":"private","id":"4161183151518570414349285932182760288961689691043823886807644299644687930091field","value":"ciphertext1qyqreg8a27jzsgm7m5umh98nr9nwyxhkv3aus5htm6vepdkr67zh5zqc4uzpd"}' speculate: the execution was accepted add_next_block: succeeded. - verified: true execute: hello.aleo/goodbye: outputs: - - '{"type":"public","id":"6479967549102698786501355579286201653159194507767944415639092049192937228124field","value":"1u32"}' - - '{"type":"future","id":"3413333273447015937668991532824129740236214843152627847347248311711336145614field","value":"{\n program_id: hello.aleo,\n function_name: goodbye,\n arguments: [\n 1u32,\n 1u32\n ]\n}"}' + - '{"type":"public","id":"5242826180235740795678885917758843199455932502638973701040836238216490364326field","value":"1u32"}' + - '{"type":"future","id":"5032744372214352919665806641360511690042524830912111693095233654380228978511field","value":"{\n program_id: hello.aleo,\n function_name: goodbye,\n arguments: [\n 1u32,\n 1u32\n ]\n}"}' speculate: the execution was accepted add_next_block: succeeded. - verified: true execute: hello.aleo/goodbye: outputs: - - '{"type":"public","id":"626299501033028507774181248923989773544577830656010087029558433445349806993field","value":"1u32"}' - - '{"type":"future","id":"3565307659242500881563819123626548100763518079205528385426399960688796421686field","value":"{\n program_id: hello.aleo,\n function_name: goodbye,\n arguments: [\n 0u32,\n 1u32\n ]\n}"}' + - '{"type":"public","id":"5954748469306089505201665920330448486455515953532955699388262149494774760375field","value":"1u32"}' + - '{"type":"future","id":"4063241715105271542572794266017703704121932214929814829482077839560118863776field","value":"{\n program_id: hello.aleo,\n function_name: goodbye,\n arguments: [\n 0u32,\n 1u32\n ]\n}"}' speculate: the execution was rejected add_next_block: succeeded. additional: - child_outputs: - credits.aleo/fee_private: + credits.aleo/fee_public: outputs: - - '{"type":"record","id":"1563962924137405178742221846132647984424532842613103093908307558260562727180field","checksum":"5823248983705197503554751176880028590763810324036946942686008195006386919883field","value":"record1qyqsqxplm7g9hnkv0kecp0qyzpcmqssafmu3l63n3rasf5tqn9mg95qxqyxx66trwfhkxun9v35hguerqqpqzqq6096l6jlnt6yw20wxxh5vz8hnau7v7zh0lhgg58t7330fzx2jzqucjp2scympk3qapejn5adphf7vcznjr0h3ar76peu3f74d8jess2usmfv"}' + - '{"type":"future","id":"5633762070565015506869877991983247311752566772551740661334199093127666173285field","value":"{\n program_id: credits.aleo,\n function_name: fee_public,\n arguments: [\n aleo10knkelvnd55fsaarm25wch7p9suf2tqlgwy5k4nxwms6d262xyfqm2tccr,\n 1285u64\n ]\n}"}' - child_outputs: - credits.aleo/fee_private: + credits.aleo/fee_public: outputs: - - '{"type":"record","id":"7418640687872332228724490929058573679078624012230582886849419189302370026910field","checksum":"2288322295066553762968656723772741917161404143341855080238665799645378480682field","value":"record1qyqsq2qp0m80sqnsgn4fdmpwfx7tgwh4cr9nvlejg2arkrn2u43qhpq0qyxx66trwfhkxun9v35hguerqqpqzqx7606fvtdu7h2zqjjxpmn9lhux5ys0hqqlltf2w62p7ukkddhyzpflcm4xfrg65m6y0c6g3xyg5ax25zq7kl5ea269mzpd9m86fz5qk6fhd5c"}' + - '{"type":"future","id":"4168535226825329132770722118865464284070271653060849404506023921600553004505field","value":"{\n program_id: credits.aleo,\n function_name: fee_public,\n arguments: [\n aleo10knkelvnd55fsaarm25wch7p9suf2tqlgwy5k4nxwms6d262xyfqm2tccr,\n 1285u64\n ]\n}"}' - child_outputs: - credits.aleo/fee_private: + credits.aleo/fee_public: outputs: - - '{"type":"record","id":"3460880420858965334724667101702159478601299033919299770846136229938204873952field","checksum":"889185695775021282240580382339319659491334542311157511499897352194166769728field","value":"record1qyqsqgg555pvesq2ss8kth990hv0qsfd7d4k9td00tufe3qzufxdsmsgqyxx66trwfhkxun9v35hguerqqpqzqr95x0pv0jygxkd7txgx8h34sfeducq4vjwvcsfjvn2xe55vht8zzxn40ssvl4eyvd9y3wj2qy47alyc69m0tkftmeuzrvnh6jqupkq20gsf5e"}' + - '{"type":"future","id":"7264507997475302694771209211625280132862970630053323876707987175500028146955field","value":"{\n program_id: credits.aleo,\n function_name: fee_public,\n arguments: [\n aleo10knkelvnd55fsaarm25wch7p9suf2tqlgwy5k4nxwms6d262xyfqm2tccr,\n 5334u64\n ]\n}"}' - child_outputs: - credits.aleo/fee_private: + credits.aleo/fee_public: outputs: - - '{"type":"record","id":"1053040319405423319885581476153506668871260904872026627338166884076666293995field","checksum":"3137133192027913702130513124530902338448032734519979527965593396128027839095field","value":"record1qyqsphyrpqq85txdn28cakkhasnrc5nfw5devjsmmhj3jdgm3eg80nqyqyxx66trwfhkxun9v35hguerqqpqzqyfnegh56dljl46e5p06q86e8fv844xepww7c9rldherdj9l6r0zgvu7qj26zt906m3htewsu4plawunfac2e7vjpre58sqte2n29rs7zaue0v"}' + - '{"type":"future","id":"2296589459402270097535149680764663519350069575816165257148428557434644368237field","value":"{\n program_id: credits.aleo,\n function_name: fee_public,\n arguments: [\n aleo10knkelvnd55fsaarm25wch7p9suf2tqlgwy5k4nxwms6d262xyfqm2tccr,\n 5334u64\n ]\n}"}' diff --git a/synthesizer/tests/expectations/vm/execute_and_finalize/ignore_finalize_fail.out b/synthesizer/tests/expectations/vm/execute_and_finalize/ignore_finalize_fail.out new file mode 100644 index 0000000000..1afb6b4e21 --- /dev/null +++ b/synthesizer/tests/expectations/vm/execute_and_finalize/ignore_finalize_fail.out @@ -0,0 +1,3 @@ +errors: +- 'Failed to run `VM::deploy for program parent.aleo: Function ''parent.aleo/foo'' must contain a finalize block, since it calls ''child.aleo/foo''.' +outputs: [] diff --git a/synthesizer/tests/expectations/vm/execute_and_finalize/last_reg_is_not_future_fail.out b/synthesizer/tests/expectations/vm/execute_and_finalize/last_reg_is_not_future_fail.out new file mode 100644 index 0000000000..2195cccd79 --- /dev/null +++ b/synthesizer/tests/expectations/vm/execute_and_finalize/last_reg_is_not_future_fail.out @@ -0,0 +1,3 @@ +errors: +- 'Failed to run `VM::deploy for program child.aleo: The last output of function ''foo'' must be a future associated with itself' +outputs: [] diff --git a/synthesizer/tests/expectations/vm/execute_and_finalize/mapping_operations.out b/synthesizer/tests/expectations/vm/execute_and_finalize/mapping_operations.out index 4bdf331e05..6b2773e6cf 100644 --- a/synthesizer/tests/expectations/vm/execute_and_finalize/mapping_operations.out +++ b/synthesizer/tests/expectations/vm/execute_and_finalize/mapping_operations.out @@ -4,44 +4,44 @@ outputs: execute: mapping_operations.aleo/empty_remove: outputs: - - '{"type":"future","id":"7291478973311998196889109284965727070006803821552839308760042614138648979276field","value":"{\n program_id: mapping_operations.aleo,\n function_name: empty_remove,\n arguments: [\n 10u8\n ]\n}"}' + - '{"type":"future","id":"7635640739633293853436744163909014640199975942090334368682977334784204769011field","value":"{\n program_id: mapping_operations.aleo,\n function_name: empty_remove,\n arguments: [\n 10u8\n ]\n}"}' speculate: the execution was accepted add_next_block: succeeded. - verified: true execute: mapping_operations.aleo/insert_contains_remove: outputs: - - '{"type":"future","id":"8125199181008624196820150111927204421834678204873633176936287270742730215646field","value":"{\n program_id: mapping_operations.aleo,\n function_name: insert_contains_remove,\n arguments: [\n 0u8,\n 0u8\n ]\n}"}' + - '{"type":"future","id":"2222820579352641087756930842916349134795974577897148450258189134473958563715field","value":"{\n program_id: mapping_operations.aleo,\n function_name: insert_contains_remove,\n arguments: [\n 0u8,\n 0u8\n ]\n}"}' speculate: the execution was accepted add_next_block: succeeded. - verified: true execute: mapping_operations.aleo/insert_contains_remove: outputs: - - '{"type":"future","id":"564988137123189827911719500643390438805066632168511094429444048054438194661field","value":"{\n program_id: mapping_operations.aleo,\n function_name: insert_contains_remove,\n arguments: [\n 0u8,\n 0u8\n ]\n}"}' + - '{"type":"future","id":"7976126249407457464284575267611007374057326939931567034459595303517614384513field","value":"{\n program_id: mapping_operations.aleo,\n function_name: insert_contains_remove,\n arguments: [\n 0u8,\n 0u8\n ]\n}"}' speculate: the execution was accepted add_next_block: succeeded. - verified: true execute: mapping_operations.aleo/insert_contains_remove: outputs: - - '{"type":"future","id":"1504983850261979002191664940947948558038660291110914628083356222450379989621field","value":"{\n program_id: mapping_operations.aleo,\n function_name: insert_contains_remove,\n arguments: [\n 0u8,\n 1u8\n ]\n}"}' + - '{"type":"future","id":"7584999017838461056060100707559452462656710499900127442904227073384510302747field","value":"{\n program_id: mapping_operations.aleo,\n function_name: insert_contains_remove,\n arguments: [\n 0u8,\n 1u8\n ]\n}"}' speculate: the execution was accepted add_next_block: succeeded. additional: - child_outputs: - credits.aleo/fee_private: + credits.aleo/fee_public: outputs: - - '{"type":"record","id":"2982975415914840620507158319258878944269906877410766878798891082463802016242field","checksum":"3862090753639563234029083871874631115872354449378325601192947833837837297501field","value":"record1qyqsp3p5w845k0nmwmlwau7zavphxns6rh9nw8tg83ds0cjx2hnmj6qdqyxx66trwfhkxun9v35hguerqqpqzqzfd6xh2zdl25cwy9evykj9s6p90xys775fwzrqws8qn9g44lzfqn3e4cam6huahmz49k20vnssfjs25f4v0huxlpklc0wnyvredep3y4w6r94"}' + - '{"type":"future","id":"3701652920213282423281084685169254121653756184853703028504071363673532942725field","value":"{\n program_id: credits.aleo,\n function_name: fee_public,\n arguments: [\n aleo1eakarna6a70pg9r0l9qal20faejwctgur5xt7lnc2a42wj2yssfqc89rk8,\n 11245u64\n ]\n}"}' - child_outputs: - credits.aleo/fee_private: + credits.aleo/fee_public: outputs: - - '{"type":"record","id":"48072890610857571956763071582832119607215592326496047104336441787173630868field","checksum":"7467290380261150018279273531628821954702770589524221500078644428102363349374field","value":"record1qyqsqh45sake5ld7mknnvq6gqz4fnpx5trzu6lmhsfxejf69pm8hfaqpqyxx66trwfhkxun9v35hguerqqpqzqxdhy98teyfl6uz9j434zhapesfd8s9zggu6wmcw8cjuyscgnq9px7a3r4uf4udc98zwvfksj4nqv8ckf0wv8lj6l35zdtm0l7gx6vswyvq76a"}' + - '{"type":"future","id":"767852843394263062723803642109980847781175859546892255347263461730875281315field","value":"{\n program_id: credits.aleo,\n function_name: fee_public,\n arguments: [\n aleo1eakarna6a70pg9r0l9qal20faejwctgur5xt7lnc2a42wj2yssfqc89rk8,\n 154810u64\n ]\n}"}' - child_outputs: - credits.aleo/fee_private: + credits.aleo/fee_public: outputs: - - '{"type":"record","id":"4110917071309386297883198571995053247696762203282665069271948556795502494816field","checksum":"5208736060122533898058377411292933064659147009969289502970426241558725961052field","value":"record1qyqspe5yr7j2nej3pg4jr9gd6fn9uzu27gfy29aj3g5spc32tr2dqksqqyxx66trwfhkxun9v35hguerqqpqzq8tq90g30z3r0ll9ffn36cwtwvwmugnh4yak6afr264lzzw3lg4p7dfeqfwzqzuuw084ym2km0um206l4s9lkmw29w4499r422uvy3q6dvnd32"}' + - '{"type":"future","id":"5831877738131393412656336446760308055442933218729238434398743654782252530700field","value":"{\n program_id: credits.aleo,\n function_name: fee_public,\n arguments: [\n aleo1eakarna6a70pg9r0l9qal20faejwctgur5xt7lnc2a42wj2yssfqc89rk8,\n 154810u64\n ]\n}"}' - child_outputs: - credits.aleo/fee_private: + credits.aleo/fee_public: outputs: - - '{"type":"record","id":"127003451731600147843337554003537999361398270748193639437833213734975434703field","checksum":"6199041391391566355775171236268799936285239125918398783413624959865612791864field","value":"record1qyqsq3cmtcjcws6hmecvz7wswnhgulm8clv7tjvsnjdrgncamj4sqcqpqyxx66trwfhkxun9v35hguerqqpqzq993fr433kwql98qu7wh083fwed69wt9pk5ftg9cldvymz2eyk9pg8366sccu0as9vjnt0y4nxuw6mzm3g3f4ckjf2q2mkaasy0w0cs634pjhl"}' + - '{"type":"future","id":"3924911723698610328779426182874581891939297874519691347932588542992608923909field","value":"{\n program_id: credits.aleo,\n function_name: fee_public,\n arguments: [\n aleo1eakarna6a70pg9r0l9qal20faejwctgur5xt7lnc2a42wj2yssfqc89rk8,\n 154810u64\n ]\n}"}' diff --git a/synthesizer/tests/expectations/vm/execute_and_finalize/mint_and_split.out b/synthesizer/tests/expectations/vm/execute_and_finalize/mint_and_split.out index 91264a79a7..bd63f8cfca 100644 --- a/synthesizer/tests/expectations/vm/execute_and_finalize/mint_and_split.out +++ b/synthesizer/tests/expectations/vm/execute_and_finalize/mint_and_split.out @@ -4,15 +4,15 @@ outputs: execute: mint_and_split.aleo/mint: outputs: - - '{"type":"record","id":"7636508513753843688255477046613755926717864376364314808857555307422986366583field","checksum":"2724303349197854838397688958932042996013299317300510410551052639216931686578field","value":"record1qyqspznunw4usdtcqwznvpm3f73dlfxcmg07emews5rxsvfp8jelaqs3qyxx66trwfhkxun9v35hguerqqpqzqqjmj4kl3w0fh0dhqy67c47xgw7ftfdkhplztykwwr7mgnn8cm3q50aufvdjh84e3325vgsq98uzgdxyscfrymgz6j5p6txj0u06ngskkug8kd"}' + - '{"type":"record","id":"7334558502140616765536611609313499148179717945955456292257315684677266501449field","checksum":"281154250604636828435706866945909260040573817788279288153963030037316744168field","value":"record1qyqsp9pr7qlgedcywvyk5lsnpuqpepyuv9hseec63eacjpn8d0rpmwgsqyxx66trwfhkxun9v35hguerqqpqzq9ql0hu0da6wuzrm0a7ms3kfvj5279e43rulstm9plhzs0z0xe6p52cn7pdz7x5hkwhz905kp56f4fuq08k9maf57qteekdv9yddk8sqxq4d98"}' speculate: the execution was accepted add_next_block: succeeded. - execute: Commitment '1266307482263846358970326041806201638141701138269282465033372005968041137990field' does not exist - execute: Input record for 'mint_and_split.aleo' must belong to the signer additional: - child_outputs: - credits.aleo/fee_private: + credits.aleo/fee_public: outputs: - - '{"type":"record","id":"6034384869866160676195114971378149013303703779101829835678240523345757814679field","checksum":"685010203971891455492083289280654319079811728620135209391715861377156619685field","value":"record1qyqspmum93lsptda6cxy2934329jud5j5d2w974qj980uzacuurkteq0qyxx66trwfhkxun9v35hguerqqpqzq95n3clh78n7alksrkrcwvf0kc5qeej4mmudqf57t7mqt08jd97p4nq3txxlzvtf3hsf093raynhzylqxz8vscq22pc2qaryw7r3qms7wmxm6s"}' + - '{"type":"future","id":"2224596965693604846363254284152512550549945382876106932610187515185636813504field","value":"{\n program_id: credits.aleo,\n function_name: fee_public,\n arguments: [\n aleo19e6k5ferx3k8a9k79xtj4uuaztt2jl4eza7k43pygsu977yazypqqwdmw6,\n 1414u64\n ]\n}"}' - {} - {} diff --git a/synthesizer/tests/expectations/vm/execute_and_finalize/multiple_async_fail.out b/synthesizer/tests/expectations/vm/execute_and_finalize/multiple_async_fail.out new file mode 100644 index 0000000000..2c841f10f6 --- /dev/null +++ b/synthesizer/tests/expectations/vm/execute_and_finalize/multiple_async_fail.out @@ -0,0 +1,3 @@ +errors: +- 'Failed to run `VM::deploy for program parent.aleo: Function ''foo'' can contain at most one ''async'' instruction' +outputs: [] diff --git a/synthesizer/tests/expectations/vm/execute_and_finalize/no_import_external_read_fail.out b/synthesizer/tests/expectations/vm/execute_and_finalize/no_import_external_read_fail.out new file mode 100644 index 0000000000..817f9c7e2f --- /dev/null +++ b/synthesizer/tests/expectations/vm/execute_and_finalize/no_import_external_read_fail.out @@ -0,0 +1,3 @@ +errors: +- 'Failed to run `VM::deploy for program relay.aleo: External program ''registry.aleo'' is not imported by ''relay.aleo''.' +outputs: [] diff --git a/synthesizer/tests/expectations/vm/execute_and_finalize/out_of_order_await_fail.out b/synthesizer/tests/expectations/vm/execute_and_finalize/out_of_order_await_fail.out new file mode 100644 index 0000000000..7d4109cee6 --- /dev/null +++ b/synthesizer/tests/expectations/vm/execute_and_finalize/out_of_order_await_fail.out @@ -0,0 +1,3 @@ +errors: +- 'Failed to run `VM::deploy for program parent.aleo: Futures in finalize ''foo'' are not awaited in the order they are passed in.' +outputs: [] diff --git a/synthesizer/tests/expectations/vm/execute_and_finalize/output_child_without_async_fail.out b/synthesizer/tests/expectations/vm/execute_and_finalize/output_child_without_async_fail.out new file mode 100644 index 0000000000..1afb6b4e21 --- /dev/null +++ b/synthesizer/tests/expectations/vm/execute_and_finalize/output_child_without_async_fail.out @@ -0,0 +1,3 @@ +errors: +- 'Failed to run `VM::deploy for program parent.aleo: Function ''parent.aleo/foo'' must contain a finalize block, since it calls ''child.aleo/foo''.' +outputs: [] diff --git a/synthesizer/tests/expectations/vm/execute_and_finalize/program_callable.out b/synthesizer/tests/expectations/vm/execute_and_finalize/program_callable.out index 150420a0fa..8bb0637e68 100644 --- a/synthesizer/tests/expectations/vm/execute_and_finalize/program_callable.out +++ b/synthesizer/tests/expectations/vm/execute_and_finalize/program_callable.out @@ -5,10 +5,10 @@ outputs: execute: parent.aleo/foo: outputs: - - '{"type":"public","id":"232678181145630788399983360361981778077854318297598920122679715355556018707field","value":"aleo16w8t56s7v6ud7vu33fr388ph0dq0c7yhp597cyjt88rr3nultcyqcyk9yy"}' - - '{"type":"public","id":"307885753616111794499278513815755325774421253222438014088047291626146246158field","value":"aleo1qr2ha4pfs5l28aze88yn6fhleeythklkczrule2v838uwj65n5gqxt9djx"}' - - '{"type":"public","id":"1260075966251215032940540134753397347255605556303289668063307322367655594483field","value":"aleo1qr2ha4pfs5l28aze88yn6fhleeythklkczrule2v838uwj65n5gqxt9djx"}' - - '{"type":"public","id":"6132019643538827223159216821616255691562939708927702036224235366852272625285field","value":"aleo1qr2ha4pfs5l28aze88yn6fhleeythklkczrule2v838uwj65n5gqxt9djx"}' + - '{"type":"public","id":"7957417389566842019333476383015223465797041221984916169491225413765492389707field","value":"aleo16w8t56s7v6ud7vu33fr388ph0dq0c7yhp597cyjt88rr3nultcyqcyk9yy"}' + - '{"type":"public","id":"5660332966063165816193998255057769236492104556419359071777110117631685203432field","value":"aleo1qr2ha4pfs5l28aze88yn6fhleeythklkczrule2v838uwj65n5gqxt9djx"}' + - '{"type":"public","id":"119590126009840588550727571915536854356547100414781210870229193044716803923field","value":"aleo1qr2ha4pfs5l28aze88yn6fhleeythklkczrule2v838uwj65n5gqxt9djx"}' + - '{"type":"public","id":"3955648008862663886784245642019540732803638576128766893102159939289033551109field","value":"aleo1qr2ha4pfs5l28aze88yn6fhleeythklkczrule2v838uwj65n5gqxt9djx"}' speculate: the execution was accepted add_next_block: succeeded. additional: @@ -16,8 +16,8 @@ additional: - child_outputs: child.aleo/foo: outputs: - - '{"type":"public","id":"5601132905719769837487481041811167147031721408864816226961174115045250639394field","value":"aleo16w8t56s7v6ud7vu33fr388ph0dq0c7yhp597cyjt88rr3nultcyqcyk9yy"}' - - '{"type":"public","id":"4866955403610457225216600968672882719581841491915839063024906456611968828355field","value":"aleo1qr2ha4pfs5l28aze88yn6fhleeythklkczrule2v838uwj65n5gqxt9djx"}' - credits.aleo/fee_private: + - '{"type":"public","id":"5860672233404277218165914850445330702482897594856038385529876412918074003384field","value":"aleo16w8t56s7v6ud7vu33fr388ph0dq0c7yhp597cyjt88rr3nultcyqcyk9yy"}' + - '{"type":"public","id":"7032360703707892728582573080419248600535906704593225990910797295396231122684field","value":"aleo1qr2ha4pfs5l28aze88yn6fhleeythklkczrule2v838uwj65n5gqxt9djx"}' + credits.aleo/fee_public: outputs: - - '{"type":"record","id":"3405674025766329369434309421947987188809020226610822755338621799001328840366field","checksum":"2290293865028149494249499535633241867489656506924610383724277827943521435010field","value":"record1qyqsq8u5n9rn77xzqqemfr60lr5gd0kl3kmcr8y4kvf34rtxkn5jzhsyqyxx66trwfhkxun9v35hguerqqpqzqyhetu9jy8vsyeaqsq5thyrj7nldd626s0fgatwpffv5j3hecq6p7pkx5xqz4xfcrlu8u2tz8hew0wuagx3cc0wgzlq7uhr2lrc35nqy7dgy47"}' + - '{"type":"future","id":"3173434315503739455207432985984041544966817001490874630397863189982314711932field","value":"{\n program_id: credits.aleo,\n function_name: fee_public,\n arguments: [\n aleo1qr2ha4pfs5l28aze88yn6fhleeythklkczrule2v838uwj65n5gqxt9djx,\n 2123u64\n ]\n}"}' diff --git a/synthesizer/tests/expectations/vm/execute_and_finalize/public_wallet.out b/synthesizer/tests/expectations/vm/execute_and_finalize/public_wallet.out index 83bd95ab4c..cbb7f2d77c 100644 --- a/synthesizer/tests/expectations/vm/execute_and_finalize/public_wallet.out +++ b/synthesizer/tests/expectations/vm/execute_and_finalize/public_wallet.out @@ -4,14 +4,14 @@ outputs: execute: public_wallet.aleo/init: outputs: - - '{"type":"future","id":"643313357591837098160410683436104757275790983408765054797072865390269356548field","value":"{\n program_id: public_wallet.aleo,\n function_name: init,\n arguments: [\n {\n program_id: token.aleo,\n function_name: mint_public,\n arguments: [\n aleo1sry3pke49ykrf0aeshf889tr98r4c86p5f4ms766795ssdwfdyqq9jdg0j,\n 10u64\n ]\n }\n \n ]\n}"}' + - '{"type":"future","id":"2740109864087873652477151933781698204925175410187376817867987810696050546048field","value":"{\n program_id: public_wallet.aleo,\n function_name: init,\n arguments: [\n {\n program_id: token.aleo,\n function_name: mint_public,\n arguments: [\n aleo1sry3pke49ykrf0aeshf889tr98r4c86p5f4ms766795ssdwfdyqq9jdg0j,\n 10u64\n ]\n }\n \n ]\n}"}' speculate: the execution was accepted add_next_block: succeeded. additional: - child_outputs: token.aleo/mint_public: outputs: - - '{"type":"future","id":"4537627763763327286502315223758961709385837795977680378235435937164895010454field","value":"{\n program_id: token.aleo,\n function_name: mint_public,\n arguments: [\n aleo1sry3pke49ykrf0aeshf889tr98r4c86p5f4ms766795ssdwfdyqq9jdg0j,\n 10u64\n ]\n}"}' - credits.aleo/fee_private: + - '{"type":"future","id":"2095235103073153862497986952383880687050623273703041876358116424903602929020field","value":"{\n program_id: token.aleo,\n function_name: mint_public,\n arguments: [\n aleo1sry3pke49ykrf0aeshf889tr98r4c86p5f4ms766795ssdwfdyqq9jdg0j,\n 10u64\n ]\n}"}' + credits.aleo/fee_public: outputs: - - '{"type":"record","id":"5907104904483017944370886525724321520913422122645092211228240860024172441663field","checksum":"5028009166132212399635589181092346921835371600420834084268447856042439491field","value":"record1qyqspshxjatwcdkrde0m2rspsm338q8p7s4eru9m8ftjqzf0ttxpkag0qyxx66trwfhkxun9v35hguerqqpqzqqzvfyzxfn5qlq5aewnafp8z576qvqjjx9pup92f6dxywga9qdupchalpjgkqrwva74ssnqjqgewu8xwj285f0c08t6czss5lcftjhqjuazgdk"}' + - '{"type":"future","id":"4373249435479943424484888940718424132561120812144078253060284512525421799293field","value":"{\n program_id: credits.aleo,\n function_name: fee_public,\n arguments: [\n aleo1d3e2je2m2hsxwdsvntvf4jnnlj459ywfry6ch2qwrpy6l6r6yvpq8e88h5,\n 131201u64\n ]\n}"}' diff --git a/synthesizer/tests/expectations/vm/execute_and_finalize/read_external_mapping.out b/synthesizer/tests/expectations/vm/execute_and_finalize/read_external_mapping.out new file mode 100644 index 0000000000..a729b3163e --- /dev/null +++ b/synthesizer/tests/expectations/vm/execute_and_finalize/read_external_mapping.out @@ -0,0 +1,73 @@ +errors: [] +outputs: +- verified: true + execute: + relay.aleo/send: + outputs: + - '{"type":"record","id":"5505341694097720023583674648027312667621444458172921945164834002648638744768field","checksum":"4170712463954366904268628656227022271867279479485549214633981747772705648157field","value":"record1qyqsp358e054av498aavwel28wr36tg0ay27k4fc539ffmwz2nddl8gqqyzxgct5vy3sqqspqpfgwnp3rnwprhd2q3h8gmxcnldlczrvszade4vzxlu7dmfeg6j3rd8mwuzysqtgl6603ey2zzry8hjwmn3pt3twclpkkvssc4l4jzsvd6lxar"}' + - '{"type":"future","id":"5336913895922947334887041593466841136470735988519588898509306662059714980450field","value":"{\n program_id: relay.aleo,\n function_name: send,\n arguments: [\n aleo1f6eg623knp66cwx0926w3plgdgzcmfpgyrzgnjz90mucgs3z7s9qls4upm\n ]\n}"}' + speculate: the execution was rejected + add_next_block: succeeded. +- verified: true + execute: + relay.aleo/send_without_check: + outputs: + - '{"type":"record","id":"4755207731349921544198839760105069860415948248486655350742993041864954064196field","checksum":"7848435433502532569425287419063381736913355859517668180377091558079541996646field","value":"record1qyqsp83ncqrtrev57v03h3j8qcysfgef256zh7pmh7zgj83h6g7tfkq0qyzxgct5vy3sqqspqzx4ww05zz3grf6hxgr46csu2vmzr2lgq0f48kxp4j383l68ufqsq45f8wqk6jxfnkm6v92cq48xea0tfrg0fwwr249m95t4eka6jkgv0c5y7k"}' + - '{"type":"future","id":"1027044606530325120447980237911983680107621060206232306337126914234987187002field","value":"{\n program_id: relay.aleo,\n function_name: send_without_check,\n arguments: [\n aleo1f6eg623knp66cwx0926w3plgdgzcmfpgyrzgnjz90mucgs3z7s9qls4upm\n ]\n}"}' + speculate: the execution was accepted + add_next_block: succeeded. +- verified: true + execute: + registry.aleo/register: + outputs: + - '{"type":"future","id":"4059159583881077685368973757192878822018897618745592372395499886263264340961field","value":"{\n program_id: registry.aleo,\n function_name: register,\n arguments: [\n aleo1f6eg623knp66cwx0926w3plgdgzcmfpgyrzgnjz90mucgs3z7s9qls4upm\n ]\n}"}' + speculate: the execution was accepted + add_next_block: succeeded. +- verified: true + execute: + relay.aleo/send: + outputs: + - '{"type":"record","id":"2277384653342632398532359071690090462344215994043547853708800775056671259572field","checksum":"3071210942562837171924171313096615835242397071199450951002063969440885822680field","value":"record1qyqspfwaru0f2lj0s2k6p9jfmmkzyvkzl5qpagt00edyuf9qn3gnu5g9qyzxgct5vy3sqqspqrncgctd3wfmz2ggx0v7l5cggxxad49wcmtlyrjnk8fqulmkg3h3rleuqh8nmwn5d9z8cpf6z75sy880xenua6hu9wk6ptzwh9vnzps3l7743a"}' + - '{"type":"future","id":"6015012441862221318333000102440691156905727650418067306609473392233853855381field","value":"{\n program_id: relay.aleo,\n function_name: send,\n arguments: [\n aleo1f6eg623knp66cwx0926w3plgdgzcmfpgyrzgnjz90mucgs3z7s9qls4upm\n ]\n}"}' + speculate: the execution was accepted + add_next_block: succeeded. +- verified: true + execute: + registry.aleo/unregister: + outputs: + - '{"type":"future","id":"621057053984946494815874859056940220465065220086041076777338967969133345871field","value":"{\n program_id: registry.aleo,\n function_name: unregister,\n arguments: [\n aleo1f6eg623knp66cwx0926w3plgdgzcmfpgyrzgnjz90mucgs3z7s9qls4upm\n ]\n}"}' + speculate: the execution was accepted + add_next_block: succeeded. +- verified: true + execute: + relay.aleo/send: + outputs: + - '{"type":"record","id":"6497977440830787207175874226764101265608813002804421333613230199582364410758field","checksum":"319323911748946858530605909565888788506340329996151513367076865761846915611field","value":"record1qyqsqnajqear5neee3l8fykp4vcq35sgwreyz7hz3png3cn2yyljdscfqyzxgct5vy3sqqspqzu6lezptk9xjpx35xdrv5tztz0v9qs9xx803pyqury2j47x2d5seymhf3xa2wefz7mkas7r7m3uf4kte7fdwm00ral53q2mhclx95qte8mpvc"}' + - '{"type":"future","id":"2216932771373637316148105432054544027092193801977530259105019952220093166242field","value":"{\n program_id: relay.aleo,\n function_name: send,\n arguments: [\n aleo1f6eg623knp66cwx0926w3plgdgzcmfpgyrzgnjz90mucgs3z7s9qls4upm\n ]\n}"}' + speculate: the execution was rejected + add_next_block: succeeded. +additional: +- child_outputs: + credits.aleo/fee_public: + outputs: + - '{"type":"future","id":"2373837014611692049497129045871775574464197133932453792739782919776486496194field","value":"{\n program_id: credits.aleo,\n function_name: fee_public,\n arguments: [\n aleo1xe2fps8f9xpdas2q0fqy22uraenk84tvvzetrsyxgnwy6445h59s6wv78x,\n 28479u64\n ]\n}"}' +- child_outputs: + credits.aleo/fee_public: + outputs: + - '{"type":"future","id":"6963949699870804211203514659901328830518734684604845622658837353595728006898field","value":"{\n program_id: credits.aleo,\n function_name: fee_public,\n arguments: [\n aleo1xe2fps8f9xpdas2q0fqy22uraenk84tvvzetrsyxgnwy6445h59s6wv78x,\n 28507u64\n ]\n}"}' +- child_outputs: + credits.aleo/fee_public: + outputs: + - '{"type":"future","id":"786151097471386478439918490898626420968604200995134718973623517242949574field","value":"{\n program_id: credits.aleo,\n function_name: fee_public,\n arguments: [\n aleo1f6eg623knp66cwx0926w3plgdgzcmfpgyrzgnjz90mucgs3z7s9qls4upm,\n 101210u64\n ]\n}"}' +- child_outputs: + credits.aleo/fee_public: + outputs: + - '{"type":"future","id":"7962216909726487379370954492051267200961073450060603523391007597642835489177field","value":"{\n program_id: credits.aleo,\n function_name: fee_public,\n arguments: [\n aleo1xe2fps8f9xpdas2q0fqy22uraenk84tvvzetrsyxgnwy6445h59s6wv78x,\n 28479u64\n ]\n}"}' +- child_outputs: + credits.aleo/fee_public: + outputs: + - '{"type":"future","id":"5340444358291789813118792762028331134214990505407681376089964565359528622453field","value":"{\n program_id: credits.aleo,\n function_name: fee_public,\n arguments: [\n aleo1f6eg623knp66cwx0926w3plgdgzcmfpgyrzgnjz90mucgs3z7s9qls4upm,\n 101214u64\n ]\n}"}' +- child_outputs: + credits.aleo/fee_public: + outputs: + - '{"type":"future","id":"7708776674386621879381619680665250794376507748822342974632850445134733330595field","value":"{\n program_id: credits.aleo,\n function_name: fee_public,\n arguments: [\n aleo1xe2fps8f9xpdas2q0fqy22uraenk84tvvzetrsyxgnwy6445h59s6wv78x,\n 28479u64\n ]\n}"}' diff --git a/synthesizer/tests/expectations/vm/execute_and_finalize/test_branch.out b/synthesizer/tests/expectations/vm/execute_and_finalize/test_branch.out index a41aa6a4f5..7c414c0ce3 100644 --- a/synthesizer/tests/expectations/vm/execute_and_finalize/test_branch.out +++ b/synthesizer/tests/expectations/vm/execute_and_finalize/test_branch.out @@ -4,33 +4,33 @@ outputs: execute: test_branch.aleo/run_test: outputs: - - '{"type":"future","id":"4807227403819588684031853679423924290019569164203178974904495218971849247520field","value":"{\n program_id: test_branch.aleo,\n function_name: run_test,\n arguments: [\n 1u8,\n 1u8\n ]\n}"}' + - '{"type":"future","id":"6853588955800014673009987953241306389090845327747907984198222141108269232573field","value":"{\n program_id: test_branch.aleo,\n function_name: run_test,\n arguments: [\n 1u8,\n 1u8\n ]\n}"}' speculate: the execution was rejected add_next_block: succeeded. - verified: true execute: test_branch.aleo/run_test: outputs: - - '{"type":"future","id":"6578563227874053328775488720373016284038873634029366303301076699226711812107field","value":"{\n program_id: test_branch.aleo,\n function_name: run_test,\n arguments: [\n 0u8,\n 1u8\n ]\n}"}' + - '{"type":"future","id":"7316910653703512796159979382480893246542312648132879967453276886284034879075field","value":"{\n program_id: test_branch.aleo,\n function_name: run_test,\n arguments: [\n 0u8,\n 1u8\n ]\n}"}' speculate: the execution was rejected add_next_block: succeeded. - verified: true execute: test_branch.aleo/run_test: outputs: - - '{"type":"future","id":"7508008554951635493629769098645178330945961002302110561994738513868095644109field","value":"{\n program_id: test_branch.aleo,\n function_name: run_test,\n arguments: [\n 0u8,\n 0u8\n ]\n}"}' + - '{"type":"future","id":"6402417637041760480107523094357167265585878714433227566435547816691472198663field","value":"{\n program_id: test_branch.aleo,\n function_name: run_test,\n arguments: [\n 0u8,\n 0u8\n ]\n}"}' speculate: the execution was accepted add_next_block: succeeded. additional: - child_outputs: - credits.aleo/fee_private: + credits.aleo/fee_public: outputs: - - '{"type":"record","id":"1798938114974546684903209797369438296308320213665197846673649863916852114678field","checksum":"934572607592461135140890365283940117058051001959725525910576582297797621269field","value":"record1qyqspa9fxepth5l9c2u7gj6quuvfpq8wqc9q73p4xqcrmj2nurd7cfq3qyxx66trwfhkxun9v35hguerqqpqzqx9w072uwlffgvu0he36lya458pluymlds8kpv3shjc4asc3wy2zp3rk0fv9mhq6ja45tux30ps2x433ekqdsx967z8ysqp9fk37epqvu520r9"}' + - '{"type":"future","id":"8176559465483810586872674176090912007328770812617215482809916166686904238834field","value":"{\n program_id: credits.aleo,\n function_name: fee_public,\n arguments: [\n aleo1x3r205zqql5ywy0cqqt74k0r0htuusn0d037ycxe8ftt9ep8hyzsmqz4dh,\n 17268u64\n ]\n}"}' - child_outputs: - credits.aleo/fee_private: + credits.aleo/fee_public: outputs: - - '{"type":"record","id":"1039543979177875951661037350337722240996003141461618829150227721443832233782field","checksum":"4293043628731843171082705656149326581514713699817730020543476948781408422008field","value":"record1qyqsqaarvryvc8vfqer4jcngz5k9wd6m4dfvwsr4tc43v8ynmc5ny2q0qyxx66trwfhkxun9v35hguerqqpqzqx2mm3lh6vu0f9pmpne0qz5rkr7g26s3et0trj673gff8klmlpqq0r5sjjjncqlvgazec63a0enrf5dfr3n7jczarf0eshvu6d0hx3sztrtu4z"}' + - '{"type":"future","id":"885516194992930770292437059317184478627651975125735363573325083543887608918field","value":"{\n program_id: credits.aleo,\n function_name: fee_public,\n arguments: [\n aleo1x3r205zqql5ywy0cqqt74k0r0htuusn0d037ycxe8ftt9ep8hyzsmqz4dh,\n 17268u64\n ]\n}"}' - child_outputs: - credits.aleo/fee_private: + credits.aleo/fee_public: outputs: - - '{"type":"record","id":"145788756097299829231591915301155878379901156732628535292899269362119878286field","checksum":"2449429819506214589213294593178366793060498781467568604016514039430651546570field","value":"record1qyqspttnaxurc3z4pdve7697588gxr5lvrj04m93shwzhr4znh49k9gwqyxx66trwfhkxun9v35hguerqqpqzqrc35k4y2kzu7kvhksvv2yu4t5m680w5u6ykv8cnrprtvfrda27qqt3y37y87fw5crd0efl8elqad36xdqekqr3w020krmj00enuy5pzqylmsv"}' + - '{"type":"future","id":"6884773732489674095657820682011451719106395760464004244662697484163781295818field","value":"{\n program_id: credits.aleo,\n function_name: fee_public,\n arguments: [\n aleo1x3r205zqql5ywy0cqqt74k0r0htuusn0d037ycxe8ftt9ep8hyzsmqz4dh,\n 17268u64\n ]\n}"}' diff --git a/synthesizer/tests/expectations/vm/execute_and_finalize/test_rand.out b/synthesizer/tests/expectations/vm/execute_and_finalize/test_rand.out index f31addd98f..d753dcf2b4 100644 --- a/synthesizer/tests/expectations/vm/execute_and_finalize/test_rand.out +++ b/synthesizer/tests/expectations/vm/execute_and_finalize/test_rand.out @@ -4,44 +4,44 @@ outputs: execute: test_rand.aleo/rand_chacha_with_literals: outputs: - - '{"type":"future","id":"856859213363286889141859059264203066675648340843625883729021378909143418037field","value":"{\n program_id: test_rand.aleo,\n function_name: rand_chacha_with_literals,\n arguments: [\n 0scalar,\n 0group,\n 0u8,\n 2i16,\n 4u32,\n 7i64,\n 8u128,\n 10field\n ]\n}"}' + - '{"type":"future","id":"859791478012828215720348494076914719205244104520150752280307504054509554398field","value":"{\n program_id: test_rand.aleo,\n function_name: rand_chacha_with_literals,\n arguments: [\n 0scalar,\n 0group,\n 0u8,\n 2i16,\n 4u32,\n 7i64,\n 8u128,\n 10field\n ]\n}"}' speculate: the execution was accepted add_next_block: succeeded. - verified: true execute: test_rand.aleo/rand_chacha_with_struct: outputs: - - '{"type":"future","id":"2863690152286397600239869811553134445139434415556812678934084238826545926808field","value":"{\n program_id: test_rand.aleo,\n function_name: rand_chacha_with_struct,\n arguments: [\n {\n first: 0field,\n second: 0field,\n third: 0field,\n fourth: 0field,\n fifth: 0field\n}\n ]\n}"}' + - '{"type":"future","id":"1067916594854496467910772380664552622025523070198727493475463622599909707249field","value":"{\n program_id: test_rand.aleo,\n function_name: rand_chacha_with_struct,\n arguments: [\n {\n first: 0field,\n second: 0field,\n third: 0field,\n fourth: 0field,\n fifth: 0field\n}\n ]\n}"}' speculate: the execution was accepted add_next_block: succeeded. - verified: true execute: test_rand.aleo/rand_chacha_check: outputs: - - '{"type":"future","id":"5111075340000719619409954898038080805237607157543987731445343150082162501419field","value":"{\n program_id: test_rand.aleo,\n function_name: rand_chacha_check,\n arguments: [\n 0field,\n false\n ]\n}"}' + - '{"type":"future","id":"3721325135151760660773959530505944451747681933722462808964783147996869797702field","value":"{\n program_id: test_rand.aleo,\n function_name: rand_chacha_check,\n arguments: [\n 0field,\n false\n ]\n}"}' speculate: the execution was rejected add_next_block: succeeded. - verified: true execute: test_rand.aleo/rand_chacha_check: outputs: - - '{"type":"future","id":"6632578043065250671900699630486713707508511783056688582055933257632278653469field","value":"{\n program_id: test_rand.aleo,\n function_name: rand_chacha_check,\n arguments: [\n 1field,\n true\n ]\n}"}' + - '{"type":"future","id":"887371549615679800380522845098080464570119184210350810479392117984911457950field","value":"{\n program_id: test_rand.aleo,\n function_name: rand_chacha_check,\n arguments: [\n 1field,\n true\n ]\n}"}' speculate: the execution was rejected add_next_block: succeeded. additional: - child_outputs: - credits.aleo/fee_private: + credits.aleo/fee_public: outputs: - - '{"type":"record","id":"4381229306463692442497149670614588062764798743790588146804215330229884792339field","checksum":"2138639167265992670566199833999883133869484105738196279078647847384925584310field","value":"record1qyqsq96nqu434vyta43jksy548fdck37rc7kc34494ndwd9xfg9l7zsfqyxx66trwfhkxun9v35hguerqqpqzqp47n5hcfmt2twwyz0wm37mh898yfhr6rw79z026vpnc4v6h5sap3wxhvupx7k4p2jhrwtx5zw6r5t3vuarmsq8n4c4p987z586n6yqzjd7p7w"}' + - '{"type":"future","id":"6314628133780265670801554258125814886017405269745792760682853845340140460175field","value":"{\n program_id: credits.aleo,\n function_name: fee_public,\n arguments: [\n aleo1uchf7kruskpp8thlnfeya9qeklcjss27j6rtu74zz7ch559neqystgslsp,\n 601806u64\n ]\n}"}' - child_outputs: - credits.aleo/fee_private: + credits.aleo/fee_public: outputs: - - '{"type":"record","id":"6586316285692512365232256556210792622636286450707813931421762885414931353832field","checksum":"1586190359159798393415585130912439326237257065208437997708789530351085560464field","value":"record1qyqsq4tq08wj70zje3dth93xhcaac25nc7szhtt8egu4h42tln3nh0crqyxx66trwfhkxun9v35hguerqqpqzq96k0cukn8schvjlnjdyf0z0r28yvnlnmpuuj4qhxpwqc8qktc8z9e93akjxefm88vhqxpv7ppqrxp6q8hpvzee72az4mx6k4553tmss23hwgr"}' + - '{"type":"future","id":"3725939744341267737290273038160661629630343114766507174134842826652488781816field","value":"{\n program_id: credits.aleo,\n function_name: fee_public,\n arguments: [\n aleo1uchf7kruskpp8thlnfeya9qeklcjss27j6rtu74zz7ch559neqystgslsp,\n 26679u64\n ]\n}"}' - child_outputs: - credits.aleo/fee_private: + credits.aleo/fee_public: outputs: - - '{"type":"record","id":"70907903135321440406346995966772705528708353149493963813138922918859409499field","checksum":"1758155264387920852695791077636953465936690916033386407170194003924917340917field","value":"record1qyqspxy8g4khtakgjt7dk7p3ych5zhqysfp78uh7740jyr4j2q4xu3qxqyxx66trwfhkxun9v35hguerqqpqzqqfgux5pw80dc9xzk0vzklz8v7kt3szhr2h70tt8tj9c2ddgyptppsgj9uj7g0spx3uvw5l7c4ue7qgp948gjcwen4ql5u8fhj3333qjccs5hk"}' + - '{"type":"future","id":"5227003534986816857285932061757797688706802206018964764622184983760566708322field","value":"{\n program_id: credits.aleo,\n function_name: fee_public,\n arguments: [\n aleo1uchf7kruskpp8thlnfeya9qeklcjss27j6rtu74zz7ch559neqystgslsp,\n 28344u64\n ]\n}"}' - child_outputs: - credits.aleo/fee_private: + credits.aleo/fee_public: outputs: - - '{"type":"record","id":"5402941757199163131329664269602668871633752908147903083826863058623466909547field","checksum":"6299858438509874984288393356100492859126380436511534635530655092194846486258field","value":"record1qyqsparxz7rjgu5qwj29fp5xspylvxuxumjzpjwp7yahvn09szx0l6g3qyxx66trwfhkxun9v35hguerqqpqzq9vq9pcmuhpxvvrax2n3nht8f9jgdpzt9h9l7w7k5geycwqtypupsuh2lrxq4l7ts96haxgvxkdjvwd42cqpt2l96j9xhw5zlxuu2ssx0gfq8x"}' + - '{"type":"future","id":"5806769723479332130567002952494928256138310337461654699762319212831997850826field","value":"{\n program_id: credits.aleo,\n function_name: fee_public,\n arguments: [\n aleo1uchf7kruskpp8thlnfeya9qeklcjss27j6rtu74zz7ch559neqystgslsp,\n 28344u64\n ]\n}"}' diff --git a/synthesizer/tests/expectations/vm/execute_and_finalize/timelock.out b/synthesizer/tests/expectations/vm/execute_and_finalize/timelock.out index ade542ee2f..425f80af2e 100644 --- a/synthesizer/tests/expectations/vm/execute_and_finalize/timelock.out +++ b/synthesizer/tests/expectations/vm/execute_and_finalize/timelock.out @@ -4,22 +4,22 @@ outputs: execute: timelock.aleo/lock: outputs: - - '{"type":"future","id":"578152375660960413620944529176838111130558819753119775638549496005056051167field","value":"{\n program_id: timelock.aleo,\n function_name: lock,\n arguments: []\n}"}' + - '{"type":"future","id":"5726101227699718662507291026879175619949633046158707589853378418659241463316field","value":"{\n program_id: timelock.aleo,\n function_name: lock,\n arguments: []\n}"}' speculate: the execution was rejected add_next_block: succeeded. - verified: true execute: timelock.aleo/lock: outputs: - - '{"type":"future","id":"1153321566575392305875708931221831574430034867281515788213221373067783340857field","value":"{\n program_id: timelock.aleo,\n function_name: lock,\n arguments: []\n}"}' + - '{"type":"future","id":"5825781590715337627504208073275179158827587281138872289977731167576414664969field","value":"{\n program_id: timelock.aleo,\n function_name: lock,\n arguments: []\n}"}' speculate: the execution was rejected add_next_block: succeeded. additional: - child_outputs: - credits.aleo/fee_private: + credits.aleo/fee_public: outputs: - - '{"type":"record","id":"5473322261890369096710391500775021877134453013698547035599192539986067358037field","checksum":"6980374149233505129565181437471791515647159093892921261741246653396495143051field","value":"record1qyqsq40tapm7w0zll08x28yu8ufzs3jqk53ec0pen7vfaeppl2n84jcyqyxx66trwfhkxun9v35hguerqqpqzqzc6xphegrmzxkw8clqe5fsxs7xnqp9f57usrmmyv3gu4ey5d3aplsdq2hkpwzylgwne67j769p77754yqdcam28mh68huzlz74npxsqjwwdtx"}' + - '{"type":"future","id":"2868527388214006275127069563021857572887489216649877337285946162120321568912field","value":"{\n program_id: credits.aleo,\n function_name: fee_public,\n arguments: [\n aleo12tksdptp7hvxly8tkm3um08fvf53qpehsgdgqfvy9pe3sewcq5ysjg5myy,\n 5164u64\n ]\n}"}' - child_outputs: - credits.aleo/fee_private: + credits.aleo/fee_public: outputs: - - '{"type":"record","id":"3500478305510861468600974780645788091574034938191756692997832413371764035285field","checksum":"5270087208881412083504176599993145162545711447694009649032440534082007001776field","value":"record1qyqsqk3tweyvy3x4rsk06umpl3j6ucy7fpk5n8ypafp4vga25asnf0grqyxx66trwfhkxun9v35hguerqqpqzqx0kky3nlgn23x8m0khjmsgh8hmye9m0mpre68zgvysvlz82szuq3vy0ualeusvypmafl9gp97gwv067ljhn9we058mvua74ych7hgqvnj9lc4"}' + - '{"type":"future","id":"3666380379303443004933801395245329857516145915761366182794264005536589963556field","value":"{\n program_id: credits.aleo,\n function_name: fee_public,\n arguments: [\n aleo12tksdptp7hvxly8tkm3um08fvf53qpehsgdgqfvy9pe3sewcq5ysjg5myy,\n 5164u64\n ]\n}"}' diff --git a/synthesizer/tests/expectations/vm/execute_and_finalize/unawaited_future_fail.out b/synthesizer/tests/expectations/vm/execute_and_finalize/unawaited_future_fail.out new file mode 100644 index 0000000000..7d4109cee6 --- /dev/null +++ b/synthesizer/tests/expectations/vm/execute_and_finalize/unawaited_future_fail.out @@ -0,0 +1,3 @@ +errors: +- 'Failed to run `VM::deploy for program parent.aleo: Futures in finalize ''foo'' are not awaited in the order they are passed in.' +outputs: [] diff --git a/synthesizer/tests/expectations/vm/execute_and_finalize/unknown_external_mapping_fail.out b/synthesizer/tests/expectations/vm/execute_and_finalize/unknown_external_mapping_fail.out new file mode 100644 index 0000000000..65ce813937 --- /dev/null +++ b/synthesizer/tests/expectations/vm/execute_and_finalize/unknown_external_mapping_fail.out @@ -0,0 +1,3 @@ +errors: +- 'Failed to run `VM::deploy for program relay.aleo: Mapping ''foo'' in ''registry.aleo'' is not defined.' +outputs: [] diff --git a/synthesizer/tests/expectations/vm/execute_and_finalize/unknown_mapping_fail.out b/synthesizer/tests/expectations/vm/execute_and_finalize/unknown_mapping_fail.out new file mode 100644 index 0000000000..eb8a147f5c --- /dev/null +++ b/synthesizer/tests/expectations/vm/execute_and_finalize/unknown_mapping_fail.out @@ -0,0 +1,3 @@ +errors: +- 'Failed to run `VM::deploy for program registry.aleo: Mapping ''foo'' in ''registry.aleo/register'' is not defined.' +outputs: [] diff --git a/synthesizer/tests/expectations/vm/execute_and_finalize/unused_future_fail.out b/synthesizer/tests/expectations/vm/execute_and_finalize/unused_future_fail.out new file mode 100644 index 0000000000..0cea7546f1 --- /dev/null +++ b/synthesizer/tests/expectations/vm/execute_and_finalize/unused_future_fail.out @@ -0,0 +1,3 @@ +errors: +- 'Failed to run `VM::deploy for program parent.aleo: Function ''foo'' contains futures, but the ''async'' instruction does not consume all of them in the order they were produced' +outputs: [] diff --git a/synthesizer/tests/expectations/vm/execute_and_finalize/unused_position.out b/synthesizer/tests/expectations/vm/execute_and_finalize/unused_position.out index eb604e54d4..c5e300b026 100644 --- a/synthesizer/tests/expectations/vm/execute_and_finalize/unused_position.out +++ b/synthesizer/tests/expectations/vm/execute_and_finalize/unused_position.out @@ -4,11 +4,11 @@ outputs: execute: unused_position.aleo/foo: outputs: - - '{"type":"future","id":"754819646549188005033265189873387370723612282538036088693249266044696644507field","value":"{\n program_id: unused_position.aleo,\n function_name: foo,\n arguments: []\n}"}' + - '{"type":"future","id":"4435915382452600913825742955271157728527943603774006701552876898718102875463field","value":"{\n program_id: unused_position.aleo,\n function_name: foo,\n arguments: []\n}"}' speculate: the execution was accepted add_next_block: succeeded. additional: - child_outputs: - credits.aleo/fee_private: + credits.aleo/fee_public: outputs: - - '{"type":"record","id":"3540562477020683716064470691350628391302941512915750897951711495334005821133field","checksum":"7993988099268814368872113132611442146154220017865766620056128915944022458704field","value":"record1qyqsq40tapm7w0zll08x28yu8ufzs3jqk53ec0pen7vfaeppl2n84jcyqyxx66trwfhkxun9v35hguerqqpqzqzc6xpjenmmzxkw8clqe5fsxs7xnqp9f57usrmmyv3gu4ey5d3aplsdq2hkpwzylgwne67j769p77754yqdcam28mh68huzlz74npxsqylrxra"}' + - '{"type":"future","id":"5889749875317192883762347751185109427367185401929794748301981981444845203330field","value":"{\n program_id: credits.aleo,\n function_name: fee_public,\n arguments: [\n aleo12tksdptp7hvxly8tkm3um08fvf53qpehsgdgqfvy9pe3sewcq5ysjg5myy,\n 2176u64\n ]\n}"}' diff --git a/synthesizer/tests/expectations/vm/execute_and_finalize/user_callable.out b/synthesizer/tests/expectations/vm/execute_and_finalize/user_callable.out index 58f4be97a8..85f3df23fb 100644 --- a/synthesizer/tests/expectations/vm/execute_and_finalize/user_callable.out +++ b/synthesizer/tests/expectations/vm/execute_and_finalize/user_callable.out @@ -4,14 +4,14 @@ outputs: execute: child.aleo/foo: outputs: - - '{"type":"public","id":"4279509780486643035080626777023973066084837379094817161797996960689084569794field","value":"aleo1qr2ha4pfs5l28aze88yn6fhleeythklkczrule2v838uwj65n5gqxt9djx"}' - - '{"type":"public","id":"4408001848080504344165436121252640202617322612254005793329268580869751931263field","value":"aleo1qr2ha4pfs5l28aze88yn6fhleeythklkczrule2v838uwj65n5gqxt9djx"}' + - '{"type":"public","id":"3583507900097573902692207210661581535840809808651900827750728854102720512424field","value":"aleo1qr2ha4pfs5l28aze88yn6fhleeythklkczrule2v838uwj65n5gqxt9djx"}' + - '{"type":"public","id":"476166291720572191849579987891810720100233870490756615272004665719966045283field","value":"aleo1qr2ha4pfs5l28aze88yn6fhleeythklkczrule2v838uwj65n5gqxt9djx"}' speculate: the execution was accepted add_next_block: succeeded. - execute: 'Failed to evaluate instruction (call child.aleo/foo into r0 r1;): Failed to evaluate instruction (assert.eq self.caller self.signer ;): ''assert.eq'' failed: ''aleo16w8t56s7v6ud7vu33fr388ph0dq0c7yhp597cyjt88rr3nultcyqcyk9yy'' is not equal to ''aleo1qr2ha4pfs5l28aze88yn6fhleeythklkczrule2v838uwj65n5gqxt9djx'' (should be equal)' additional: - child_outputs: - credits.aleo/fee_private: + credits.aleo/fee_public: outputs: - - '{"type":"record","id":"7306036686667087636279947507997825407084203811048562802739660291735242744444field","checksum":"7132981897182131254956763451386939918007763262893522220397577775796699535598field","value":"record1qyqsqx586v4tjhcdgtvpat2cmz7ztzd7drzm5c04gyeaxqmujgv7ucq3qyxx66trwfhkxun9v35hguerqqpqzqx4pcwh0jc37snpu02y8ujwh2u327ghc6yaeeyc4k74e56uvuhqp0tpta5q5eppwa48pq9eepyuln9ct5qth57klqzf67ewyqn9hresxwalp5l"}' + - '{"type":"future","id":"5245767978479482276373144091068362056657622227760198296183689243703275814117field","value":"{\n program_id: credits.aleo,\n function_name: fee_public,\n arguments: [\n aleo1qr2ha4pfs5l28aze88yn6fhleeythklkczrule2v838uwj65n5gqxt9djx,\n 1244u64\n ]\n}"}' - {} diff --git a/synthesizer/tests/test_process_execute.rs b/synthesizer/tests/test_process_execute.rs index a8d710234e..d57b5b91cf 100644 --- a/synthesizer/tests/test_process_execute.rs +++ b/synthesizer/tests/test_process_execute.rs @@ -35,110 +35,129 @@ fn test_process_execute() { // Run each test and compare it against its corresponding expectation. tests.par_iter().for_each(|test| { - // Add the programs into the process. - let mut process = process.clone(); - for program in test.programs() { - process.add_program(program).unwrap() + // Run the test. + let output = run_test(process.clone(), test); + // Check against the expected output. + test.check(&output).unwrap(); + // Save the output. + test.save(&output).unwrap(); + }); +} + +// A helper function to run the test and extract the outputs as YAML, to be compared against the expectation. +fn run_test(process: Process, test: &ProgramTest) -> serde_yaml::Mapping { + // Initialize the output. + let mut output = serde_yaml::Mapping::new(); + output.insert( + serde_yaml::Value::String("errors".to_string()), + serde_yaml::Value::Sequence(serde_yaml::Sequence::new()), + ); + + // Add the programs into the process. + let mut process = process.clone(); + for program in test.programs() { + if let Err(err) = process.add_program(program) { + output + .get_mut(&serde_yaml::Value::String("errors".to_string())) + .unwrap() + .as_sequence_mut() + .unwrap() + .push(serde_yaml::Value::String(err.to_string())); + output.insert( + serde_yaml::Value::String("outputs".to_string()), + serde_yaml::Value::Sequence(serde_yaml::Sequence::new()), + ); + return output; } + } - // Initialize the RNG. - let rng = &mut match test.randomness() { - None => TestRng::default(), - Some(randomness) => TestRng::fixed(randomness), - }; + // Initialize the RNG. + let rng = &mut match test.randomness() { + None => TestRng::default(), + Some(randomness) => TestRng::fixed(randomness), + }; - let mut output = serde_yaml::Mapping::new(); - output.insert( - serde_yaml::Value::String("errors".to_string()), - serde_yaml::Value::Sequence(serde_yaml::Sequence::new()), - ); - output.insert( - serde_yaml::Value::String("outputs".to_string()), - serde_yaml::Value::Sequence( - test.cases() - .iter() - .map(|value| { - // Extract the function name, inputs, and optional private key. - let value = value.as_mapping().expect("expected mapping for test case"); - let program_id = ProgramID::::from_str( - value - .get("program") - .expect("expected program name for test case") - .as_str() - .expect("expected string for program name"), - ) - .expect("unable to parse program name"); - let function_name = Identifier::::from_str( - value - .get("function") - .expect("expected function name for test case") - .as_str() - .expect("expected string for function name"), - ) - .expect("unable to parse function name"); - let inputs = value - .get("inputs") - .expect("expected inputs for test case") - .as_sequence() - .expect("expected sequence for inputs") - .iter() - .map(|input| match &input { - serde_yaml::Value::Bool(bool) => { - Value::::from(Literal::Boolean(Boolean::new(*bool))) - } - _ => Value::::from_str( - input.as_str().expect("expected string for input"), - ) + output.insert( + serde_yaml::Value::String("outputs".to_string()), + serde_yaml::Value::Sequence( + test.cases() + .iter() + .map(|value| { + // Extract the function name, inputs, and optional private key. + let value = value.as_mapping().expect("expected mapping for test case"); + let program_id = ProgramID::::from_str( + value + .get("program") + .expect("expected program name for test case") + .as_str() + .expect("expected string for program name"), + ) + .expect("unable to parse program name"); + let function_name = Identifier::::from_str( + value + .get("function") + .expect("expected function name for test case") + .as_str() + .expect("expected string for function name"), + ) + .expect("unable to parse function name"); + let inputs = value + .get("inputs") + .expect("expected inputs for test case") + .as_sequence() + .expect("expected sequence for inputs") + .iter() + .map(|input| match &input { + serde_yaml::Value::Bool(bool) => { + Value::::from(Literal::Boolean(Boolean::new(*bool))) + } + _ => Value::::from_str(input.as_str().expect("expected string for input")) .expect("unable to parse input"), - }) - .collect_vec(); - let private_key = match value.get("private_key") { - Some(private_key) => PrivateKey::::from_str( - private_key.as_str().expect("expected string for private key"), - ) - .expect("unable to parse private key"), - None => PrivateKey::new(rng).unwrap(), - }; + }) + .collect_vec(); + let private_key = match value.get("private_key") { + Some(private_key) => PrivateKey::::from_str( + private_key.as_str().expect("expected string for private key"), + ) + .expect("unable to parse private key"), + None => PrivateKey::new(rng).unwrap(), + }; - let mut run_test = || -> serde_yaml::Value { - // Authorize the execution. - let authorization = match process.authorize::( - &private_key, - program_id, - function_name, - inputs.iter(), - rng, - ) { - Ok(authorization) => authorization, - Err(err) => return serde_yaml::Value::String(err.to_string()), - }; - // Execute the authorization and extract the output as YAML. - std::panic::catch_unwind(AssertUnwindSafe(|| { - match process.execute::(authorization) { - Ok((response, _)) => serde_yaml::Value::Sequence( - response - .outputs() - .iter() - .cloned() - .map(|output| serde_yaml::Value::String(output.to_string())) - .collect_vec(), - ), - Err(err) => serde_yaml::Value::String(err.to_string()), - } - })) - .unwrap_or(serde_yaml::Value::String( - "Compiler panicked when calling `Process::execute`".to_string(), - )) + let mut run_test = || -> serde_yaml::Value { + // Authorize the execution. + let authorization = match process.authorize::( + &private_key, + program_id, + function_name, + inputs.iter(), + rng, + ) { + Ok(authorization) => authorization, + Err(err) => return serde_yaml::Value::String(err.to_string()), }; - run_test() - }) - .collect::(), - ), - ); + // Execute the authorization and extract the output as YAML. + std::panic::catch_unwind(AssertUnwindSafe(|| { + match process.execute::(authorization, rng) { + Ok((response, _)) => serde_yaml::Value::Sequence( + response + .outputs() + .iter() + .cloned() + .map(|output| serde_yaml::Value::String(output.to_string())) + .collect_vec(), + ), + Err(err) => serde_yaml::Value::String(err.to_string()), + } + })) + .unwrap_or(serde_yaml::Value::String( + "Compiler panicked when calling `Process::execute`".to_string(), + )) + }; + run_test() + }) + .collect::(), + ), + ); - // Check against the expected output. - test.check(&output).unwrap(); - // Save the output. - test.save(&output).unwrap(); - }); + output } diff --git a/synthesizer/tests/test_vm_execute_and_finalize.rs b/synthesizer/tests/test_vm_execute_and_finalize.rs index ce7ff4a93e..21a5b4e234 100644 --- a/synthesizer/tests/test_vm_execute_and_finalize.rs +++ b/synthesizer/tests/test_vm_execute_and_finalize.rs @@ -35,6 +35,7 @@ use snarkvm_synthesizer::{program::FinalizeOperation, VM}; use synthesizer_program::FinalizeGlobalState; use anyhow::Result; +use console::account::Address; use indexmap::IndexMap; use rayon::prelude::*; use std::borrow::Borrow; @@ -69,34 +70,61 @@ fn run_test(test: &ProgramTest) -> serde_yaml::Mapping { let genesis_private_key = PrivateKey::::new(rng).unwrap(); // Initialize the VM. - let (vm, records) = initialize_vm(&genesis_private_key, rng); + let (vm, _) = initialize_vm(&genesis_private_key, rng); + + // Fund the additional keys. + for key in test.keys() { + // Transfer 1_000_000_000_000 + let transaction = vm + .execute( + &genesis_private_key, + ("credits.aleo", "transfer_public"), + vec![ + Value::Plaintext(Plaintext::from(Literal::Address(Address::try_from(key).unwrap()))), + Value::Plaintext(Plaintext::from(Literal::U64(U64::new(1_000_000_000_000)))), + ] + .iter(), + None, + 0, + None, + rng, + ) + .unwrap(); + let (ratifications, transactions, aborted_transaction_ids, ratified_finalize_operations) = + vm.speculate(construct_finalize_global_state(&vm), Some(0u64), vec![], None, [transaction].iter()).unwrap(); + assert!(aborted_transaction_ids.is_empty()); - // Pre-construct the necessary fee records. - let num_fee_records = test.programs().len() + test.cases().len(); - let mut fee_records = construct_fee_records(&vm, &genesis_private_key, records, num_fee_records, rng); + let block = construct_next_block( + &vm, + &genesis_private_key, + ratifications, + transactions, + aborted_transaction_ids, + ratified_finalize_operations, + rng, + ); + vm.add_next_block(&block.unwrap()).unwrap(); + } // Deploy the programs. for program in test.programs() { - let transaction = - match vm.deploy(&genesis_private_key, program, Some(fee_records.pop().unwrap().0), 0, None, rng) { - Ok(transaction) => transaction, - Err(error) => { - let mut output = serde_yaml::Mapping::new(); - output.insert( - serde_yaml::Value::String("errors".to_string()), - serde_yaml::Value::Sequence(vec![serde_yaml::Value::String(format!( - "Failed to run `VM::deploy for program {}: {}", - program.id(), - error - ))]), - ); - output.insert( - serde_yaml::Value::String("outputs".to_string()), - serde_yaml::Value::Sequence(Vec::new()), - ); - return output; - } - }; + let transaction = match vm.deploy(&genesis_private_key, program, None, 0, None, rng) { + Ok(transaction) => transaction, + Err(error) => { + let mut output = serde_yaml::Mapping::new(); + output.insert( + serde_yaml::Value::String("errors".to_string()), + serde_yaml::Value::Sequence(vec![serde_yaml::Value::String(format!( + "Failed to run `VM::deploy for program {}: {}", + program.id(), + error + ))]), + ); + output + .insert(serde_yaml::Value::String("outputs".to_string()), serde_yaml::Value::Sequence(Vec::new())); + return output; + } + }; let (ratifications, transactions, aborted_transaction_ids, ratified_finalize_operations) = vm.speculate(construct_finalize_global_state(&vm), Some(0u64), vec![], None, [transaction].iter()).unwrap(); @@ -168,28 +196,21 @@ fn run_test(test: &ProgramTest) -> serde_yaml::Mapping { let mut other = serde_yaml::Mapping::new(); // Execute the function, extracting the transaction. - let transaction = match vm.execute( - &private_key, - (program_id, function_name), - inputs.iter(), - Some(fee_records.pop().unwrap().0), - 0u64, - None, - rng, - ) { - Ok(transaction) => transaction, - // If the execution fails, return the error. - Err(err) => { - result.insert( - serde_yaml::Value::String("execute".to_string()), - serde_yaml::Value::String(err.to_string()), - ); - return (serde_yaml::Value::Mapping(result), serde_yaml::Value::Mapping(Default::default())); - } - }; + let transaction = + match vm.execute(&private_key, (program_id, function_name), inputs.iter(), None, 0u64, None, rng) { + Ok(transaction) => transaction, + // If the execution fails, return the error. + Err(err) => { + result.insert( + serde_yaml::Value::String("execute".to_string()), + serde_yaml::Value::String(err.to_string()), + ); + return (serde_yaml::Value::Mapping(result), serde_yaml::Value::Mapping(Default::default())); + } + }; // Attempt to verify the transaction. - let verified = vm.check_transaction(&transaction, None).is_ok(); + let verified = vm.check_transaction(&transaction, None, rng).is_ok(); // Store the verification result. result.insert(serde_yaml::Value::String("verified".to_string()), serde_yaml::Value::Bool(verified)); @@ -333,6 +354,7 @@ fn initialize_vm( } // A helper function construct the desired number of fee records from an initial record, all owned by the same key. +#[allow(unused)] fn construct_fee_records, R: Rng + CryptoRng>( vm: &VM, private_key: &PrivateKey, @@ -454,7 +476,7 @@ fn construct_next_block, R: Rng + CryptoRng> } // A helper function to invoke `credits.aleo/split`. -#[allow(clippy::type_complexity)] +#[allow(clippy::type_complexity, unused)] fn split, R: Rng + CryptoRng>( vm: &VM, private_key: &PrivateKey, diff --git a/synthesizer/tests/tests/process/execute/hash_into_struct.aleo b/synthesizer/tests/tests/process/execute/hash_into_struct.aleo new file mode 100644 index 0000000000..c00bdc039a --- /dev/null +++ b/synthesizer/tests/tests/process/execute/hash_into_struct.aleo @@ -0,0 +1,24 @@ +/* +randomness: 9045282 +cases: [] +*/ + +program hash_into_struct.aleo; + +struct arjQITzQAw: + qcoZ_qzzye as boolean; + ynh0UgMeBB as address; + uLpRnfnsg9 as u8; + D4j4T0s8Kc as boolean; + MaDEhMWsdd as u8; + HpHdqmODmR as i64; + rnbm6ct6qw as u16; + +closure kmNSyjzeaZ: + input r0 as boolean; + hash.bhp768 5372560835405554868775356124803982999725684598186573362469277964967083823207field into r1 as arjQITzQAw; + output r1.qcoZ_qzzye as boolean; + +function uftFRlQzED: + input r0 as boolean.private; + output r0 as boolean.private; diff --git a/synthesizer/tests/tests/vm/execute_and_finalize/async_without_finalize_fail.aleo b/synthesizer/tests/tests/vm/execute_and_finalize/async_without_finalize_fail.aleo new file mode 100644 index 0000000000..caf9ff0c22 --- /dev/null +++ b/synthesizer/tests/tests/vm/execute_and_finalize/async_without_finalize_fail.aleo @@ -0,0 +1,17 @@ +/* +randomness: 45791624 +cases: [] +*/ + +program child.aleo; + +function foo: + input r0 as field.private; + input r1 as field.private; + async foo self.caller into r2; + add r0 r1 into r3; + output r3 as field.private; + + + + diff --git a/synthesizer/tests/tests/vm/execute_and_finalize/call_after_async_fail.aleo b/synthesizer/tests/tests/vm/execute_and_finalize/call_after_async_fail.aleo new file mode 100644 index 0000000000..272bc951e0 --- /dev/null +++ b/synthesizer/tests/tests/vm/execute_and_finalize/call_after_async_fail.aleo @@ -0,0 +1,37 @@ +/* +randomness: 45791624 +cases: [] +*/ + +program child.aleo; + +mapping count: + key as address.public; + value as field.public; + +function foo: + async foo self.caller into r0; + output r0 as child.aleo/foo.future; + +finalize foo: + input r0 as address.public; + get.or_use count[r0] 0field into r1; + add r1 1field into r2; + set r2 into count[r0]; + +///////////////////////////////////////////////// + +import child.aleo; + +program parent.aleo; + +function foo: + call child.aleo/foo into r0; + async foo r0 into r1; + call child.aleo/foo into r2; + output r1 as parent.aleo/foo.future; + +finalize foo: + input r0 as child.aleo/foo.future; + await r0; + diff --git a/synthesizer/tests/tests/vm/execute_and_finalize/external_read_with_local_fail.aleo b/synthesizer/tests/tests/vm/execute_and_finalize/external_read_with_local_fail.aleo new file mode 100644 index 0000000000..56a3c02c9b --- /dev/null +++ b/synthesizer/tests/tests/vm/execute_and_finalize/external_read_with_local_fail.aleo @@ -0,0 +1,38 @@ +/* +cases: [] +*/ + +program registry.aleo; + +mapping users: + key as address.public; + value as boolean.public; + +function register: + async register self.caller into r0; + output r0 as registry.aleo/register.future; + +finalize register: + input r0 as address.public; + set true into users[r0]; + + +///////////////////////////////////////////////// + +import registry.aleo; + +program relay.aleo; + +mapping users: + key as address.public; + value as boolean.public; + +function send: + input r0 as address.public; + async send r0 into r1; + output r1 as relay.aleo/send.future; + +finalize send: + input r0 as address.public; + get relay.aleo/users[r0] into r1; + assert.eq r1 true; diff --git a/synthesizer/tests/tests/vm/execute_and_finalize/future_out_of_order_fail.aleo b/synthesizer/tests/tests/vm/execute_and_finalize/future_out_of_order_fail.aleo new file mode 100644 index 0000000000..f2b6225a55 --- /dev/null +++ b/synthesizer/tests/tests/vm/execute_and_finalize/future_out_of_order_fail.aleo @@ -0,0 +1,39 @@ +/* +randomness: 45791624 +cases: [] +*/ + +program child.aleo; + +mapping count: + key as address.public; + value as field.public; + +function foo: + async foo self.caller into r0; + output r0 as child.aleo/foo.future; + +finalize foo: + input r0 as address.public; + get.or_use count[r0] 0field into r1; + add r1 1field into r2; + set r2 into count[r0]; + +///////////////////////////////////////////////// + +import child.aleo; + +program parent.aleo; + +function foo: + call child.aleo/foo into r0; + call child.aleo/foo into r1; + async foo r1 r0 into r2; + output r2 as parent.aleo/foo.future; + +finalize foo: + input r0 as child.aleo/foo.future; + input r1 as child.aleo/foo.future; + await r0; + await r1; + diff --git a/synthesizer/tests/tests/vm/execute_and_finalize/ignore_finalize_fail.aleo b/synthesizer/tests/tests/vm/execute_and_finalize/ignore_finalize_fail.aleo new file mode 100644 index 0000000000..7514c7773c --- /dev/null +++ b/synthesizer/tests/tests/vm/execute_and_finalize/ignore_finalize_fail.aleo @@ -0,0 +1,33 @@ +/* +randomness: 45791624 +cases: [] +*/ + +program child.aleo; + +mapping count: + key as address.public; + value as field.public; + +function foo: + async foo self.caller into r0; + output r0 as child.aleo/foo.future; + +finalize foo: + input r0 as address.public; + get.or_use count[r0] 0field into r1; + add r1 1field into r2; + set r2 into count[r0]; + +///////////////////////////////////////////////// + +import child.aleo; + +program parent.aleo; + +function foo: + input r0 as field.private; + input r1 as field.private; + call child.aleo/foo into r2; + add r0 r1 into r3; + output r3 as field.private; diff --git a/synthesizer/tests/tests/vm/execute_and_finalize/last_reg_is_not_future_fail.aleo b/synthesizer/tests/tests/vm/execute_and_finalize/last_reg_is_not_future_fail.aleo new file mode 100644 index 0000000000..0a0f2ea01b --- /dev/null +++ b/synthesizer/tests/tests/vm/execute_and_finalize/last_reg_is_not_future_fail.aleo @@ -0,0 +1,25 @@ +/* +randomness: 45791624 +cases: [] +*/ + +program child.aleo; + +mapping count: + key as address.public; + value as field.public; + +function foo: + input r0 as field.private; + input r1 as field.private; + async foo self.caller into r2; + add r0 r1 into r3; + output r2 as child.aleo/foo.future; + output r3 as field.private; + +finalize foo: + input r0 as address.public; + get.or_use count[r0] 0field into r1; + add r1 1field into r2; + set r2 into count[r0]; + diff --git a/synthesizer/tests/tests/vm/execute_and_finalize/mint_and_split.aleo b/synthesizer/tests/tests/vm/execute_and_finalize/mint_and_split.aleo index 3d69f3c955..c8163d2990 100644 --- a/synthesizer/tests/tests/vm/execute_and_finalize/mint_and_split.aleo +++ b/synthesizer/tests/tests/vm/execute_and_finalize/mint_and_split.aleo @@ -1,5 +1,8 @@ /* randomness: 1337 +keys: + - APrivateKey1zkpFbGDx4znwxo1zrxfUscfGn1Vy3My3ia5gRHx3XwaLtCR + - APrivateKey1zkpJhviKDvvm7yu7SZuhSudVR7zjCRG2HznuAHwuGYc1xqN cases: - program: mint_and_split.aleo function: mint diff --git a/synthesizer/tests/tests/vm/execute_and_finalize/multiple_async_fail.aleo b/synthesizer/tests/tests/vm/execute_and_finalize/multiple_async_fail.aleo new file mode 100644 index 0000000000..7a186c0f02 --- /dev/null +++ b/synthesizer/tests/tests/vm/execute_and_finalize/multiple_async_fail.aleo @@ -0,0 +1,38 @@ +/* +randomness: 45791624 +cases: [] +*/ + +program child.aleo; + +mapping count: + key as address.public; + value as field.public; + +function foo: + async foo self.caller into r0; + output r0 as child.aleo/foo.future; + +finalize foo: + input r0 as address.public; + get.or_use count[r0] 0field into r1; + add r1 1field into r2; + set r2 into count[r0]; + +///////////////////////////////////////////////// + +import child.aleo; + +program parent.aleo; + +function foo: + call child.aleo/foo into r0; + call child.aleo/foo into r1; + async foo r0 into r2; + async foo r1 into r3; + output r2 as parent.aleo/foo.future; + +finalize foo: + input r0 as child.aleo/foo.future; + await r0; + diff --git a/synthesizer/tests/tests/vm/execute_and_finalize/no_import_external_read_fail.aleo b/synthesizer/tests/tests/vm/execute_and_finalize/no_import_external_read_fail.aleo new file mode 100644 index 0000000000..3a64c123d1 --- /dev/null +++ b/synthesizer/tests/tests/vm/execute_and_finalize/no_import_external_read_fail.aleo @@ -0,0 +1,32 @@ +/* +cases: [] +*/ + +program registry.aleo; + +mapping users: + key as address.public; + value as boolean.public; + +function register: + async register self.caller into r0; + output r0 as registry.aleo/register.future; + +finalize register: + input r0 as address.public; + set true into users[r0]; + + +///////////////////////////////////////////////// + +program relay.aleo; + +function send: + input r0 as address.public; + async send r0 into r1; + output r1 as relay.aleo/send.future; + +finalize send: + input r0 as address.public; + get registry.aleo/users[r0] into r1; + assert.eq r1 true; diff --git a/synthesizer/tests/tests/vm/execute_and_finalize/out_of_order_await_fail.aleo b/synthesizer/tests/tests/vm/execute_and_finalize/out_of_order_await_fail.aleo new file mode 100644 index 0000000000..b86f29b8b5 --- /dev/null +++ b/synthesizer/tests/tests/vm/execute_and_finalize/out_of_order_await_fail.aleo @@ -0,0 +1,39 @@ +/* +randomness: 45791624 +cases: [] +*/ + +program child.aleo; + +mapping count: + key as address.public; + value as field.public; + +function foo: + async foo self.caller into r0; + output r0 as child.aleo/foo.future; + +finalize foo: + input r0 as address.public; + get.or_use count[r0] 0field into r1; + add r1 1field into r2; + set r2 into count[r0]; + +///////////////////////////////////////////////// + +import child.aleo; + +program parent.aleo; + +function foo: + call child.aleo/foo into r0; + call child.aleo/foo into r1; + async foo r0 r1 into r2; + output r2 as parent.aleo/foo.future; + +finalize foo: + input r0 as child.aleo/foo.future; + input r1 as child.aleo/foo.future; + await r1; + await r0; + diff --git a/synthesizer/tests/tests/vm/execute_and_finalize/output_child_without_async_fail.aleo b/synthesizer/tests/tests/vm/execute_and_finalize/output_child_without_async_fail.aleo new file mode 100644 index 0000000000..7d34f0c060 --- /dev/null +++ b/synthesizer/tests/tests/vm/execute_and_finalize/output_child_without_async_fail.aleo @@ -0,0 +1,32 @@ +/* +randomness: 45791624 +cases: [] +*/ + +program child.aleo; + +mapping count: + key as address.public; + value as field.public; + +function foo: + async foo self.caller into r0; + output r0 as child.aleo/foo.future; + +finalize foo: + input r0 as address.public; + get.or_use count[r0] 0field into r1; + add r1 1field into r2; + set r2 into count[r0]; + +///////////////////////////////////////////////// + +import child.aleo; + +program parent.aleo; + +function foo: + call child.aleo/foo into r0; + output r2 as child.aleo/foo.future; + + diff --git a/synthesizer/tests/tests/vm/execute_and_finalize/read_external_mapping.aleo b/synthesizer/tests/tests/vm/execute_and_finalize/read_external_mapping.aleo new file mode 100644 index 0000000000..20ad427064 --- /dev/null +++ b/synthesizer/tests/tests/vm/execute_and_finalize/read_external_mapping.aleo @@ -0,0 +1,84 @@ +/* +randomness: 9086185409 +keys: + - APrivateKey1zkpABon5degxuW8JnBniSXgN1C4eAGKfDH8qRPZe1geHpWp +cases: + - program: relay.aleo + function: send + inputs: [aleo1f6eg623knp66cwx0926w3plgdgzcmfpgyrzgnjz90mucgs3z7s9qls4upm, 0u8] + - program: relay.aleo + function: send_without_check + inputs: [aleo1f6eg623knp66cwx0926w3plgdgzcmfpgyrzgnjz90mucgs3z7s9qls4upm, 0u8] + - program: registry.aleo + function: register + inputs: [] + private_key: APrivateKey1zkpABon5degxuW8JnBniSXgN1C4eAGKfDH8qRPZe1geHpWp + - program: relay.aleo + function: send + inputs: [aleo1f6eg623knp66cwx0926w3plgdgzcmfpgyrzgnjz90mucgs3z7s9qls4upm, 1u8] + - program: registry.aleo + function: unregister + inputs: [] + private_key: APrivateKey1zkpABon5degxuW8JnBniSXgN1C4eAGKfDH8qRPZe1geHpWp + - program: relay.aleo + function: send + inputs: [aleo1f6eg623knp66cwx0926w3plgdgzcmfpgyrzgnjz90mucgs3z7s9qls4upm, 2u8] +*/ + +program registry.aleo; + +mapping users: + key as address.public; + value as boolean.public; + +function register: + async register self.caller into r0; + output r0 as registry.aleo/register.future; + +finalize register: + input r0 as address.public; + set true into users[r0]; + +function unregister: + async unregister self.caller into r0; + output r0 as registry.aleo/unregister.future; + +finalize unregister: + input r0 as address.public; + set false into users[r0]; + +///////////////////////////////////////////////// + +import registry.aleo; + +program relay.aleo; + +record message: + owner as address.private; + data as u8.private; + +function send: + input r0 as address.public; + input r1 as u8.public; + cast r0 r1 into r2 as message.record; + async send r0 into r3; + output r2 as message.record; + output r3 as relay.aleo/send.future; + +finalize send: + input r0 as address.public; + get registry.aleo/users[r0] into r1; + assert.eq r1 true; + +function send_without_check: + input r0 as address.public; + input r1 as u8.public; + cast r0 r1 into r2 as message.record; + async send_without_check r0 into r3; + output r2 as message.record; + output r3 as relay.aleo/send_without_check.future; + +finalize send_without_check: + input r0 as address.public; + get.or_use registry.aleo/users[r0] true into r1; + assert.eq r1 true; diff --git a/synthesizer/tests/tests/vm/execute_and_finalize/unawaited_future_fail.aleo b/synthesizer/tests/tests/vm/execute_and_finalize/unawaited_future_fail.aleo new file mode 100644 index 0000000000..26736dca73 --- /dev/null +++ b/synthesizer/tests/tests/vm/execute_and_finalize/unawaited_future_fail.aleo @@ -0,0 +1,38 @@ +/* +randomness: 45791624 +cases: [] +*/ + +program child.aleo; + +mapping count: + key as address.public; + value as field.public; + +function foo: + async foo self.caller into r0; + output r0 as child.aleo/foo.future; + +finalize foo: + input r0 as address.public; + get.or_use count[r0] 0field into r1; + add r1 1field into r2; + set r2 into count[r0]; + +///////////////////////////////////////////////// + +import child.aleo; + +program parent.aleo; + +function foo: + call child.aleo/foo into r0; + call child.aleo/foo into r1; + async foo r0 r1 into r2; + output r2 as parent.aleo/foo.future; + +finalize foo: + input r0 as child.aleo/foo.future; + input r1 as child.aleo/foo.future; + await r0; + diff --git a/synthesizer/tests/tests/vm/execute_and_finalize/unknown_external_mapping_fail.aleo b/synthesizer/tests/tests/vm/execute_and_finalize/unknown_external_mapping_fail.aleo new file mode 100644 index 0000000000..05397274d7 --- /dev/null +++ b/synthesizer/tests/tests/vm/execute_and_finalize/unknown_external_mapping_fail.aleo @@ -0,0 +1,34 @@ +/* +cases: [] +*/ + +program registry.aleo; + +mapping users: + key as address.public; + value as boolean.public; + +function register: + async register self.caller into r0; + output r0 as registry.aleo/register.future; + +finalize register: + input r0 as address.public; + set true into users[r0]; + + +///////////////////////////////////////////////// + +import registry.aleo; + +program relay.aleo; + +function send: + input r0 as address.public; + async send r0 into r1; + output r1 as relay.aleo/send.future; + +finalize send: + input r0 as address.public; + get registry.aleo/foo[r0] into r1; + assert.eq r1 true; diff --git a/synthesizer/tests/tests/vm/execute_and_finalize/unknown_mapping_fail.aleo b/synthesizer/tests/tests/vm/execute_and_finalize/unknown_mapping_fail.aleo new file mode 100644 index 0000000000..f344a08a36 --- /dev/null +++ b/synthesizer/tests/tests/vm/execute_and_finalize/unknown_mapping_fail.aleo @@ -0,0 +1,18 @@ +/* +cases: [] +*/ + +program registry.aleo; + +mapping users: + key as address.public; + value as boolean.public; + +function register: + async register self.caller into r0; + output r0 as registry.aleo/register.future; + +finalize register: + input r0 as address.public; + set true into foo[r0]; + diff --git a/synthesizer/tests/tests/vm/execute_and_finalize/unused_future_fail.aleo b/synthesizer/tests/tests/vm/execute_and_finalize/unused_future_fail.aleo new file mode 100644 index 0000000000..9277b9a3dd --- /dev/null +++ b/synthesizer/tests/tests/vm/execute_and_finalize/unused_future_fail.aleo @@ -0,0 +1,37 @@ +/* +randomness: 45791624 +cases: [] +*/ + +program child.aleo; + +mapping count: + key as address.public; + value as field.public; + +function foo: + async foo self.caller into r0; + output r0 as child.aleo/foo.future; + +finalize foo: + input r0 as address.public; + get.or_use count[r0] 0field into r1; + add r1 1field into r2; + set r2 into count[r0]; + +///////////////////////////////////////////////// + +import child.aleo; + +program parent.aleo; + +function foo: + call child.aleo/foo into r0; + call child.aleo/foo into r1; + async foo r0 into r2; + output r2 as parent.aleo/foo.future; + +finalize foo: + input r0 as child.aleo/foo.future; + await r0; + diff --git a/synthesizer/tests/utilities/tests/program_test.rs b/synthesizer/tests/utilities/tests/program_test.rs index 77a031aa92..71290b0a0b 100644 --- a/synthesizer/tests/utilities/tests/program_test.rs +++ b/synthesizer/tests/utilities/tests/program_test.rs @@ -43,6 +43,8 @@ pub struct ProgramTest { rewrite: bool, /// The seed for the RNG. randomness: Option, + /// Additional keys for the test. + keys: Vec>, } impl ProgramTest { @@ -60,6 +62,11 @@ impl ProgramTest { pub fn randomness(&self) -> Option { self.randomness } + + /// Returns the additional keys for the test. + pub fn keys(&self) -> &[PrivateKey] { + &self.keys + } } impl ExpectedTest for ProgramTest { @@ -79,12 +86,25 @@ impl ExpectedTest for ProgramTest { let comment = &source[first_comment_start + 2..first_comment_start + 2 + end_first_comment]; // Parse the comment into the test configuration. - println!("comment: {}", comment); let test_config = serde_yaml::from_str::(comment).expect("invalid test configuration"); // If the `randomness` field is present in the config, parse it as a `u64`. let randomness = test_config.get("randomness").map(|value| value.as_u64().expect("`randomness` must be a u64")); + // If the `keys` field is present in the config, parse it as a sequence of `PrivateKey`s. + let keys = match test_config.get("keys") { + None => Vec::new(), + Some(value) => value + .as_sequence() + .expect("`keys` must be a sequence") + .iter() + .map(|value| { + PrivateKey::::from_str(value.as_str().expect("private key must be a string")) + .expect("invalid private key") + }) + .collect::>(), + }; + // Extract the test cases from the config. let cases = test_config .get("cases") @@ -110,7 +130,7 @@ impl ExpectedTest for ProgramTest { } }; - Self { programs, cases, expected, path, rewrite, randomness } + Self { programs, cases, expected, path, rewrite, randomness, keys } } fn check(&self, output: &Self::Output) -> Result<()> { diff --git a/utilities/Cargo.toml b/utilities/Cargo.toml index a0e48fee79..bbced72fca 100644 --- a/utilities/Cargo.toml +++ b/utilities/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "snarkvm-utilities" -version = "0.16.3" +version = "0.16.15" authors = [ "The Aleo Team " ] description = "Utilities for a decentralized virtual machine" homepage = "https://aleo.org" @@ -25,7 +25,7 @@ edition = "2021" [dependencies.snarkvm-utilities-derives] path = "./derives" -version = "=0.16.3" +version = "=0.16.15" optional = true [dependencies.aleo-std] diff --git a/utilities/derives/Cargo.toml b/utilities/derives/Cargo.toml index 707bebc048..29d966060b 100644 --- a/utilities/derives/Cargo.toml +++ b/utilities/derives/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "snarkvm-utilities-derives" -version = "0.16.3" +version = "0.16.15" authors = [ "The Aleo Team " ] description = "Canonical serialization for a decentralized virtual machine" homepage = "https://aleo.org" diff --git a/utilities/src/biginteger/mod.rs b/utilities/src/biginteger/mod.rs index 21e68fdbad..34fe2e1d4b 100644 --- a/utilities/src/biginteger/mod.rs +++ b/utilities/src/biginteger/mod.rs @@ -75,10 +75,10 @@ pub trait BigInteger: /// Returns true iff this number is odd. fn is_odd(&self) -> bool; - /// Returns true iff this number is even. + /// Returns true if this number is even. fn is_even(&self) -> bool; - /// Returns true iff this number is zero. + /// Returns true if this number is zero. fn is_zero(&self) -> bool; /// Compute the number of bits needed to encode this number. Always a diff --git a/utilities/src/bits.rs b/utilities/src/bits.rs index 4523ec50c4..53e9a04c44 100644 --- a/utilities/src/bits.rs +++ b/utilities/src/bits.rs @@ -41,17 +41,22 @@ pub trait ToBits: Sized { /// Returns `self` as a boolean array in little-endian order. fn to_bits_le(&self) -> Vec { - let mut bits = Vec::with_capacity(32); + let mut bits = Vec::new(); self.write_bits_le(&mut bits); bits } /// Returns `self` as a boolean array in big-endian order. fn to_bits_be(&self) -> Vec { - let mut bits = Vec::with_capacity(32); + let mut bits = Vec::new(); self.write_bits_be(&mut bits); bits } + + /// An optional indication of how many bits an object can be represented with. + fn num_bits() -> Option { + None + } } pub trait FromBits: Sized { @@ -144,7 +149,6 @@ macro_rules! impl_bits_for_integer { /// Returns `self` as a boolean array in little-endian order. #[inline] fn write_bits_le(&self, vec: &mut Vec) { - vec.reserve(<$int>::BITS as usize); let mut value = *self; for _ in 0..<$int>::BITS { vec.push(value & 1 == 1); @@ -158,6 +162,10 @@ macro_rules! impl_bits_for_integer { let reversed = self.reverse_bits(); reversed.write_bits_le(vec); } + + fn num_bits() -> Option { + Some(<$int>::BITS as usize) + } } impl FromBits for $int { @@ -266,6 +274,10 @@ impl ToBits for &[C] { /// A helper method to return a concatenated list of little-endian bits. #[inline] fn write_bits_le(&self, vec: &mut Vec) { + if let Some(num_bits) = C::num_bits() { + vec.reserve(num_bits * self.len()); + } + for elem in self.iter() { elem.write_bits_le(vec); } @@ -274,6 +286,10 @@ impl ToBits for &[C] { /// A helper method to return a concatenated list of big-endian bits. #[inline] fn write_bits_be(&self, vec: &mut Vec) { + if let Some(num_bits) = C::num_bits() { + vec.reserve(num_bits * self.len()); + } + for elem in self.iter() { elem.write_bits_be(vec); } diff --git a/utilities/src/bytes.rs b/utilities/src/bytes.rs index 2a9938dccf..9a7ba1d824 100644 --- a/utilities/src/bytes.rs +++ b/utilities/src/bytes.rs @@ -145,6 +145,11 @@ impl<'de, T: FromBytes> FromBytesDeserializer { false => (size_b, size_a), }; + // Ensure 'size_b' is within bounds. + if size_b > i32::MAX as usize { + return Err(D::Error::custom(format!("size_b ({size_b}) exceeds maximum"))); + } + // Reserve a new `Vec` with the larger size capacity. let mut buffer = Vec::with_capacity(size_b); diff --git a/utilities/src/error.rs b/utilities/src/error.rs index 2914264eb4..97ab38bc65 100644 --- a/utilities/src/error.rs +++ b/utilities/src/error.rs @@ -37,3 +37,32 @@ impl Error for crate::String {} #[cfg(not(feature = "std"))] impl Error for crate::io::Error {} + +/// This purpose of this macro is to catch the instances of halting +/// without producing logs looking like unexpected panics. It prints +/// to stderr using the format: "Halted at : ". +#[macro_export] +macro_rules! handle_halting { + ($e:expr) => {{ + use std::panic; + + // Set a custom hook before calling catch_unwind to + // indicate that the panic was expected and handled. + panic::set_hook(Box::new(|e| { + let msg = e.to_string(); + let msg = msg.split_ascii_whitespace().skip_while(|&word| word != "panicked").collect::>(); + let mut msg = msg.join(" "); + msg = msg.replacen("panicked", "Halted", 1); + eprintln!("{msg}"); + })); + + // Perform the operation that may panic. + let result = panic::catch_unwind($e); + + // Restore the standard panic hook. + let _ = panic::take_hook(); + + // Return the result, allowing regular error-handling. + result + }}; +} diff --git a/utilities/src/rand.rs b/utilities/src/rand.rs index 818aa969fe..b92f0e95da 100644 --- a/utilities/src/rand.rs +++ b/utilities/src/rand.rs @@ -55,6 +55,13 @@ impl TestRng { println!("\nInitializing 'TestRng' with seed '{seed}'\n"); // Use the seed to initialize a fast, non-cryptographic Rng. + Self::from_seed(seed) + } + + // This is the preferred method to use once the main instance of TestRng had already + // been initialized in a test or benchmark and an auxiliary one is desired without + // spamming the stdout. + pub fn from_seed(seed: u64) -> Self { Self(XorShiftRng::seed_from_u64(seed)) } diff --git a/utilities/src/serialize/impls.rs b/utilities/src/serialize/impls.rs index f163180df1..dccc02981e 100644 --- a/utilities/src/serialize/impls.rs +++ b/utilities/src/serialize/impls.rs @@ -306,7 +306,7 @@ impl CanonicalSerialize for Rc { // } // } -impl CanonicalSerialize for Arc { +impl CanonicalSerialize for Arc { #[inline] fn serialize_with_mode(&self, mut writer: W, compress: Compress) -> Result<(), SerializationError> { self.as_ref().serialize_with_mode(&mut writer, compress) @@ -334,7 +334,7 @@ impl Valid for Arc { } } -impl CanonicalDeserialize for Arc { +impl CanonicalDeserialize for Arc { #[inline] fn deserialize_with_mode( reader: R, @@ -424,6 +424,7 @@ impl CanonicalDeserialize for Vec { ) -> Result { let len = u64::deserialize_with_mode(&mut reader, compress, validate)?; let mut values = Vec::new(); + let _ = values.try_reserve(len as usize); for _ in 0..len { values.push(T::deserialize_with_mode(&mut reader, compress, Validate::No)?); } diff --git a/vm/cli/commands/run.rs b/vm/cli/commands/run.rs index db13d5e741..285b46034b 100644 --- a/vm/cli/commands/run.rs +++ b/vm/cli/commands/run.rs @@ -106,12 +106,16 @@ mod tests { #[test] fn clap_snarkvm_run() { - let arg_vec = vec!["snarkvm", "run", "hello", "1u32", "2u32"]; + let arg_vec = vec!["snarkvm", "run", "hello", "1u32", "2u32", "foo.aleo"]; let cli = CLI::parse_from(&arg_vec); if let Command::Run(run) = cli.command { assert_eq!(run.function, Identifier::try_from(arg_vec[2]).unwrap()); - assert_eq!(run.inputs, vec![Value::try_from(arg_vec[3]).unwrap(), Value::try_from(arg_vec[4]).unwrap()]); + assert_eq!(run.inputs, vec![ + Value::try_from(arg_vec[3]).unwrap(), + Value::try_from(arg_vec[4]).unwrap(), + Value::try_from(arg_vec[5]).unwrap() + ]); } else { panic!("Unexpected result of clap parsing!"); } diff --git a/vm/lib.rs b/vm/lib.rs index cc5d1cfdaa..60d18da0df 100644 --- a/vm/lib.rs +++ b/vm/lib.rs @@ -37,6 +37,8 @@ pub use snarkvm_curves as curves; pub use snarkvm_fields as fields; #[cfg(feature = "ledger")] pub use snarkvm_ledger as ledger; +#[cfg(feature = "metrics")] +pub use snarkvm_metrics as metrics; #[cfg(feature = "parameters")] pub use snarkvm_parameters as parameters; #[cfg(feature = "synthesizer")] diff --git a/vm/package/execute.rs b/vm/package/execute.rs index f134c0f8bc..e075e8de86 100644 --- a/vm/package/execute.rs +++ b/vm/package/execute.rs @@ -96,7 +96,7 @@ impl Package { process.insert_verifying_key(program_id, &function_name, verifier.verifying_key().clone())?; // Execute the circuit. - let (response, mut trace) = process.execute::(authorization)?; + let (response, mut trace) = process.execute::(authorization, rng)?; // Retrieve the call metrics. let call_metrics = trace.call_metrics().to_vec(); diff --git a/vm/package/mod.rs b/vm/package/mod.rs index f9083cc6a9..87f383df83 100644 --- a/vm/package/mod.rs +++ b/vm/package/mod.rs @@ -273,6 +273,60 @@ function transfer: sample_package_with_program_and_imports(&main_program, &[imported_program]) } + /// Samples a (temporary) package containing a `grandparent.aleo` program which imports `parent.aleo` which imports `child.aleo`. + pub(crate) fn sample_nested_package() -> (PathBuf, Package) { + // Initialize the child program. + let child_program = Program::::from_str( + " +program child.aleo; + +record A: + owner as address.private; + val as u32.private; + +function mint: + input r0 as address.private; + input r1 as u32.private; + cast r0 r1 into r2 as A.record; + output r2 as A.record;", + ) + .unwrap(); + + // Initialize the parent program. + let parent_program = Program::::from_str( + " +import child.aleo; + +program parent.aleo; + +function wrapper_mint: + input r0 as address.private; + input r1 as u32.private; + call child.aleo/mint r0 r1 into r2; + output r2 as child.aleo/A.record;", + ) + .unwrap(); + + // Initialize the grandparent program. + let grandparent_program = Program::::from_str( + " +import child.aleo; +import parent.aleo; + +program grandparent.aleo; + +function double_wrapper_mint: + input r0 as address.private; + input r1 as u32.private; + call parent.aleo/wrapper_mint r0 r1 into r2; + output r2 as child.aleo/A.record;", + ) + .unwrap(); + + // Sample the package using the main program and imported program. + sample_package_with_program_and_imports(&grandparent_program, &[child_program, parent_program]) + } + /// Samples a (temporary) package containing a `transfer.aleo` program which imports `credits.aleo`. pub(crate) fn sample_transfer_package() -> (PathBuf, Package) { // Initialize the imported program. @@ -386,6 +440,23 @@ function main: (caller0_private_key, function_name, vec![r0, r1, r2]) } + "grandparent.aleo" => { + // Initialize caller 0. + let caller0_private_key = crate::cli::helpers::dotenv_private_key().unwrap(); + + // Initialize caller 1. + let caller1_private_key = PrivateKey::::new(rng).unwrap(); + let caller1 = Address::try_from(&caller1_private_key).unwrap(); + + // Declare the function name. + let function_name = Identifier::from_str("double_wrapper_mint").unwrap(); + + // Initialize the function inputs. + let r0 = Value::::from_str(&caller1.to_string()).unwrap(); + let r1 = Value::::from_str("1u32").unwrap(); + + (caller0_private_key, function_name, vec![r0, r1]) + } _ => panic!("Invalid program ID for sample package (while testing)"), } } diff --git a/vm/package/run.rs b/vm/package/run.rs index e3c5d8e69a..54b1520ead 100644 --- a/vm/package/run.rs +++ b/vm/package/run.rs @@ -53,7 +53,7 @@ impl Package { // Initialize the call stack. let call_stack = CallStack::PackageRun(vec![request], *private_key, assignments.clone()); // Synthesize the circuit. - let response = stack.execute_function::(call_stack, None)?; + let response = stack.execute_function::(call_stack, None, rng)?; // Retrieve the call metrics. let call_metrics = assignments.read().iter().map(|(_, metrics)| *metrics).collect::>(); // Return the response and call metrics. @@ -116,6 +116,30 @@ mod tests { std::fs::remove_dir_all(directory).unwrap(); } + #[test] + fn test_run_with_nested_imports() { + // Samples a new package at a temporary directory. + let (directory, package) = crate::package::test_helpers::sample_nested_package(); + + // Ensure the build directory does *not* exist. + assert!(!package.build_directory().exists()); + // Build the package. + package.build::(None).unwrap(); + // Ensure the build directory exists. + assert!(package.build_directory().exists()); + + // Initialize an RNG. + let rng = &mut TestRng::default(); + // Sample the function inputs. + let (private_key, function_name, inputs) = + crate::package::test_helpers::sample_package_run(package.program_id()); + // Run the program function. + let (_response, _metrics) = package.run::(&private_key, function_name, &inputs, rng).unwrap(); + + // Proactively remove the temporary directory (to conserve space). + std::fs::remove_dir_all(directory).unwrap(); + } + /// Use `cargo test profiler --features timer` to run this test. #[ignore] #[test] diff --git a/wasm/Cargo.toml b/wasm/Cargo.toml index e5ed286abf..0ab51f4d5a 100644 --- a/wasm/Cargo.toml +++ b/wasm/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "snarkvm-wasm" -version = "0.16.3" +version = "0.16.15" authors = [ "The Aleo Team " ] description = "WASM for a decentralized virtual machine" homepage = "https://aleo.org" @@ -26,60 +26,85 @@ edition = "2021" [lib] crate-type = [ "cdylib", "rlib" ] +[features] +default = [ "full" ] +full = [ + "circuit", + "console", + "curves", + "fields", + "ledger", + "synthesizer", + "utilities" +] +circuit = [ "snarkvm-circuit-network" ] +console = [ "snarkvm-console" ] +curves = [ "snarkvm-curves" ] +fields = [ "snarkvm-fields" ] +ledger = [ + "snarkvm-ledger-block", + "snarkvm-ledger-query", + "snarkvm-ledger-store" +] +synthesizer = [ "snarkvm-synthesizer" ] +utilities = [ "snarkvm-utilities" ] + +[dependencies.snarkvm-circuit-network] +path = "../circuit/network" +version = "=0.16.15" +features = [ "wasm" ] +optional = true + [dependencies.snarkvm-console] path = "../console" -version = "=0.16.3" +version = "=0.16.15" features = [ "wasm" ] optional = true [dependencies.snarkvm-curves] path = "../curves" -version = "=0.16.3" +version = "=0.16.15" optional = true [dependencies.snarkvm-fields] path = "../fields" -version = "=0.16.3" +version = "=0.16.15" +optional = true + +[dependencies.snarkvm-ledger-block] +path = "../ledger/block" +version = "=0.16.15" +features = [ "wasm" ] +optional = true + +[dependencies.snarkvm-ledger-query] +path = "../ledger/query" +version = "=0.16.15" +features = [ "async", "wasm" ] +optional = true + +[dependencies.snarkvm-ledger-store] +path = "../ledger/store" +version = "=0.16.15" +features = [ "wasm" ] optional = true [dependencies.snarkvm-synthesizer] path = "../synthesizer" -version = "=0.16.3" +version = "=0.16.15" default-features = false -features = [ "wasm" ] +features = [ "async", "wasm" ] optional = true [dependencies.snarkvm-utilities] path = "../utilities" -version = "=0.16.3" +version = "=0.16.15" features = [ "wasm" ] optional = true -[dependencies.rand] -version = "0.8" -default-features = false - -[dependencies.serde] -version = "1.0.188" -default-features = false -features = [ "derive" ] - -[dependencies.wasm-bindgen] -version = "0.2" -features = [ "serde-serialize" ] - [dependencies.getrandom] version = "0.2" features = [ "js" ] [dev-dependencies.wasm-bindgen-test] version = "0.3.37" - -[features] -default = [ "full" ] -full = [ "console", "curves", "fields", "synthesizer", "utilities" ] -console = [ "snarkvm-console" ] -curves = [ "snarkvm-curves" ] -fields = [ "snarkvm-fields" ] -synthesizer = [ "snarkvm-synthesizer" ] -utilities = [ "snarkvm-utilities" ] diff --git a/wasm/src/lib.rs b/wasm/src/lib.rs index 6105b7c352..2d9e723294 100644 --- a/wasm/src/lib.rs +++ b/wasm/src/lib.rs @@ -12,12 +12,22 @@ // See the License for the specific language governing permissions and // limitations under the License. +#[cfg(feature = "network")] +pub use snarkvm_circuit_network as circuit_network; #[cfg(feature = "console")] pub use snarkvm_console as console; +#[cfg(feature = "network")] +pub use snarkvm_console::network as console_network; #[cfg(feature = "curves")] pub use snarkvm_curves as curves; #[cfg(feature = "fields")] pub use snarkvm_fields as fields; +#[cfg(feature = "ledger")] +pub use snarkvm_ledger_block as ledger_block; +#[cfg(feature = "ledger")] +pub use snarkvm_ledger_query as ledger_query; +#[cfg(feature = "ledger")] +pub use snarkvm_ledger_store as ledger_store; #[cfg(feature = "synthesizer")] pub use snarkvm_synthesizer as synthesizer; #[cfg(feature = "utilities")]