diff --git a/.cargo/release-version b/.cargo/release-version
index 90787b183e..ef9b3919c6 100644
--- a/.cargo/release-version
+++ b/.cargo/release-version
@@ -1 +1 @@
-v0.16.3
\ No newline at end of file
+v0.16.15
\ No newline at end of file
diff --git a/.circleci/config.yml b/.circleci/config.yml
index c78cf5f13b..2402f7faa9 100644
--- a/.circleci/config.yml
+++ b/.circleci/config.yml
@@ -480,6 +480,16 @@ jobs:
workspace_member: ledger
cache_key: snarkvm-ledger-cache
+ ledger-with-rocksdb:
+ docker:
+ - image: cimg/rust:1.71.1
+ resource_class: 2xlarge
+ steps:
+ - run_serial:
+ flags: --features=rocks
+ workspace_member: ledger
+ cache_key: snarkvm-ledger-with-rocksdb-cache
+
ledger-authority:
docker:
- image: cimg/rust:1.71.1
@@ -854,6 +864,8 @@ workflows:
- curves
- fields
- ledger
+# TODO (howardwu) - Implement `open_testing` on all storage, update to `CurrentConsensusStore::open_testing`, then re-enable.
+# - ledger-with-rocksdb
- ledger-authority
- ledger-block
- ledger-coinbase
diff --git a/.github/workflows/benchmarks.yml b/.github/workflows/benchmarks.yml
index f33dfcd481..bd6e7ad390 100644
--- a/.github/workflows/benchmarks.yml
+++ b/.github/workflows/benchmarks.yml
@@ -60,7 +60,6 @@ jobs:
run: |
cd console/collections
cargo bench --bench merkle_tree -- --output-format bencher | tee -a ../../output.txt
- cargo bench --bench kary_merkle_tree -- --output-format bencher | tee -a ../../output.txt
cd ../..
- name: Benchmark curves
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index 9dda0af373..b0233c6c71 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -20,7 +20,7 @@ snarkVM is a big project, so (non-)adherence to best practices related to perfor
### Memory handling
- if the final size is known, pre-allocate the collections (`Vec`, `HashMap` etc.) using `with_capacity` or `reserve` - this ensures that there are both fewer allocations (which involve system calls) and that the final allocated capacity is as close to the required size as possible
- create the collections right before they are populated/used, as opposed to e.g. creating a few big ones at the beginning of a function and only using them later on; this reduces the amount of time they occupy memory
-- if an intermediate vector is avoidable, use an `Iterator` instead; most of the time this just amounts to omitting the call to `.collect()` if a single-pass iteraton follows afterwards, or returning an `impl Iterator- ` from a function when the caller only needs to iterate over that result once
+- if an intermediate vector is avoidable, use an `Iterator` instead; most of the time this just amounts to omitting the call to `.collect()` if a single-pass iteration follows afterwards, or returning an `impl Iterator
- ` from a function when the caller only needs to iterate over that result once
- when possible, fill/resize collections "in bulk" instead of pushing a single element in a loop; this is usually (but not always) detected by `clippy`, suggesting to create vectors containing a repeated value with `vec![x; N]` or extending them with `.resize(N, x)`
- when a value is to eventually be consumed in a chain of function calls, pass it by value instead of by reference; this has the following benefits:
* it makes the fact that the value is needed by value clear to the caller, who can then potentially reclaim it from the object afterwards if it is "heavy", limiting allocations
diff --git a/Cargo.lock b/Cargo.lock
index 40342afae2..d5dfef8577 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -1163,6 +1163,15 @@ version = "0.12.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888"
+[[package]]
+name = "hashbrown"
+version = "0.13.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "33ff8ae62cd3a9102e5637afc8452c55acf3844001bd5374e0b0bd7b6616c038"
+dependencies = [
+ "ahash",
+]
+
[[package]]
name = "hashbrown"
version = "0.14.1"
@@ -1477,6 +1486,15 @@ dependencies = [
"libc",
]
+[[package]]
+name = "mach2"
+version = "0.4.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6d0d1830bcd151a6fc4aea1369af235b36c1528fe976b8ff678683c9995eade8"
+dependencies = [
+ "libc",
+]
+
[[package]]
name = "matchers"
version = "0.1.0"
@@ -1501,6 +1519,61 @@ dependencies = [
"autocfg",
]
+[[package]]
+name = "metrics"
+version = "0.21.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "fde3af1a009ed76a778cb84fdef9e7dbbdf5775ae3e4cc1f434a6a307f6f76c5"
+dependencies = [
+ "ahash",
+ "metrics-macros",
+ "portable-atomic",
+]
+
+[[package]]
+name = "metrics-exporter-prometheus"
+version = "0.12.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8a4964177ddfdab1e3a2b37aec7cf320e14169abb0ed73999f558136409178d5"
+dependencies = [
+ "base64",
+ "hyper",
+ "indexmap 1.9.3",
+ "ipnet",
+ "metrics",
+ "metrics-util",
+ "quanta",
+ "thiserror",
+ "tokio",
+ "tracing",
+]
+
+[[package]]
+name = "metrics-macros"
+version = "0.7.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ddece26afd34c31585c74a4db0630c376df271c285d682d1e55012197830b6df"
+dependencies = [
+ "proc-macro2",
+ "quote 1.0.33",
+ "syn 2.0.38",
+]
+
+[[package]]
+name = "metrics-util"
+version = "0.15.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4de2ed6e491ed114b40b732e4d1659a9d53992ebd87490c44a6ffe23739d973e"
+dependencies = [
+ "crossbeam-epoch",
+ "crossbeam-utils",
+ "hashbrown 0.13.1",
+ "metrics",
+ "num_cpus",
+ "quanta",
+ "sketches-ddsketch",
+]
+
[[package]]
name = "mime"
version = "0.3.17"
@@ -1879,6 +1952,22 @@ dependencies = [
"unarray",
]
+[[package]]
+name = "quanta"
+version = "0.11.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a17e662a7a8291a865152364c20c7abc5e60486ab2001e8ec10b24862de0b9ab"
+dependencies = [
+ "crossbeam-utils",
+ "libc",
+ "mach2",
+ "once_cell",
+ "raw-cpuid",
+ "wasi",
+ "web-sys",
+ "winapi",
+]
+
[[package]]
name = "quick-error"
version = "1.2.3"
@@ -1958,6 +2047,15 @@ dependencies = [
"rand_core",
]
+[[package]]
+name = "raw-cpuid"
+version = "10.7.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6c297679cb867470fa8c9f67dbba74a78d78e3e98d7cf2b08d6d71540f797332"
+dependencies = [
+ "bitflags 1.3.2",
+]
+
[[package]]
name = "rayon"
version = "1.8.0"
@@ -2447,6 +2545,12 @@ version = "1.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a7cee0529a6d40f580e7a5e6c495c8fbfe21b7b52795ed4bb5e62cdf92bc6380"
+[[package]]
+name = "sketches-ddsketch"
+version = "0.2.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "68a406c1882ed7f29cd5e248c9848a80e7cb6ae0fea82346d2746f2f941c07e1"
+
[[package]]
name = "slab"
version = "0.4.9"
@@ -2473,7 +2577,7 @@ dependencies = [
[[package]]
name = "snarkvm"
-version = "0.16.3"
+version = "0.16.15"
dependencies = [
"anstyle",
"anyhow",
@@ -2496,6 +2600,7 @@ dependencies = [
"snarkvm-curves",
"snarkvm-fields",
"snarkvm-ledger",
+ "snarkvm-metrics",
"snarkvm-parameters",
"snarkvm-synthesizer",
"snarkvm-utilities",
@@ -2508,7 +2613,7 @@ dependencies = [
[[package]]
name = "snarkvm-algorithms"
-version = "0.16.3"
+version = "0.16.15"
dependencies = [
"aleo-std",
"anyhow",
@@ -2547,7 +2652,7 @@ dependencies = [
[[package]]
name = "snarkvm-algorithms-cuda"
-version = "0.16.3"
+version = "0.16.15"
dependencies = [
"blst",
"cc",
@@ -2557,7 +2662,7 @@ dependencies = [
[[package]]
name = "snarkvm-circuit"
-version = "0.16.3"
+version = "0.16.15"
dependencies = [
"snarkvm-circuit-account",
"snarkvm-circuit-algorithms",
@@ -2570,7 +2675,7 @@ dependencies = [
[[package]]
name = "snarkvm-circuit-account"
-version = "0.16.3"
+version = "0.16.15"
dependencies = [
"anyhow",
"snarkvm-circuit-algorithms",
@@ -2582,7 +2687,7 @@ dependencies = [
[[package]]
name = "snarkvm-circuit-algorithms"
-version = "0.16.3"
+version = "0.16.15"
dependencies = [
"anyhow",
"snarkvm-circuit-types",
@@ -2594,7 +2699,7 @@ dependencies = [
[[package]]
name = "snarkvm-circuit-collections"
-version = "0.16.3"
+version = "0.16.15"
dependencies = [
"anyhow",
"snarkvm-circuit-algorithms",
@@ -2608,7 +2713,7 @@ dependencies = [
[[package]]
name = "snarkvm-circuit-environment"
-version = "0.16.3"
+version = "0.16.15"
dependencies = [
"criterion",
"indexmap 2.0.2",
@@ -2629,11 +2734,11 @@ dependencies = [
[[package]]
name = "snarkvm-circuit-environment-witness"
-version = "0.16.3"
+version = "0.16.15"
[[package]]
name = "snarkvm-circuit-network"
-version = "0.16.3"
+version = "0.16.15"
dependencies = [
"snarkvm-circuit-algorithms",
"snarkvm-circuit-collections",
@@ -2644,7 +2749,7 @@ dependencies = [
[[package]]
name = "snarkvm-circuit-program"
-version = "0.16.3"
+version = "0.16.15"
dependencies = [
"anyhow",
"paste",
@@ -2662,7 +2767,7 @@ dependencies = [
[[package]]
name = "snarkvm-circuit-types"
-version = "0.16.3"
+version = "0.16.15"
dependencies = [
"snarkvm-circuit-environment",
"snarkvm-circuit-types-address",
@@ -2677,7 +2782,7 @@ dependencies = [
[[package]]
name = "snarkvm-circuit-types-address"
-version = "0.16.3"
+version = "0.16.15"
dependencies = [
"snarkvm-circuit-environment",
"snarkvm-circuit-types-boolean",
@@ -2689,7 +2794,7 @@ dependencies = [
[[package]]
name = "snarkvm-circuit-types-boolean"
-version = "0.16.3"
+version = "0.16.15"
dependencies = [
"criterion",
"snarkvm-circuit-environment",
@@ -2698,7 +2803,7 @@ dependencies = [
[[package]]
name = "snarkvm-circuit-types-field"
-version = "0.16.3"
+version = "0.16.15"
dependencies = [
"snarkvm-circuit-environment",
"snarkvm-circuit-types-boolean",
@@ -2707,7 +2812,7 @@ dependencies = [
[[package]]
name = "snarkvm-circuit-types-group"
-version = "0.16.3"
+version = "0.16.15"
dependencies = [
"snarkvm-circuit-environment",
"snarkvm-circuit-types-boolean",
@@ -2719,7 +2824,7 @@ dependencies = [
[[package]]
name = "snarkvm-circuit-types-integers"
-version = "0.16.3"
+version = "0.16.15"
dependencies = [
"paste",
"snarkvm-circuit-environment",
@@ -2732,7 +2837,7 @@ dependencies = [
[[package]]
name = "snarkvm-circuit-types-scalar"
-version = "0.16.3"
+version = "0.16.15"
dependencies = [
"snarkvm-circuit-environment",
"snarkvm-circuit-types-boolean",
@@ -2742,7 +2847,7 @@ dependencies = [
[[package]]
name = "snarkvm-circuit-types-string"
-version = "0.16.3"
+version = "0.16.15"
dependencies = [
"rand",
"snarkvm-circuit-environment",
@@ -2755,7 +2860,7 @@ dependencies = [
[[package]]
name = "snarkvm-console"
-version = "0.16.3"
+version = "0.16.15"
dependencies = [
"snarkvm-console-account",
"snarkvm-console-algorithms",
@@ -2767,7 +2872,7 @@ dependencies = [
[[package]]
name = "snarkvm-console-account"
-version = "0.16.3"
+version = "0.16.15"
dependencies = [
"bincode",
"bs58",
@@ -2780,7 +2885,7 @@ dependencies = [
[[package]]
name = "snarkvm-console-algorithms"
-version = "0.16.3"
+version = "0.16.15"
dependencies = [
"blake2s_simd",
"criterion",
@@ -2798,7 +2903,7 @@ dependencies = [
[[package]]
name = "snarkvm-console-collections"
-version = "0.16.3"
+version = "0.16.15"
dependencies = [
"aleo-std",
"criterion",
@@ -2811,7 +2916,7 @@ dependencies = [
[[package]]
name = "snarkvm-console-network"
-version = "0.16.3"
+version = "0.16.15"
dependencies = [
"anyhow",
"indexmap 2.0.2",
@@ -2833,7 +2938,7 @@ dependencies = [
[[package]]
name = "snarkvm-console-network-environment"
-version = "0.16.3"
+version = "0.16.15"
dependencies = [
"anyhow",
"bech32",
@@ -2850,7 +2955,7 @@ dependencies = [
[[package]]
name = "snarkvm-console-program"
-version = "0.16.3"
+version = "0.16.15"
dependencies = [
"bincode",
"enum_index",
@@ -2871,7 +2976,7 @@ dependencies = [
[[package]]
name = "snarkvm-console-types"
-version = "0.16.3"
+version = "0.16.15"
dependencies = [
"snarkvm-console-network-environment",
"snarkvm-console-types-address",
@@ -2885,7 +2990,7 @@ dependencies = [
[[package]]
name = "snarkvm-console-types-address"
-version = "0.16.3"
+version = "0.16.15"
dependencies = [
"bincode",
"serde_json",
@@ -2897,7 +3002,7 @@ dependencies = [
[[package]]
name = "snarkvm-console-types-boolean"
-version = "0.16.3"
+version = "0.16.15"
dependencies = [
"bincode",
"serde_json",
@@ -2906,7 +3011,7 @@ dependencies = [
[[package]]
name = "snarkvm-console-types-field"
-version = "0.16.3"
+version = "0.16.15"
dependencies = [
"bincode",
"serde_json",
@@ -2917,7 +3022,7 @@ dependencies = [
[[package]]
name = "snarkvm-console-types-group"
-version = "0.16.3"
+version = "0.16.15"
dependencies = [
"bincode",
"serde_json",
@@ -2929,7 +3034,7 @@ dependencies = [
[[package]]
name = "snarkvm-console-types-integers"
-version = "0.16.3"
+version = "0.16.15"
dependencies = [
"bincode",
"serde_json",
@@ -2941,7 +3046,7 @@ dependencies = [
[[package]]
name = "snarkvm-console-types-scalar"
-version = "0.16.3"
+version = "0.16.15"
dependencies = [
"bincode",
"serde_json",
@@ -2953,7 +3058,7 @@ dependencies = [
[[package]]
name = "snarkvm-console-types-string"
-version = "0.16.3"
+version = "0.16.15"
dependencies = [
"bincode",
"serde_json",
@@ -2965,7 +3070,7 @@ dependencies = [
[[package]]
name = "snarkvm-curves"
-version = "0.16.3"
+version = "0.16.15"
dependencies = [
"bincode",
"criterion",
@@ -2980,7 +3085,7 @@ dependencies = [
[[package]]
name = "snarkvm-fields"
-version = "0.16.3"
+version = "0.16.15"
dependencies = [
"aleo-std",
"anyhow",
@@ -2997,7 +3102,7 @@ dependencies = [
[[package]]
name = "snarkvm-ledger"
-version = "0.16.3"
+version = "0.16.15"
dependencies = [
"aleo-std",
"anyhow",
@@ -3024,7 +3129,7 @@ dependencies = [
[[package]]
name = "snarkvm-ledger-authority"
-version = "0.16.3"
+version = "0.16.15"
dependencies = [
"anyhow",
"bincode",
@@ -3037,7 +3142,7 @@ dependencies = [
[[package]]
name = "snarkvm-ledger-block"
-version = "0.16.3"
+version = "0.16.15"
dependencies = [
"bincode",
"indexmap 2.0.2",
@@ -3049,6 +3154,7 @@ dependencies = [
"snarkvm-ledger-authority",
"snarkvm-ledger-coinbase",
"snarkvm-ledger-committee",
+ "snarkvm-ledger-narwhal-batch-header",
"snarkvm-ledger-narwhal-subdag",
"snarkvm-ledger-narwhal-transmission-id",
"snarkvm-ledger-query",
@@ -3060,7 +3166,7 @@ dependencies = [
[[package]]
name = "snarkvm-ledger-coinbase"
-version = "0.16.3"
+version = "0.16.15"
dependencies = [
"aleo-std",
"anyhow",
@@ -3081,7 +3187,7 @@ dependencies = [
[[package]]
name = "snarkvm-ledger-committee"
-version = "0.16.3"
+version = "0.16.15"
dependencies = [
"anyhow",
"bincode",
@@ -3095,12 +3201,14 @@ dependencies = [
"serde_json",
"snarkvm-console",
"snarkvm-ledger-committee",
+ "snarkvm-ledger-narwhal-batch-header",
+ "snarkvm-metrics",
"test-strategy",
]
[[package]]
name = "snarkvm-ledger-narwhal"
-version = "0.16.3"
+version = "0.16.15"
dependencies = [
"snarkvm-ledger-narwhal",
"snarkvm-ledger-narwhal-batch-certificate",
@@ -3113,21 +3221,21 @@ dependencies = [
[[package]]
name = "snarkvm-ledger-narwhal-batch-certificate"
-version = "0.16.3"
+version = "0.16.15"
dependencies = [
"bincode",
"indexmap 2.0.2",
+ "rayon",
"serde_json",
"snarkvm-console",
"snarkvm-ledger-narwhal-batch-certificate",
"snarkvm-ledger-narwhal-batch-header",
"snarkvm-ledger-narwhal-transmission-id",
- "time",
]
[[package]]
name = "snarkvm-ledger-narwhal-batch-header"
-version = "0.16.3"
+version = "0.16.15"
dependencies = [
"bincode",
"indexmap 2.0.2",
@@ -3140,7 +3248,7 @@ dependencies = [
[[package]]
name = "snarkvm-ledger-narwhal-data"
-version = "0.16.3"
+version = "0.16.15"
dependencies = [
"bytes",
"serde_json",
@@ -3150,7 +3258,7 @@ dependencies = [
[[package]]
name = "snarkvm-ledger-narwhal-subdag"
-version = "0.16.3"
+version = "0.16.15"
dependencies = [
"bincode",
"indexmap 2.0.2",
@@ -3158,13 +3266,14 @@ dependencies = [
"serde_json",
"snarkvm-console",
"snarkvm-ledger-narwhal-batch-certificate",
+ "snarkvm-ledger-narwhal-batch-header",
"snarkvm-ledger-narwhal-subdag",
"snarkvm-ledger-narwhal-transmission-id",
]
[[package]]
name = "snarkvm-ledger-narwhal-transmission"
-version = "0.16.3"
+version = "0.16.15"
dependencies = [
"bincode",
"bytes",
@@ -3177,7 +3286,7 @@ dependencies = [
[[package]]
name = "snarkvm-ledger-narwhal-transmission-id"
-version = "0.16.3"
+version = "0.16.15"
dependencies = [
"bincode",
"serde_json",
@@ -3187,7 +3296,7 @@ dependencies = [
[[package]]
name = "snarkvm-ledger-query"
-version = "0.16.3"
+version = "0.16.15"
dependencies = [
"async-trait",
"reqwest",
@@ -3199,7 +3308,7 @@ dependencies = [
[[package]]
name = "snarkvm-ledger-store"
-version = "0.16.3"
+version = "0.16.15"
dependencies = [
"aleo-std",
"anyhow",
@@ -3227,7 +3336,7 @@ dependencies = [
[[package]]
name = "snarkvm-ledger-test-helpers"
-version = "0.16.3"
+version = "0.16.15"
dependencies = [
"once_cell",
"snarkvm-circuit",
@@ -3239,9 +3348,17 @@ dependencies = [
"snarkvm-synthesizer-program",
]
+[[package]]
+name = "snarkvm-metrics"
+version = "0.16.15"
+dependencies = [
+ "metrics",
+ "metrics-exporter-prometheus",
+]
+
[[package]]
name = "snarkvm-parameters"
-version = "0.16.3"
+version = "0.16.15"
dependencies = [
"aleo-std",
"anyhow",
@@ -3255,6 +3372,7 @@ dependencies = [
"itertools 0.11.0",
"js-sys",
"lazy_static",
+ "parking_lot",
"paste",
"rand",
"serde_json",
@@ -3273,7 +3391,7 @@ dependencies = [
[[package]]
name = "snarkvm-synthesizer"
-version = "0.16.3"
+version = "0.16.15"
dependencies = [
"aleo-std",
"anyhow",
@@ -3304,7 +3422,7 @@ dependencies = [
[[package]]
name = "snarkvm-synthesizer-process"
-version = "0.16.3"
+version = "0.16.15"
dependencies = [
"aleo-std",
"bincode",
@@ -3324,12 +3442,13 @@ dependencies = [
"snarkvm-ledger-test-helpers",
"snarkvm-synthesizer-program",
"snarkvm-synthesizer-snark",
+ "snarkvm-utilities",
"tempfile",
]
[[package]]
name = "snarkvm-synthesizer-program"
-version = "0.16.3"
+version = "0.16.15"
dependencies = [
"bincode",
"criterion",
@@ -3345,7 +3464,7 @@ dependencies = [
[[package]]
name = "snarkvm-synthesizer-snark"
-version = "0.16.3"
+version = "0.16.15"
dependencies = [
"bincode",
"colored",
@@ -3358,7 +3477,7 @@ dependencies = [
[[package]]
name = "snarkvm-utilities"
-version = "0.16.3"
+version = "0.16.15"
dependencies = [
"aleo-std",
"anyhow",
@@ -3378,7 +3497,7 @@ dependencies = [
[[package]]
name = "snarkvm-utilities-derives"
-version = "0.16.3"
+version = "0.16.15"
dependencies = [
"proc-macro2",
"quote 1.0.33",
@@ -3387,17 +3506,18 @@ dependencies = [
[[package]]
name = "snarkvm-wasm"
-version = "0.16.3"
+version = "0.16.15"
dependencies = [
"getrandom",
- "rand",
- "serde",
+ "snarkvm-circuit-network",
"snarkvm-console",
"snarkvm-curves",
"snarkvm-fields",
+ "snarkvm-ledger-block",
+ "snarkvm-ledger-query",
+ "snarkvm-ledger-store",
"snarkvm-synthesizer",
"snarkvm-utilities",
- "wasm-bindgen",
"wasm-bindgen-test",
]
@@ -3962,8 +4082,6 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7706a72ab36d8cb1f80ffbf0e071533974a60d0a308d01a5d0375bf60499a342"
dependencies = [
"cfg-if",
- "serde",
- "serde_json",
"wasm-bindgen-macro",
]
diff --git a/Cargo.toml b/Cargo.toml
index da49a99dd2..2932649638 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -1,6 +1,6 @@
[package]
name = "snarkvm"
-version = "0.16.3"
+version = "0.16.15"
authors = [ "The Aleo Team " ]
description = "A decentralized virtual machine"
homepage = "https://aleo.org"
@@ -76,6 +76,7 @@ members = [
"ledger/query",
"ledger/store",
"ledger/test-helpers",
+ "metrics",
"parameters",
"synthesizer",
"synthesizer/process",
@@ -123,7 +124,8 @@ cli = [
"rand",
"self_update",
"serde_json",
- "thiserror"
+ "thiserror",
+ "ureq"
]
aleo-cli = [ "snarkvm-synthesizer/aleo-cli" ]
async = [ "snarkvm-ledger/async", "snarkvm-synthesizer/async" ]
@@ -131,6 +133,7 @@ cuda = [ "snarkvm-algorithms/cuda" ]
parameters_no_std_out = [ "snarkvm-parameters/no_std_out" ]
noconfig = [ ]
rocks = [ "snarkvm-ledger/rocks" ]
+test = [ "snarkvm-ledger/test" ]
test-helpers = [ "snarkvm-ledger/test-helpers" ]
timer = [ "snarkvm-ledger/timer" ]
algorithms = [ "snarkvm-algorithms" ]
@@ -139,6 +142,7 @@ console = [ "snarkvm-console" ]
curves = [ "snarkvm-curves" ]
fields = [ "snarkvm-fields" ]
ledger = [ "snarkvm-ledger" ]
+metrics = [ "snarkvm-metrics", "snarkvm-ledger/metrics" ]
parameters = [ "snarkvm-parameters" ]
synthesizer = [ "snarkvm-synthesizer" ]
utilities = [ "snarkvm-utilities" ]
@@ -146,53 +150,58 @@ wasm = [ "snarkvm-wasm" ]
[dependencies.snarkvm-algorithms]
path = "./algorithms"
-version = "=0.16.3"
+version = "=0.16.15"
optional = true
[dependencies.snarkvm-circuit]
path = "./circuit"
-version = "=0.16.3"
+version = "=0.16.15"
optional = true
[dependencies.snarkvm-console]
path = "./console"
-version = "=0.16.3"
+version = "=0.16.15"
optional = true
[dependencies.snarkvm-curves]
path = "./curves"
-version = "=0.16.3"
+version = "=0.16.15"
optional = true
[dependencies.snarkvm-fields]
path = "./fields"
-version = "=0.16.3"
+version = "=0.16.15"
optional = true
[dependencies.snarkvm-ledger]
path = "./ledger"
-version = "=0.16.3"
+version = "=0.16.15"
+optional = true
+
+[dependencies.snarkvm-metrics]
+path = "./metrics"
+version = "=0.16.15"
optional = true
[dependencies.snarkvm-parameters]
path = "./parameters"
-version = "=0.16.3"
+version = "=0.16.15"
optional = true
[dependencies.snarkvm-synthesizer]
path = "./synthesizer"
-version = "=0.16.3"
+version = "=0.16.15"
default-features = false
optional = true
[dependencies.snarkvm-utilities]
path = "./utilities"
-version = "=0.16.3"
+version = "=0.16.15"
optional = true
[dependencies.snarkvm-wasm]
path = "./wasm"
-version = "=0.16.3"
+version = "=0.16.15"
optional = true
[dependencies.anstyle]
@@ -251,6 +260,7 @@ optional = true
[dependencies.ureq]
version = "2.7"
features = [ "json" ]
+optional = true
[dev-dependencies.bincode]
version = "1.3"
diff --git a/README.md b/README.md
index dbd1bf0af6..f496a6c566 100644
--- a/README.md
+++ b/README.md
@@ -6,7 +6,8 @@
-
+
+
## Table of Contents
@@ -85,7 +86,7 @@ snarkvm
## 4. Contributors
-Thank you for helping make SnarkVM better!
+Thank you for helping make snarkVM better!
[π§ What do the emojis mean?](https://allcontributors.org/docs/en/emoji-key)
@@ -104,21 +105,21 @@ Thank you for helping make SnarkVM better!
 Collin Chin π» π π |
-  Alessandro Coglio π» π β οΈ |
+  Alessandro Coglio π» π β οΈ |
 Niklas Long π» |
 jules π» |
 Ali Mousa π» |
 Weikeng Chen π» |
-  Max Bruce π» |
+  Evan Schott π» |
+  Max Bruce π» |
 zhiqiangxu π» |
 Javier RodrΓguez Chatruc π» |
 Eduardo Morais π» |
 Maciej ZwoliΕski π» |
 Ivan Litteri π» |
 Francisco Strambini π» |
-  swift-mx π» |
 Haruka π π» |
@@ -130,11 +131,20 @@ Thank you for helping make SnarkVM better!
 Psi Vesely π» |
+  swift-mx π» |
 Nacho Avecilla π» |
 qy3u π» |
 Yt π» |
 Kostyan π» |
+  stanlagermin π» |
+  Sukey π» |
+
+
+  Alex Zhao π» |
+  ghost ant π» |
+  Psi Vesely π» |
 Dependabot π» |
+  Dependabot Preview π» |
 All Contributors π |
diff --git a/algorithms/Cargo.toml b/algorithms/Cargo.toml
index d6cdf4e8e9..af5a76e19b 100644
--- a/algorithms/Cargo.toml
+++ b/algorithms/Cargo.toml
@@ -1,6 +1,6 @@
[package]
name = "snarkvm-algorithms"
-version = "0.16.3"
+version = "0.16.15"
authors = [ "The Aleo Team " ]
description = "Algorithms for a decentralized virtual machine"
homepage = "https://aleo.org"
@@ -47,27 +47,27 @@ required-features = [ "test" ]
[dependencies.snarkvm-curves]
path = "../curves"
-version = "=0.16.3"
+version = "=0.16.15"
default-features = false
[dependencies.snarkvm-fields]
path = "../fields"
-version = "=0.16.3"
+version = "=0.16.15"
default-features = false
[dependencies.snarkvm-parameters]
path = "../parameters"
-version = "=0.16.3"
+version = "=0.16.15"
optional = true
[dependencies.snarkvm-utilities]
path = "../utilities"
-version = "=0.16.3"
+version = "=0.16.15"
default-features = false
[dependencies.snarkvm-algorithms-cuda]
path = "./cuda"
-version = "=0.16.3"
+version = "=0.16.15"
optional = true
[dependencies.aleo-std]
diff --git a/algorithms/benches/snark/varuna.rs b/algorithms/benches/snark/varuna.rs
index da095e49eb..ea23d8d4ed 100644
--- a/algorithms/benches/snark/varuna.rs
+++ b/algorithms/benches/snark/varuna.rs
@@ -25,7 +25,7 @@ use snarkvm_curves::bls12_377::{Bls12_377, Fq, Fr};
use snarkvm_utilities::{CanonicalDeserialize, CanonicalSerialize, TestRng};
use criterion::Criterion;
-use std::collections::BTreeMap;
+use std::{collections::BTreeMap, time::Duration};
type VarunaInst = VarunaSNARK;
type FS = PoseidonSponge;
@@ -58,11 +58,12 @@ fn snark_circuit_setup(c: &mut Criterion) {
}
fn snark_prove(c: &mut Criterion) {
+ let rng = &mut TestRng::default();
+
c.bench_function("snark_prove", move |b| {
let num_constraints = 100;
let num_variables = 25;
let mul_depth = 1;
- let rng = &mut TestRng::default();
let max_degree = AHPForR1CS::::max_degree(1000, 1000, 1000).unwrap();
let universal_srs = VarunaInst::universal_setup(max_degree).unwrap();
@@ -77,11 +78,12 @@ fn snark_prove(c: &mut Criterion) {
}
fn snark_batch_prove(c: &mut Criterion) {
+ let rng = &mut TestRng::default();
+
c.bench_function("snark_batch_prove", move |b| {
let num_constraints_base = 100;
let num_variables_base = 25;
let mul_depth_base = 1;
- let rng = &mut TestRng::default();
let max_degree = AHPForR1CS::::max_degree(1000000, 1000000, 1000000).unwrap();
let universal_srs = VarunaInst::universal_setup(max_degree).unwrap();
@@ -119,11 +121,12 @@ fn snark_batch_prove(c: &mut Criterion) {
}
fn snark_verify(c: &mut Criterion) {
+ let rng = &mut TestRng::default();
+
c.bench_function("snark_verify", move |b| {
let num_constraints = 100;
let num_variables = 25;
let mul_depth = 1;
- let rng = &mut TestRng::default();
let max_degree = AHPForR1CS::::max_degree(100, 100, 100).unwrap();
let universal_srs = VarunaInst::universal_setup(max_degree).unwrap();
@@ -145,10 +148,11 @@ fn snark_verify(c: &mut Criterion) {
}
fn snark_batch_verify(c: &mut Criterion) {
+ let rng = &mut TestRng::default();
+
c.bench_function("snark_batch_verify", move |b| {
let num_constraints_base = 100;
let num_variables_base = 25;
- let rng = &mut TestRng::default();
let max_degree = AHPForR1CS::::max_degree(1000, 1000, 100).unwrap();
let universal_srs = VarunaInst::universal_setup(max_degree).unwrap();
@@ -200,6 +204,7 @@ fn snark_batch_verify(c: &mut Criterion) {
fn snark_vk_serialize(c: &mut Criterion) {
use snarkvm_utilities::serialize::Compress;
let mut group = c.benchmark_group("snark_vk_serialize");
+ let rng = &mut TestRng::default();
for mode in [Compress::Yes, Compress::No] {
let name = match mode {
Compress::No => "uncompressed",
@@ -208,7 +213,6 @@ fn snark_vk_serialize(c: &mut Criterion) {
let num_constraints = 100;
let num_variables = 25;
let mul_depth = 1;
- let rng = &mut TestRng::default();
let max_degree = AHPForR1CS::::max_degree(100, 100, 100).unwrap();
let universal_srs = VarunaInst::universal_setup(max_degree).unwrap();
@@ -229,6 +233,7 @@ fn snark_vk_serialize(c: &mut Criterion) {
fn snark_vk_deserialize(c: &mut Criterion) {
use snarkvm_utilities::serialize::{Compress, Validate};
let mut group = c.benchmark_group("snark_vk_deserialize");
+ let rng = &mut TestRng::default();
for compress in [Compress::Yes, Compress::No] {
let compress_name = match compress {
Compress::No => "uncompressed",
@@ -243,7 +248,6 @@ fn snark_vk_deserialize(c: &mut Criterion) {
let num_constraints = 100;
let num_variables = 25;
let mul_depth = 1;
- let rng = &mut TestRng::default();
let max_degree = AHPForR1CS::::max_degree(100, 100, 100).unwrap();
let universal_srs = VarunaInst::universal_setup(max_degree).unwrap();
@@ -273,13 +277,13 @@ fn snark_certificate_prove(c: &mut Criterion) {
let fs_p = &fs_parameters;
for size in [100, 1_000, 10_000, 100_000] {
- c.bench_function(&format!("snark_certificate_prove_{size}"), |b| {
- let num_constraints = size;
- let num_variables = size;
- let mul_depth = 1;
- let (circuit, _) = TestCircuit::gen_rand(mul_depth, num_constraints, num_variables, rng);
- let (pk, vk) = VarunaInst::circuit_setup(&universal_srs, &circuit).unwrap();
+ let num_constraints = size;
+ let num_variables = size;
+ let mul_depth = 1;
+ let (circuit, _) = TestCircuit::gen_rand(mul_depth, num_constraints, num_variables, rng);
+ let (pk, vk) = VarunaInst::circuit_setup(&universal_srs, &circuit).unwrap();
+ c.bench_function(&format!("snark_certificate_prove_{size}"), |b| {
b.iter(|| VarunaInst::prove_vk(universal_prover, fs_p, &vk, &pk).unwrap())
});
}
@@ -296,14 +300,14 @@ fn snark_certificate_verify(c: &mut Criterion) {
let fs_p = &fs_parameters;
for size in [100, 1_000, 10_000, 100_000] {
- c.bench_function(&format!("snark_certificate_verify_{size}"), |b| {
- let num_constraints = size;
- let num_variables = size;
- let mul_depth = 1;
- let (circuit, _) = TestCircuit::gen_rand(mul_depth, num_constraints, num_variables, rng);
- let (pk, vk) = VarunaInst::circuit_setup(&universal_srs, &circuit).unwrap();
- let certificate = VarunaInst::prove_vk(universal_prover, fs_p, &vk, &pk).unwrap();
+ let num_constraints = size;
+ let num_variables = size;
+ let mul_depth = 1;
+ let (circuit, _) = TestCircuit::gen_rand(mul_depth, num_constraints, num_variables, rng);
+ let (pk, vk) = VarunaInst::circuit_setup(&universal_srs, &circuit).unwrap();
+ let certificate = VarunaInst::prove_vk(universal_prover, fs_p, &vk, &pk).unwrap();
+ c.bench_function(&format!("snark_certificate_verify_{size}"), |b| {
b.iter(|| VarunaInst::verify_vk(universal_verifier, fs_p, &circuit, &vk, &certificate).unwrap())
});
}
@@ -311,7 +315,7 @@ fn snark_certificate_verify(c: &mut Criterion) {
criterion_group! {
name = varuna_snark;
- config = Criterion::default().sample_size(10);
+ config = Criterion::default().measurement_time(Duration::from_secs(10));
targets = snark_universal_setup, snark_circuit_setup, snark_prove, snark_verify, snark_batch_prove, snark_batch_verify, snark_vk_serialize, snark_vk_deserialize, snark_certificate_prove, snark_certificate_verify,
}
diff --git a/algorithms/cuda/Cargo.toml b/algorithms/cuda/Cargo.toml
index 1e79a92768..f57066d0f1 100644
--- a/algorithms/cuda/Cargo.toml
+++ b/algorithms/cuda/Cargo.toml
@@ -1,6 +1,6 @@
[package]
name = "snarkvm-algorithms-cuda"
-version = "0.16.3"
+version = "0.16.15"
authors = [ "The Aleo Team " ]
description = "Cuda optimizations for a decentralized virtual machine"
homepage = "https://aleo.org"
diff --git a/algorithms/cuda/build.rs b/algorithms/cuda/build.rs
index 8b2036d54a..8e85744b6c 100644
--- a/algorithms/cuda/build.rs
+++ b/algorithms/cuda/build.rs
@@ -20,7 +20,7 @@ fn main() {
// account for cross-compilation [by examining environment variable]
let target_arch = env::var("CARGO_CFG_TARGET_ARCH").unwrap();
- // Set CC environment variable to choose alternative C compiler.
+ // Set CC environment variable to choose an alternative C compiler.
// Optimization level depends on whether or not --release is passed
// or implied.
let mut cc = cc::Build::new();
diff --git a/algorithms/cuda/cuda/polynomial.cuh b/algorithms/cuda/cuda/polynomial.cuh
index e74fa7bad3..745075accc 100644
--- a/algorithms/cuda/cuda/polynomial.cuh
+++ b/algorithms/cuda/cuda/polynomial.cuh
@@ -116,7 +116,7 @@ public:
size_t pcur = 0;
size_t ecur = 0;
- // Set up the first polynomail / evaluation in dmem0
+ // Set up the first polynomial / evaluation in dmem0
if (pcount > 0) {
mul_copy_poly(hmem0, dmem0, polynomials[0], plens[0], stream, lg_domain_size);
// Perform NTT on the input data
diff --git a/algorithms/src/crypto_hash/poseidon.rs b/algorithms/src/crypto_hash/poseidon.rs
index 3476c271b8..77a28d8d47 100644
--- a/algorithms/src/crypto_hash/poseidon.rs
+++ b/algorithms/src/crypto_hash/poseidon.rs
@@ -18,6 +18,7 @@ use snarkvm_utilities::{BigInteger, FromBits, ToBits};
use smallvec::SmallVec;
use std::{
+ iter::Peekable,
ops::{Index, IndexMut},
sync::Arc,
};
@@ -40,7 +41,7 @@ impl State impl Iterator- {
self.capacity_state.iter_mut().chain(self.rate_state.iter_mut())
}
@@ -332,7 +333,11 @@ impl PoseidonSponge {
/// Compress every two elements if possible.
/// Provides a vector of (limb, num_of_additions), both of which are F.
- pub fn compress_elements(&self, src_limbs: &[(F, F)], ty: OptimizationType) -> Vec {
+ pub fn compress_elements>(
+ &self,
+ mut src_limbs: Peekable,
+ ty: OptimizationType,
+ ) -> Vec {
let capacity = F::size_in_bits() - 1;
let mut dest_limbs = Vec::::new();
@@ -341,11 +346,8 @@ impl PoseidonSponge {
// Prepare a reusable vector to be used in overhead calculation.
let mut num_bits = Vec::new();
- let mut i = 0;
- let src_len = src_limbs.len();
- while i < src_len {
- let first = &src_limbs[i];
- let second = if i + 1 < src_len { Some(&src_limbs[i + 1]) } else { None };
+ while let Some(first) = src_limbs.next() {
+ let second = src_limbs.peek();
let first_max_bits_per_limb = params.bits_per_limb + crate::overhead!(first.1 + F::one(), &mut num_bits);
let second_max_bits_per_limb = if let Some(second) = second {
@@ -359,14 +361,12 @@ impl PoseidonSponge {
let adjustment_factor = &self.adjustment_factor_lookup_table[second_max_bits_per_limb];
dest_limbs.push(first.0 * adjustment_factor + second.0);
- i += 2;
+ src_limbs.next();
} else {
dest_limbs.push(first.0);
- i += 1;
}
} else {
dest_limbs.push(first.0);
- i += 1;
}
}
@@ -405,7 +405,7 @@ impl PoseidonSponge {
cur_bits.clear();
}
- // then we reserve, so that the limbs are ``big limb first''
+ // then we reverse, so that the limbs are ``big limb first''
limbs.reverse();
limbs
@@ -417,17 +417,16 @@ impl PoseidonSponge {
src: impl IntoIterator
- ,
ty: OptimizationType,
) {
- let mut src_limbs = Vec::<(F, F)>::new();
-
- for elem in src {
- let limbs = Self::get_limbs_representations(&elem, ty);
- for limb in limbs.iter() {
- src_limbs.push((*limb, F::one()));
+ let src_limbs = src
+ .into_iter()
+ .flat_map(|elem| {
+ let limbs = Self::get_limbs_representations(&elem, ty);
+ limbs.into_iter().map(|limb| (limb, F::one()))
// specifically set to one, since most gadgets in the constraint world would not have zero noise (due to the relatively weak normal form testing in `alloc`)
- }
- }
+ })
+ .peekable();
- let dest_limbs = self.compress_elements::(&src_limbs, ty);
+ let dest_limbs = self.compress_elements::(src_limbs, ty);
self.absorb_native_field_elements(&dest_limbs);
}
@@ -465,7 +464,7 @@ impl PoseidonSponge {
};
let bits = self.get_bits(num_bits_per_nonnative * num_elements);
- let mut lookup_table = Vec::::new();
+ let mut lookup_table = Vec::::with_capacity(num_bits_per_nonnative);
let mut cur = TargetField::one();
for _ in 0..num_bits_per_nonnative {
lookup_table.push(cur);
diff --git a/algorithms/src/errors.rs b/algorithms/src/errors.rs
index 0713953dc7..a1dedf9f12 100644
--- a/algorithms/src/errors.rs
+++ b/algorithms/src/errors.rs
@@ -43,9 +43,6 @@ pub enum SNARKError {
#[error("Circuit not found")]
CircuitNotFound,
-
- #[error("terminated")]
- Terminated,
}
impl From for SNARKError {
@@ -53,12 +50,3 @@ impl From for SNARKError {
SNARKError::Crate("AHPError", format!("{err:?}"))
}
}
-
-impl From for SNARKError {
- fn from(err: crate::polycommit::PCError) -> Self {
- match err {
- crate::polycommit::PCError::Terminated => SNARKError::Terminated,
- err => SNARKError::Crate("PCError", format!("{err:?}")),
- }
- }
-}
diff --git a/algorithms/src/fft/domain.rs b/algorithms/src/fft/domain.rs
index 93e7d1ed12..c121e9bfe2 100644
--- a/algorithms/src/fft/domain.rs
+++ b/algorithms/src/fft/domain.rs
@@ -345,13 +345,13 @@ impl EvaluationDomain {
/// Perform O(n) multiplication of two polynomials that are presented by their
/// evaluations in the domain.
/// Returns the evaluations of the product over the domain.
- #[must_use]
- pub fn mul_polynomials_in_evaluation_domain(&self, self_evals: Vec, other_evals: &[F]) -> Vec {
+ pub fn mul_polynomials_in_evaluation_domain(&self, self_evals: Vec, other_evals: &[F]) -> Result> {
let mut result = self_evals;
+ ensure!(result.len() == other_evals.len());
cfg_iter_mut!(result).zip_eq(other_evals).for_each(|(a, b)| *a *= b);
- result
+ Ok(result)
}
}
diff --git a/algorithms/src/fft/evaluations.rs b/algorithms/src/fft/evaluations.rs
index f58925a02a..532e06756b 100644
--- a/algorithms/src/fft/evaluations.rs
+++ b/algorithms/src/fft/evaluations.rs
@@ -38,7 +38,9 @@ pub struct Evaluations {
impl Evaluations {
/// Construct `Self` from evaluations and a domain.
- pub fn from_vec_and_domain(evaluations: Vec, domain: EvaluationDomain) -> Self {
+ pub fn from_vec_and_domain(mut evaluations: Vec, domain: EvaluationDomain) -> Self {
+ // Pad evaluations to ensure we can always evaluate
+ evaluations.resize(domain.size(), F::zero());
Self { evaluations, domain }
}
diff --git a/algorithms/src/fft/polynomial/dense.rs b/algorithms/src/fft/polynomial/dense.rs
index 07454c9b5a..80b9658fc1 100644
--- a/algorithms/src/fft/polynomial/dense.rs
+++ b/algorithms/src/fft/polynomial/dense.rs
@@ -14,10 +14,12 @@
//! A polynomial represented in coefficient form.
+use super::PolyMultiplier;
use crate::fft::{EvaluationDomain, Evaluations, Polynomial};
use snarkvm_fields::{Field, PrimeField};
use snarkvm_utilities::{cfg_iter_mut, serialize::*};
+use anyhow::Result;
use num_traits::CheckedDiv;
use rand::Rng;
use std::{
@@ -25,14 +27,11 @@ use std::{
ops::{Add, AddAssign, Deref, DerefMut, Div, Mul, MulAssign, Neg, Sub, SubAssign},
};
-#[cfg(feature = "serial")]
use itertools::Itertools;
#[cfg(not(feature = "serial"))]
use rayon::prelude::*;
-use super::PolyMultiplier;
-
/// Stores a polynomial in coefficient form.
#[derive(Clone, PartialEq, Eq, Hash, Default, CanonicalSerialize, CanonicalDeserialize)]
#[must_use]
@@ -75,10 +74,10 @@ impl DensePolynomial {
/// Constructs a new polynomial from a list of coefficients.
pub fn from_coefficients_vec(mut coeffs: Vec) -> Self {
// While there are zeros at the end of the coefficient vector, pop them off.
- while coeffs.last().map_or(false, |c| c.is_zero()) {
+ while let Some(true) = coeffs.last().map(|c| c.is_zero()) {
coeffs.pop();
}
- // Check that either the coefficients vec is empty or that the last coeff is non-zero.
+ // Check that either the coefficients vec are empty or that the last coeff is non-zero.
assert!(coeffs.last().map_or(true, |coeff| !coeff.is_zero()));
Self { coeffs }
@@ -113,10 +112,16 @@ impl DensePolynomial {
crate::cfg_reduce!(mapping, || zero, |a, b| a + b)
}
- /// Outputs a polynomial of degree `d` where each coefficient is sampled uniformly at random
- /// from the field `F`.
+ /// Outputs a univariate polynomial of degree `d` where each non-leading
+ /// coefficient is sampled uniformly at random from R and the leading
+ /// coefficient is sampled uniformly at random from among the non-zero
+ /// elements of R.
pub fn rand(d: usize, rng: &mut R) -> Self {
- let random_coeffs = (0..(d + 1)).map(|_| F::rand(rng)).collect();
+ let mut random_coeffs = (0..(d + 1)).map(|_| F::rand(rng)).collect_vec();
+ while random_coeffs[d].is_zero() {
+ // In the extremely unlikely event, sample again.
+ random_coeffs[d] = F::rand(rng);
+ }
Self::from_coefficients_vec(random_coeffs)
}
@@ -156,7 +161,7 @@ impl DensePolynomial {
pub fn divide_by_vanishing_poly(
&self,
domain: EvaluationDomain,
- ) -> Option<(DensePolynomial, DensePolynomial)> {
+ ) -> Result<(DensePolynomial, DensePolynomial)> {
let self_poly = Polynomial::from(self);
let vanishing_poly = Polynomial::from(domain.vanishing_polynomial());
self_poly.divide_with_q_and_r(&vanishing_poly)
@@ -189,7 +194,7 @@ impl<'a, 'b, F: Field> Add<&'a DensePolynomial> for &'b DensePolynomial {
type Output = DensePolynomial;
fn add(self, other: &'a DensePolynomial) -> DensePolynomial {
- if self.is_zero() {
+ let mut result = if self.is_zero() {
other.clone()
} else if other.is_zero() {
self.clone()
@@ -202,12 +207,13 @@ impl<'a, 'b, F: Field> Add<&'a DensePolynomial> for &'b DensePolynomial {
let mut result = other.clone();
// Zip safety: `result` and `other` could have different lengths.
cfg_iter_mut!(result.coeffs).zip(&self.coeffs).for_each(|(a, b)| *a += b);
- // If the leading coefficient ends up being zero, pop it off.
- while let Some(true) = self.coeffs.last().map(|c| c.is_zero()) {
- result.coeffs.pop();
- }
result
+ };
+ // If the leading coefficient ends up being zero, pop it off.
+ while let Some(true) = result.coeffs.last().map(|c| c.is_zero()) {
+ result.coeffs.pop();
}
+ result
}
}
@@ -273,10 +279,10 @@ impl<'a, F: Field> AddAssign<(F, &'a DensePolynomial)> for DensePolynomial
cfg_iter_mut!(self.coeffs).zip(&other.coeffs).for_each(|(a, b)| {
*a += f * b;
});
- // If the leading coefficient ends up being zero, pop it off.
- while let Some(true) = self.coeffs.last().map(|c| c.is_zero()) {
- self.coeffs.pop();
- }
+ }
+ // If the leading coefficient ends up being zero, pop it off.
+ while let Some(true) = self.coeffs.last().map(|c| c.is_zero()) {
+ self.coeffs.pop();
}
}
}
@@ -298,7 +304,7 @@ impl<'a, 'b, F: Field> Sub<&'a DensePolynomial> for &'b DensePolynomial {
#[inline]
fn sub(self, other: &'a DensePolynomial) -> DensePolynomial {
- if self.is_zero() {
+ let mut result = if self.is_zero() {
let mut result = other.clone();
for coeff in &mut result.coeffs {
*coeff = -(*coeff);
@@ -318,15 +324,13 @@ impl<'a, 'b, F: Field> Sub<&'a DensePolynomial> for &'b DensePolynomial {
cfg_iter_mut!(result.coeffs).zip(&other.coeffs).for_each(|(a, b)| {
*a -= b;
});
- if !result.is_zero() {
- // If the leading coefficient ends up being zero, pop it off.
- while result.coeffs.last().map(|c| c.is_zero()) == Some(true) {
- result.coeffs.pop();
- }
- }
-
result
+ };
+ // If the leading coefficient ends up being zero, pop it off.
+ while let Some(true) = result.coeffs.last().map(|c| c.is_zero()) {
+ result.coeffs.pop();
}
+ result
}
}
@@ -348,10 +352,10 @@ impl<'a, F: Field> SubAssign<&'a DensePolynomial> for DensePolynomial {
self.coeffs.resize(other.coeffs.len(), F::zero());
// Zip safety: self and other have the same length after the resize.
cfg_iter_mut!(self.coeffs).zip(&other.coeffs).for_each(|(a, b)| *a -= b);
- // If the leading coefficient ends up being zero, pop it off.
- while let Some(true) = self.coeffs.last().map(|c| c.is_zero()) {
- self.coeffs.pop();
- }
+ }
+ // If the leading coefficient ends up being zero, pop it off.
+ while let Some(true) = self.coeffs.last().map(|c| c.is_zero()) {
+ self.coeffs.pop();
}
}
}
@@ -421,14 +425,14 @@ impl CheckedDiv for DensePolynomial {
let a: Polynomial<_> = self.into();
let b: Polynomial<_> = divisor.into();
match a.divide_with_q_and_r(&b) {
- Some((divisor, remainder)) => {
+ Ok((divisor, remainder)) => {
if remainder.is_zero() {
Some(divisor)
} else {
None
}
}
- None => None,
+ Err(_) => None,
}
}
}
@@ -592,11 +596,9 @@ mod tests {
for b_degree in 0..70 {
let dividend = DensePolynomial::::rand(a_degree, rng);
let divisor = DensePolynomial::::rand(b_degree, rng);
- if let Some((quotient, remainder)) =
- Polynomial::divide_with_q_and_r(&(÷nd).into(), &(&divisor).into())
- {
- assert_eq!(dividend, &(&divisor * "ient) + &remainder)
- }
+ let (quotient, remainder) =
+ Polynomial::divide_with_q_and_r(&(÷nd).into(), &(&divisor).into()).unwrap();
+ assert_eq!(dividend, &(&divisor * "ient) + &remainder)
}
}
}
@@ -615,6 +617,13 @@ mod tests {
}
}
+ #[test]
+ fn divide_poly_by_zero() {
+ let a = Polynomial::::zero();
+ let b = Polynomial::::zero();
+ assert!(a.divide_with_q_and_r(&b).is_err());
+ }
+
#[test]
fn mul_polynomials_random() {
let rng = &mut TestRng::default();
@@ -691,7 +700,7 @@ mod tests {
multiplier.add_polynomial(a.clone(), "a");
assert_eq!(multiplier.multiply().unwrap(), a);
- // Note PolyMultiplier doesn't support a evluations with no polynomials
+ // Note PolyMultiplier doesn't support evaluations with no polynomials
}
#[test]
diff --git a/algorithms/src/fft/polynomial/mod.rs b/algorithms/src/fft/polynomial/mod.rs
index fa24d0c72e..8fcb907408 100644
--- a/algorithms/src/fft/polynomial/mod.rs
+++ b/algorithms/src/fft/polynomial/mod.rs
@@ -17,11 +17,11 @@
use crate::fft::{EvaluationDomain, Evaluations};
use snarkvm_fields::{Field, PrimeField};
use snarkvm_utilities::{cfg_iter_mut, serialize::*, SerializationError};
+use Polynomial::*;
+use anyhow::{ensure, Result};
use std::{borrow::Cow, convert::TryInto};
-use Polynomial::*;
-
#[cfg(not(feature = "serial"))]
use rayon::prelude::*;
@@ -67,7 +67,16 @@ impl<'a, F: Field> CanonicalSerialize for Polynomial<'a, F> {
impl<'a, F: Field> Valid for Polynomial<'a, F> {
fn check(&self) -> Result<(), SerializationError> {
- Ok(())
+ // Check that the polynomial contains a trailing zero coefficient.
+ let has_trailing_zero = match self {
+ Sparse(p) => p.coeffs().last().map(|(_, c)| c.is_zero()),
+ Dense(p) => p.coeffs.last().map(|c| c.is_zero()),
+ };
+ // Fail if the trailing coefficient is zero.
+ match has_trailing_zero {
+ Some(true) => Err(SerializationError::InvalidData),
+ Some(false) | None => Ok(()),
+ }
}
}
@@ -209,13 +218,13 @@ impl<'a, F: Field> Polynomial<'a, F> {
}
/// Divide self by another (sparse or dense) polynomial, and returns the quotient and remainder.
- pub fn divide_with_q_and_r(&self, divisor: &Self) -> Option<(DensePolynomial, DensePolynomial)> {
+ pub fn divide_with_q_and_r(&self, divisor: &Self) -> Result<(DensePolynomial, DensePolynomial)> {
+ ensure!(!divisor.is_zero(), "Dividing by zero polynomial is undefined");
+
if self.is_zero() {
- Some((DensePolynomial::zero(), DensePolynomial::zero()))
- } else if divisor.is_zero() {
- panic!("Dividing by zero polynomial")
+ Ok((DensePolynomial::zero(), DensePolynomial::zero()))
} else if self.degree() < divisor.degree() {
- Some((DensePolynomial::zero(), self.clone().into()))
+ Ok((DensePolynomial::zero(), self.clone().into()))
} else {
// Now we know that self.degree() >= divisor.degree();
let mut quotient = vec![F::zero(); self.degree() - divisor.degree() + 1];
@@ -241,7 +250,7 @@ impl<'a, F: Field> Polynomial<'a, F> {
remainder.coeffs.pop();
}
}
- Some((DensePolynomial::from_coefficients_vec(quotient), remainder))
+ Ok((DensePolynomial::from_coefficients_vec(quotient), remainder))
}
}
}
diff --git a/algorithms/src/fft/polynomial/multiplier.rs b/algorithms/src/fft/polynomial/multiplier.rs
index 66d7a82c9e..c93aa4de76 100644
--- a/algorithms/src/fft/polynomial/multiplier.rs
+++ b/algorithms/src/fft/polynomial/multiplier.rs
@@ -102,7 +102,7 @@ impl<'a, F: PrimeField> PolyMultiplier<'a, F> {
}
let fft_pc = &self.fft_precomputation.unwrap();
let ifft_pc = &self.ifft_precomputation.unwrap();
- let mut pool = ExecutionPool::new();
+ let mut pool = ExecutionPool::with_capacity(self.polynomials.len() + self.evaluations.len());
for (_, p) in self.polynomials {
pool.add_job(move || {
let mut p = p.clone().into_owned().coeffs;
@@ -146,7 +146,7 @@ impl<'a, F: PrimeField> PolyMultiplier<'a, F> {
Some(Cow::Owned(self.fft_precomputation.as_ref().unwrap().to_ifft_precomputation()));
}
let fft_pc = self.fft_precomputation.as_ref().unwrap();
- let mut pool = ExecutionPool::new();
+ let mut pool = ExecutionPool::with_capacity(self.polynomials.len() + self.evaluations.len());
for (l, p) in self.polynomials {
pool.add_job(move || {
let mut p = p.clone().into_owned().coeffs;
diff --git a/algorithms/src/fft/polynomial/sparse.rs b/algorithms/src/fft/polynomial/sparse.rs
index 23b2b1f0df..f38967f99f 100644
--- a/algorithms/src/fft/polynomial/sparse.rs
+++ b/algorithms/src/fft/polynomial/sparse.rs
@@ -93,7 +93,7 @@ impl SparsePolynomial {
total
}
- /// Perform a naive n^2 multiplicatoin of `self` by `other`.
+ /// Perform a naive n^2 multiplication of `self` by `other`.
pub fn mul(&self, other: &Self) -> Self {
if self.is_zero() || other.is_zero() {
SparsePolynomial::zero()
diff --git a/algorithms/src/msm/variable_base/batched.rs b/algorithms/src/msm/variable_base/batched.rs
index 18fe9e9b8e..b1f172d8a4 100644
--- a/algorithms/src/msm/variable_base/batched.rs
+++ b/algorithms/src/msm/variable_base/batched.rs
@@ -48,7 +48,7 @@ impl PartialOrd for BucketPosition {
}
}
-/// Returns a batch size of sufficient size to amortise the cost of an inversion,
+/// Returns a batch size of sufficient size to amortize the cost of an inversion,
/// while attempting to reduce strain to the CPU cache.
#[inline]
const fn batch_size(msm_size: usize) -> usize {
@@ -57,7 +57,7 @@ const fn batch_size(msm_size: usize) -> usize {
// L1 and L2 cache sizes and dividing them by the size of group elements (i.e. 96 bytes).
//
// As the algorithm itself requires caching additional values beyond the group elements,
- // the ideal batch size is less than expectations, to accommodate those values.
+ // the ideal batch size is less than expected, to accommodate those values.
// In general, it was found that undershooting is better than overshooting this heuristic.
if cfg!(target_arch = "x86_64") && msm_size < 500_000 {
// Assumes an L1 cache size of 32KiB. Note that larger cache sizes
@@ -192,7 +192,7 @@ pub(super) fn batch_add(
let mut number_of_bases_in_batch = 0;
let mut instr = Vec::<(u32, u32)>::with_capacity(batch_size);
- let mut new_bases = Vec::with_capacity(bases.len() * 3 / 8);
+ let mut new_bases = Vec::with_capacity(bases.len());
let mut scratch_space = Vec::with_capacity(batch_size / 2);
// In the first loop, copy the results of the first in-place addition tree to the vector `new_bases`.
diff --git a/algorithms/src/polycommit/error.rs b/algorithms/src/polycommit/error.rs
index 2be41f7f8e..f101d30224 100644
--- a/algorithms/src/polycommit/error.rs
+++ b/algorithms/src/polycommit/error.rs
@@ -13,33 +13,33 @@
// limitations under the License.
/// The error type for `PolynomialCommitment`.
-#[derive(Debug)]
+#[derive(Debug, Error)]
pub enum PCError {
- AnyhowError(anyhow::Error),
+ #[error("{0}")]
+ AnyhowError(#[from] anyhow::Error),
- /// The query set contains a label for a polynomial that was not provided as
- /// input to the `PC::open`.
+ #[error("QuerySet` refers to polynomial \"{label}\", but it was not provided.")]
MissingPolynomial {
- /// The label of the missing polynomial.
+ /// The label of the missing polynomial
label: String,
},
- /// `Evaluations` does not contain an evaluation for the polynomial labelled
- /// `label` at a particular query.
+ #[error("`QuerySet` refers to polynomial \"{label}\", but `Evaluations` does not contain an evaluation for it.")]
MissingEvaluation {
/// The label of the missing polynomial.
label: String,
},
- /// The provided polynomial was meant to be hiding, but `rng` was `None`.
+ #[error("The provided polynomial was meant to be hiding, but `rng` was `None`.")]
MissingRng,
- /// The degree provided in setup was too small; degree 0 polynomials
- /// are not supported.
+ #[error("The degree provided in setup was too small; degree 0 polynomials are not supported.")]
DegreeIsZero,
- /// The degree of the polynomial passed to `commit` or `open`
- /// was too large.
+ #[error(
+ "the number of coefficients in the polynomial ({num_coefficients:?}) is greater than \
+ the maximum number of powers in `Powers` ({num_powers:?})"
+ )]
TooManyCoefficients {
/// The number of coefficients in the polynomial.
num_coefficients: usize,
@@ -47,10 +47,12 @@ pub enum PCError {
num_powers: usize,
},
- /// The hiding bound was not `None`, but the hiding bound was zero.
+ #[error("The hiding bound was not `None`, but the hiding bound was zero.")]
HidingBoundIsZero,
- /// The hiding bound was too large for the given `Powers`.
+ #[error(
+ "the degree of the hiding poly ({hiding_poly_degree:?}) is not less than the maximum number of powers in `Powers` ({num_powers:?})"
+ )]
HidingBoundToolarge {
/// The hiding bound
hiding_poly_degree: usize,
@@ -58,29 +60,28 @@ pub enum PCError {
num_powers: usize,
},
- /// The lagrange basis is not a power of two.
+ #[error("The lagrange basis is not a power of two.")]
LagrangeBasisSizeIsNotPowerOfTwo,
- /// The lagrange basis is larger than the supported degree,
+ #[error("The lagrange basis is larger than the supported degree.")]
LagrangeBasisSizeIsTooLarge,
- /// The degree provided to `trim` was too large.
+ #[error("The degree provided to `trim` was too large.")]
TrimmingDegreeTooLarge,
- /// The provided equation contained multiple polynomials, of which least one
- /// had a strict degree bound.
+ #[error("the equation \"{0}\" contained degree-bounded polynomials")]
EquationHasDegreeBounds(String),
- /// The required degree bound is not supported by ck/vk
+ #[error("the degree bound ({0}) is not supported by the parameters")]
UnsupportedDegreeBound(usize),
- /// The provided equation contained multiple polynomials, of which least one
- /// had a strict degree bound.
+ #[error("the Lagrange basis size ({0}) is not supported by the parameters")]
UnsupportedLagrangeBasisSize(usize),
- /// The degree bound for the `index`-th polynomial passed to `commit`, `open`
- /// or `check` was incorrect, that is, `degree_bound >= poly_degree` or
- /// `degree_bound <= max_degree`.
+ #[error(
+ "the degree bound ({degree_bound}) for the polynomial {label} \
+ (having degree {poly_degree}) is greater than the maximum degree ({max_degree})"
+ )]
IncorrectDegreeBound {
/// Degree of the polynomial.
poly_degree: usize,
@@ -91,63 +92,4 @@ pub enum PCError {
/// Index of the offending polynomial.
label: String,
},
-
- Terminated,
-}
-
-impl snarkvm_utilities::error::Error for PCError {}
-
-impl From for PCError {
- fn from(other: anyhow::Error) -> Self {
- Self::AnyhowError(other)
- }
-}
-
-impl core::fmt::Display for PCError {
- fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
- match self {
- Self::AnyhowError(error) => write!(f, "{error}"),
- Self::MissingPolynomial { label } => {
- write!(f, "`QuerySet` refers to polynomial \"{label}\", but it was not provided.")
- }
- Self::MissingEvaluation { label } => write!(
- f,
- "`QuerySet` refers to polynomial \"{label}\", but `Evaluations` does not contain an evaluation for it."
- ),
- Self::MissingRng => write!(f, "hiding commitments require `Some(rng)`"),
- Self::DegreeIsZero => write!(f, "this scheme does not support committing to degree 0 polynomials"),
- Self::TooManyCoefficients { num_coefficients, num_powers } => write!(
- f,
- "the number of coefficients in the polynomial ({num_coefficients:?}) is greater than\
- the maximum number of powers in `Powers` ({num_powers:?})"
- ),
- Self::HidingBoundIsZero => write!(f, "this scheme does not support non-`None` hiding bounds that are 0"),
- Self::HidingBoundToolarge { hiding_poly_degree, num_powers } => write!(
- f,
- "the degree of the hiding poly ({hiding_poly_degree:?}) is not less than the maximum number of powers in `Powers` ({num_powers:?})"
- ),
- Self::TrimmingDegreeTooLarge => write!(f, "the degree provided to `trim` was too large"),
- Self::EquationHasDegreeBounds(e) => {
- write!(f, "the eqaution \"{e}\" contained degree-bounded polynomials")
- }
- Self::UnsupportedDegreeBound(bound) => {
- write!(f, "the degree bound ({bound:?}) is not supported by the parameters")
- }
- Self::LagrangeBasisSizeIsNotPowerOfTwo => {
- write!(f, "the Lagrange Basis size is not a power of two")
- }
- Self::UnsupportedLagrangeBasisSize(size) => {
- write!(f, "the Lagrange basis size ({size:?}) is not supported by the parameters")
- }
- Self::LagrangeBasisSizeIsTooLarge => {
- write!(f, "the Lagrange Basis size larger than max supported degree")
- }
- Self::IncorrectDegreeBound { poly_degree, degree_bound, max_degree, label } => write!(
- f,
- "the degree bound ({degree_bound}) for the polynomial {label} \
- (having degree {poly_degree}) is greater than the maximum degree ({max_degree})"
- ),
- Self::Terminated => write!(f, "terminated"),
- }
- }
}
diff --git a/algorithms/src/polycommit/kzg10/data_structures.rs b/algorithms/src/polycommit/kzg10/data_structures.rs
index c13328439c..495e817788 100644
--- a/algorithms/src/polycommit/kzg10/data_structures.rs
+++ b/algorithms/src/polycommit/kzg10/data_structures.rs
@@ -31,7 +31,6 @@ use snarkvm_utilities::{
use crate::srs::{UniversalProver, UniversalVerifier};
use anyhow::Result;
use core::ops::{Add, AddAssign};
-use parking_lot::RwLock;
use rand_core::RngCore;
use std::{collections::BTreeMap, io, ops::Range, sync::Arc};
@@ -42,7 +41,7 @@ pub struct UniversalParams {
/// and group elements of the form `{ \beta^i \gamma G }`, where `i` ranges from 0 to `degree`.
/// This struct provides an abstraction over the powers which are located on-disk
/// to reduce memory usage.
- powers: Arc>>,
+ powers: Arc>,
/// The generator of G2.
pub h: E::G2Affine,
/// The generator of G2, prepared for use in pairings.
@@ -53,16 +52,16 @@ pub struct UniversalParams {
impl UniversalParams {
pub fn load() -> Result {
- let powers = Arc::new(RwLock::new(PowersOfG::::load()?));
+ let powers = Arc::new(PowersOfG::::load()?);
let h = E::G2Affine::prime_subgroup_generator();
let prepared_h = h.prepare();
- let prepared_beta_h = powers.read().beta_h().prepare();
+ let prepared_beta_h = powers.beta_h().prepare();
Ok(Self { powers, h, prepared_h, prepared_beta_h })
}
pub fn download_powers_for(&self, range: Range) -> Result<()> {
- self.powers.write().download_powers_for(range)
+ self.powers.download_powers_for(range)
}
pub fn lagrange_basis(&self, domain: EvaluationDomain) -> Result> {
@@ -72,23 +71,23 @@ impl UniversalParams {
}
pub fn power_of_beta_g(&self, index: usize) -> Result {
- self.powers.write().power_of_beta_g(index)
+ self.powers.power_of_beta_g(index)
}
pub fn powers_of_beta_g(&self, lower: usize, upper: usize) -> Result> {
- Ok(self.powers.write().powers_of_beta_g(lower..upper)?.to_vec())
+ self.powers.powers_of_beta_g(lower..upper)
}
- pub fn powers_of_beta_times_gamma_g(&self) -> Arc> {
- self.powers.read().powers_of_beta_gamma_g()
+ pub fn powers_of_beta_times_gamma_g(&self) -> &BTreeMap {
+ self.powers.powers_of_beta_gamma_g()
}
pub fn beta_h(&self) -> E::G2Affine {
- self.powers.read().beta_h()
+ self.powers.beta_h()
}
pub fn max_degree(&self) -> usize {
- self.powers.read().max_num_powers() - 1
+ self.powers.max_num_powers() - 1
}
pub fn to_universal_prover(&self) -> Result> {
@@ -105,7 +104,7 @@ impl UniversalParams {
Ok(UniversalVerifier {
vk: VerifierKey:: { g, gamma_g, h, beta_h, prepared_h, prepared_beta_h },
- prepared_negative_powers_of_beta_h: self.powers.read().prepared_negative_powers_of_beta_h(),
+ prepared_negative_powers_of_beta_h: self.powers.prepared_negative_powers_of_beta_h(),
})
}
}
@@ -113,7 +112,7 @@ impl UniversalParams {
impl FromBytes for UniversalParams {
fn read_le(mut reader: R) -> io::Result {
// Deserialize `powers`.
- let powers = Arc::new(RwLock::new(PowersOfG::read_le(&mut reader)?));
+ let powers = Arc::new(PowersOfG::read_le(&mut reader)?);
// Deserialize `h`.
let h: E::G2Affine = FromBytes::read_le(&mut reader)?;
@@ -131,7 +130,7 @@ impl FromBytes for UniversalParams {
impl ToBytes for UniversalParams {
fn write_le(&self, mut writer: W) -> io::Result<()> {
// Serialize powers.
- self.powers.read().write_le(&mut writer)?;
+ self.powers.write_le(&mut writer)?;
// Serialize `h`.
self.h.write_le(&mut writer)?;
diff --git a/algorithms/src/polycommit/kzg10/mod.rs b/algorithms/src/polycommit/kzg10/mod.rs
index 8ddaf0f79c..e6fe95d534 100644
--- a/algorithms/src/polycommit/kzg10/mod.rs
+++ b/algorithms/src/polycommit/kzg10/mod.rs
@@ -24,11 +24,11 @@ use crate::{
msm::VariableBase,
polycommit::PCError,
};
-use anyhow::anyhow;
use snarkvm_curves::traits::{AffineCurve, PairingCurve, PairingEngine, ProjectiveCurve};
use snarkvm_fields::{One, PrimeField, Zero};
use snarkvm_utilities::{cfg_iter, cfg_iter_mut, rand::Uniform, BitIteratorBE};
+use anyhow::{anyhow, ensure, Result};
use core::{marker::PhantomData, ops::Mul};
use itertools::Itertools;
use rand_core::RngCore;
@@ -276,7 +276,7 @@ impl KZG10 {
evaluations: &[E::Fr],
point: E::Fr,
evaluation_at_point: E::Fr,
- ) -> Result, PCError> {
+ ) -> Result> {
Self::check_degree_is_too_large(evaluations.len() - 1, lagrange_basis.size())?;
// Ensure that the point is not in the domain
if lagrange_basis.domain.evaluate_vanishing_polynomial(point).is_zero() {
@@ -290,6 +290,7 @@ impl KZG10 {
let mut divisor_evals = cfg_iter!(domain_elements).map(|&e| e - point).collect::>();
snarkvm_fields::batch_inversion(&mut divisor_evals);
+ ensure!(divisor_evals.len() == evaluations.len());
cfg_iter_mut!(divisor_evals).zip_eq(evaluations).for_each(|(divisor_eval, &eval)| {
*divisor_eval *= eval - evaluation_at_point;
});
@@ -351,7 +352,7 @@ impl KZG10 {
values: &[E::Fr],
proofs: &[KZGProof],
rng: &mut R,
- ) -> Result {
+ ) -> Result {
let check_time = start_timer!(|| format!("Checking {} evaluation proofs", commitments.len()));
let g = vk.g.to_projective();
let gamma_g = vk.gamma_g.to_projective();
@@ -365,6 +366,9 @@ impl KZG10 {
// their coefficients and perform a final multiplication at the end.
let mut g_multiplier = E::Fr::zero();
let mut gamma_g_multiplier = E::Fr::zero();
+ ensure!(commitments.len() == points.len());
+ ensure!(commitments.len() == values.len());
+ ensure!(commitments.len() == proofs.len());
for (((c, z), v), proof) in commitments.iter().zip_eq(points).zip_eq(values).zip_eq(proofs) {
let w = proof.w;
let mut temp = w.mul(*z);
@@ -451,11 +455,12 @@ fn skip_leading_zeros_and_convert_to_bigints(p: &DensePolynomial<
if p.coeffs.is_empty() {
(0, vec![])
} else {
- let mut num_leading_zeros = 0;
- while p.coeffs[num_leading_zeros].is_zero() && num_leading_zeros < p.coeffs.len() {
- num_leading_zeros += 1;
- }
- let coeffs = convert_to_bigints(&p.coeffs[num_leading_zeros..]);
+ let num_leading_zeros = p.coeffs.iter().take_while(|c| c.is_zero()).count();
+ let coeffs = if num_leading_zeros == p.coeffs.len() {
+ vec![]
+ } else {
+ convert_to_bigints(&p.coeffs[num_leading_zeros..])
+ };
(num_leading_zeros, coeffs)
}
}
diff --git a/algorithms/src/polycommit/sonic_pc/data_structures.rs b/algorithms/src/polycommit/sonic_pc/data_structures.rs
index 25fbbd8292..6504d198fa 100644
--- a/algorithms/src/polycommit/sonic_pc/data_structures.rs
+++ b/algorithms/src/polycommit/sonic_pc/data_structures.rs
@@ -36,7 +36,7 @@ pub type Randomness = kzg10::KZGRandomness;
pub type Commitment = kzg10::KZGCommitment;
/// `CommitterKey` is used to commit to, and create evaluation proofs for, a given polynomial.
-#[derive(Clone, Debug, Default, Hash, CanonicalSerialize, CanonicalDeserialize, PartialEq, Eq)]
+#[derive(Debug)]
pub struct CommitterKey {
/// The key used to commit to polynomials.
pub powers_of_beta_g: Vec,
@@ -271,7 +271,7 @@ impl CommitterKey {
}
/// `CommitterUnionKey` is a union of `CommitterKey`s, useful for multi-circuit batch proofs.
-#[derive(Clone, Debug, Hash, PartialEq, Eq)]
+#[derive(Debug)]
pub struct CommitterUnionKey<'a, E: PairingEngine> {
/// The key used to commit to polynomials.
pub powers_of_beta_g: Option<&'a Vec>,
diff --git a/algorithms/src/polycommit/sonic_pc/mod.rs b/algorithms/src/polycommit/sonic_pc/mod.rs
index 3b1858f6e2..847483ca69 100644
--- a/algorithms/src/polycommit/sonic_pc/mod.rs
+++ b/algorithms/src/polycommit/sonic_pc/mod.rs
@@ -24,7 +24,7 @@ use itertools::Itertools;
use snarkvm_curves::traits::{AffineCurve, PairingCurve, PairingEngine, ProjectiveCurve};
use snarkvm_fields::{One, Zero};
-use anyhow::{bail, Result};
+use anyhow::{bail, ensure, Result};
use core::{convert::TryInto, marker::PhantomData, ops::Mul};
use rand_core::{RngCore, SeedableRng};
use std::{
@@ -95,18 +95,17 @@ impl> SonicKZG10 {
max_degree - lowest_shift_degree + 1
));
- let shifted_powers_of_beta_g = pp.powers_of_beta_g(lowest_shift_degree, pp.max_degree() + 1)?.to_vec();
+ let shifted_powers_of_beta_g = pp.powers_of_beta_g(lowest_shift_degree, pp.max_degree() + 1)?;
let mut shifted_powers_of_beta_times_gamma_g = BTreeMap::new();
// Also add degree 0.
for degree_bound in enforced_degree_bounds {
let shift_degree = max_degree - degree_bound;
- let mut powers_for_degree_bound = Vec::with_capacity((max_degree + 2).saturating_sub(shift_degree));
- for i in 0..=supported_hiding_bound + 1 {
- // We have an additional degree in `powers_of_beta_times_gamma_g` beyond `powers_of_beta_g`.
- if shift_degree + i < max_degree + 2 {
- powers_for_degree_bound.push(pp.powers_of_beta_times_gamma_g()[&(shift_degree + i)]);
- }
- }
+ // We have an additional degree in `powers_of_beta_times_gamma_g` beyond `powers_of_beta_g`.
+ let powers_for_degree_bound = pp
+ .powers_of_beta_times_gamma_g()
+ .range(shift_degree..max_degree.min(shift_degree + supported_hiding_bound) + 2)
+ .map(|(_k, v)| *v)
+ .collect();
shifted_powers_of_beta_times_gamma_g.insert(*degree_bound, powers_for_degree_bound);
}
@@ -118,15 +117,17 @@ impl> SonicKZG10 {
(None, None)
};
- let powers_of_beta_g = pp.powers_of_beta_g(0, supported_degree + 1)?.to_vec();
- let powers_of_beta_times_gamma_g = (0..=(supported_hiding_bound + 1))
- .map(|i| {
- pp.powers_of_beta_times_gamma_g()
- .get(&i)
- .copied()
- .ok_or(PCError::HidingBoundToolarge { hiding_poly_degree: supported_hiding_bound, num_powers: 0 })
- })
- .collect::, _>>()?;
+ let powers_of_beta_g = pp.powers_of_beta_g(0, supported_degree + 1)?;
+ let powers_of_beta_times_gamma_g = pp
+ .powers_of_beta_times_gamma_g()
+ .range(0..=(supported_hiding_bound + 1))
+ .map(|(_k, v)| *v)
+ .collect::>();
+ if powers_of_beta_times_gamma_g.len() != supported_hiding_bound + 2 {
+ return Err(
+ PCError::HidingBoundToolarge { hiding_poly_degree: supported_hiding_bound, num_powers: 0 }.into()
+ );
+ }
let mut lagrange_bases_at_beta_g = BTreeMap::new();
for size in supported_lagrange_sizes {
@@ -159,7 +160,7 @@ impl> SonicKZG10 {
Ok((ck, vk))
}
- /// Outputs a commitments to `polynomials`.
+ /// Outputs commitments to `polynomials`.
///
/// If `polynomials[i].is_hiding()`, then the `i`-th commitment is hiding
/// up to `polynomials.hiding_bound()` queries.
@@ -180,8 +181,6 @@ impl> SonicKZG10 {
) -> Result<(Vec>>, Vec>), PCError> {
let rng = &mut OptionalRng(rng);
let commit_time = start_timer!(|| "Committing to polynomials");
- let mut labeled_comms: Vec>> = Vec::new();
- let mut randomness: Vec> = Vec::new();
let mut pool = snarkvm_utilities::ExecutionPool::>::new();
for p in polynomials {
@@ -210,49 +209,42 @@ impl> SonicKZG10 {
hiding_bound,
));
- let (comm, rand) = p
- .sum()
- .map(move |p| {
- let rng_ref = rng.as_mut().map(|s| s as _);
- match p {
- PolynomialWithBasis::Lagrange { evaluations } => {
- let domain = crate::fft::EvaluationDomain::new(evaluations.evaluations.len()).unwrap();
- let lagrange_basis = ck
- .lagrange_basis(domain)
- .ok_or(PCError::UnsupportedLagrangeBasisSize(domain.size()))?;
- assert!(domain.size().is_power_of_two());
- assert!(lagrange_basis.size().is_power_of_two());
- kzg10::KZG10::commit_lagrange(
- &lagrange_basis,
- &evaluations.evaluations,
- hiding_bound,
- rng_ref,
- )
- }
- PolynomialWithBasis::Monomial { polynomial, degree_bound } => {
- let powers = if let Some(degree_bound) = degree_bound {
- ck.shifted_powers_of_beta_g(degree_bound).unwrap()
- } else {
- ck.powers()
- };
-
- kzg10::KZG10::commit(&powers, &polynomial, hiding_bound, rng_ref)
- }
+ let (comm, rand) = {
+ let rng_ref = rng.as_mut().map(|s| s as _);
+ match p.polynomial {
+ PolynomialWithBasis::Lagrange { evaluations } => {
+ let domain = crate::fft::EvaluationDomain::new(evaluations.evaluations.len()).unwrap();
+ let lagrange_basis = ck
+ .lagrange_basis(domain)
+ .ok_or(PCError::UnsupportedLagrangeBasisSize(domain.size()))?;
+ assert!(domain.size().is_power_of_two());
+ assert!(lagrange_basis.size().is_power_of_two());
+ kzg10::KZG10::commit_lagrange(
+ &lagrange_basis,
+ &evaluations.evaluations,
+ hiding_bound,
+ rng_ref,
+ )?
+ }
+ PolynomialWithBasis::Monomial { polynomial, degree_bound } => {
+ let powers = if let Some(degree_bound) = degree_bound {
+ ck.shifted_powers_of_beta_g(degree_bound).unwrap()
+ } else {
+ ck.powers()
+ };
+
+ kzg10::KZG10::commit(&powers, &polynomial, hiding_bound, rng_ref)?
}
- })
- .collect::, _>>()?
- .into_iter()
- .fold((E::G1Projective::zero(), Randomness::empty()), |mut a, b| {
- a.0.add_assign_mixed(&b.0.0);
- a.1 += (E::Fr::one(), &b.1);
- a
- });
- let comm = kzg10::KZGCommitment(comm.to_affine());
+ }
+ };
Ok((LabeledCommitment::new(label.to_string(), comm, degree_bound), rand))
});
}
let results: Vec> = pool.execute_all();
+
+ let mut labeled_comms = Vec::with_capacity(results.len());
+ let mut randomness = Vec::with_capacity(results.len());
for result in results {
let (comm, rand) = result?;
labeled_comms.push(comm);
@@ -266,22 +258,26 @@ impl> SonicKZG10 {
pub fn combine_for_open<'a>(
universal_prover: &UniversalProver,
ck: &CommitterUnionKey,
- labeled_polynomials: impl IntoIterator
- >,
- rands: impl IntoIterator
- >,
+ labeled_polynomials: impl ExactSizeIterator
- >,
+ rands: impl ExactSizeIterator
- >,
fs_rng: &mut S,
- ) -> Result<(DensePolynomial, Randomness), PCError>
+ ) -> Result<(DensePolynomial, Randomness)>
where
Randomness: 'a,
Commitment: 'a,
{
- Ok(Self::combine_polynomials(labeled_polynomials.into_iter().zip_eq(rands).map(|(p, r)| {
+ ensure!(labeled_polynomials.len() == rands.len());
+ let mut to_combine = Vec::with_capacity(labeled_polynomials.len());
+
+ for (p, r) in labeled_polynomials.zip_eq(rands) {
let enforced_degree_bounds: Option<&[usize]> = ck.enforced_degree_bounds.as_deref();
- kzg10::KZG10::::check_degrees_and_bounds(universal_prover.max_degree, enforced_degree_bounds, p)
- .unwrap();
+ kzg10::KZG10::::check_degrees_and_bounds(universal_prover.max_degree, enforced_degree_bounds, p)?;
let challenge = fs_rng.squeeze_short_nonnative_field_element::();
- (challenge, p.polynomial().to_dense(), r)
- })))
+ to_combine.push((challenge, p.polynomial().to_dense(), r));
+ }
+
+ Ok(Self::combine_polynomials(to_combine))
}
/// On input a list of labeled polynomials and a query set, `open` outputs a proof of evaluation
@@ -289,15 +285,16 @@ impl> SonicKZG10 {
pub fn batch_open<'a>(
universal_prover: &UniversalProver,
ck: &CommitterUnionKey,
- labeled_polynomials: impl IntoIterator
- >,
+ labeled_polynomials: impl ExactSizeIterator
- >,
query_set: &QuerySet,
- rands: impl IntoIterator
- >,
+ rands: impl ExactSizeIterator
- >,
fs_rng: &mut S,
- ) -> Result, PCError>
+ ) -> Result>
where
Randomness: 'a,
Commitment: 'a,
{
+ ensure!(labeled_polynomials.len() == rands.len());
let poly_rand: HashMap<_, _> =
labeled_polynomials.into_iter().zip_eq(rands).map(|(poly, r)| (poly.label(), (poly, r))).collect();
@@ -326,7 +323,8 @@ impl> SonicKZG10 {
query_polys.push(*polynomial);
query_rands.push(*rand);
}
- let (polynomial, rand) = Self::combine_for_open(universal_prover, ck, query_polys, query_rands, fs_rng)?;
+ let (polynomial, rand) =
+ Self::combine_for_open(universal_prover, ck, query_polys.into_iter(), query_rands.into_iter(), fs_rng)?;
let _randomizer = fs_rng.squeeze_short_nonnative_field_element::();
pool.add_job(move || {
@@ -336,7 +334,7 @@ impl> SonicKZG10 {
proof
});
}
- let batch_proof = pool.execute_all().into_iter().collect::>().map(BatchProof);
+ let batch_proof = pool.execute_all().into_iter().collect::>().map(BatchProof).map_err(Into::into);
end_timer!(open_time);
batch_proof
@@ -349,7 +347,7 @@ impl> SonicKZG10 {
values: &Evaluations,
proof: &BatchProof,
fs_rng: &mut S,
- ) -> Result
+ ) -> Result
where
Commitment: 'a,
{
@@ -374,6 +372,7 @@ impl> SonicKZG10 {
let mut combined_witness = E::G1Projective::zero();
let mut combined_adjusted_witness = E::G1Projective::zero();
+ ensure!(query_to_labels_map.len() == proof.0.len());
for ((_query_name, (query, labels)), p) in query_to_labels_map.into_iter().zip_eq(&proof.0) {
let mut comms_to_combine: Vec<&'_ LabeledCommitment<_>> = Vec::new();
let mut values_to_combine = Vec::new();
@@ -400,14 +399,14 @@ impl> SonicKZG10 {
p,
Some(randomizer),
fs_rng,
- );
+ )?;
randomizer = fs_rng.squeeze_short_nonnative_field_element::();
}
let result = Self::check_elems(vk, combined_comms, combined_witness, combined_adjusted_witness);
end_timer!(batch_check_time);
- result
+ result.map_err(Into::into)
}
pub fn open_combinations<'a>(
@@ -418,7 +417,7 @@ impl> SonicKZG10 {
rands: impl IntoIterator
- >,
query_set: &QuerySet,
fs_rng: &mut S,
- ) -> Result, PCError>
+ ) -> Result>
where
Randomness: 'a,
Commitment: 'a,
@@ -445,7 +444,7 @@ impl> SonicKZG10 {
label_map.get(label as &str).ok_or(PCError::MissingPolynomial { label: label.to_string() })?;
if let Some(cur_degree_bound) = cur_poly.degree_bound() {
if num_polys != 1 {
- return Err(PCError::EquationHasDegreeBounds(lc_label));
+ bail!(PCError::EquationHasDegreeBounds(lc_label));
}
assert!(coeff.is_one(), "Coefficient must be one for degree-bounded equations");
if let Some(old_degree_bound) = degree_bound {
@@ -482,7 +481,7 @@ impl> SonicKZG10 {
evaluations: &Evaluations,
proof: &BatchLCProof,
fs_rng: &mut S,
- ) -> Result
+ ) -> Result
where
Commitment: 'a,
{
@@ -515,10 +514,9 @@ impl> SonicKZG10 {
.ok_or(PCError::MissingPolynomial { label: label.to_string() })?;
if cur_comm.degree_bound().is_some() {
- if num_polys != 1 {
- return Err(PCError::EquationHasDegreeBounds(lc_label));
+ if num_polys != 1 || !coeff.is_one() {
+ bail!(PCError::EquationHasDegreeBounds(lc_label));
}
- assert!(coeff.is_one(), "Coefficient must be one for degree-bounded equations");
degree_bound = cur_comm.degree_bound();
}
coeffs_and_comms.push((*coeff, cur_comm.commitment()));
@@ -533,6 +531,7 @@ impl> SonicKZG10 {
let combined_comms_norm_time = start_timer!(|| "Normalizing commitments");
let comms = Self::normalize_commitments(lc_commitments);
+ ensure!(lc_info.len() == comms.len());
let lc_commitments = lc_info
.into_iter()
.zip_eq(comms)
@@ -571,7 +570,7 @@ impl> SonicKZG10 {
VariableBase::msm(&bases, &scalars)
}
- fn normalize_commitments(commitments: Vec) -> impl Iterator
- > {
+ fn normalize_commitments(commitments: Vec) -> impl ExactSizeIterator
- > {
let comms = E::G1Projective::batch_normalization_into_affine(commitments);
comms.into_iter().map(|c| kzg10::KZGCommitment(c))
}
@@ -584,18 +583,19 @@ impl> SonicKZG10 {
combined_witness: &mut E::G1Projective,
combined_adjusted_witness: &mut E::G1Projective,
vk: &UniversalVerifier,
- commitments: impl IntoIterator
- >>,
+ commitments: impl ExactSizeIterator
- >>,
point: E::Fr,
- values: impl IntoIterator